From 1684b05b07b104165c2c6916ee45b6289c29740c Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 13 Jul 2022 13:55:44 +0000 Subject: [PATCH 01/94] first commit for yolov7 --- fastdeploy/vision.h | 1 + fastdeploy/vision/wongkinyiu/__init__.py | 116 +++++++++ .../vision/wongkinyiu/wongkinyiu_pybind.cc | 41 ++++ fastdeploy/vision/wongkinyiu/yolov7.cc | 230 ++++++++++++++++++ fastdeploy/vision/wongkinyiu/yolov7.h | 87 +++++++ model_zoo/vision/yolov7/cpp/CMakeLists.txt | 18 ++ model_zoo/vision/yolov7/cpp/README.md | 30 +++ model_zoo/vision/yolov7/cpp/yolov7.cc | 40 +++ model_zoo/vision/yolov7/yolov7.py | 23 ++ 9 files changed, 586 insertions(+) create mode 100644 fastdeploy/vision/wongkinyiu/__init__.py create mode 100644 fastdeploy/vision/wongkinyiu/wongkinyiu_pybind.cc create mode 100644 fastdeploy/vision/wongkinyiu/yolov7.cc create mode 100644 fastdeploy/vision/wongkinyiu/yolov7.h create mode 100644 model_zoo/vision/yolov7/cpp/CMakeLists.txt create mode 100644 model_zoo/vision/yolov7/cpp/README.md create mode 100644 model_zoo/vision/yolov7/cpp/yolov7.cc create mode 100644 model_zoo/vision/yolov7/yolov7.py diff --git a/fastdeploy/vision.h b/fastdeploy/vision.h index ca2b9a618a..821f3689e5 100644 --- a/fastdeploy/vision.h +++ b/fastdeploy/vision.h @@ -17,6 +17,7 @@ #ifdef ENABLE_VISION #include "fastdeploy/vision/ppcls/model.h" #include "fastdeploy/vision/ultralytics/yolov5.h" +#include "fastdeploy/vision/wongkinyiu/yolov7.h" #endif #include "fastdeploy/vision/visualize/visualize.h" diff --git a/fastdeploy/vision/wongkinyiu/__init__.py b/fastdeploy/vision/wongkinyiu/__init__.py new file mode 100644 index 0000000000..e3ed7730e6 --- /dev/null +++ b/fastdeploy/vision/wongkinyiu/__init__.py @@ -0,0 +1,116 @@ +# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from __future__ import absolute_import +import logging +from ... import FastDeployModel, Frontend +from ... import fastdeploy_main as C + + +class YOLOv7(FastDeployModel): + def __init__(self, + model_file, + params_file="", + runtime_option=None, + model_format=Frontend.ONNX): + # 调用基函数进行backend_option的初始化 + # 初始化后的option保存在self._runtime_option + super(YOLOv7, self).__init__(runtime_option) + + self._model = C.vision.yongkinyiu.YOLOv7( + model_file, params_file, self._runtime_option, model_format) + # 通过self.initialized判断整个模型的初始化是否成功 + assert self.initialized, "YOLOv7 initialize failed." + + def predict(self, input_image, conf_threshold=0.25, nms_iou_threshold=0.5): + return self._model.predict(input_image, conf_threshold, + nms_iou_threshold) + + # 一些跟YOLOv7模型有关的属性封装 + # 多数是预处理相关,可通过修改如model.size = [1280, 1280]改变预处理时resize的大小(前提是模型支持) + @property + def size(self): + return self.model.size + + @property + def padding_value(self): + return self.model.padding_value + + @property + def is_no_pad(self): + return self.model.is_no_pad + + @property + def is_mini_pad(self): + return self.model.is_mini_pad + + @property + def is_scale_up(self): + return self.model.is_scale_up + + @property + def stride(self): + return self.model.stride + + @property + def max_wh(self): + return self.model.max_wh + + @size.setter + def size(self, wh): + assert isinstance(wh, [list, tuple]),\ + "The value to set `size` must be type of tuple or list." + assert len(wh) == 2,\ + "The value to set `size` must contatins 2 elements means [width, height], but now it contains {} elements.".format( + len(wh)) + self.model.size = wh + + @padding_value.setter + def padding_value(self, value): + assert isinstance( + value, + list), "The value to set `padding_value` must be type of list." + self.model.padding_value = value + + @is_no_pad.setter + def is_no_pad(self, value): + assert isinstance( + value, bool), "The value to set `is_no_pad` must be type of bool." + self.model.is_no_pad = value + + @is_mini_pad.setter + def is_mini_pad(self, value): + assert isinstance( + value, + bool), "The value to set `is_mini_pad` must be type of bool." + self.model.is_mini_pad = value + + @is_scale_up.setter + def is_scale_up(self, value): + assert isinstance( + value, + bool), "The value to set `is_scale_up` must be type of bool." + self.model.is_scale_up = value + + @stride.setter + def stride(self, value): + assert isinstance( + value, int), "The value to set `stride` must be type of int." + self.model.stride = value + + @max_wh.setter + def max_wh(self, value): + assert isinstance( + value, float), "The value to set `max_wh` must be type of float." + self.model.max_wh = value diff --git a/fastdeploy/vision/wongkinyiu/wongkinyiu_pybind.cc b/fastdeploy/vision/wongkinyiu/wongkinyiu_pybind.cc new file mode 100644 index 0000000000..99f0aab628 --- /dev/null +++ b/fastdeploy/vision/wongkinyiu/wongkinyiu_pybind.cc @@ -0,0 +1,41 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/pybind/main.h" + +namespace fastdeploy { +void BindWongkinyiu(pybind11::module& m) { + auto yongkinyiu_module = + m.def_submodule("WongKinYiu", "https://github.com/WongKinYiu/yolov7"); + pybind11::class_( + yongkinyiu_module, "YOLOv7") + .def(pybind11::init()) + .def("predict", + [](vision::yongkinyiu::YOLOv7& self, pybind11::array& data, + float conf_threshold, float nms_iou_threshold) { + auto mat = PyArrayToCvMat(data); + vision::DetectionResult res; + self.Predict(&mat, &res, conf_threshold, nms_iou_threshold); + return res; + }) + .def_readwrite("size", &vision::yongkinyiu::YOLOv7::size) + .def_readwrite("padding_value", + &vision::yongkinyiu::YOLOv7::padding_value) + .def_readwrite("is_mini_pad", &vision::yongkinyiu::YOLOv7::is_mini_pad) + .def_readwrite("is_no_pad", &vision::yongkinyiu::YOLOv7::is_no_pad) + .def_readwrite("is_scale_up", &vision::yongkinyiu::YOLOv7::is_scale_up) + .def_readwrite("stride", &vision::yongkinyiu::YOLOv7::stride) + .def_readwrite("max_wh", &vision::yongkinyiu::YOLOv7::max_wh); +} +} // namespace fastdeploy diff --git a/fastdeploy/vision/wongkinyiu/yolov7.cc b/fastdeploy/vision/wongkinyiu/yolov7.cc new file mode 100644 index 0000000000..09004b5c3c --- /dev/null +++ b/fastdeploy/vision/wongkinyiu/yolov7.cc @@ -0,0 +1,230 @@ +#include "fastdeploy/vision/WongKinYiu/yolov7.h" +#include "fastdeploy/utils/perf.h" +#include "fastdeploy/vision/utils/utils.h" + +namespace fastdeploy { +namespace vision { +namespace wongkinyiu { + +void LetterBox(Mat* mat, std::vector size, std::vector color, + bool _auto, bool scale_fill = false, bool scale_up = true, + int stride = 32) { + float scale = + std::min(size[1] * 1.0 / mat->Height(), size[0] * 1.0 / mat->Width()); + if (!scale_up) { + scale = std::min(scale, 1.0f); + } + + int resize_h = int(round(mat->Height() * scale)); + int resize_w = int(round(mat->Width() * scale)); + + int pad_w = size[0] - resize_w; + int pad_h = size[1] - resize_h; + if (_auto) { + pad_h = pad_h % stride; + pad_w = pad_w % stride; + } else if (scale_fill) { + pad_h = 0; + pad_w = 0; + resize_h = size[1]; + resize_w = size[0]; + } + Resize::Run(mat, resize_w, resize_h); + if (pad_h > 0 || pad_w > 0) { + float half_h = pad_h * 1.0 / 2; + int top = int(round(half_h - 0.1)); + int bottom = int(round(half_h + 0.1)); + float half_w = pad_w * 1.0 / 2; + int left = int(round(half_w - 0.1)); + int right = int(round(half_w + 0.1)); + Pad::Run(mat, top, bottom, left, right, color); + } +} + +YOLOv7::YOLOv7(const std::string& model_file, const std::string& params_file, + const RuntimeOption& custom_option, + const Frontend& model_format) { + if (model_format == Frontend::ONNX) { + valid_cpu_backends = {Backend::ORT}; // 指定可用的CPU后端 + valid_gpu_backends = {Backend::ORT, Backend::TRT}; // 指定可用的GPU后端 + } else { + valid_cpu_backends = {Backend::PDINFER, Backend::ORT}; + valid_gpu_backends = {Backend::PDINFER, Backend::ORT, Backend::TRT}; + } + runtime_option = custom_option; + runtime_option.model_format = model_format; + runtime_option.model_file = model_file; + runtime_option.params_file = params_file; + initialized = Initialize(); +} + +bool YOLOv7::Initialize() { + // parameters for preprocess + size = {640, 640}; + padding_value = {114.0, 114.0, 114.0}; + is_mini_pad = false; + is_no_pad = false; + is_scale_up = false; + stride = 32; + max_wh = 7680.0; + + if (!InitRuntime()) { + FDERROR << "Failed to initialize fastdeploy backend." << std::endl; + return false; + } + return true; +} + +bool YOLOv7::Preprocess(Mat* mat, FDTensor* output, + std::map>* im_info) { + // process after image load + double ratio = (size[0] * 1.0) / std::max(static_cast(mat->Height()), + static_cast(mat->Width())); + if (ratio != 1.0) { + int interp = cv::INTER_AREA; + if (ratio > 1.0) { + interp = cv::INTER_LINEAR; + } + int resize_h = int(mat->Height() * ratio); + int resize_w = int(mat->Width() * ratio); + Resize::Run(mat, resize_w, resize_h, -1, -1, interp); + } + // yolov7's preprocess steps + // 1. letterbox + // 2. BGR->RGB + // 3. HWC->CHW + LetterBox(mat, size, padding_value, is_mini_pad, is_no_pad, is_scale_up, + stride); + BGR2RGB::Run(mat); + Normalize::Run(mat, std::vector(mat->Channels(), 0.0), + std::vector(mat->Channels(), 1.0)); + + // Record output shape of preprocessed image + (*im_info)["output_shape"] = {static_cast(mat->Height()), + static_cast(mat->Width())}; + + HWC2CHW::Run(mat); + Cast::Run(mat, "float"); + mat->ShareWithTensor(output); + output->shape.insert(output->shape.begin(), 1); // reshape to n, h, w, c + return true; +} + +bool YOLOv7::Postprocess( + FDTensor& infer_result, DetectionResult* result, + const std::map>& im_info, + float conf_threshold, float nms_iou_threshold) { + FDASSERT(infer_result.shape[0] == 1, "Only support batch =1 now."); + result->Clear(); + result->Reserve(infer_result.shape[1]); + if (infer_result.dtype != FDDataType::FP32) { + FDERROR << "Only support post process with float32 data." << std::endl; + return false; + } + float* data = static_cast(infer_result.Data()); + for (size_t i = 0; i < infer_result.shape[1]; ++i) { + int s = i * infer_result.shape[2]; + float confidence = data[s + 4]; + float* max_class_score = + std::max_element(data + s + 5, data + s + infer_result.shape[2]); + confidence *= (*max_class_score); + // filter boxes by conf_threshold + if (confidence <= conf_threshold) { + continue; + } + int32_t label_id = std::distance(data + s + 5, max_class_score); + // convert from [x, y, w, h] to [x1, y1, x2, y2] + result->boxes.emplace_back(std::array{ + data[s] - data[s + 2] / 2.0f + label_id * max_wh, + data[s + 1] - data[s + 3] / 2.0f + label_id * max_wh, + data[s + 0] + data[s + 2] / 2.0f + label_id * max_wh, + data[s + 1] + data[s + 3] / 2.0f + label_id * max_wh}); + result->label_ids.push_back(label_id); + result->scores.push_back(confidence); + } + utils::NMS(result, nms_iou_threshold); + + // scale the boxes to the origin image shape + auto iter_out = im_info.find("output_shape"); + auto iter_ipt = im_info.find("input_shape"); + FDASSERT(iter_out != im_info.end() && iter_ipt != im_info.end(), + "Cannot find input_shape or output_shape from im_info."); + float out_h = iter_out->second[0]; + float out_w = iter_out->second[1]; + float ipt_h = iter_ipt->second[0]; + float ipt_w = iter_ipt->second[1]; + float scale = std::min(out_h / ipt_h, out_w / ipt_w); + for (size_t i = 0; i < result->boxes.size(); ++i) { + float pad_h = (out_h - ipt_h * scale) / 2; + float pad_w = (out_w - ipt_w * scale) / 2; + int32_t label_id = (result->label_ids)[i]; + // clip box + result->boxes[i][0] = result->boxes[i][0] - max_wh * label_id; + result->boxes[i][1] = result->boxes[i][1] - max_wh * label_id; + result->boxes[i][2] = result->boxes[i][2] - max_wh * label_id; + result->boxes[i][3] = result->boxes[i][3] - max_wh * label_id; + result->boxes[i][0] = std::max((result->boxes[i][0] - pad_w) / scale, 0.0f); + result->boxes[i][1] = std::max((result->boxes[i][1] - pad_h) / scale, 0.0f); + result->boxes[i][2] = std::max((result->boxes[i][2] - pad_w) / scale, 0.0f); + result->boxes[i][3] = std::max((result->boxes[i][3] - pad_h) / scale, 0.0f); + result->boxes[i][0] = std::min(result->boxes[i][0], ipt_w); + result->boxes[i][1] = std::min(result->boxes[i][1], ipt_h); + result->boxes[i][2] = std::min(result->boxes[i][2], ipt_w); + result->boxes[i][3] = std::min(result->boxes[i][3], ipt_h); + } + return true; +} + +bool YOLOv7::Predict(cv::Mat* im, DetectionResult* result, float conf_threshold, + float nms_iou_threshold) { +#ifdef FASTDEPLOY_DEBUG + TIMERECORD_START(0) +#endif + + Mat mat(*im); + std::vector input_tensors(1); + + std::map> im_info; + + // Record the shape of image and the shape of preprocessed image + im_info["input_shape"] = {static_cast(mat.Height()), + static_cast(mat.Width())}; + im_info["output_shape"] = {static_cast(mat.Height()), + static_cast(mat.Width())}; + + if (!Preprocess(&mat, &input_tensors[0], &im_info)) { + FDERROR << "Failed to preprocess input image." << std::endl; + return false; + } + +#ifdef FASTDEPLOY_DEBUG + TIMERECORD_END(0, "Preprocess") + TIMERECORD_START(1) +#endif + + input_tensors[0].name = InputInfoOfRuntime(0).name; + std::vector output_tensors; + if (!Infer(input_tensors, &output_tensors)) { + FDERROR << "Failed to inference." << std::endl; + return false; + } +#ifdef FASTDEPLOY_DEBUG + TIMERECORD_END(1, "Inference") + TIMERECORD_START(2) +#endif + + if (!Postprocess(output_tensors[0], result, im_info, conf_threshold, + nms_iou_threshold)) { + FDERROR << "Failed to post process." << std::endl; + return false; + } + +#ifdef FASTDEPLOY_DEBUG + TIMERECORD_END(2, "Postprocess") +#endif + return true; +} + +} // namespace wongkinyiu +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/wongkinyiu/yolov7.h b/fastdeploy/vision/wongkinyiu/yolov7.h new file mode 100644 index 0000000000..b21c04936a --- /dev/null +++ b/fastdeploy/vision/wongkinyiu/yolov7.h @@ -0,0 +1,87 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#pragma once +#include "fastdeploy/fastdeploy_model.h" +#include "fastdeploy/vision/common/processors/transform.h" +#include "fastdeploy/vision/common/result.h" + +namespace fastdeploy { +namespace vision { +namespace wongkinyiu { + +class FASTDEPLOY_DECL YOLOv7 : public FastDeployModel { + public: + // 当model_format为ONNX时,无需指定params_file + // 当model_format为Paddle时,则需同时指定model_file & params_file + YOLOv7(const std::string& model_file, const std::string& params_file = "", + const RuntimeOption& custom_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX); + + // 定义模型的名称 + virtual std::string ModelName() const { return "WongKinYiu/yolov7"; } + + // 初始化函数,包括初始化后端,以及其它模型推理需要涉及的操作 + virtual bool Initialize(); + + // 输入图像预处理操作 + // Mat为FastDeploy定义的数据结构 + // FDTensor为预处理后的Tensor数据,传给后端进行推理 + // im_info为预处理过程保存的数据,在后处理中需要用到 + virtual bool Preprocess(Mat* mat, FDTensor* outputs, + std::map>* im_info); + + // 后端推理结果后处理,输出给用户 + // infer_result 为后端推理后的输出Tensor + // result 为模型预测的结果 + // im_info 为预处理记录的信息,后处理用于还原box + // conf_threshold 后处理时过滤box的置信度阈值 + // nms_iou_threshold 后处理时NMS设定的iou阈值 + virtual bool Postprocess( + FDTensor& infer_result, DetectionResult* result, + const std::map>& im_info, + float conf_threshold, float nms_iou_threshold); + + // 模型预测接口,即用户调用的接口 + // im 为用户的输入数据,目前对于CV均定义为cv::Mat + // result 为模型预测的输出结构体 + // conf_threshold 为后处理的参数 + // nms_iou_threshold 为后处理的参数 + virtual bool Predict(cv::Mat* im, DetectionResult* result, + float conf_threshold = 0.25, + float nms_iou_threshold = 0.5); + + // 以下为模型在预测时的一些参数,基本是前后处理所需 + // 用户在创建模型后,可根据模型的要求,以及自己的需求 + // 对参数进行修改 + // tuple of (width, height) + std::vector size; + // padding value, size should be same with Channels + std::vector padding_value; + // only pad to the minimum rectange which height and width is times of stride + bool is_mini_pad; + // while is_mini_pad = false and is_no_pad = true, will resize the image to + // the set size + bool is_no_pad; + // if is_scale_up is false, the input image only can be zoom out, the maximum + // resize scale cannot exceed 1.0 + bool is_scale_up; + // padding stride, for is_mini_pad + int stride; + // for offseting the boxes by classes when using NMS + float max_wh; +}; +} // namespace wongkinyiu +} // namespace vision +} // namespace fastdeploy diff --git a/model_zoo/vision/yolov7/cpp/CMakeLists.txt b/model_zoo/vision/yolov7/cpp/CMakeLists.txt new file mode 100644 index 0000000000..b3b790698c --- /dev/null +++ b/model_zoo/vision/yolov7/cpp/CMakeLists.txt @@ -0,0 +1,18 @@ +PROJECT(yolov7_demo C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.16) + +# 在低版本ABI环境中,通过如下代码进行兼容性编译 +# add_definitions(-D_GLIBCXX_USE_CXX11_ABI=0) + +# 指定下载解压后的fastdeploy库路径 +set(FASTDEPLOY_INSTALL_DIR /home/fastdeploy/FastDeploy/build/fastdeploy-0.0.3/) + +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) + +# 添加FastDeploy依赖头文件 +include_directories(${FASTDEPLOY_INCS}) + +add_executable(yolov7_demo ${PROJECT_SOURCE_DIR}/yolov7.cc) +# 添加FastDeploy库依赖 +target_link_libraries(yolov7_demo ${FASTDEPLOY_LIBS}) + diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md new file mode 100644 index 0000000000..dd740ff58a --- /dev/null +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -0,0 +1,30 @@ +# 编译YOLOv5示例 + + +``` +# 下载和解压预测库 +wget https://bj.bcebos.com/paddle2onnx/fastdeploy/fastdeploy-linux-x64-0.0.3.tgz +tar xvf fastdeploy-linux-x64-0.0.3.tgz + +# 编译示例代码 +mkdir build & cd build +cmake .. +make -j + +# 下载模型和图片 +wget https://github.com/ultralytics/yolov5/releases/download/v6.0/yolov5s.onnx +wget https://raw.githubusercontent.com/ultralytics/yolov5/master/data/images/bus.jpg + +# 执行 +./yolov5_demo +``` + +执行完后可视化的结果保存在本地`vis_result.jpg`,同时会将检测框输出在终端,如下所示 +``` +DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] +223.395142,403.948669, 345.337189, 867.339050, 0.856906, 0 +668.301758,400.781342, 808.441772, 882.534973, 0.829716, 0 +50.210720,398.571411, 243.123367, 905.016602, 0.805375, 0 +23.768242,214.979370, 802.627686, 778.840881, 0.756311, 5 +0.737200,552.281006, 78.617218, 890.945007, 0.363471, 0 +``` diff --git a/model_zoo/vision/yolov7/cpp/yolov7.cc b/model_zoo/vision/yolov7/cpp/yolov7.cc new file mode 100644 index 0000000000..4b89972859 --- /dev/null +++ b/model_zoo/vision/yolov7/cpp/yolov7.cc @@ -0,0 +1,40 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +int main() { + namespace vis = fastdeploy::vision; + auto model = vis::wongkinyiu::YOLOv7("/home/fastdeploy/yolov7/onnxfiles/yolov7.onnx"); + if (!model.Initialized()) { + std::cerr << "Init Failed." << std::endl; + return -1; + } + cv::Mat im = cv::imread("bus.jpg"); + cv::Mat vis_im = im.clone(); + + vis::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Prediction Failed." << std::endl; + return -1; + } + + // 输出预测框结果 + std::cout << res.Str() << std::endl; + + // 可视化预测结果 + vis::Visualize::VisDetection(&vis_im, res); + cv::imwrite("vis_result.jpg", vis_im); + return 0; +} diff --git a/model_zoo/vision/yolov7/yolov7.py b/model_zoo/vision/yolov7/yolov7.py new file mode 100644 index 0000000000..c502c66366 --- /dev/null +++ b/model_zoo/vision/yolov7/yolov7.py @@ -0,0 +1,23 @@ +import fastdeploy as fd +import cv2 + +# 下载模型和测试图片 +model_url = "https://github.com/ultralytics/yolov5/releases/download/v6.0/yolov5s.onnx" +test_jpg_url = "https://raw.githubusercontent.com/ultralytics/yolov5/master/data/images/bus.jpg" +fd.download(model_url, ".", show_progress=True) +fd.download(test_jpg_url, ".", show_progress=True) + +# 加载模型 +model = fd.vision.ultralytics.YOLOv5("yolov5s.onnx") + +# 预测图片 +im = cv2.imread("bus.jpg") +result = model.predict(im, conf_threshold=0.25, nms_iou_threshold=0.5) + +# 可视化结果 +fd.vision.visualize.vis_detection(im, result) +cv2.imwrite("vis_result.jpg", im) + +# 输出预测结果 +print(result) +print(model.runtime_option) From 71c00d94e12c6a52afc1342de893bec2f7850ae2 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Thu, 14 Jul 2022 07:02:55 +0000 Subject: [PATCH 02/94] pybind for yolov7 --- fastdeploy/vision/__init__.py | 1 + fastdeploy/vision/vision_pybind.cc | 2 ++ fastdeploy/vision/wongkinyiu/__init__.py | 2 +- .../vision/wongkinyiu/wongkinyiu_pybind.cc | 24 +++++++++---------- model_zoo/vision/yolov7/yolov7.py | 8 +++---- 5 files changed, 20 insertions(+), 17 deletions(-) diff --git a/fastdeploy/vision/__init__.py b/fastdeploy/vision/__init__.py index 810b23cd3d..1ea30c35ae 100644 --- a/fastdeploy/vision/__init__.py +++ b/fastdeploy/vision/__init__.py @@ -17,3 +17,4 @@ from . import ppcls from . import ultralytics from . import visualize +from . import wongkinyiu diff --git a/fastdeploy/vision/vision_pybind.cc b/fastdeploy/vision/vision_pybind.cc index f3c3f0052d..5d79ffb2a6 100644 --- a/fastdeploy/vision/vision_pybind.cc +++ b/fastdeploy/vision/vision_pybind.cc @@ -17,6 +17,7 @@ namespace fastdeploy { void BindPpClsModel(pybind11::module& m); +void BindWongkinyiu(pybind11::module& m); void BindUltralytics(pybind11::module& m); #ifdef ENABLE_VISION_VISUALIZE void BindVisualize(pybind11::module& m); @@ -40,6 +41,7 @@ void BindVision(pybind11::module& m) { BindPpClsModel(m); BindUltralytics(m); + BindWongkinyiu(m); BindVisualize(m); } } // namespace fastdeploy diff --git a/fastdeploy/vision/wongkinyiu/__init__.py b/fastdeploy/vision/wongkinyiu/__init__.py index e3ed7730e6..0ce06209fc 100644 --- a/fastdeploy/vision/wongkinyiu/__init__.py +++ b/fastdeploy/vision/wongkinyiu/__init__.py @@ -28,7 +28,7 @@ def __init__(self, # 初始化后的option保存在self._runtime_option super(YOLOv7, self).__init__(runtime_option) - self._model = C.vision.yongkinyiu.YOLOv7( + self._model = C.vision.wongkinyiu.YOLOv7( model_file, params_file, self._runtime_option, model_format) # 通过self.initialized判断整个模型的初始化是否成功 assert self.initialized, "YOLOv7 initialize failed." diff --git a/fastdeploy/vision/wongkinyiu/wongkinyiu_pybind.cc b/fastdeploy/vision/wongkinyiu/wongkinyiu_pybind.cc index 99f0aab628..4a10f47a76 100644 --- a/fastdeploy/vision/wongkinyiu/wongkinyiu_pybind.cc +++ b/fastdeploy/vision/wongkinyiu/wongkinyiu_pybind.cc @@ -16,26 +16,26 @@ namespace fastdeploy { void BindWongkinyiu(pybind11::module& m) { - auto yongkinyiu_module = - m.def_submodule("WongKinYiu", "https://github.com/WongKinYiu/yolov7"); - pybind11::class_( - yongkinyiu_module, "YOLOv7") + auto wongkinyiu_module = + m.def_submodule("wongkinyiu", "https://github.com/WongKinYiu/yolov7"); + pybind11::class_( + wongkinyiu_module, "YOLOv7") .def(pybind11::init()) .def("predict", - [](vision::yongkinyiu::YOLOv7& self, pybind11::array& data, + [](vision::wongkinyiu::YOLOv7& self, pybind11::array& data, float conf_threshold, float nms_iou_threshold) { auto mat = PyArrayToCvMat(data); vision::DetectionResult res; self.Predict(&mat, &res, conf_threshold, nms_iou_threshold); return res; }) - .def_readwrite("size", &vision::yongkinyiu::YOLOv7::size) + .def_readwrite("size", &vision::wongkinyiu::YOLOv7::size) .def_readwrite("padding_value", - &vision::yongkinyiu::YOLOv7::padding_value) - .def_readwrite("is_mini_pad", &vision::yongkinyiu::YOLOv7::is_mini_pad) - .def_readwrite("is_no_pad", &vision::yongkinyiu::YOLOv7::is_no_pad) - .def_readwrite("is_scale_up", &vision::yongkinyiu::YOLOv7::is_scale_up) - .def_readwrite("stride", &vision::yongkinyiu::YOLOv7::stride) - .def_readwrite("max_wh", &vision::yongkinyiu::YOLOv7::max_wh); + &vision::wongkinyiu::YOLOv7::padding_value) + .def_readwrite("is_mini_pad", &vision::wongkinyiu::YOLOv7::is_mini_pad) + .def_readwrite("is_no_pad", &vision::wongkinyiu::YOLOv7::is_no_pad) + .def_readwrite("is_scale_up", &vision::wongkinyiu::YOLOv7::is_scale_up) + .def_readwrite("stride", &vision::wongkinyiu::YOLOv7::stride) + .def_readwrite("max_wh", &vision::wongkinyiu::YOLOv7::max_wh); } } // namespace fastdeploy diff --git a/model_zoo/vision/yolov7/yolov7.py b/model_zoo/vision/yolov7/yolov7.py index c502c66366..81c529b15b 100644 --- a/model_zoo/vision/yolov7/yolov7.py +++ b/model_zoo/vision/yolov7/yolov7.py @@ -2,16 +2,16 @@ import cv2 # 下载模型和测试图片 -model_url = "https://github.com/ultralytics/yolov5/releases/download/v6.0/yolov5s.onnx" -test_jpg_url = "https://raw.githubusercontent.com/ultralytics/yolov5/master/data/images/bus.jpg" +model_url = "TODO " +test_jpg_url = "https://github.com/WongKinYiu/yolov7/blob/main/inference/images/horses.jpg" fd.download(model_url, ".", show_progress=True) fd.download(test_jpg_url, ".", show_progress=True) # 加载模型 -model = fd.vision.ultralytics.YOLOv5("yolov5s.onnx") +model = fd.vision.wongkinyiu.YOLOv7("yolov7.onnx") # 预测图片 -im = cv2.imread("bus.jpg") +im = cv2.imread("horses.jpg") result = model.predict(im, conf_threshold=0.25, nms_iou_threshold=0.5) # 可视化结果 From 21ab2f939c8e1469f320826808c5d430234e25fd Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Thu, 14 Jul 2022 07:14:03 +0000 Subject: [PATCH 03/94] CPP README.md --- model_zoo/vision/yolov7/cpp/README.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index dd740ff58a..f19c0625d1 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -12,11 +12,11 @@ cmake .. make -j # 下载模型和图片 -wget https://github.com/ultralytics/yolov5/releases/download/v6.0/yolov5s.onnx -wget https://raw.githubusercontent.com/ultralytics/yolov5/master/data/images/bus.jpg +wget "TODO" +wget https://github.com/WongKinYiu/yolov7/blob/main/inference/images/horses.jpg # 执行 -./yolov5_demo +./yolov7_demo ``` 执行完后可视化的结果保存在本地`vis_result.jpg`,同时会将检测框输出在终端,如下所示 From d63e862f919d0ce9025f78271a03e9a122d2ccdd Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Thu, 14 Jul 2022 07:14:30 +0000 Subject: [PATCH 04/94] CPP README.md --- model_zoo/vision/yolov7/cpp/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index f19c0625d1..b43d4381e5 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -1,4 +1,4 @@ -# 编译YOLOv5示例 +# 编译YOLOv7示例 ``` From 7b3b0e271072987f11fb8ffabdc8d276cf878fa0 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Thu, 14 Jul 2022 09:54:30 +0000 Subject: [PATCH 05/94] modified yolov7.cc --- fastdeploy/vision/wongkinyiu/yolov7.cc | 2 +- model_zoo/vision/yolov7/cpp/CMakeLists.txt | 5 ++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/fastdeploy/vision/wongkinyiu/yolov7.cc b/fastdeploy/vision/wongkinyiu/yolov7.cc index 09004b5c3c..6baf4c336b 100644 --- a/fastdeploy/vision/wongkinyiu/yolov7.cc +++ b/fastdeploy/vision/wongkinyiu/yolov7.cc @@ -1,4 +1,4 @@ -#include "fastdeploy/vision/WongKinYiu/yolov7.h" +#include "fastdeploy/vision/wongkinyiu/yolov7.h" #include "fastdeploy/utils/perf.h" #include "fastdeploy/vision/utils/utils.h" diff --git a/model_zoo/vision/yolov7/cpp/CMakeLists.txt b/model_zoo/vision/yolov7/cpp/CMakeLists.txt index b3b790698c..09f07b1748 100644 --- a/model_zoo/vision/yolov7/cpp/CMakeLists.txt +++ b/model_zoo/vision/yolov7/cpp/CMakeLists.txt @@ -5,7 +5,7 @@ CMAKE_MINIMUM_REQUIRED (VERSION 3.16) # add_definitions(-D_GLIBCXX_USE_CXX11_ABI=0) # 指定下载解压后的fastdeploy库路径 -set(FASTDEPLOY_INSTALL_DIR /home/fastdeploy/FastDeploy/build/fastdeploy-0.0.3/) +set(FASTDEPLOY_INSTALL_DIR /home/fastdeploy/FastDeploy/build/fastdeploy-0.0.3) include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) @@ -14,5 +14,4 @@ include_directories(${FASTDEPLOY_INCS}) add_executable(yolov7_demo ${PROJECT_SOURCE_DIR}/yolov7.cc) # 添加FastDeploy库依赖 -target_link_libraries(yolov7_demo ${FASTDEPLOY_LIBS}) - +target_link_libraries(yolov7_demo ${FASTDEPLOY_LIBS}) \ No newline at end of file From d039e800190e484c583509c3b0e97eb2222f32e9 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Fri, 15 Jul 2022 05:11:01 +0000 Subject: [PATCH 06/94] README.md --- model_zoo/vision/yolov7/README.md | 80 +++++++++++++++++++++++++++++++ model_zoo/vision/yolov7/api.md | 71 +++++++++++++++++++++++++++ 2 files changed, 151 insertions(+) create mode 100644 model_zoo/vision/yolov7/README.md create mode 100644 model_zoo/vision/yolov7/api.md diff --git a/model_zoo/vision/yolov7/README.md b/model_zoo/vision/yolov7/README.md new file mode 100644 index 0000000000..80f9aa0fac --- /dev/null +++ b/model_zoo/vision/yolov7/README.md @@ -0,0 +1,80 @@ +# 编译YOLOv7示例 + +本文档说明如何进行[YOLOv7](https://github.com/WongKinYiu/yolov7)的快速部署推理。本目录结构如下 + +``` +. +├── cpp +│   ├── CMakeLists.txt +│   ├── README.md +│   └── yolov7.cc +├── README.md +└── yolov7.py +``` + +## 生成ONNX文件 + +- 手动获取 + + 访问[YOLOv7](https://github.com/WongKinYiu/yolov7)官方github库,按照指引下载安装,下载`yolov7.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 + + + + ``` + #下载yolov7模型文件 + wget https://github.com/WongKinYiu/yolov7/releases/download/v0.1/yolov7.pt + + # 导出onnx格式文件 + python models/export.py --grid --dynamic --weights PATH/TO/yolo7.pt + ``` + + + +- 从PaddlePaddle获取 + +## Python部署 + +### 安装FastDeploy + +使用如下命令安装FastDeploy,注意到此处安装的是`vision-cpu`,也可根据需求安装`vision-gpu` + +``` +# 安装fastdeploy-python工具 +pip install fastdeploy-python + +# 安装vision-cpu模块 +fastdeploy install vision-cpu +``` + +### 运行demo + +``` +python yolov7.py +``` + + + +## C++部署 + +### 编译demo文件 + +``` +# 切换到./cpp/ 目录下 +cd cpp/ + +# 下载和解压预测库 +wget https://bj.bcebos.com/paddle2onnx/fastdeploy/fastdeploy-linux-x64-0.0.3.tgz +tar xvf fastdeploy-linux-x64-0.0.3.tgz + +# 编译示例代码 +mkdir build & cd build +cmake .. +make -j +``` + + + + + + + diff --git a/model_zoo/vision/yolov7/api.md b/model_zoo/vision/yolov7/api.md new file mode 100644 index 0000000000..898a3f585f --- /dev/null +++ b/model_zoo/vision/yolov7/api.md @@ -0,0 +1,71 @@ +# YOLOv7 API说明 + +## Python API + +### YOLOv7类 +``` +fastdeploy.vision.ultralytics.YOLOv7(model_file, params_file=None, runtime_option=None, model_format=fd.Frontend.ONNX) +``` +YOLOv7模型加载和初始化,当model_format为`fd.Frontend.ONNX`时,只需提供model_file,如`yolov7s.onnx`;当model_format为`fd.Frontend.PADDLE`时,则需同时提供model_file和params_file。 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式 + +#### predict函数 +> ``` +> YOLOv7.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> ``` +> 模型预测结口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,RGB格式 +> > * **conf_threshold**(float): 检测框置信度过滤阈值 +> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值 + +示例代码参考[yolov7.py](./yolov7.py) + + +## C++ API + +### YOLOv7类 +``` +fastdeploy::vision::ultralytics::YOLOv7( + const string& model_file, + const string& params_file = "", + const RuntimeOption& runtime_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX) +``` +YOLOv7模型加载和初始化,当model_format为`Frontend::ONNX`时,只需提供model_file,如`yolov7s.onnx`;当model_format为`Frontend::PADDLE`时,则需同时提供model_file和params_file。 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式 + +#### predict函数 +> ``` +> YOLOv7::predict(cv::Mat* im, DetectionResult* result, +> float conf_threshold = 0.25, +> float nms_iou_threshold = 0.5) +> ``` +> 模型预测接口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **im**: 输入图像,注意需为HWC,RGB格式 +> > * **result**: 检测结果,包括检测框,各个框的置信度 +> > * **conf_threshold**: 检测框置信度过滤阈值 +> > * **nms_iou_threshold**: NMS处理过程中iou阈值 + +示例代码参考[cpp/yolov7.cc](cpp/yolov7.cc) + +## 其它API使用 + +- [模型部署RuntimeOption配置](../../../docs/api/runtime_option.md) From a34a815de844834bfcacc8154ab206587b9a7b0b Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 03:14:38 +0000 Subject: [PATCH 07/94] python file modify --- fastdeploy/LICENSE | 201 +++++++ fastdeploy/ThirdPartyNotices.txt | 734 +++++++++++++++++++++++ fastdeploy/vision/wongkinyiu/__init__.py | 28 +- model_zoo/vision/yolov7/yolov7.py | 8 +- 4 files changed, 953 insertions(+), 18 deletions(-) create mode 100644 fastdeploy/LICENSE create mode 100644 fastdeploy/ThirdPartyNotices.txt diff --git a/fastdeploy/LICENSE b/fastdeploy/LICENSE new file mode 100644 index 0000000000..261eeb9e9f --- /dev/null +++ b/fastdeploy/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/fastdeploy/ThirdPartyNotices.txt b/fastdeploy/ThirdPartyNotices.txt new file mode 100644 index 0000000000..5842b9a717 --- /dev/null +++ b/fastdeploy/ThirdPartyNotices.txt @@ -0,0 +1,734 @@ +This project depends on some open source projects, list as below + +-------- +1. https://github.com/protocolbuffers/protobuf + +Copyright 2008 Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Code generated by the Protocol Buffer compiler is owned by the owner +of the input file used when generating it. This code is not +standalone and requires a support library to be linked with it. This +support library is itself covered by the above license. + +-------- +2. https://github.com/onnx/onnx + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +-------- +3. https://github.com/microsoft/onnxruntime + +MIT License + +Copyright (c) Microsoft Corporation + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +-------- +4. https://github.com/pybind/pybind11 + +Copyright (c) 2016 Wenzel Jakob , All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors + may be used to endorse or promote products derived from this software + without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Please also refer to the file .github/CONTRIBUTING.md, which clarifies licensing of +external contributions to this project including patches, pull requests, etc. + +-------- +4. https://github.com/onnx/onnx-tensorrt + + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2021 NVIDIA Corporation + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +-------- +5. https://github.com/opencv/opencv + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. + +-------- +6. https://github.com/jbeder/yaml-cpp + +Copyright (c) 2008-2015 Jesse Beder. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/fastdeploy/vision/wongkinyiu/__init__.py b/fastdeploy/vision/wongkinyiu/__init__.py index 0ce06209fc..542389e208 100644 --- a/fastdeploy/vision/wongkinyiu/__init__.py +++ b/fastdeploy/vision/wongkinyiu/__init__.py @@ -41,31 +41,31 @@ def predict(self, input_image, conf_threshold=0.25, nms_iou_threshold=0.5): # 多数是预处理相关,可通过修改如model.size = [1280, 1280]改变预处理时resize的大小(前提是模型支持) @property def size(self): - return self.model.size + return self._model.size @property def padding_value(self): - return self.model.padding_value + return self._model.padding_value @property def is_no_pad(self): - return self.model.is_no_pad + return self._model.is_no_pad @property def is_mini_pad(self): - return self.model.is_mini_pad + return self._model.is_mini_pad @property def is_scale_up(self): - return self.model.is_scale_up + return self._model.is_scale_up @property def stride(self): - return self.model.stride + return self._model.stride @property def max_wh(self): - return self.model.max_wh + return self._model.max_wh @size.setter def size(self, wh): @@ -74,43 +74,43 @@ def size(self, wh): assert len(wh) == 2,\ "The value to set `size` must contatins 2 elements means [width, height], but now it contains {} elements.".format( len(wh)) - self.model.size = wh + self._model.size = wh @padding_value.setter def padding_value(self, value): assert isinstance( value, list), "The value to set `padding_value` must be type of list." - self.model.padding_value = value + self._model.padding_value = value @is_no_pad.setter def is_no_pad(self, value): assert isinstance( value, bool), "The value to set `is_no_pad` must be type of bool." - self.model.is_no_pad = value + self._model.is_no_pad = value @is_mini_pad.setter def is_mini_pad(self, value): assert isinstance( value, bool), "The value to set `is_mini_pad` must be type of bool." - self.model.is_mini_pad = value + self._model.is_mini_pad = value @is_scale_up.setter def is_scale_up(self, value): assert isinstance( value, bool), "The value to set `is_scale_up` must be type of bool." - self.model.is_scale_up = value + self._model.is_scale_up = value @stride.setter def stride(self, value): assert isinstance( value, int), "The value to set `stride` must be type of int." - self.model.stride = value + self._model.stride = value @max_wh.setter def max_wh(self, value): assert isinstance( value, float), "The value to set `max_wh` must be type of float." - self.model.max_wh = value + self._model.max_wh = value diff --git a/model_zoo/vision/yolov7/yolov7.py b/model_zoo/vision/yolov7/yolov7.py index 81c529b15b..ca8aeeaf88 100644 --- a/model_zoo/vision/yolov7/yolov7.py +++ b/model_zoo/vision/yolov7/yolov7.py @@ -2,13 +2,13 @@ import cv2 # 下载模型和测试图片 -model_url = "TODO " -test_jpg_url = "https://github.com/WongKinYiu/yolov7/blob/main/inference/images/horses.jpg" -fd.download(model_url, ".", show_progress=True) +# model_url = "TODO " +test_jpg_url = "https://raw.githubusercontent.com/WongKinYiu/yolov7/main/inference/images/horses.jpg" +# fd.download(model_url, ".", show_progress=True) fd.download(test_jpg_url, ".", show_progress=True) # 加载模型 -model = fd.vision.wongkinyiu.YOLOv7("yolov7.onnx") +model = fd.vision.wongkinyiu.YOLOv7("/home/fastdeploy/yolov7/onnxfiles/yolov7.onnx") # 预测图片 im = cv2.imread("horses.jpg") From 39f64f2f5c0c0c479fa7219b1b436f61d625a61f Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 06:03:08 +0000 Subject: [PATCH 08/94] delete license in fastdeploy/ --- fastdeploy/LICENSE | 201 --------- fastdeploy/ThirdPartyNotices.txt | 734 ------------------------------- 2 files changed, 935 deletions(-) delete mode 100644 fastdeploy/LICENSE delete mode 100644 fastdeploy/ThirdPartyNotices.txt diff --git a/fastdeploy/LICENSE b/fastdeploy/LICENSE deleted file mode 100644 index 261eeb9e9f..0000000000 --- a/fastdeploy/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/fastdeploy/ThirdPartyNotices.txt b/fastdeploy/ThirdPartyNotices.txt deleted file mode 100644 index 5842b9a717..0000000000 --- a/fastdeploy/ThirdPartyNotices.txt +++ /dev/null @@ -1,734 +0,0 @@ -This project depends on some open source projects, list as below - --------- -1. https://github.com/protocolbuffers/protobuf - -Copyright 2008 Google Inc. All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are -met: - - * Redistributions of source code must retain the above copyright -notice, this list of conditions and the following disclaimer. - * Redistributions in binary form must reproduce the above -copyright notice, this list of conditions and the following disclaimer -in the documentation and/or other materials provided with the -distribution. - * Neither the name of Google Inc. nor the names of its -contributors may be used to endorse or promote products derived from -this software without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -Code generated by the Protocol Buffer compiler is owned by the owner -of the input file used when generating it. This code is not -standalone and requires a support library to be linked with it. This -support library is itself covered by the above license. - --------- -2. https://github.com/onnx/onnx - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - --------- -3. https://github.com/microsoft/onnxruntime - -MIT License - -Copyright (c) Microsoft Corporation - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. - --------- -4. https://github.com/pybind/pybind11 - -Copyright (c) 2016 Wenzel Jakob , All rights reserved. - -Redistribution and use in source and binary forms, with or without -modification, are permitted provided that the following conditions are met: - -1. Redistributions of source code must retain the above copyright notice, this - list of conditions and the following disclaimer. - -2. Redistributions in binary form must reproduce the above copyright notice, - this list of conditions and the following disclaimer in the documentation - and/or other materials provided with the distribution. - -3. Neither the name of the copyright holder nor the names of its contributors - may be used to endorse or promote products derived from this software - without specific prior written permission. - -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND -ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED -WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -Please also refer to the file .github/CONTRIBUTING.md, which clarifies licensing of -external contributions to this project including patches, pull requests, etc. - --------- -4. https://github.com/onnx/onnx-tensorrt - - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright 2021 NVIDIA Corporation - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - --------- -5. https://github.com/opencv/opencv - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. - --------- -6. https://github.com/jbeder/yaml-cpp - -Copyright (c) 2008-2015 Jesse Beder. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. From d071b3702c39386dc3cc9a19af7e0ee56b36cdca Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 06:15:17 +0000 Subject: [PATCH 09/94] repush the conflict part --- fastdeploy/vision.h | 3 --- fastdeploy/vision/vision_pybind.cc | 3 --- 2 files changed, 6 deletions(-) diff --git a/fastdeploy/vision.h b/fastdeploy/vision.h index 4e83d2681c..5f948092d7 100644 --- a/fastdeploy/vision.h +++ b/fastdeploy/vision.h @@ -17,11 +17,8 @@ #ifdef ENABLE_VISION #include "fastdeploy/vision/ppcls/model.h" #include "fastdeploy/vision/ultralytics/yolov5.h" -<<<<<<< HEAD #include "fastdeploy/vision/wongkinyiu/yolov7.h" -======= #include "fastdeploy/vision/meituan/yolov6.h" ->>>>>>> PaddlePaddle-develop #endif #include "fastdeploy/vision/visualize/visualize.h" diff --git a/fastdeploy/vision/vision_pybind.cc b/fastdeploy/vision/vision_pybind.cc index 256fb1e114..bc54e0d674 100644 --- a/fastdeploy/vision/vision_pybind.cc +++ b/fastdeploy/vision/vision_pybind.cc @@ -42,12 +42,9 @@ void BindVision(pybind11::module& m) { BindPpClsModel(m); BindUltralytics(m); -<<<<<<< HEAD BindWongkinyiu(m); -======= BindMeituan(m); #ifdef ENABLE_VISION_VISUALIZE ->>>>>>> PaddlePaddle-develop BindVisualize(m); #endif } From d5026ca1e47612b7ab85fb27a2730ea350dfc211 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 06:44:54 +0000 Subject: [PATCH 10/94] README.md modified --- model_zoo/vision/yolov7/README.md | 36 +++++++++------------- model_zoo/vision/yolov7/api.md | 8 ++--- model_zoo/vision/yolov7/cpp/CMakeLists.txt | 2 +- model_zoo/vision/yolov7/cpp/README.md | 3 +- model_zoo/vision/yolov7/cpp/yolov7.cc | 2 +- model_zoo/vision/yolov7/yolov7.py | 4 +-- 6 files changed, 24 insertions(+), 31 deletions(-) diff --git a/model_zoo/vision/yolov7/README.md b/model_zoo/vision/yolov7/README.md index 80f9aa0fac..93a6f81188 100644 --- a/model_zoo/vision/yolov7/README.md +++ b/model_zoo/vision/yolov7/README.md @@ -32,9 +32,9 @@ - 从PaddlePaddle获取 -## Python部署 -### 安装FastDeploy + +## 安装FastDeploy 使用如下命令安装FastDeploy,注意到此处安装的是`vision-cpu`,也可根据需求安装`vision-gpu` @@ -45,33 +45,27 @@ pip install fastdeploy-python # 安装vision-cpu模块 fastdeploy install vision-cpu ``` +## Python部署 -### 运行demo - +执行如下代码即会自动下载测试图片 ``` python yolov7.py ``` - - -## C++部署 - -### 编译demo文件 - +执行完成后会将可视化结果保存在本地`vis_result.jpg`,同时输出检测结果如下 ``` -# 切换到./cpp/ 目录下 -cd cpp/ - -# 下载和解压预测库 -wget https://bj.bcebos.com/paddle2onnx/fastdeploy/fastdeploy-linux-x64-0.0.3.tgz -tar xvf fastdeploy-linux-x64-0.0.3.tgz - -# 编译示例代码 -mkdir build & cd build -cmake .. -make -j +DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] +223.395142,403.948669, 345.337189, 867.339050, 0.856906, 0 +668.301758,400.781342, 808.441772, 882.534973, 0.829716, 0 +50.210720,398.571411, 243.123367, 905.016602, 0.805375, 0 +23.768242,214.979370, 802.627686, 778.840881, 0.756311, 5 +0.737200,552.281006, 78.617218, 890.945007, 0.363471, 0 ``` +## 其它文档 + +- [C++部署](./cpp/README.md) +- [YOLOv7 API文档](./api.md) diff --git a/model_zoo/vision/yolov7/api.md b/model_zoo/vision/yolov7/api.md index 898a3f585f..7c5fc30163 100644 --- a/model_zoo/vision/yolov7/api.md +++ b/model_zoo/vision/yolov7/api.md @@ -4,9 +4,9 @@ ### YOLOv7类 ``` -fastdeploy.vision.ultralytics.YOLOv7(model_file, params_file=None, runtime_option=None, model_format=fd.Frontend.ONNX) +fastdeploy.vision.wongkinyiu.YOLOv7(model_file, params_file=None, runtime_option=None, model_format=fd.Frontend.ONNX) ``` -YOLOv7模型加载和初始化,当model_format为`fd.Frontend.ONNX`时,只需提供model_file,如`yolov7s.onnx`;当model_format为`fd.Frontend.PADDLE`时,则需同时提供model_file和params_file。 +YOLOv7模型加载和初始化,当model_format为`fd.Frontend.ONNX`时,只需提供model_file,如`yolov7.onnx`;当model_format为`fd.Frontend.PADDLE`时,则需同时提供model_file和params_file。 **参数** @@ -34,13 +34,13 @@ YOLOv7模型加载和初始化,当model_format为`fd.Frontend.ONNX`时,只 ### YOLOv7类 ``` -fastdeploy::vision::ultralytics::YOLOv7( +fastdeploy::vision::wongkinyiu::YOLOv7( const string& model_file, const string& params_file = "", const RuntimeOption& runtime_option = RuntimeOption(), const Frontend& model_format = Frontend::ONNX) ``` -YOLOv7模型加载和初始化,当model_format为`Frontend::ONNX`时,只需提供model_file,如`yolov7s.onnx`;当model_format为`Frontend::PADDLE`时,则需同时提供model_file和params_file。 +YOLOv7模型加载和初始化,当model_format为`Frontend::ONNX`时,只需提供model_file,如`yolov7.onnx`;当model_format为`Frontend::PADDLE`时,则需同时提供model_file和params_file。 **参数** diff --git a/model_zoo/vision/yolov7/cpp/CMakeLists.txt b/model_zoo/vision/yolov7/cpp/CMakeLists.txt index 09f07b1748..ec7c86d026 100644 --- a/model_zoo/vision/yolov7/cpp/CMakeLists.txt +++ b/model_zoo/vision/yolov7/cpp/CMakeLists.txt @@ -5,7 +5,7 @@ CMAKE_MINIMUM_REQUIRED (VERSION 3.16) # add_definitions(-D_GLIBCXX_USE_CXX11_ABI=0) # 指定下载解压后的fastdeploy库路径 -set(FASTDEPLOY_INSTALL_DIR /home/fastdeploy/FastDeploy/build/fastdeploy-0.0.3) +set(FASTDEPLOY_INSTALL_DIR ${PROJECT_SOURCE_DIR}/fastdeploy-linux-x64-0.3.0/) include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index b43d4381e5..fd46e210f8 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -26,5 +26,6 @@ DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] 668.301758,400.781342, 808.441772, 882.534973, 0.829716, 0 50.210720,398.571411, 243.123367, 905.016602, 0.805375, 0 23.768242,214.979370, 802.627686, 778.840881, 0.756311, 5 -0.737200,552.281006, 78.617218, 890.945007, 0.363471, 0 +0.737200,552.281006, 78.617218, 890.945007, 0.36341 ``` + diff --git a/model_zoo/vision/yolov7/cpp/yolov7.cc b/model_zoo/vision/yolov7/cpp/yolov7.cc index 4b89972859..1607b2be09 100644 --- a/model_zoo/vision/yolov7/cpp/yolov7.cc +++ b/model_zoo/vision/yolov7/cpp/yolov7.cc @@ -16,7 +16,7 @@ int main() { namespace vis = fastdeploy::vision; - auto model = vis::wongkinyiu::YOLOv7("/home/fastdeploy/yolov7/onnxfiles/yolov7.onnx"); + auto model = vis::wongkinyiu::YOLOv7("yolov7.onnx"); if (!model.Initialized()) { std::cerr << "Init Failed." << std::endl; return -1; diff --git a/model_zoo/vision/yolov7/yolov7.py b/model_zoo/vision/yolov7/yolov7.py index ca8aeeaf88..cef467622d 100644 --- a/model_zoo/vision/yolov7/yolov7.py +++ b/model_zoo/vision/yolov7/yolov7.py @@ -2,13 +2,11 @@ import cv2 # 下载模型和测试图片 -# model_url = "TODO " test_jpg_url = "https://raw.githubusercontent.com/WongKinYiu/yolov7/main/inference/images/horses.jpg" -# fd.download(model_url, ".", show_progress=True) fd.download(test_jpg_url, ".", show_progress=True) # 加载模型 -model = fd.vision.wongkinyiu.YOLOv7("/home/fastdeploy/yolov7/onnxfiles/yolov7.onnx") +model = fd.vision.wongkinyiu.YOLOv7("yolov7.onnx") # 预测图片 im = cv2.imread("horses.jpg") From fb376adf9616b9b3aa3d515c739655567161722b Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 06:46:19 +0000 Subject: [PATCH 11/94] README.md modified --- model_zoo/vision/yolov7/README.md | 10 +++++----- model_zoo/vision/yolov7/cpp/README.md | 10 +++++----- model_zoo/vision/yolov7/cpp/yolov7.cc | 2 +- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/model_zoo/vision/yolov7/README.md b/model_zoo/vision/yolov7/README.md index 93a6f81188..77e7a654d1 100644 --- a/model_zoo/vision/yolov7/README.md +++ b/model_zoo/vision/yolov7/README.md @@ -55,11 +55,11 @@ python yolov7.py 执行完成后会将可视化结果保存在本地`vis_result.jpg`,同时输出检测结果如下 ``` DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] -223.395142,403.948669, 345.337189, 867.339050, 0.856906, 0 -668.301758,400.781342, 808.441772, 882.534973, 0.829716, 0 -50.210720,398.571411, 243.123367, 905.016602, 0.805375, 0 -23.768242,214.979370, 802.627686, 778.840881, 0.756311, 5 -0.737200,552.281006, 78.617218, 890.945007, 0.363471, 0 +0.056616,191.221619, 314.871063, 409.948914, 0.955449, 17 +432.547852,211.914841, 594.904297, 346.708618, 0.942706, 17 +0.000000,185.456207, 153.967789, 286.157562, 0.860487, 17 +224.049210,195.147003, 419.658234, 364.004852, 0.798262, 17 +369.316986,209.055725, 456.373840, 321.627625, 0.687066, 17 ``` ## 其它文档 diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index fd46e210f8..012d4c765b 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -22,10 +22,10 @@ wget https://github.com/WongKinYiu/yolov7/blob/main/inference/images/horses.jpg 执行完后可视化的结果保存在本地`vis_result.jpg`,同时会将检测框输出在终端,如下所示 ``` DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] -223.395142,403.948669, 345.337189, 867.339050, 0.856906, 0 -668.301758,400.781342, 808.441772, 882.534973, 0.829716, 0 -50.210720,398.571411, 243.123367, 905.016602, 0.805375, 0 -23.768242,214.979370, 802.627686, 778.840881, 0.756311, 5 -0.737200,552.281006, 78.617218, 890.945007, 0.36341 +0.056616,191.221619, 314.871063, 409.948914, 0.955449, 17 +432.547852,211.914841, 594.904297, 346.708618, 0.942706, 17 +0.000000,185.456207, 153.967789, 286.157562, 0.860487, 17 +224.049210,195.147003, 419.658234, 364.004852, 0.798262, 17 +369.316986,209.055725, 456.373840, 321.627625, 0.687066, 17 ``` diff --git a/model_zoo/vision/yolov7/cpp/yolov7.cc b/model_zoo/vision/yolov7/cpp/yolov7.cc index 1607b2be09..8b41c0288b 100644 --- a/model_zoo/vision/yolov7/cpp/yolov7.cc +++ b/model_zoo/vision/yolov7/cpp/yolov7.cc @@ -21,7 +21,7 @@ int main() { std::cerr << "Init Failed." << std::endl; return -1; } - cv::Mat im = cv::imread("bus.jpg"); + cv::Mat im = cv::imread("horses.jpg"); cv::Mat vis_im = im.clone(); vis::DetectionResult res; From 4b8737c9c0577c1a6ba0132ad76b6e72aa9e8e20 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 06:54:11 +0000 Subject: [PATCH 12/94] file path modified --- model_zoo/vision/yolov7/README.md | 3 +++ model_zoo/vision/yolov7/cpp/yolov7.cc | 4 ++-- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/model_zoo/vision/yolov7/README.md b/model_zoo/vision/yolov7/README.md index 77e7a654d1..70841fa61e 100644 --- a/model_zoo/vision/yolov7/README.md +++ b/model_zoo/vision/yolov7/README.md @@ -26,6 +26,9 @@ # 导出onnx格式文件 python models/export.py --grid --dynamic --weights PATH/TO/yolo7.pt + + # 移动onnx文件到demo目录 + cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolov7/ ``` diff --git a/model_zoo/vision/yolov7/cpp/yolov7.cc b/model_zoo/vision/yolov7/cpp/yolov7.cc index 8b41c0288b..6d2a80a85c 100644 --- a/model_zoo/vision/yolov7/cpp/yolov7.cc +++ b/model_zoo/vision/yolov7/cpp/yolov7.cc @@ -16,12 +16,12 @@ int main() { namespace vis = fastdeploy::vision; - auto model = vis::wongkinyiu::YOLOv7("yolov7.onnx"); + auto model = vis::wongkinyiu::YOLOv7("../yolov7.onnx"); if (!model.Initialized()) { std::cerr << "Init Failed." << std::endl; return -1; } - cv::Mat im = cv::imread("horses.jpg"); + cv::Mat im = cv::imread("../horses.jpg"); cv::Mat vis_im = im.clone(); vis::DetectionResult res; From ce922a0326c8dc14964476be7501a896d9e39302 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 06:57:49 +0000 Subject: [PATCH 13/94] file path modified --- model_zoo/vision/yolov7/cpp/README.md | 27 +++++++++++++++++++++++++-- model_zoo/vision/yolov7/cpp/yolov7.cc | 4 ++-- 2 files changed, 27 insertions(+), 4 deletions(-) diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index 012d4c765b..bef869f881 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -1,5 +1,29 @@ # 编译YOLOv7示例 +## 生成ONNX文件 + +- 手动获取 + + 访问[YOLOv7](https://github.com/WongKinYiu/yolov7)官方github库,按照指引下载安装,下载`yolov7.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 + + + + ``` + #下载yolov7模型文件 + wget https://github.com/WongKinYiu/yolov7/releases/download/v0.1/yolov7.pt + + # 导出onnx格式文件 + python models/export.py --grid --dynamic --weights PATH/TO/yolo7.pt + + # 移动onnx文件到demo目录 + cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolov7/ + ``` + + + +- 从PaddlePaddle获取 + + ``` # 下载和解压预测库 @@ -11,8 +35,7 @@ mkdir build & cd build cmake .. make -j -# 下载模型和图片 -wget "TODO" +# 下载图片 wget https://github.com/WongKinYiu/yolov7/blob/main/inference/images/horses.jpg # 执行 diff --git a/model_zoo/vision/yolov7/cpp/yolov7.cc b/model_zoo/vision/yolov7/cpp/yolov7.cc index 6d2a80a85c..8b41c0288b 100644 --- a/model_zoo/vision/yolov7/cpp/yolov7.cc +++ b/model_zoo/vision/yolov7/cpp/yolov7.cc @@ -16,12 +16,12 @@ int main() { namespace vis = fastdeploy::vision; - auto model = vis::wongkinyiu::YOLOv7("../yolov7.onnx"); + auto model = vis::wongkinyiu::YOLOv7("yolov7.onnx"); if (!model.Initialized()) { std::cerr << "Init Failed." << std::endl; return -1; } - cv::Mat im = cv::imread("../horses.jpg"); + cv::Mat im = cv::imread("horses.jpg"); cv::Mat vis_im = im.clone(); vis::DetectionResult res; From 6e00b82b40e3e8d19944408379ed11fb77a90073 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 06:59:58 +0000 Subject: [PATCH 14/94] file path modified --- model_zoo/vision/yolov7/cpp/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index bef869f881..1b577a7a3a 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -23,7 +23,7 @@ - 从PaddlePaddle获取 - +## 运行demo ``` # 下载和解压预测库 From 8c359fb9defa42ccd404890d26bc55b8f063c176 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 07:02:31 +0000 Subject: [PATCH 15/94] file path modified --- model_zoo/vision/yolov7/cpp/README.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index 1b577a7a3a..918625eea7 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -15,8 +15,6 @@ # 导出onnx格式文件 python models/export.py --grid --dynamic --weights PATH/TO/yolo7.pt - # 移动onnx文件到demo目录 - cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolov7/ ``` @@ -35,6 +33,9 @@ mkdir build & cd build cmake .. make -j +# 移动onnx文件到demo目录 +cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolov7/cpp/build/ + # 下载图片 wget https://github.com/WongKinYiu/yolov7/blob/main/inference/images/horses.jpg From 906c730255d7e4e1198784f45918984dcfe9820f Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 07:03:37 +0000 Subject: [PATCH 16/94] file path modified --- model_zoo/vision/yolov7/README.md | 2 +- model_zoo/vision/yolov7/cpp/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/model_zoo/vision/yolov7/README.md b/model_zoo/vision/yolov7/README.md index 70841fa61e..7246a4a7b7 100644 --- a/model_zoo/vision/yolov7/README.md +++ b/model_zoo/vision/yolov7/README.md @@ -12,7 +12,7 @@ └── yolov7.py ``` -## 生成ONNX文件 +## 获取ONNX文件 - 手动获取 diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index 918625eea7..ce6337962d 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -1,6 +1,6 @@ # 编译YOLOv7示例 -## 生成ONNX文件 +## 获取ONNX文件 - 手动获取 From 80c12230f5447966d363f34f57a15abeda1951ae Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 07:36:14 +0000 Subject: [PATCH 17/94] README modified --- model_zoo/vision/yolov7/cpp/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index ce6337962d..0fcaf8ae11 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -37,7 +37,7 @@ make -j cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolov7/cpp/build/ # 下载图片 -wget https://github.com/WongKinYiu/yolov7/blob/main/inference/images/horses.jpg +wget hhttps://raw.githubusercontent.com/WongKinYiu/yolov7/main/inference/images/horses.jpg # 执行 ./yolov7_demo From 6072757fe8af3a7f2a666b638a379865d26e9e59 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 07:36:46 +0000 Subject: [PATCH 18/94] README modified --- model_zoo/vision/yolov7/cpp/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index 0fcaf8ae11..a1d146053a 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -37,7 +37,7 @@ make -j cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolov7/cpp/build/ # 下载图片 -wget hhttps://raw.githubusercontent.com/WongKinYiu/yolov7/main/inference/images/horses.jpg +wget https://raw.githubusercontent.com/WongKinYiu/yolov7/main/inference/images/horses.jpg # 执行 ./yolov7_demo From 2c6e6a4836b6c20c4a3ebc562d9cf3722c414423 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 08:25:58 +0000 Subject: [PATCH 19/94] move some helpers to private --- fastdeploy/vision/wongkinyiu/yolov7.h | 43 ++++++++++++++------------- model_zoo/vision/yolov7/api.md | 2 +- 2 files changed, 23 insertions(+), 22 deletions(-) diff --git a/fastdeploy/vision/wongkinyiu/yolov7.h b/fastdeploy/vision/wongkinyiu/yolov7.h index b21c04936a..29dffaf2f4 100644 --- a/fastdeploy/vision/wongkinyiu/yolov7.h +++ b/fastdeploy/vision/wongkinyiu/yolov7.h @@ -32,27 +32,6 @@ class FASTDEPLOY_DECL YOLOv7 : public FastDeployModel { // 定义模型的名称 virtual std::string ModelName() const { return "WongKinYiu/yolov7"; } - // 初始化函数,包括初始化后端,以及其它模型推理需要涉及的操作 - virtual bool Initialize(); - - // 输入图像预处理操作 - // Mat为FastDeploy定义的数据结构 - // FDTensor为预处理后的Tensor数据,传给后端进行推理 - // im_info为预处理过程保存的数据,在后处理中需要用到 - virtual bool Preprocess(Mat* mat, FDTensor* outputs, - std::map>* im_info); - - // 后端推理结果后处理,输出给用户 - // infer_result 为后端推理后的输出Tensor - // result 为模型预测的结果 - // im_info 为预处理记录的信息,后处理用于还原box - // conf_threshold 后处理时过滤box的置信度阈值 - // nms_iou_threshold 后处理时NMS设定的iou阈值 - virtual bool Postprocess( - FDTensor& infer_result, DetectionResult* result, - const std::map>& im_info, - float conf_threshold, float nms_iou_threshold); - // 模型预测接口,即用户调用的接口 // im 为用户的输入数据,目前对于CV均定义为cv::Mat // result 为模型预测的输出结构体 @@ -81,6 +60,28 @@ class FASTDEPLOY_DECL YOLOv7 : public FastDeployModel { int stride; // for offseting the boxes by classes when using NMS float max_wh; + + private: + // 初始化函数,包括初始化后端,以及其它模型推理需要涉及的操作 + virtual bool Initialize(); + + // 输入图像预处理操作 + // Mat为FastDeploy定义的数据结构 + // FDTensor为预处理后的Tensor数据,传给后端进行推理 + // im_info为预处理过程保存的数据,在后处理中需要用到 + virtual bool Preprocess(Mat* mat, FDTensor* outputs, + std::map>* im_info); + + // 后端推理结果后处理,输出给用户 + // infer_result 为后端推理后的输出Tensor + // result 为模型预测的结果 + // im_info 为预处理记录的信息,后处理用于还原box + // conf_threshold 后处理时过滤box的置信度阈值 + // nms_iou_threshold 后处理时NMS设定的iou阈值 + virtual bool Postprocess( + FDTensor& infer_result, DetectionResult* result, + const std::map>& im_info, + float conf_threshold, float nms_iou_threshold); }; } // namespace wongkinyiu } // namespace vision diff --git a/model_zoo/vision/yolov7/api.md b/model_zoo/vision/yolov7/api.md index 7c5fc30163..1f40ba645a 100644 --- a/model_zoo/vision/yolov7/api.md +++ b/model_zoo/vision/yolov7/api.md @@ -23,7 +23,7 @@ YOLOv7模型加载和初始化,当model_format为`fd.Frontend.ONNX`时,只 > > **参数** > -> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,RGB格式 +> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 > > * **conf_threshold**(float): 检测框置信度过滤阈值 > > * **nms_iou_threshold**(float): NMS处理过程中iou阈值 From 48136f0d152af4a1a658af71ddaacfe4498b9f2e Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 08:46:49 +0000 Subject: [PATCH 20/94] add examples for yolov7 --- examples/CMakeLists.txt | 1 + examples/vision/wongkinyiu_yolov7.cc | 52 ++++++++++++++++++++++++++++ 2 files changed, 53 insertions(+) create mode 100644 examples/vision/wongkinyiu_yolov7.cc diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 4228a3e01f..31cd1723b1 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -17,6 +17,7 @@ endfunction() if (WTIH_VISION_EXAMPLES) add_fastdeploy_executable(vision ultralytics yolov5) add_fastdeploy_executable(vision meituan yolov6) + add_fastdeploy_executable(vision wongkinyiu yolov7) endif() # other examples ... \ No newline at end of file diff --git a/examples/vision/wongkinyiu_yolov7.cc b/examples/vision/wongkinyiu_yolov7.cc new file mode 100644 index 0000000000..7de033cae8 --- /dev/null +++ b/examples/vision/wongkinyiu_yolov7.cc @@ -0,0 +1,52 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +int main() { + namespace vis = fastdeploy::vision; + + std::string model_file = "../resources/models/yolov7.onnx"; + std::string img_path = "../resources/images/horses.jpg"; + std::string vis_path = "../resources/outputs/wongkinyiu_yolov7_vis_result.jpg"; + + auto model = vis::wongkinyiu::YOLOv7(model_file); + if (!model.Initialized()) { + std::cerr << "Init Failed! Model: " << model_file << std::endl; + return -1; + } else { + std::cout << "Init Done! Model:" << model_file << std::endl; + } + model.EnableDebug(); + + cv::Mat im = cv::imread(img_path); + cv::Mat vis_im = im.clone(); + + vis::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Prediction Failed." << std::endl; + return -1; + } else { + std::cout << "Prediction Done!" << std::endl; + } + + // 输出预测框结果 + std::cout << res.Str() << std::endl; + + // 可视化预测结果 + vis::Visualize::VisDetection(&vis_im, res); + cv::imwrite(vis_path, vis_im); + std::cout << "Detect Done! Saved: " << vis_path << std::endl; + return 0; +} From 6feca9233a0503c3e2644b9fa2d1dd76ce5bdbb5 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 09:07:47 +0000 Subject: [PATCH 21/94] api.md modified --- model_zoo/vision/yolov7/api.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/model_zoo/vision/yolov7/api.md b/model_zoo/vision/yolov7/api.md index 1f40ba645a..92b16c4755 100644 --- a/model_zoo/vision/yolov7/api.md +++ b/model_zoo/vision/yolov7/api.md @@ -51,7 +51,7 @@ YOLOv7模型加载和初始化,当model_format为`Frontend::ONNX`时,只需 #### predict函数 > ``` -> YOLOv7::predict(cv::Mat* im, DetectionResult* result, +> YOLOv7::Predict(cv::Mat* im, DetectionResult* result, > float conf_threshold = 0.25, > float nms_iou_threshold = 0.5) > ``` @@ -59,7 +59,7 @@ YOLOv7模型加载和初始化,当model_format为`Frontend::ONNX`时,只需 > > **参数** > -> > * **im**: 输入图像,注意需为HWC,RGB格式 +> > * **im**: 输入图像,注意需为HWC,BGR格式 > > * **result**: 检测结果,包括检测框,各个框的置信度 > > * **conf_threshold**: 检测框置信度过滤阈值 > > * **nms_iou_threshold**: NMS处理过程中iou阈值 From ae70d4f50ec9981e97dd7b79f3e3265c2105ed0c Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 09:11:01 +0000 Subject: [PATCH 22/94] api.md modified --- model_zoo/vision/yolov7/api.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model_zoo/vision/yolov7/api.md b/model_zoo/vision/yolov7/api.md index 92b16c4755..abd2abdcec 100644 --- a/model_zoo/vision/yolov7/api.md +++ b/model_zoo/vision/yolov7/api.md @@ -49,7 +49,7 @@ YOLOv7模型加载和初始化,当model_format为`Frontend::ONNX`时,只需 > * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 > * **model_format**(Frontend): 模型格式 -#### predict函数 +#### Predict函数 > ``` > YOLOv7::Predict(cv::Mat* im, DetectionResult* result, > float conf_threshold = 0.25, From f591b8567b08afbd1e3894100becaa2ce511424b Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 09:31:25 +0000 Subject: [PATCH 23/94] api.md modified --- model_zoo/vision/yolov7/api.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model_zoo/vision/yolov7/api.md b/model_zoo/vision/yolov7/api.md index abd2abdcec..02cf78121c 100644 --- a/model_zoo/vision/yolov7/api.md +++ b/model_zoo/vision/yolov7/api.md @@ -49,7 +49,7 @@ YOLOv7模型加载和初始化,当model_format为`Frontend::ONNX`时,只需 > * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 > * **model_format**(Frontend): 模型格式 -#### Predict函数 +#### redict函数 > ``` > YOLOv7::Predict(cv::Mat* im, DetectionResult* result, > float conf_threshold = 0.25, From f0def41c8b5e5e2b1d627ada84b2c4b17c84aeac Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 09:41:47 +0000 Subject: [PATCH 24/94] YOLOv7 --- model_zoo/vision/yolov7/api.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/model_zoo/vision/yolov7/api.md b/model_zoo/vision/yolov7/api.md index 02cf78121c..abd2abdcec 100644 --- a/model_zoo/vision/yolov7/api.md +++ b/model_zoo/vision/yolov7/api.md @@ -49,7 +49,7 @@ YOLOv7模型加载和初始化,当model_format为`Frontend::ONNX`时,只需 > * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 > * **model_format**(Frontend): 模型格式 -#### redict函数 +#### Predict函数 > ``` > YOLOv7::Predict(cv::Mat* im, DetectionResult* result, > float conf_threshold = 0.25, From 15b91609aae1f81e3d5789d40c18f0aa16e37e86 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 10:50:08 +0000 Subject: [PATCH 25/94] yolov7 release link --- model_zoo/vision/yolov7/README.md | 2 +- model_zoo/vision/yolov7/cpp/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/model_zoo/vision/yolov7/README.md b/model_zoo/vision/yolov7/README.md index 7246a4a7b7..c81c75d8d2 100644 --- a/model_zoo/vision/yolov7/README.md +++ b/model_zoo/vision/yolov7/README.md @@ -16,7 +16,7 @@ - 手动获取 - 访问[YOLOv7](https://github.com/WongKinYiu/yolov7)官方github库,按照指引下载安装,下载`yolov7.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 + 访问[YOLOv7](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1)官方github库,按照指引下载安装,下载`yolov7.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index a1d146053a..c3d4e8bcb2 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -4,7 +4,7 @@ - 手动获取 - 访问[YOLOv7](https://github.com/WongKinYiu/yolov7)官方github库,按照指引下载安装,下载`yolov7.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 + 访问[YOLOv7](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1)官方github库,按照指引下载安装,下载`yolov7.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 From 4706e8ca754735d318650c3f7a90b3e00f6ef16a Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 11:01:53 +0000 Subject: [PATCH 26/94] yolov7 release link --- model_zoo/vision/yolov7/README.md | 2 ++ model_zoo/vision/yolov7/cpp/README.md | 2 ++ 2 files changed, 4 insertions(+) diff --git a/model_zoo/vision/yolov7/README.md b/model_zoo/vision/yolov7/README.md index c81c75d8d2..e330a3055b 100644 --- a/model_zoo/vision/yolov7/README.md +++ b/model_zoo/vision/yolov7/README.md @@ -1,5 +1,7 @@ # 编译YOLOv7示例 +当前支持模型版本为:[YOLOv7](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1) + 本文档说明如何进行[YOLOv7](https://github.com/WongKinYiu/yolov7)的快速部署推理。本目录结构如下 ``` diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index c3d4e8bcb2..2e9570f224 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -1,5 +1,7 @@ # 编译YOLOv7示例 +当前支持模型版本为:[YOLOv7](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1) + ## 获取ONNX文件 - 手动获取 From dc8358461f384cc7ee0fcc592a68e5a917925bf6 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 11:05:01 +0000 Subject: [PATCH 27/94] yolov7 release link --- model_zoo/vision/yolov7/README.md | 2 +- model_zoo/vision/yolov7/cpp/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/model_zoo/vision/yolov7/README.md b/model_zoo/vision/yolov7/README.md index e330a3055b..7eed2c0c43 100644 --- a/model_zoo/vision/yolov7/README.md +++ b/model_zoo/vision/yolov7/README.md @@ -1,6 +1,6 @@ # 编译YOLOv7示例 -当前支持模型版本为:[YOLOv7](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1) +当前支持模型版本为:[YOLOv7 v0.1](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1) 本文档说明如何进行[YOLOv7](https://github.com/WongKinYiu/yolov7)的快速部署推理。本目录结构如下 diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index 2e9570f224..13a5e8343e 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -1,6 +1,6 @@ # 编译YOLOv7示例 -当前支持模型版本为:[YOLOv7](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1) +当前支持模型版本为:[YOLOv7 v0.1](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1) ## 获取ONNX文件 From 086debd8d3e040d37b0b8cbc006277d91e246baa Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 11:10:43 +0000 Subject: [PATCH 28/94] copyright --- fastdeploy/vision/wongkinyiu/yolov7.cc | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/fastdeploy/vision/wongkinyiu/yolov7.cc b/fastdeploy/vision/wongkinyiu/yolov7.cc index 6baf4c336b..db470d327e 100644 --- a/fastdeploy/vision/wongkinyiu/yolov7.cc +++ b/fastdeploy/vision/wongkinyiu/yolov7.cc @@ -1,3 +1,17 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + #include "fastdeploy/vision/wongkinyiu/yolov7.h" #include "fastdeploy/utils/perf.h" #include "fastdeploy/vision/utils/utils.h" From 4f980b9ce8e2573d76385ca4f0b98febf66f57a4 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 18 Jul 2022 12:09:04 +0000 Subject: [PATCH 29/94] change some helpers to private --- fastdeploy/vision/wongkinyiu/yolov7.h | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/fastdeploy/vision/wongkinyiu/yolov7.h b/fastdeploy/vision/wongkinyiu/yolov7.h index 29dffaf2f4..75cab34ded 100644 --- a/fastdeploy/vision/wongkinyiu/yolov7.h +++ b/fastdeploy/vision/wongkinyiu/yolov7.h @@ -63,13 +63,13 @@ class FASTDEPLOY_DECL YOLOv7 : public FastDeployModel { private: // 初始化函数,包括初始化后端,以及其它模型推理需要涉及的操作 - virtual bool Initialize(); + bool Initialize(); // 输入图像预处理操作 // Mat为FastDeploy定义的数据结构 // FDTensor为预处理后的Tensor数据,传给后端进行推理 // im_info为预处理过程保存的数据,在后处理中需要用到 - virtual bool Preprocess(Mat* mat, FDTensor* outputs, + bool Preprocess(Mat* mat, FDTensor* outputs, std::map>* im_info); // 后端推理结果后处理,输出给用户 @@ -78,7 +78,7 @@ class FASTDEPLOY_DECL YOLOv7 : public FastDeployModel { // im_info 为预处理记录的信息,后处理用于还原box // conf_threshold 后处理时过滤box的置信度阈值 // nms_iou_threshold 后处理时NMS设定的iou阈值 - virtual bool Postprocess( + bool Postprocess( FDTensor& infer_result, DetectionResult* result, const std::map>& im_info, float conf_threshold, float nms_iou_threshold); From 80beadfa3ce7ebb7cc2d345d4154cd42f6dec785 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Tue, 19 Jul 2022 02:57:08 +0000 Subject: [PATCH 30/94] change variables to const and fix documents. --- fastdeploy/vision/wongkinyiu/yolov7.cc | 6 +++--- model_zoo/vision/yolov7/README.md | 16 ++-------------- model_zoo/vision/yolov7/cpp/README.md | 8 +------- 3 files changed, 6 insertions(+), 24 deletions(-) diff --git a/fastdeploy/vision/wongkinyiu/yolov7.cc b/fastdeploy/vision/wongkinyiu/yolov7.cc index db470d327e..248718a69a 100644 --- a/fastdeploy/vision/wongkinyiu/yolov7.cc +++ b/fastdeploy/vision/wongkinyiu/yolov7.cc @@ -20,9 +20,9 @@ namespace fastdeploy { namespace vision { namespace wongkinyiu { -void LetterBox(Mat* mat, std::vector size, std::vector color, - bool _auto, bool scale_fill = false, bool scale_up = true, - int stride = 32) { +void LetterBox(Mat* mat, const std::vector& size, + const std::vector& color, bool _auto, + bool scale_fill = false, bool scale_up = true, int stride = 32) { float scale = std::min(size[1] * 1.0 / mat->Height(), size[0] * 1.0 / mat->Width()); if (!scale_up) { diff --git a/model_zoo/vision/yolov7/README.md b/model_zoo/vision/yolov7/README.md index 7eed2c0c43..2bb13ce459 100644 --- a/model_zoo/vision/yolov7/README.md +++ b/model_zoo/vision/yolov7/README.md @@ -20,12 +20,12 @@ 访问[YOLOv7](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1)官方github库,按照指引下载安装,下载`yolov7.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 - + ``` #下载yolov7模型文件 wget https://github.com/WongKinYiu/yolov7/releases/download/v0.1/yolov7.pt - + # 导出onnx格式文件 python models/export.py --grid --dynamic --weights PATH/TO/yolo7.pt @@ -33,12 +33,6 @@ cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolov7/ ``` - - -- 从PaddlePaddle获取 - - - ## 安装FastDeploy 使用如下命令安装FastDeploy,注意到此处安装的是`vision-cpu`,也可根据需求安装`vision-gpu` @@ -71,9 +65,3 @@ DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] - [C++部署](./cpp/README.md) - [YOLOv7 API文档](./api.md) - - - - - - diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index 13a5e8343e..f216c1aecf 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -8,20 +8,15 @@ 访问[YOLOv7](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1)官方github库,按照指引下载安装,下载`yolov7.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 - - ``` #下载yolov7模型文件 wget https://github.com/WongKinYiu/yolov7/releases/download/v0.1/yolov7.pt - + # 导出onnx格式文件 python models/export.py --grid --dynamic --weights PATH/TO/yolo7.pt ``` - - -- 从PaddlePaddle获取 ## 运行demo @@ -54,4 +49,3 @@ DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] 224.049210,195.147003, 419.658234, 364.004852, 0.798262, 17 369.316986,209.055725, 456.373840, 321.627625, 0.687066, 17 ``` - From f5f7a863e09490213c5ea51fd83c584ff10752df Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Tue, 19 Jul 2022 05:16:07 +0000 Subject: [PATCH 31/94] gitignore --- .gitignore | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.gitignore b/.gitignore index 39783b8839..51f2f2ed80 100644 --- a/.gitignore +++ b/.gitignore @@ -11,4 +11,4 @@ fastdeploy.egg-info .setuptools-cmake-build fastdeploy/version.py fastdeploy/LICENSE* -fastdeploy/ThirdPartyNotices* \ No newline at end of file +fastdeploy/ThirdPartyNotices* From e6cec25cace95e029adc08412aa359486446ec6d Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Tue, 19 Jul 2022 08:05:01 +0000 Subject: [PATCH 32/94] Transfer some funtions to private member of class --- fastdeploy/vision/wongkinyiu/yolov7.cc | 10 +++++----- fastdeploy/vision/wongkinyiu/yolov7.h | 17 ++++++++++++----- 2 files changed, 17 insertions(+), 10 deletions(-) diff --git a/fastdeploy/vision/wongkinyiu/yolov7.cc b/fastdeploy/vision/wongkinyiu/yolov7.cc index 248718a69a..532f552947 100644 --- a/fastdeploy/vision/wongkinyiu/yolov7.cc +++ b/fastdeploy/vision/wongkinyiu/yolov7.cc @@ -20,9 +20,9 @@ namespace fastdeploy { namespace vision { namespace wongkinyiu { -void LetterBox(Mat* mat, const std::vector& size, - const std::vector& color, bool _auto, - bool scale_fill = false, bool scale_up = true, int stride = 32) { +void YOLOv7::LetterBox(Mat* mat, const std::vector& size, + const std::vector& color, bool _auto, + bool scale_fill, bool scale_up, int stride) { float scale = std::min(size[1] * 1.0 / mat->Height(), size[0] * 1.0 / mat->Width()); if (!scale_up) { @@ -107,8 +107,8 @@ bool YOLOv7::Preprocess(Mat* mat, FDTensor* output, // 1. letterbox // 2. BGR->RGB // 3. HWC->CHW - LetterBox(mat, size, padding_value, is_mini_pad, is_no_pad, is_scale_up, - stride); + YOLOv7::LetterBox(mat, size, padding_value, is_mini_pad, is_no_pad, + is_scale_up, stride); BGR2RGB::Run(mat); Normalize::Run(mat, std::vector(mat->Channels(), 0.0), std::vector(mat->Channels(), 1.0)); diff --git a/fastdeploy/vision/wongkinyiu/yolov7.h b/fastdeploy/vision/wongkinyiu/yolov7.h index 75cab34ded..90be9ea463 100644 --- a/fastdeploy/vision/wongkinyiu/yolov7.h +++ b/fastdeploy/vision/wongkinyiu/yolov7.h @@ -70,7 +70,7 @@ class FASTDEPLOY_DECL YOLOv7 : public FastDeployModel { // FDTensor为预处理后的Tensor数据,传给后端进行推理 // im_info为预处理过程保存的数据,在后处理中需要用到 bool Preprocess(Mat* mat, FDTensor* outputs, - std::map>* im_info); + std::map>* im_info); // 后端推理结果后处理,输出给用户 // infer_result 为后端推理后的输出Tensor @@ -78,10 +78,17 @@ class FASTDEPLOY_DECL YOLOv7 : public FastDeployModel { // im_info 为预处理记录的信息,后处理用于还原box // conf_threshold 后处理时过滤box的置信度阈值 // nms_iou_threshold 后处理时NMS设定的iou阈值 - bool Postprocess( - FDTensor& infer_result, DetectionResult* result, - const std::map>& im_info, - float conf_threshold, float nms_iou_threshold); + bool Postprocess(FDTensor& infer_result, DetectionResult* result, + const std::map>& im_info, + float conf_threshold, float nms_iou_threshold); + + // 对图片进行LetterBox处理 + // mat 为输入图片 + // size 为输入图片的size + void LetterBox(Mat* mat, const std::vector& size, + const std::vector& color, bool _auto, + bool scale_fill = false, bool scale_up = true, + int stride = 32); }; } // namespace wongkinyiu } // namespace vision From e25e4f2a5c18ffe45bd3b8574dbe7c612a528e72 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Tue, 19 Jul 2022 08:07:49 +0000 Subject: [PATCH 33/94] Transfer some funtions to private member of class --- fastdeploy/vision/wongkinyiu/yolov7.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/fastdeploy/vision/wongkinyiu/yolov7.h b/fastdeploy/vision/wongkinyiu/yolov7.h index 90be9ea463..c494754f0e 100644 --- a/fastdeploy/vision/wongkinyiu/yolov7.h +++ b/fastdeploy/vision/wongkinyiu/yolov7.h @@ -83,8 +83,8 @@ class FASTDEPLOY_DECL YOLOv7 : public FastDeployModel { float conf_threshold, float nms_iou_threshold); // 对图片进行LetterBox处理 - // mat 为输入图片 - // size 为输入图片的size + // mat 为读取到的原图 + // size 为输入模型的图像尺寸 void LetterBox(Mat* mat, const std::vector& size, const std::vector& color, bool _auto, bool scale_fill = false, bool scale_up = true, From e8a8439dd97e0a6d52f299bff2958290637687c8 Mon Sep 17 00:00:00 2001 From: ziqi-jin <67993288+ziqi-jin@users.noreply.github.com> Date: Wed, 20 Jul 2022 15:25:57 +0800 Subject: [PATCH 34/94] Merge from develop (#9) * Fix compile problem in different python version (#26) * fix some usage problem in linux * Fix compile problem Co-authored-by: root * Add PaddleDetetion/PPYOLOE model support (#22) * add ppdet/ppyoloe * Add demo code and documents * add convert processor to vision (#27) * update .gitignore * Added checking for cmake include dir * fixed missing trt_backend option bug when init from trt * remove un-need data layout and add pre-check for dtype * changed RGB2BRG to BGR2RGB in ppcls model * add model_zoo yolov6 c++/python demo * fixed CMakeLists.txt typos * update yolov6 cpp/README.md * add yolox c++/pybind and model_zoo demo * move some helpers to private * fixed CMakeLists.txt typos * add normalize with alpha and beta * add version notes for yolov5/yolov6/yolox * add copyright to yolov5.cc * revert normalize * fixed some bugs in yolox * fixed examples/CMakeLists.txt to avoid conflicts * add convert processor to vision * format examples/CMakeLists summary * Fix bug while the inference result is empty with YOLOv5 (#29) * Add multi-label function for yolov5 * Update README.md Update doc * Update fastdeploy_runtime.cc fix variable option.trt_max_shape wrong name * Update runtime_option.md Update resnet model dynamic shape setting name from images to x * Fix bug when inference result boxes are empty * Delete detection.py Co-authored-by: Jason Co-authored-by: root Co-authored-by: DefTruth <31974251+DefTruth@users.noreply.github.com> Co-authored-by: huangjianhui <852142024@qq.com> --- examples/CMakeLists.txt | 26 +-- examples/vision/ppdet_ppyoloe.cc | 51 ++++++ fastdeploy/__init__.py | 2 +- fastdeploy/download.py | 2 +- fastdeploy/utils/utils.h | 4 + fastdeploy/vision.h | 1 + fastdeploy/vision/__init__.py | 1 + .../vision/common/processors/convert.cc | 62 +++++++ fastdeploy/vision/common/processors/convert.h | 42 +++++ .../vision/common/processors/transform.h | 1 + fastdeploy/vision/meituan/yolov6.cc | 28 +-- fastdeploy/vision/ppcls/model.cc | 19 +- fastdeploy/vision/ppcls/model.h | 16 +- fastdeploy/vision/ppcls/ppcls_pybind.cc | 2 +- fastdeploy/vision/ppdet/__init__.py | 39 ++++ fastdeploy/vision/ppdet/ppdet_pybind.cc | 32 ++++ fastdeploy/vision/ppdet/ppyoloe.cc | 170 ++++++++++++++++++ fastdeploy/vision/ppdet/ppyoloe.h | 44 +++++ fastdeploy/vision/ultralytics/yolov5.cc | 19 +- fastdeploy/vision/utils/sort_det_res.cc | 6 +- fastdeploy/vision/vision_pybind.cc | 10 +- fastdeploy/vision/visualize/detection.cc | 4 +- model_zoo/vision/ppyoloe/README.md | 52 ++++++ model_zoo/vision/ppyoloe/api.md | 74 ++++++++ model_zoo/vision/ppyoloe/cpp/CMakeLists.txt | 17 ++ model_zoo/vision/ppyoloe/cpp/README.md | 39 ++++ model_zoo/vision/ppyoloe/cpp/ppyoloe.cc | 51 ++++++ model_zoo/vision/ppyoloe/ppyoloe.py | 24 +++ setup.py | 30 +++- 29 files changed, 818 insertions(+), 50 deletions(-) create mode 100644 examples/vision/ppdet_ppyoloe.cc create mode 100644 fastdeploy/vision/common/processors/convert.cc create mode 100644 fastdeploy/vision/common/processors/convert.h create mode 100644 fastdeploy/vision/ppdet/__init__.py create mode 100644 fastdeploy/vision/ppdet/ppdet_pybind.cc create mode 100644 fastdeploy/vision/ppdet/ppyoloe.cc create mode 100644 fastdeploy/vision/ppdet/ppyoloe.h create mode 100644 model_zoo/vision/ppyoloe/README.md create mode 100644 model_zoo/vision/ppyoloe/api.md create mode 100644 model_zoo/vision/ppyoloe/cpp/CMakeLists.txt create mode 100644 model_zoo/vision/ppyoloe/cpp/README.md create mode 100644 model_zoo/vision/ppyoloe/cpp/ppyoloe.cc create mode 100644 model_zoo/vision/ppyoloe/ppyoloe.py diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 1e2dc43bd4..112193c86a 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -1,24 +1,26 @@ -function(add_fastdeploy_executable field url model) +function(add_fastdeploy_executable FIELD CC_FILE) # temp target name/file var in function scope - set(TEMP_TARGET_FILE ${PROJECT_SOURCE_DIR}/examples/${field}/${url}_${model}.cc) - set(TEMP_TARGET_NAME ${field}_${url}_${model}) + set(TEMP_TARGET_FILE ${CC_FILE}) + string(REGEX MATCHALL "[0-9A-Za-z_]*.cc" FILE_NAME ${CC_FILE}) + string(REGEX REPLACE ".cc" "" FILE_PREFIX ${FILE_NAME}) + set(TEMP_TARGET_NAME ${FIELD}_${FILE_PREFIX}) if (EXISTS ${TEMP_TARGET_FILE} AND TARGET fastdeploy) add_executable(${TEMP_TARGET_NAME} ${TEMP_TARGET_FILE}) target_link_libraries(${TEMP_TARGET_NAME} PUBLIC fastdeploy) - message(STATUS "Found source file: [${field}/${url}_${model}.cc], ADD!!! fastdeploy executable: [${TEMP_TARGET_NAME}] !") - else () - message(WARNING "Can not found source file: [${field}/${url}_${model}.cc], SKIP!!! fastdeploy executable: [${TEMP_TARGET_NAME}] !") + message(STATUS " Added FastDeploy Executable : ${TEMP_TARGET_NAME}") endif() unset(TEMP_TARGET_FILE) unset(TEMP_TARGET_NAME) endfunction() # vision examples -if (WITH_VISION_EXAMPLES) - add_fastdeploy_executable(vision ultralytics yolov5) - add_fastdeploy_executable(vision meituan yolov6) - add_fastdeploy_executable(vision wongkinyiu yolov7) - add_fastdeploy_executable(vision megvii yolox) +if(WITH_VISION_EXAMPLES AND EXISTS ${PROJECT_SOURCE_DIR}/examples/vision) + message(STATUS "") + message(STATUS "*************FastDeploy Examples Summary**********") + file(GLOB ALL_VISION_EXAMPLE_SRCS ${PROJECT_SOURCE_DIR}/examples/vision/*.cc) + foreach(_CC_FILE ${ALL_VISION_EXAMPLE_SRCS}) + add_fastdeploy_executable(vision ${_CC_FILE}) + endforeach() endif() -# other examples ... \ No newline at end of file +# other examples ... diff --git a/examples/vision/ppdet_ppyoloe.cc b/examples/vision/ppdet_ppyoloe.cc new file mode 100644 index 0000000000..b234021c92 --- /dev/null +++ b/examples/vision/ppdet_ppyoloe.cc @@ -0,0 +1,51 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +int main() { + namespace vis = fastdeploy::vision; + + std::string model_file = "ppyoloe_crn_l_300e_coco/model.pdmodel"; + std::string params_file = "ppyoloe_crn_l_300e_coco/model.pdiparams"; + std::string config_file = "ppyoloe_crn_l_300e_coco/infer_cfg.yml"; + std::string img_path = "test.jpeg"; + std::string vis_path = "vis.jpeg"; + + auto model = vis::ppdet::PPYOLOE(model_file, params_file, config_file); + if (!model.Initialized()) { + std::cerr << "Init Failed." << std::endl; + return -1; + } + + cv::Mat im = cv::imread(img_path); + cv::Mat vis_im = im.clone(); + + vis::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Prediction Failed." << std::endl; + return -1; + } else { + std::cout << "Prediction Done!" << std::endl; + } + + // 输出预测框结果 + std::cout << res.Str() << std::endl; + + // 可视化预测结果 + vis::Visualize::VisDetection(&vis_im, res); + cv::imwrite(vis_path, vis_im); + std::cout << "Detect Done! Saved: " << vis_path << std::endl; + return 0; +} diff --git a/fastdeploy/__init__.py b/fastdeploy/__init__.py index 500e7cc42a..68006c1bed 100644 --- a/fastdeploy/__init__.py +++ b/fastdeploy/__init__.py @@ -17,7 +17,7 @@ from .fastdeploy_runtime import * from . import fastdeploy_main as C from . import vision -from .download import download +from .download import download, download_and_decompress def TensorInfoStr(tensor_info): diff --git a/fastdeploy/download.py b/fastdeploy/download.py index e00af098df..67f21d8e76 100644 --- a/fastdeploy/download.py +++ b/fastdeploy/download.py @@ -156,7 +156,7 @@ def decompress(fname): def url2dir(url, path, rename=None): full_name = download(url, path, rename, show_progress=True) - print("SDK is donwloaded, now extracting...") + print("File is donwloaded, now extracting...") if url.count(".tgz") > 0 or url.count(".tar") > 0 or url.count("zip") > 0: return decompress(full_name) diff --git a/fastdeploy/utils/utils.h b/fastdeploy/utils/utils.h index 1b9f625b5e..9312084265 100644 --- a/fastdeploy/utils/utils.h +++ b/fastdeploy/utils/utils.h @@ -64,6 +64,10 @@ class FASTDEPLOY_DECL FDLogger { bool verbose_ = true; }; +#ifndef __REL_FILE__ +#define __REL_FILE__ __FILE__ +#endif + #define FDERROR \ FDLogger(true, "[ERROR]") \ << __REL_FILE__ << "(" << __LINE__ << ")::" << __FUNCTION__ << "\t" diff --git a/fastdeploy/vision.h b/fastdeploy/vision.h index ac3f006c0a..cafe310c70 100644 --- a/fastdeploy/vision.h +++ b/fastdeploy/vision.h @@ -16,6 +16,7 @@ #include "fastdeploy/core/config.h" #ifdef ENABLE_VISION #include "fastdeploy/vision/ppcls/model.h" +#include "fastdeploy/vision/ppdet/ppyoloe.h" #include "fastdeploy/vision/ultralytics/yolov5.h" #include "fastdeploy/vision/wongkinyiu/yolov7.h" #include "fastdeploy/vision/meituan/yolov6.h" diff --git a/fastdeploy/vision/__init__.py b/fastdeploy/vision/__init__.py index 7122bede0b..6acbf0c376 100644 --- a/fastdeploy/vision/__init__.py +++ b/fastdeploy/vision/__init__.py @@ -15,6 +15,7 @@ from . import evaluation from . import ppcls +from . import ppdet from . import ultralytics from . import meituan from . import megvii diff --git a/fastdeploy/vision/common/processors/convert.cc b/fastdeploy/vision/common/processors/convert.cc new file mode 100644 index 0000000000..a7ca6de07a --- /dev/null +++ b/fastdeploy/vision/common/processors/convert.cc @@ -0,0 +1,62 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision/common/processors/convert.h" + +namespace fastdeploy { + +namespace vision { + +Convert::Convert(const std::vector& alpha, + const std::vector& beta) { + FDASSERT(alpha.size() == beta.size(), + "Convert: requires the size of alpha equal to the size of beta."); + FDASSERT(alpha.size() != 0, + "Convert: requires the size of alpha and beta > 0."); + alpha_.assign(alpha.begin(), alpha.end()); + beta_.assign(beta.begin(), beta.end()); +} + +bool Convert::CpuRun(Mat* mat) { + cv::Mat* im = mat->GetCpuMat(); + std::vector split_im; + cv::split(*im, split_im); + for (int c = 0; c < im->channels(); c++) { + split_im[c].convertTo(split_im[c], CV_32FC1, alpha_[c], beta_[c]); + } + cv::merge(split_im, *im); + return true; +} + +#ifdef ENABLE_OPENCV_CUDA +bool Convert::GpuRun(Mat* mat) { + cv::cuda::GpuMat* im = mat->GetGpuMat(); + std::vector split_im; + cv::cuda::split(*im, split_im); + for (int c = 0; c < im->channels(); c++) { + split_im[c].convertTo(split_im[c], CV_32FC1, alpha_[c], beta_[c]); + } + cv::cuda::merge(split_im, *im); + return true; +} +#endif + +bool Convert::Run(Mat* mat, const std::vector& alpha, + const std::vector& beta, ProcLib lib) { + auto c = Convert(alpha, beta); + return c(mat, lib); +} + +} // namespace vision +} // namespace fastdeploy \ No newline at end of file diff --git a/fastdeploy/vision/common/processors/convert.h b/fastdeploy/vision/common/processors/convert.h new file mode 100644 index 0000000000..5d5a5276f5 --- /dev/null +++ b/fastdeploy/vision/common/processors/convert.h @@ -0,0 +1,42 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#pragma once + +#include "fastdeploy/vision/common/processors/base.h" + +namespace fastdeploy { +namespace vision { +class Convert : public Processor { + public: + Convert(const std::vector& alpha, const std::vector& beta); + + bool CpuRun(Mat* mat); +#ifdef ENABLE_OPENCV_CUDA + bool GpuRun(Mat* mat); +#endif + std::string Name() { return "Convert"; } + + // Compute `result = mat * alpha + beta` directly by channel. + // The default behavior is the same as OpenCV's convertTo method. + static bool Run(Mat* mat, const std::vector& alpha, + const std::vector& beta, + ProcLib lib = ProcLib::OPENCV_CPU); + + private: + std::vector alpha_; + std::vector beta_; +}; +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/common/processors/transform.h b/fastdeploy/vision/common/processors/transform.h index 12eec8d72d..08073b4e42 100644 --- a/fastdeploy/vision/common/processors/transform.h +++ b/fastdeploy/vision/common/processors/transform.h @@ -17,6 +17,7 @@ #include "fastdeploy/vision/common/processors/cast.h" #include "fastdeploy/vision/common/processors/center_crop.h" #include "fastdeploy/vision/common/processors/color_space_convert.h" +#include "fastdeploy/vision/common/processors/convert.h" #include "fastdeploy/vision/common/processors/hwc2chw.h" #include "fastdeploy/vision/common/processors/normalize.h" #include "fastdeploy/vision/common/processors/pad.h" diff --git a/fastdeploy/vision/meituan/yolov6.cc b/fastdeploy/vision/meituan/yolov6.cc index 8f37bf89c6..8ac7377194 100644 --- a/fastdeploy/vision/meituan/yolov6.cc +++ b/fastdeploy/vision/meituan/yolov6.cc @@ -25,14 +25,14 @@ namespace meituan { void LetterBox(Mat* mat, std::vector size, std::vector color, bool _auto, bool scale_fill = false, bool scale_up = true, int stride = 32) { - float scale = std::min(size[1] * 1.0f / static_cast(mat->Height()), - size[0] * 1.0f / static_cast(mat->Width())); + float scale = std::min(size[1] * 1.0f / static_cast(mat->Height()), + size[0] * 1.0f / static_cast(mat->Width())); if (!scale_up) { scale = std::min(scale, 1.0f); } int resize_h = int(round(static_cast(mat->Height()) * scale)); - int resize_w = int(round(static_cast(mat->Width()) * scale)); + int resize_w = int(round(static_cast(mat->Width()) * scale)); int pad_w = size[0] - resize_w; int pad_h = size[1] - resize_h; @@ -85,13 +85,13 @@ bool YOLOv6::Initialize() { is_scale_up = false; stride = 32; max_wh = 4096.0f; - + if (!InitRuntime()) { FDERROR << "Failed to initialize fastdeploy backend." << std::endl; return false; } - // Check if the input shape is dynamic after Runtime already initialized, - // Note that, We need to force is_mini_pad 'false' to keep static + // Check if the input shape is dynamic after Runtime already initialized, + // Note that, We need to force is_mini_pad 'false' to keep static // shape after padding (LetterBox) when the is_dynamic_shape is 'false'. is_dynamic_input_ = false; auto shape = InputInfoOfRuntime(0).shape; @@ -102,7 +102,7 @@ bool YOLOv6::Initialize() { break; } } - if (!is_dynamic_input_) { + if (!is_dynamic_input_) { is_mini_pad = false; } return true; @@ -111,15 +111,15 @@ bool YOLOv6::Initialize() { bool YOLOv6::Preprocess(Mat* mat, FDTensor* output, std::map>* im_info) { // process after image load - float ratio = std::min(size[1] * 1.0f / static_cast(mat->Height()), - size[0] * 1.0f / static_cast(mat->Width())); + float ratio = std::min(size[1] * 1.0f / static_cast(mat->Height()), + size[0] * 1.0f / static_cast(mat->Width())); if (ratio != 1.0) { int interp = cv::INTER_AREA; if (ratio > 1.0) { interp = cv::INTER_LINEAR; } int resize_h = int(round(static_cast(mat->Height()) * ratio)); - int resize_w = int(round(static_cast(mat->Width()) * ratio)); + int resize_w = int(round(static_cast(mat->Width()) * ratio)); Resize::Run(mat, resize_w, resize_h, -1, -1, interp); } // yolov6's preprocess steps @@ -129,8 +129,12 @@ bool YOLOv6::Preprocess(Mat* mat, FDTensor* output, LetterBox(mat, size, padding_value, is_mini_pad, is_no_pad, is_scale_up, stride); BGR2RGB::Run(mat); - Normalize::Run(mat, std::vector(mat->Channels(), 0.0), - std::vector(mat->Channels(), 1.0)); + // Normalize::Run(mat, std::vector(mat->Channels(), 0.0), + // std::vector(mat->Channels(), 1.0)); + // Compute `result = mat * alpha + beta` directly by channel + std::vector alpha = {1.0f / 255.0f, 1.0f / 255.0f, 1.0f / 255.0f}; + std::vector beta = {0.0f, 0.0f, 0.0f}; + Convert::Run(mat, alpha, beta); // Record output shape of preprocessed image (*im_info)["output_shape"] = {static_cast(mat->Height()), diff --git a/fastdeploy/vision/ppcls/model.cc b/fastdeploy/vision/ppcls/model.cc index 915cb97512..c4e5b767c7 100644 --- a/fastdeploy/vision/ppcls/model.cc +++ b/fastdeploy/vision/ppcls/model.cc @@ -1,3 +1,16 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. #include "fastdeploy/vision/ppcls/model.h" #include "fastdeploy/vision/utils/utils.h" @@ -135,6 +148,6 @@ bool Model::Predict(cv::Mat* im, ClassifyResult* result, int topk) { return true; } -} // namespace ppcls -} // namespace vision -} // namespace fastdeploy +} // namespace ppcls +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/ppcls/model.h b/fastdeploy/vision/ppcls/model.h index 36841d74c6..265f92d32b 100644 --- a/fastdeploy/vision/ppcls/model.h +++ b/fastdeploy/vision/ppcls/model.h @@ -1,7 +1,21 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + #pragma once #include "fastdeploy/fastdeploy_model.h" -#include "fastdeploy/vision/common/result.h" #include "fastdeploy/vision/common/processors/transform.h" +#include "fastdeploy/vision/common/result.h" namespace fastdeploy { namespace vision { diff --git a/fastdeploy/vision/ppcls/ppcls_pybind.cc b/fastdeploy/vision/ppcls/ppcls_pybind.cc index ef3fffee8e..1abc0b2b7c 100644 --- a/fastdeploy/vision/ppcls/ppcls_pybind.cc +++ b/fastdeploy/vision/ppcls/ppcls_pybind.cc @@ -14,7 +14,7 @@ #include "fastdeploy/pybind/main.h" namespace fastdeploy { -void BindPpClsModel(pybind11::module& m) { +void BindPPCls(pybind11::module& m) { auto ppcls_module = m.def_submodule("ppcls", "Module to deploy PaddleClas."); pybind11::class_(ppcls_module, "Model") .def(pybind11::init(ppdet_module, + "PPYOLOE") + .def(pybind11::init()) + .def("predict", [](vision::ppdet::PPYOLOE& self, pybind11::array& data, + float conf_threshold, float nms_iou_threshold) { + auto mat = PyArrayToCvMat(data); + vision::DetectionResult res; + self.Predict(&mat, &res, conf_threshold, nms_iou_threshold); + return res; + }); +} +} // namespace fastdeploy diff --git a/fastdeploy/vision/ppdet/ppyoloe.cc b/fastdeploy/vision/ppdet/ppyoloe.cc new file mode 100644 index 0000000000..c215ecb0ca --- /dev/null +++ b/fastdeploy/vision/ppdet/ppyoloe.cc @@ -0,0 +1,170 @@ +#include "fastdeploy/vision/ppdet/ppyoloe.h" +#include "fastdeploy/vision/utils/utils.h" +#include "yaml-cpp/yaml.h" + +namespace fastdeploy { +namespace vision { +namespace ppdet { + +PPYOLOE::PPYOLOE(const std::string& model_file, const std::string& params_file, + const std::string& config_file, + const RuntimeOption& custom_option, + const Frontend& model_format) { + config_file_ = config_file; + valid_cpu_backends = {Backend::ORT, Backend::PDINFER}; + valid_gpu_backends = {Backend::ORT, Backend::PDINFER}; + runtime_option = custom_option; + runtime_option.model_format = model_format; + runtime_option.model_file = model_file; + runtime_option.params_file = params_file; + initialized = Initialize(); +} + +bool PPYOLOE::Initialize() { + if (!BuildPreprocessPipelineFromConfig()) { + std::cout << "Failed to build preprocess pipeline from configuration file." + << std::endl; + return false; + } + if (!InitRuntime()) { + std::cout << "Failed to initialize fastdeploy backend." << std::endl; + return false; + } + return true; +} + +bool PPYOLOE::BuildPreprocessPipelineFromConfig() { + processors_.clear(); + YAML::Node cfg; + try { + cfg = YAML::LoadFile(config_file_); + } catch (YAML::BadFile& e) { + std::cout << "Failed to load yaml file " << config_file_ + << ", maybe you should check this file." << std::endl; + return false; + } + + if (cfg["arch"].as() != "YOLO") { + std::cout << "Require the arch of model is YOLO, but arch defined in " + "config file is " + << cfg["arch"].as() << "." << std::endl; + return false; + } + processors_.push_back(std::make_shared()); + + for (const auto& op : cfg["Preprocess"]) { + std::string op_name = op["type"].as(); + if (op_name == "NormalizeImage") { + auto mean = op["mean"].as>(); + auto std = op["std"].as>(); + bool is_scale = op["is_scale"].as(); + processors_.push_back(std::make_shared(mean, std, is_scale)); + } else if (op_name == "Resize") { + bool keep_ratio = op["keep_ratio"].as(); + auto target_size = op["target_size"].as>(); + int interp = op["interp"].as(); + FDASSERT(target_size.size(), + "Require size of target_size be 2, but now it's " + + std::to_string(target_size.size()) + "."); + FDASSERT(!keep_ratio, + "Only support keep_ratio is false while deploy " + "PaddleDetection model."); + int width = target_size[1]; + int height = target_size[0]; + processors_.push_back( + std::make_shared(width, height, -1.0, -1.0, interp, false)); + } else if (op_name == "Permute") { + processors_.push_back(std::make_shared()); + } else { + std::cout << "Unexcepted preprocess operator: " << op_name << "." + << std::endl; + return false; + } + } + return true; +} + +bool PPYOLOE::Preprocess(Mat* mat, std::vector* outputs) { + int origin_w = mat->Width(); + int origin_h = mat->Height(); + for (size_t i = 0; i < processors_.size(); ++i) { + if (!(*(processors_[i].get()))(mat)) { + std::cout << "Failed to process image data in " << processors_[i]->Name() + << "." << std::endl; + return false; + } + } + + outputs->resize(2); + (*outputs)[0].name = InputInfoOfRuntime(0).name; + mat->ShareWithTensor(&((*outputs)[0])); + + // reshape to [1, c, h, w] + (*outputs)[0].shape.insert((*outputs)[0].shape.begin(), 1); + + (*outputs)[1].Allocate({1, 2}, FDDataType::FP32, InputInfoOfRuntime(1).name); + float* ptr = static_cast((*outputs)[1].MutableData()); + ptr[0] = mat->Height() * 1.0 / mat->Height(); + ptr[1] = mat->Width() * 1.0 / mat->Width(); + return true; +} + +bool PPYOLOE::Postprocess(std::vector& infer_result, + DetectionResult* result, float conf_threshold, + float nms_threshold) { + FDASSERT(infer_result[1].shape[0] == 1, + "Only support batch = 1 in FastDeploy now."); + int box_num = 0; + if (infer_result[1].dtype == FDDataType::INT32) { + box_num = *(static_cast(infer_result[1].Data())); + } else if (infer_result[1].dtype == FDDataType::INT64) { + box_num = *(static_cast(infer_result[1].Data())); + } else { + FDASSERT( + false, + "The output box_num of PPYOLOE model should be type of int32/int64."); + } + result->Reserve(box_num); + float* box_data = static_cast(infer_result[0].Data()); + for (size_t i = 0; i < box_num; ++i) { + if (box_data[i * 6 + 1] < conf_threshold) { + continue; + } + result->label_ids.push_back(box_data[i * 6]); + result->scores.push_back(box_data[i * 6 + 1]); + result->boxes.emplace_back( + std::array{box_data[i * 6 + 2], box_data[i * 6 + 3], + box_data[i * 6 + 4] - box_data[i * 6 + 2], + box_data[i * 6 + 5] - box_data[i * 6 + 3]}); + } + return true; +} + +bool PPYOLOE::Predict(cv::Mat* im, DetectionResult* result, + float conf_threshold, float iou_threshold) { + Mat mat(*im); + std::vector processed_data; + if (!Preprocess(&mat, &processed_data)) { + FDERROR << "Failed to preprocess input data while using model:" + << ModelName() << "." << std::endl; + return false; + } + + std::vector infer_result; + if (!Infer(processed_data, &infer_result)) { + FDERROR << "Failed to inference while using model:" << ModelName() << "." + << std::endl; + return false; + } + + if (!Postprocess(infer_result, result, conf_threshold, iou_threshold)) { + FDERROR << "Failed to postprocess while using model:" << ModelName() << "." + << std::endl; + return false; + } + return true; +} + +} // namespace ppdet +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/ppdet/ppyoloe.h b/fastdeploy/vision/ppdet/ppyoloe.h new file mode 100644 index 0000000000..a3db268ca4 --- /dev/null +++ b/fastdeploy/vision/ppdet/ppyoloe.h @@ -0,0 +1,44 @@ +#pragma once +#include "fastdeploy/fastdeploy_model.h" +#include "fastdeploy/vision/common/processors/transform.h" +#include "fastdeploy/vision/common/result.h" + +#include "fastdeploy/vision/utils/utils.h" + +namespace fastdeploy { +namespace vision { +namespace ppdet { + +class FASTDEPLOY_DECL PPYOLOE : public FastDeployModel { + public: + PPYOLOE(const std::string& model_file, const std::string& params_file, + const std::string& config_file, + const RuntimeOption& custom_option = RuntimeOption(), + const Frontend& model_format = Frontend::PADDLE); + + std::string ModelName() const { return "PaddleDetection/PPYOLOE"; } + + virtual bool Initialize(); + + virtual bool BuildPreprocessPipelineFromConfig(); + + virtual bool Preprocess(Mat* mat, std::vector* outputs); + + virtual bool Postprocess(std::vector& infer_result, + DetectionResult* result, float conf_threshold, + float nms_threshold); + + virtual bool Predict(cv::Mat* im, DetectionResult* result, + float conf_threshold = 0.5, float nms_threshold = 0.7); + + private: + std::vector> processors_; + std::string config_file_; + // PaddleDetection can export model without nms + // This flag will help us to handle the different + // situation + bool has_nms_; +}; +} // namespace ppdet +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/ultralytics/yolov5.cc b/fastdeploy/vision/ultralytics/yolov5.cc index 193cfe9794..0b7e50e735 100644 --- a/fastdeploy/vision/ultralytics/yolov5.cc +++ b/fastdeploy/vision/ultralytics/yolov5.cc @@ -87,8 +87,8 @@ bool YOLOv5::Initialize() { FDERROR << "Failed to initialize fastdeploy backend." << std::endl; return false; } - // Check if the input shape is dynamic after Runtime already initialized, - // Note that, We need to force is_mini_pad 'false' to keep static + // Check if the input shape is dynamic after Runtime already initialized, + // Note that, We need to force is_mini_pad 'false' to keep static // shape after padding (LetterBox) when the is_dynamic_shape is 'false'. is_dynamic_input_ = false; auto shape = InputInfoOfRuntime(0).shape; @@ -99,7 +99,7 @@ bool YOLOv5::Initialize() { break; } } - if (!is_dynamic_input_) { + if (!is_dynamic_input_) { is_mini_pad = false; } return true; @@ -126,8 +126,12 @@ bool YOLOv5::Preprocess(Mat* mat, FDTensor* output, LetterBox(mat, size, padding_value, is_mini_pad, is_no_pad, is_scale_up, stride); BGR2RGB::Run(mat); - Normalize::Run(mat, std::vector(mat->Channels(), 0.0), - std::vector(mat->Channels(), 1.0)); + // Normalize::Run(mat, std::vector(mat->Channels(), 0.0), + // std::vector(mat->Channels(), 1.0)); + // Compute `result = mat * alpha + beta` directly by channel + std::vector alpha = {1.0f / 255.0f, 1.0f / 255.0f, 1.0f / 255.0f}; + std::vector beta = {0.0f, 0.0f, 0.0f}; + Convert::Run(mat, alpha, beta); // Record output shape of preprocessed image (*im_info)["output_shape"] = {static_cast(mat->Height()), @@ -198,6 +202,11 @@ bool YOLOv5::Postprocess( result->scores.push_back(confidence); } } + + if (result->boxes.size() == 0) { + return true; + } + utils::NMS(result, nms_iou_threshold); // scale the boxes to the origin image shape diff --git a/fastdeploy/vision/utils/sort_det_res.cc b/fastdeploy/vision/utils/sort_det_res.cc index e4a0db9761..93dbb69694 100644 --- a/fastdeploy/vision/utils/sort_det_res.cc +++ b/fastdeploy/vision/utils/sort_det_res.cc @@ -68,7 +68,11 @@ void MergeSort(DetectionResult* result, size_t low, size_t high) { void SortDetectionResult(DetectionResult* result) { size_t low = 0; - size_t high = result->scores.size() - 1; + size_t high = result->scores.size(); + if (high == 0) { + return; + } + high = high - 1; MergeSort(result, low, high); } diff --git a/fastdeploy/vision/vision_pybind.cc b/fastdeploy/vision/vision_pybind.cc index 41ada5541a..0334303ce6 100644 --- a/fastdeploy/vision/vision_pybind.cc +++ b/fastdeploy/vision/vision_pybind.cc @@ -16,7 +16,8 @@ namespace fastdeploy { -void BindPpClsModel(pybind11::module& m); +void BindPPCls(pybind11::module& m); +void BindPPDet(pybind11::module& m); void BindWongkinyiu(pybind11::module& m); void BindUltralytics(pybind11::module& m); void BindMeituan(pybind11::module& m); @@ -41,13 +42,14 @@ void BindVision(pybind11::module& m) { .def("__repr__", &vision::DetectionResult::Str) .def("__str__", &vision::DetectionResult::Str); - BindPpClsModel(m); + BindPPCls(m); + BindPPDet(m); BindUltralytics(m); BindWongkinyiu(m); BindMeituan(m); BindMegvii(m); #ifdef ENABLE_VISION_VISUALIZE BindVisualize(m); -#endif +#endif } -} // namespace fastdeploy +} // namespace fastdeploy diff --git a/fastdeploy/vision/visualize/detection.cc b/fastdeploy/vision/visualize/detection.cc index d0c4116148..5b5538bff7 100644 --- a/fastdeploy/vision/visualize/detection.cc +++ b/fastdeploy/vision/visualize/detection.cc @@ -43,7 +43,7 @@ void Visualize::VisDetection(cv::Mat* im, const DetectionResult& result, } std::string text = id + "," + score; int font = cv::FONT_HERSHEY_SIMPLEX; - cv::Size text_size = cv::getTextSize(text, font, font_size, 0.5, nullptr); + cv::Size text_size = cv::getTextSize(text, font, font_size, 1, nullptr); cv::Point origin; origin.x = rect.x; origin.y = rect.y; @@ -52,7 +52,7 @@ void Visualize::VisDetection(cv::Mat* im, const DetectionResult& result, text_size.width, text_size.height); cv::rectangle(*im, rect, rect_color, line_size); cv::putText(*im, text, origin, font, font_size, cv::Scalar(255, 255, 255), - 0.5); + 1); } } diff --git a/model_zoo/vision/ppyoloe/README.md b/model_zoo/vision/ppyoloe/README.md new file mode 100644 index 0000000000..42d18104ad --- /dev/null +++ b/model_zoo/vision/ppyoloe/README.md @@ -0,0 +1,52 @@ +# PaddleDetection/PPYOLOE部署示例 + +- 当前支持PaddleDetection版本为[release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) + +本文档说明如何进行[PPYOLOE](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4/configs/ppyoloe)的快速部署推理。本目录结构如下 +``` +. +├── cpp # C++ 代码目录 +│   ├── CMakeLists.txt # C++ 代码编译CMakeLists文件 +│   ├── README.md # C++ 代码编译部署文档 +│   └── ppyoloe.cc # C++ 示例代码 +├── README.md # PPYOLOE 部署文档 +└── ppyoloe.py # Python示例代码 +``` + +## 安装FastDeploy + +使用如下命令安装FastDeploy,注意到此处安装的是`vision-cpu`,也可根据需求安装`vision-gpu` +``` +# 安装fastdeploy-python工具 +pip install fastdeploy-python +``` + +## Python部署 + +执行如下代码即会自动下载PPYOLOE模型和测试图片 +``` +python ppyoloe.py +``` + +执行完成后会将可视化结果保存在本地`vis_result.jpg`,同时输出检测结果如下 +``` +DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] +162.380249,132.057449, 463.178345, 413.167114, 0.962918, 33 +414.914642,141.148666, 91.275269, 308.688293, 0.951003, 0 +163.449234,129.669067, 35.253891, 135.111786, 0.900734, 0 +267.232239,142.290436, 31.578918, 126.329773, 0.848709, 0 +581.790833,179.027115, 30.893127, 135.484940, 0.837986, 0 +104.407021,72.602615, 22.900627, 75.469055, 0.796468, 0 +348.795380,70.122147, 18.806061, 85.829330, 0.785557, 0 +364.118683,92.457428, 17.437622, 89.212891, 0.774282, 0 +75.180283,192.470490, 41.898407, 55.552414, 0.712569, 56 +328.133759,61.894299, 19.100616, 65.633575, 0.710519, 0 +504.797760,181.732574, 107.740814, 248.115082, 0.708902, 0 +379.063080,64.762360, 15.956146, 68.312546, 0.680725, 0 +25.858747,186.564178, 34.958130, 56.007080, 0.580415, 0 +``` + +## 其它文档 + +- [C++部署](./cpp/README.md) +- [PPYOLOE API文档](./api.md) diff --git a/model_zoo/vision/ppyoloe/api.md b/model_zoo/vision/ppyoloe/api.md new file mode 100644 index 0000000000..1c5cbcaadb --- /dev/null +++ b/model_zoo/vision/ppyoloe/api.md @@ -0,0 +1,74 @@ +# PPYOLOE API说明 + +## Python API + +### PPYOLOE类 +``` +fastdeploy.vision.ultralytics.PPYOLOE(model_file, params_file, config_file, runtime_option=None, model_format=fd.Frontend.PADDLE) +``` +PPYOLOE模型加载和初始化,需同时提供model_file和params_file, 当前仅支持model_format为Paddle格式 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径 +> * **config_file**(str): 模型推理配置文件 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式 + +#### predict函数 +> ``` +> PPYOLOE.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> ``` +> 模型预测结口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 +> > * **conf_threshold**(float): 检测框置信度过滤阈值 +> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值(当模型中包含nms处理时,此参数自动无效) + +示例代码参考[ppyoloe.py](./ppyoloe.py) + + +## C++ API + +### PPYOLOE类 +``` +fastdeploy::vision::ultralytics::PPYOLOE( + const string& model_file, + const string& params_file, + const string& config_file, + const RuntimeOption& runtime_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX) +``` +PPYOLOE模型加载和初始化,需同时提供model_file和params_file, 当前仅支持model_format为Paddle格式 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径 +> * **config_file**(str): 模型推理配置文件 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式 + +#### Predict函数 +> ``` +> YOLOv5::Predict(cv::Mat* im, DetectionResult* result, +> float conf_threshold = 0.25, +> float nms_iou_threshold = 0.5) +> ``` +> 模型预测接口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **im**: 输入图像,注意需为HWC,BGR格式 +> > * **result**: 检测结果,包括检测框,各个框的置信度 +> > * **conf_threshold**: 检测框置信度过滤阈值 +> > * **nms_iou_threshold**: NMS处理过程中iou阈值(当模型中包含nms处理时,此参数自动无效) + +示例代码参考[cpp/yolov5.cc](cpp/yolov5.cc) + +## 其它API使用 + +- [模型部署RuntimeOption配置](../../../docs/api/runtime_option.md) diff --git a/model_zoo/vision/ppyoloe/cpp/CMakeLists.txt b/model_zoo/vision/ppyoloe/cpp/CMakeLists.txt new file mode 100644 index 0000000000..e681566517 --- /dev/null +++ b/model_zoo/vision/ppyoloe/cpp/CMakeLists.txt @@ -0,0 +1,17 @@ +PROJECT(ppyoloe_demo C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.16) + +# 在低版本ABI环境中,通过如下代码进行兼容性编译 +# add_definitions(-D_GLIBCXX_USE_CXX11_ABI=0) + +# 指定下载解压后的fastdeploy库路径 +set(FASTDEPLOY_INSTALL_DIR ${PROJECT_SOURCE_DIR}/fastdeploy-linux-x64-0.3.0/) + +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) + +# 添加FastDeploy依赖头文件 +include_directories(${FASTDEPLOY_INCS}) + +add_executable(ppyoloe_demo ${PROJECT_SOURCE_DIR}/ppyoloe.cc) +# 添加FastDeploy库依赖 +target_link_libraries(ppyoloe_demo ${FASTDEPLOY_LIBS}) diff --git a/model_zoo/vision/ppyoloe/cpp/README.md b/model_zoo/vision/ppyoloe/cpp/README.md new file mode 100644 index 0000000000..1027c2eeb2 --- /dev/null +++ b/model_zoo/vision/ppyoloe/cpp/README.md @@ -0,0 +1,39 @@ +# 编译PPYOLOE示例 + + +``` +# 下载和解压预测库 +wget https://bj.bcebos.com/paddle2onnx/fastdeploy/fastdeploy-linux-x64-0.0.3.tgz +tar xvf fastdeploy-linux-x64-0.0.3.tgz + +# 编译示例代码 +mkdir build & cd build +cmake .. +make -j + +# 下载模型和图片 +wget https://bj.bcebos.com/paddle2onnx/fastdeploy/models/ppdet/ppyoloe_crn_l_300e_coco.tgz +tar xvf ppyoloe_crn_l_300e_coco.tgz +wget https://raw.githubusercontent.com/PaddlePaddle/PaddleDetection/release/2.4/demo/000000014439_640x640.jpg + +# 执行 +./ppyoloe_demo +``` + +执行完后可视化的结果保存在本地`vis_result.jpg`,同时会将检测框输出在终端,如下所示 +``` +DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] +162.380249,132.057449, 463.178345, 413.167114, 0.962918, 33 +414.914642,141.148666, 91.275269, 308.688293, 0.951003, 0 +163.449234,129.669067, 35.253891, 135.111786, 0.900734, 0 +267.232239,142.290436, 31.578918, 126.329773, 0.848709, 0 +581.790833,179.027115, 30.893127, 135.484940, 0.837986, 0 +104.407021,72.602615, 22.900627, 75.469055, 0.796468, 0 +348.795380,70.122147, 18.806061, 85.829330, 0.785557, 0 +364.118683,92.457428, 17.437622, 89.212891, 0.774282, 0 +75.180283,192.470490, 41.898407, 55.552414, 0.712569, 56 +328.133759,61.894299, 19.100616, 65.633575, 0.710519, 0 +504.797760,181.732574, 107.740814, 248.115082, 0.708902, 0 +379.063080,64.762360, 15.956146, 68.312546, 0.680725, 0 +25.858747,186.564178, 34.958130, 56.007080, 0.580415, 0 +``` diff --git a/model_zoo/vision/ppyoloe/cpp/ppyoloe.cc b/model_zoo/vision/ppyoloe/cpp/ppyoloe.cc new file mode 100644 index 0000000000..e63f29e62a --- /dev/null +++ b/model_zoo/vision/ppyoloe/cpp/ppyoloe.cc @@ -0,0 +1,51 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +int main() { + namespace vis = fastdeploy::vision; + + std::string model_file = "ppyoloe_crn_l_300e_coco/model.pdmodel"; + std::string params_file = "ppyoloe_crn_l_300e_coco/model.pdiparams"; + std::string config_file = "ppyoloe_crn_l_300e_coco/infer_cfg.yml"; + std::string img_path = "000000014439_640x640.jpg"; + std::string vis_path = "vis.jpeg"; + + auto model = vis::ppdet::PPYOLOE(model_file, params_file, config_file); + if (!model.Initialized()) { + std::cerr << "Init Failed." << std::endl; + return -1; + } + + cv::Mat im = cv::imread(img_path); + cv::Mat vis_im = im.clone(); + + vis::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Prediction Failed." << std::endl; + return -1; + } else { + std::cout << "Prediction Done!" << std::endl; + } + + // 输出预测框结果 + std::cout << res.Str() << std::endl; + + // 可视化预测结果 + vis::Visualize::VisDetection(&vis_im, res); + cv::imwrite(vis_path, vis_im); + std::cout << "Detect Done! Saved: " << vis_path << std::endl; + return 0; +} diff --git a/model_zoo/vision/ppyoloe/ppyoloe.py b/model_zoo/vision/ppyoloe/ppyoloe.py new file mode 100644 index 0000000000..7d79dfd8cf --- /dev/null +++ b/model_zoo/vision/ppyoloe/ppyoloe.py @@ -0,0 +1,24 @@ +import fastdeploy as fd +import cv2 + +# 下载模型和测试图片 +model_url = "https://bj.bcebos.com/paddle2onnx/fastdeploy/models/ppdet/ppyoloe_crn_l_300e_coco.tgz" +test_jpg_url = "https://raw.githubusercontent.com/PaddlePaddle/PaddleDetection/release/2.4/demo/000000014439_640x640.jpg" +fd.download_and_decompress(model_url, ".") +fd.download(test_jpg_url, ".", show_progress=True) + +# 加载模型 +model = fd.vision.ppdet.PPYOLOE("ppyoloe_crn_l_300e_coco/model.pdmodel", + "ppyoloe_crn_l_300e_coco/model.pdiparams", + "ppyoloe_crn_l_300e_coco/infer_cfg.yml") + +# 预测图片 +im = cv2.imread("000000014439_640x640.jpg") +result = model.predict(im, conf_threshold=0.5) + +# 可视化结果 +fd.vision.visualize.vis_detection(im, result) +cv2.imwrite("vis_result.jpg", im) + +# 输出预测结果 +print(result) diff --git a/setup.py b/setup.py index f0ff3f16de..e76f057b1c 100644 --- a/setup.py +++ b/setup.py @@ -49,7 +49,8 @@ setup_configs["ENABLE_TRT_BACKEND"] = os.getenv("ENABLE_TRT_BACKEND", "OFF") setup_configs["WITH_GPU"] = os.getenv("WITH_GPU", "OFF") setup_configs["TRT_DIRECTORY"] = os.getenv("TRT_DIRECTORY", "UNDEFINED") -setup_configs["CUDA_DIRECTORY"] = os.getenv("CUDA_DIRECTORY", "/usr/local/cuda") +setup_configs["CUDA_DIRECTORY"] = os.getenv("CUDA_DIRECTORY", + "/usr/local/cuda") TOP_DIR = os.path.realpath(os.path.dirname(__file__)) SRC_DIR = os.path.join(TOP_DIR, "fastdeploy") @@ -325,17 +326,32 @@ def run(self): shutil.copy("LICENSE", "fastdeploy") depend_libs = list() - # modify the search path of libraries - command = "patchelf --set-rpath '$ORIGIN/libs/' .setuptools-cmake-build/fastdeploy_main.cpython-36m-x86_64-linux-gnu.so" - # The sw_64 not suppot patchelf, so we just disable that. - if platform.machine() != 'sw_64' and platform.machine() != 'mips64': - assert os.system(command) == 0, "patch fastdeploy_main.cpython-36m-x86_64-linux-gnu.so failed, the command: {}".format(command) + if platform.system().lower() == "linux": + for f in os.listdir(".setuptools-cmake-build"): + full_name = os.path.join(".setuptools-cmake-build", f) + if not os.path.isfile(full_name): + continue + if not full_name.count("fastdeploy_main.cpython-"): + continue + if not full_name.endswith(".so"): + continue + # modify the search path of libraries + command = "patchelf --set-rpath '$ORIGIN/libs/' {}".format( + full_name) + # The sw_64 not suppot patchelf, so we just disable that. + if platform.machine() != 'sw_64' and platform.machine( + ) != 'mips64': + assert os.system( + command + ) == 0, "patch fastdeploy_main.cpython-36m-x86_64-linux-gnu.so failed, the command: {}".format( + command) for f in os.listdir(".setuptools-cmake-build"): if not os.path.isfile(os.path.join(".setuptools-cmake-build", f)): continue if f.count("libfastdeploy") > 0: - shutil.copy(os.path.join(".setuptools-cmake-build", f), "fastdeploy/libs") + shutil.copy( + os.path.join(".setuptools-cmake-build", f), "fastdeploy/libs") for dirname in os.listdir(".setuptools-cmake-build/third_libs/install"): for lib in os.listdir( os.path.join(".setuptools-cmake-build/third_libs/install", From a182893d9232c3ff0ecda5d07ec6517ddca8f449 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 20 Jul 2022 07:38:15 +0000 Subject: [PATCH 35/94] first commit for yolor --- examples/CMakeLists.txt | 25 +- .../{ppdet_ppyoloe.cc => wongkinyiu_yolor.cc} | 15 +- fastdeploy/__init__.py | 2 +- fastdeploy/download.py | 2 +- fastdeploy/utils/utils.h | 27 +- fastdeploy/vision.h | 6 +- fastdeploy/vision/__init__.py | 1 - .../vision/common/processors/convert.cc | 62 ----- fastdeploy/vision/common/processors/convert.h | 42 --- .../vision/common/processors/transform.h | 1 - fastdeploy/vision/meituan/yolov6.cc | 8 +- fastdeploy/vision/ppcls/model.cc | 19 +- fastdeploy/vision/ppcls/model.h | 20 +- fastdeploy/vision/ppcls/ppcls_pybind.cc | 4 +- fastdeploy/vision/ppdet/__init__.py | 39 --- fastdeploy/vision/ppdet/ppdet_pybind.cc | 32 --- fastdeploy/vision/ppdet/ppyoloe.cc | 170 ------------ fastdeploy/vision/ppdet/ppyoloe.h | 44 ---- fastdeploy/vision/ultralytics/yolov5.cc | 13 +- fastdeploy/vision/utils/sort_det_res.cc | 6 +- fastdeploy/vision/vision_pybind.cc | 6 +- fastdeploy/vision/visualize/detection.cc | 8 +- fastdeploy/vision/wongkinyiu/__init__.py | 98 +++++++ .../vision/wongkinyiu/wongkinyiu_pybind.cc | 21 +- fastdeploy/vision/wongkinyiu/yolor.cc | 243 ++++++++++++++++++ fastdeploy/vision/wongkinyiu/yolor.h | 95 +++++++ model_zoo/vision/ppyoloe/README.md | 52 ---- model_zoo/vision/ppyoloe/cpp/README.md | 39 --- model_zoo/vision/ppyoloe/ppyoloe.py | 24 -- model_zoo/vision/yolor/README.md | 67 +++++ model_zoo/vision/{ppyoloe => yolor}/api.md | 31 +-- .../{ppyoloe => yolor}/cpp/CMakeLists.txt | 6 +- model_zoo/vision/yolor/cpp/README.md | 51 ++++ .../cpp/ppyoloe.cc => yolor/cpp/yolor.cc} | 17 +- model_zoo/vision/yolor/yolor.py | 21 ++ setup.py | 27 +- 36 files changed, 679 insertions(+), 665 deletions(-) rename examples/vision/{ppdet_ppyoloe.cc => wongkinyiu_yolor.cc} (75%) delete mode 100644 fastdeploy/vision/common/processors/convert.cc delete mode 100644 fastdeploy/vision/common/processors/convert.h delete mode 100644 fastdeploy/vision/ppdet/__init__.py delete mode 100644 fastdeploy/vision/ppdet/ppdet_pybind.cc delete mode 100644 fastdeploy/vision/ppdet/ppyoloe.cc delete mode 100644 fastdeploy/vision/ppdet/ppyoloe.h create mode 100644 fastdeploy/vision/wongkinyiu/yolor.cc create mode 100644 fastdeploy/vision/wongkinyiu/yolor.h delete mode 100644 model_zoo/vision/ppyoloe/README.md delete mode 100644 model_zoo/vision/ppyoloe/cpp/README.md delete mode 100644 model_zoo/vision/ppyoloe/ppyoloe.py create mode 100644 model_zoo/vision/yolor/README.md rename model_zoo/vision/{ppyoloe => yolor}/api.md (56%) rename model_zoo/vision/{ppyoloe => yolor}/cpp/CMakeLists.txt (75%) create mode 100644 model_zoo/vision/yolor/cpp/README.md rename model_zoo/vision/{ppyoloe/cpp/ppyoloe.cc => yolor/cpp/yolor.cc} (66%) create mode 100644 model_zoo/vision/yolor/yolor.py diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 112193c86a..67361223c6 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -1,26 +1,25 @@ -function(add_fastdeploy_executable FIELD CC_FILE) +function(add_fastdeploy_executable field url model) # temp target name/file var in function scope - set(TEMP_TARGET_FILE ${CC_FILE}) - string(REGEX MATCHALL "[0-9A-Za-z_]*.cc" FILE_NAME ${CC_FILE}) - string(REGEX REPLACE ".cc" "" FILE_PREFIX ${FILE_NAME}) - set(TEMP_TARGET_NAME ${FIELD}_${FILE_PREFIX}) + set(TEMP_TARGET_FILE ${PROJECT_SOURCE_DIR}/examples/${field}/${url}_${model}.cc) + set(TEMP_TARGET_NAME ${field}_${url}_${model}) if (EXISTS ${TEMP_TARGET_FILE} AND TARGET fastdeploy) add_executable(${TEMP_TARGET_NAME} ${TEMP_TARGET_FILE}) target_link_libraries(${TEMP_TARGET_NAME} PUBLIC fastdeploy) - message(STATUS " Added FastDeploy Executable : ${TEMP_TARGET_NAME}") + message(STATUS "Found source file: [${field}/${url}_${model}.cc], ADD!!! fastdeploy executable: [${TEMP_TARGET_NAME}] !") + else () + message(WARNING "Can not found source file: [${field}/${url}_${model}.cc], SKIP!!! fastdeploy executable: [${TEMP_TARGET_NAME}] !") endif() unset(TEMP_TARGET_FILE) unset(TEMP_TARGET_NAME) endfunction() # vision examples -if(WITH_VISION_EXAMPLES AND EXISTS ${PROJECT_SOURCE_DIR}/examples/vision) - message(STATUS "") - message(STATUS "*************FastDeploy Examples Summary**********") - file(GLOB ALL_VISION_EXAMPLE_SRCS ${PROJECT_SOURCE_DIR}/examples/vision/*.cc) - foreach(_CC_FILE ${ALL_VISION_EXAMPLE_SRCS}) - add_fastdeploy_executable(vision ${_CC_FILE}) - endforeach() +if (WITH_VISION_EXAMPLES) + add_fastdeploy_executable(vision ultralytics yolov5) + add_fastdeploy_executable(vision meituan yolov6) + add_fastdeploy_executable(vision wongkinyiu yolov7) + add_fastdeploy_executable(vision megvii yolox) + add_fastdeploy_executable(vision wongkinyiu yolor) endif() # other examples ... diff --git a/examples/vision/ppdet_ppyoloe.cc b/examples/vision/wongkinyiu_yolor.cc similarity index 75% rename from examples/vision/ppdet_ppyoloe.cc rename to examples/vision/wongkinyiu_yolor.cc index b234021c92..abdca2b7ff 100644 --- a/examples/vision/ppdet_ppyoloe.cc +++ b/examples/vision/wongkinyiu_yolor.cc @@ -17,17 +17,18 @@ int main() { namespace vis = fastdeploy::vision; - std::string model_file = "ppyoloe_crn_l_300e_coco/model.pdmodel"; - std::string params_file = "ppyoloe_crn_l_300e_coco/model.pdiparams"; - std::string config_file = "ppyoloe_crn_l_300e_coco/infer_cfg.yml"; - std::string img_path = "test.jpeg"; - std::string vis_path = "vis.jpeg"; + std::string model_file = "../resources/models/yolor.onnx"; + std::string img_path = "../resources/images/horses.jpg"; + std::string vis_path = "../resources/outputs/wongkinyiu_yolor_vis_result.jpg"; - auto model = vis::ppdet::PPYOLOE(model_file, params_file, config_file); + auto model = vis::wongkinyiu::YOLOR(model_file); if (!model.Initialized()) { - std::cerr << "Init Failed." << std::endl; + std::cerr << "Init Failed! Model: " << model_file << std::endl; return -1; + } else { + std::cout << "Init Done! Model:" << model_file << std::endl; } + model.EnableDebug(); cv::Mat im = cv::imread(img_path); cv::Mat vis_im = im.clone(); diff --git a/fastdeploy/__init__.py b/fastdeploy/__init__.py index 68006c1bed..500e7cc42a 100644 --- a/fastdeploy/__init__.py +++ b/fastdeploy/__init__.py @@ -17,7 +17,7 @@ from .fastdeploy_runtime import * from . import fastdeploy_main as C from . import vision -from .download import download, download_and_decompress +from .download import download def TensorInfoStr(tensor_info): diff --git a/fastdeploy/download.py b/fastdeploy/download.py index 67f21d8e76..e00af098df 100644 --- a/fastdeploy/download.py +++ b/fastdeploy/download.py @@ -156,7 +156,7 @@ def decompress(fname): def url2dir(url, path, rename=None): full_name = download(url, path, rename, show_progress=True) - print("File is donwloaded, now extracting...") + print("SDK is donwloaded, now extracting...") if url.count(".tgz") > 0 or url.count(".tar") > 0 or url.count("zip") > 0: return decompress(full_name) diff --git a/fastdeploy/utils/utils.h b/fastdeploy/utils/utils.h index 9312084265..23ca6ee51a 100644 --- a/fastdeploy/utils/utils.h +++ b/fastdeploy/utils/utils.h @@ -26,10 +26,10 @@ #define FASTDEPLOY_DECL __declspec(dllexport) #else #define FASTDEPLOY_DECL __declspec(dllimport) -#endif // FASTDEPLOY_LIB +#endif // FASTDEPLOY_LIB #else #define FASTDEPLOY_DECL __attribute__((visibility("default"))) -#endif // _WIN32 +#endif // _WIN32 namespace fastdeploy { @@ -42,7 +42,8 @@ class FASTDEPLOY_DECL FDLogger { } explicit FDLogger(bool verbose, const std::string& prefix = "[FastDeploy]"); - template FDLogger& operator<<(const T& val) { + template + FDLogger& operator<<(const T& val) { if (!verbose_) { return *this; } @@ -64,18 +65,14 @@ class FASTDEPLOY_DECL FDLogger { bool verbose_ = true; }; -#ifndef __REL_FILE__ -#define __REL_FILE__ __FILE__ -#endif +#define FDERROR \ + FDLogger(true, "[ERROR]") << __REL_FILE__ << "(" << __LINE__ \ + << ")::" << __FUNCTION__ << "\t" -#define FDERROR \ - FDLogger(true, "[ERROR]") \ - << __REL_FILE__ << "(" << __LINE__ << ")::" << __FUNCTION__ << "\t" - -#define FDASSERT(condition, message) \ - if (!(condition)) { \ - FDERROR << message << std::endl; \ - std::abort(); \ +#define FDASSERT(condition, message) \ + if (!(condition)) { \ + FDERROR << message << std::endl; \ + std::abort(); \ } -} // namespace fastdeploy +} // namespace fastdeploy diff --git a/fastdeploy/vision.h b/fastdeploy/vision.h index cafe310c70..4398463251 100644 --- a/fastdeploy/vision.h +++ b/fastdeploy/vision.h @@ -15,12 +15,12 @@ #include "fastdeploy/core/config.h" #ifdef ENABLE_VISION +#include "fastdeploy/vision/megvii/yolox.h" +#include "fastdeploy/vision/meituan/yolov6.h" #include "fastdeploy/vision/ppcls/model.h" -#include "fastdeploy/vision/ppdet/ppyoloe.h" #include "fastdeploy/vision/ultralytics/yolov5.h" +#include "fastdeploy/vision/wongkinyiu/yolor.h" #include "fastdeploy/vision/wongkinyiu/yolov7.h" -#include "fastdeploy/vision/meituan/yolov6.h" -#include "fastdeploy/vision/megvii/yolox.h" #endif #include "fastdeploy/vision/visualize/visualize.h" diff --git a/fastdeploy/vision/__init__.py b/fastdeploy/vision/__init__.py index 6acbf0c376..7122bede0b 100644 --- a/fastdeploy/vision/__init__.py +++ b/fastdeploy/vision/__init__.py @@ -15,7 +15,6 @@ from . import evaluation from . import ppcls -from . import ppdet from . import ultralytics from . import meituan from . import megvii diff --git a/fastdeploy/vision/common/processors/convert.cc b/fastdeploy/vision/common/processors/convert.cc deleted file mode 100644 index a7ca6de07a..0000000000 --- a/fastdeploy/vision/common/processors/convert.cc +++ /dev/null @@ -1,62 +0,0 @@ -// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#include "fastdeploy/vision/common/processors/convert.h" - -namespace fastdeploy { - -namespace vision { - -Convert::Convert(const std::vector& alpha, - const std::vector& beta) { - FDASSERT(alpha.size() == beta.size(), - "Convert: requires the size of alpha equal to the size of beta."); - FDASSERT(alpha.size() != 0, - "Convert: requires the size of alpha and beta > 0."); - alpha_.assign(alpha.begin(), alpha.end()); - beta_.assign(beta.begin(), beta.end()); -} - -bool Convert::CpuRun(Mat* mat) { - cv::Mat* im = mat->GetCpuMat(); - std::vector split_im; - cv::split(*im, split_im); - for (int c = 0; c < im->channels(); c++) { - split_im[c].convertTo(split_im[c], CV_32FC1, alpha_[c], beta_[c]); - } - cv::merge(split_im, *im); - return true; -} - -#ifdef ENABLE_OPENCV_CUDA -bool Convert::GpuRun(Mat* mat) { - cv::cuda::GpuMat* im = mat->GetGpuMat(); - std::vector split_im; - cv::cuda::split(*im, split_im); - for (int c = 0; c < im->channels(); c++) { - split_im[c].convertTo(split_im[c], CV_32FC1, alpha_[c], beta_[c]); - } - cv::cuda::merge(split_im, *im); - return true; -} -#endif - -bool Convert::Run(Mat* mat, const std::vector& alpha, - const std::vector& beta, ProcLib lib) { - auto c = Convert(alpha, beta); - return c(mat, lib); -} - -} // namespace vision -} // namespace fastdeploy \ No newline at end of file diff --git a/fastdeploy/vision/common/processors/convert.h b/fastdeploy/vision/common/processors/convert.h deleted file mode 100644 index 5d5a5276f5..0000000000 --- a/fastdeploy/vision/common/processors/convert.h +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -#pragma once - -#include "fastdeploy/vision/common/processors/base.h" - -namespace fastdeploy { -namespace vision { -class Convert : public Processor { - public: - Convert(const std::vector& alpha, const std::vector& beta); - - bool CpuRun(Mat* mat); -#ifdef ENABLE_OPENCV_CUDA - bool GpuRun(Mat* mat); -#endif - std::string Name() { return "Convert"; } - - // Compute `result = mat * alpha + beta` directly by channel. - // The default behavior is the same as OpenCV's convertTo method. - static bool Run(Mat* mat, const std::vector& alpha, - const std::vector& beta, - ProcLib lib = ProcLib::OPENCV_CPU); - - private: - std::vector alpha_; - std::vector beta_; -}; -} // namespace vision -} // namespace fastdeploy diff --git a/fastdeploy/vision/common/processors/transform.h b/fastdeploy/vision/common/processors/transform.h index 08073b4e42..12eec8d72d 100644 --- a/fastdeploy/vision/common/processors/transform.h +++ b/fastdeploy/vision/common/processors/transform.h @@ -17,7 +17,6 @@ #include "fastdeploy/vision/common/processors/cast.h" #include "fastdeploy/vision/common/processors/center_crop.h" #include "fastdeploy/vision/common/processors/color_space_convert.h" -#include "fastdeploy/vision/common/processors/convert.h" #include "fastdeploy/vision/common/processors/hwc2chw.h" #include "fastdeploy/vision/common/processors/normalize.h" #include "fastdeploy/vision/common/processors/pad.h" diff --git a/fastdeploy/vision/meituan/yolov6.cc b/fastdeploy/vision/meituan/yolov6.cc index 8ac7377194..b75f2016ee 100644 --- a/fastdeploy/vision/meituan/yolov6.cc +++ b/fastdeploy/vision/meituan/yolov6.cc @@ -129,12 +129,8 @@ bool YOLOv6::Preprocess(Mat* mat, FDTensor* output, LetterBox(mat, size, padding_value, is_mini_pad, is_no_pad, is_scale_up, stride); BGR2RGB::Run(mat); - // Normalize::Run(mat, std::vector(mat->Channels(), 0.0), - // std::vector(mat->Channels(), 1.0)); - // Compute `result = mat * alpha + beta` directly by channel - std::vector alpha = {1.0f / 255.0f, 1.0f / 255.0f, 1.0f / 255.0f}; - std::vector beta = {0.0f, 0.0f, 0.0f}; - Convert::Run(mat, alpha, beta); + Normalize::Run(mat, std::vector(mat->Channels(), 0.0), + std::vector(mat->Channels(), 1.0)); // Record output shape of preprocessed image (*im_info)["output_shape"] = {static_cast(mat->Height()), diff --git a/fastdeploy/vision/ppcls/model.cc b/fastdeploy/vision/ppcls/model.cc index c4e5b767c7..915cb97512 100644 --- a/fastdeploy/vision/ppcls/model.cc +++ b/fastdeploy/vision/ppcls/model.cc @@ -1,16 +1,3 @@ -// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. #include "fastdeploy/vision/ppcls/model.h" #include "fastdeploy/vision/utils/utils.h" @@ -148,6 +135,6 @@ bool Model::Predict(cv::Mat* im, ClassifyResult* result, int topk) { return true; } -} // namespace ppcls -} // namespace vision -} // namespace fastdeploy +} // namespace ppcls +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/ppcls/model.h b/fastdeploy/vision/ppcls/model.h index 265f92d32b..fae99d4f3c 100644 --- a/fastdeploy/vision/ppcls/model.h +++ b/fastdeploy/vision/ppcls/model.h @@ -1,17 +1,3 @@ -// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. -// -// Licensed under the Apache License, Version 2.0 (the "License"); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - #pragma once #include "fastdeploy/fastdeploy_model.h" #include "fastdeploy/vision/common/processors/transform.h" @@ -46,6 +32,6 @@ class FASTDEPLOY_DECL Model : public FastDeployModel { std::vector> processors_; std::string config_file_; }; -} // namespace ppcls -} // namespace vision -} // namespace fastdeploy +} // namespace ppcls +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/ppcls/ppcls_pybind.cc b/fastdeploy/vision/ppcls/ppcls_pybind.cc index 1abc0b2b7c..828bef3c7a 100644 --- a/fastdeploy/vision/ppcls/ppcls_pybind.cc +++ b/fastdeploy/vision/ppcls/ppcls_pybind.cc @@ -14,7 +14,7 @@ #include "fastdeploy/pybind/main.h" namespace fastdeploy { -void BindPPCls(pybind11::module& m) { +void BindPpClsModel(pybind11::module& m) { auto ppcls_module = m.def_submodule("ppcls", "Module to deploy PaddleClas."); pybind11::class_(ppcls_module, "Model") .def(pybind11::init(ppdet_module, - "PPYOLOE") - .def(pybind11::init()) - .def("predict", [](vision::ppdet::PPYOLOE& self, pybind11::array& data, - float conf_threshold, float nms_iou_threshold) { - auto mat = PyArrayToCvMat(data); - vision::DetectionResult res; - self.Predict(&mat, &res, conf_threshold, nms_iou_threshold); - return res; - }); -} -} // namespace fastdeploy diff --git a/fastdeploy/vision/ppdet/ppyoloe.cc b/fastdeploy/vision/ppdet/ppyoloe.cc deleted file mode 100644 index c215ecb0ca..0000000000 --- a/fastdeploy/vision/ppdet/ppyoloe.cc +++ /dev/null @@ -1,170 +0,0 @@ -#include "fastdeploy/vision/ppdet/ppyoloe.h" -#include "fastdeploy/vision/utils/utils.h" -#include "yaml-cpp/yaml.h" - -namespace fastdeploy { -namespace vision { -namespace ppdet { - -PPYOLOE::PPYOLOE(const std::string& model_file, const std::string& params_file, - const std::string& config_file, - const RuntimeOption& custom_option, - const Frontend& model_format) { - config_file_ = config_file; - valid_cpu_backends = {Backend::ORT, Backend::PDINFER}; - valid_gpu_backends = {Backend::ORT, Backend::PDINFER}; - runtime_option = custom_option; - runtime_option.model_format = model_format; - runtime_option.model_file = model_file; - runtime_option.params_file = params_file; - initialized = Initialize(); -} - -bool PPYOLOE::Initialize() { - if (!BuildPreprocessPipelineFromConfig()) { - std::cout << "Failed to build preprocess pipeline from configuration file." - << std::endl; - return false; - } - if (!InitRuntime()) { - std::cout << "Failed to initialize fastdeploy backend." << std::endl; - return false; - } - return true; -} - -bool PPYOLOE::BuildPreprocessPipelineFromConfig() { - processors_.clear(); - YAML::Node cfg; - try { - cfg = YAML::LoadFile(config_file_); - } catch (YAML::BadFile& e) { - std::cout << "Failed to load yaml file " << config_file_ - << ", maybe you should check this file." << std::endl; - return false; - } - - if (cfg["arch"].as() != "YOLO") { - std::cout << "Require the arch of model is YOLO, but arch defined in " - "config file is " - << cfg["arch"].as() << "." << std::endl; - return false; - } - processors_.push_back(std::make_shared()); - - for (const auto& op : cfg["Preprocess"]) { - std::string op_name = op["type"].as(); - if (op_name == "NormalizeImage") { - auto mean = op["mean"].as>(); - auto std = op["std"].as>(); - bool is_scale = op["is_scale"].as(); - processors_.push_back(std::make_shared(mean, std, is_scale)); - } else if (op_name == "Resize") { - bool keep_ratio = op["keep_ratio"].as(); - auto target_size = op["target_size"].as>(); - int interp = op["interp"].as(); - FDASSERT(target_size.size(), - "Require size of target_size be 2, but now it's " + - std::to_string(target_size.size()) + "."); - FDASSERT(!keep_ratio, - "Only support keep_ratio is false while deploy " - "PaddleDetection model."); - int width = target_size[1]; - int height = target_size[0]; - processors_.push_back( - std::make_shared(width, height, -1.0, -1.0, interp, false)); - } else if (op_name == "Permute") { - processors_.push_back(std::make_shared()); - } else { - std::cout << "Unexcepted preprocess operator: " << op_name << "." - << std::endl; - return false; - } - } - return true; -} - -bool PPYOLOE::Preprocess(Mat* mat, std::vector* outputs) { - int origin_w = mat->Width(); - int origin_h = mat->Height(); - for (size_t i = 0; i < processors_.size(); ++i) { - if (!(*(processors_[i].get()))(mat)) { - std::cout << "Failed to process image data in " << processors_[i]->Name() - << "." << std::endl; - return false; - } - } - - outputs->resize(2); - (*outputs)[0].name = InputInfoOfRuntime(0).name; - mat->ShareWithTensor(&((*outputs)[0])); - - // reshape to [1, c, h, w] - (*outputs)[0].shape.insert((*outputs)[0].shape.begin(), 1); - - (*outputs)[1].Allocate({1, 2}, FDDataType::FP32, InputInfoOfRuntime(1).name); - float* ptr = static_cast((*outputs)[1].MutableData()); - ptr[0] = mat->Height() * 1.0 / mat->Height(); - ptr[1] = mat->Width() * 1.0 / mat->Width(); - return true; -} - -bool PPYOLOE::Postprocess(std::vector& infer_result, - DetectionResult* result, float conf_threshold, - float nms_threshold) { - FDASSERT(infer_result[1].shape[0] == 1, - "Only support batch = 1 in FastDeploy now."); - int box_num = 0; - if (infer_result[1].dtype == FDDataType::INT32) { - box_num = *(static_cast(infer_result[1].Data())); - } else if (infer_result[1].dtype == FDDataType::INT64) { - box_num = *(static_cast(infer_result[1].Data())); - } else { - FDASSERT( - false, - "The output box_num of PPYOLOE model should be type of int32/int64."); - } - result->Reserve(box_num); - float* box_data = static_cast(infer_result[0].Data()); - for (size_t i = 0; i < box_num; ++i) { - if (box_data[i * 6 + 1] < conf_threshold) { - continue; - } - result->label_ids.push_back(box_data[i * 6]); - result->scores.push_back(box_data[i * 6 + 1]); - result->boxes.emplace_back( - std::array{box_data[i * 6 + 2], box_data[i * 6 + 3], - box_data[i * 6 + 4] - box_data[i * 6 + 2], - box_data[i * 6 + 5] - box_data[i * 6 + 3]}); - } - return true; -} - -bool PPYOLOE::Predict(cv::Mat* im, DetectionResult* result, - float conf_threshold, float iou_threshold) { - Mat mat(*im); - std::vector processed_data; - if (!Preprocess(&mat, &processed_data)) { - FDERROR << "Failed to preprocess input data while using model:" - << ModelName() << "." << std::endl; - return false; - } - - std::vector infer_result; - if (!Infer(processed_data, &infer_result)) { - FDERROR << "Failed to inference while using model:" << ModelName() << "." - << std::endl; - return false; - } - - if (!Postprocess(infer_result, result, conf_threshold, iou_threshold)) { - FDERROR << "Failed to postprocess while using model:" << ModelName() << "." - << std::endl; - return false; - } - return true; -} - -} // namespace ppdet -} // namespace vision -} // namespace fastdeploy diff --git a/fastdeploy/vision/ppdet/ppyoloe.h b/fastdeploy/vision/ppdet/ppyoloe.h deleted file mode 100644 index a3db268ca4..0000000000 --- a/fastdeploy/vision/ppdet/ppyoloe.h +++ /dev/null @@ -1,44 +0,0 @@ -#pragma once -#include "fastdeploy/fastdeploy_model.h" -#include "fastdeploy/vision/common/processors/transform.h" -#include "fastdeploy/vision/common/result.h" - -#include "fastdeploy/vision/utils/utils.h" - -namespace fastdeploy { -namespace vision { -namespace ppdet { - -class FASTDEPLOY_DECL PPYOLOE : public FastDeployModel { - public: - PPYOLOE(const std::string& model_file, const std::string& params_file, - const std::string& config_file, - const RuntimeOption& custom_option = RuntimeOption(), - const Frontend& model_format = Frontend::PADDLE); - - std::string ModelName() const { return "PaddleDetection/PPYOLOE"; } - - virtual bool Initialize(); - - virtual bool BuildPreprocessPipelineFromConfig(); - - virtual bool Preprocess(Mat* mat, std::vector* outputs); - - virtual bool Postprocess(std::vector& infer_result, - DetectionResult* result, float conf_threshold, - float nms_threshold); - - virtual bool Predict(cv::Mat* im, DetectionResult* result, - float conf_threshold = 0.5, float nms_threshold = 0.7); - - private: - std::vector> processors_; - std::string config_file_; - // PaddleDetection can export model without nms - // This flag will help us to handle the different - // situation - bool has_nms_; -}; -} // namespace ppdet -} // namespace vision -} // namespace fastdeploy diff --git a/fastdeploy/vision/ultralytics/yolov5.cc b/fastdeploy/vision/ultralytics/yolov5.cc index 0b7e50e735..c8c6e06a94 100644 --- a/fastdeploy/vision/ultralytics/yolov5.cc +++ b/fastdeploy/vision/ultralytics/yolov5.cc @@ -126,12 +126,8 @@ bool YOLOv5::Preprocess(Mat* mat, FDTensor* output, LetterBox(mat, size, padding_value, is_mini_pad, is_no_pad, is_scale_up, stride); BGR2RGB::Run(mat); - // Normalize::Run(mat, std::vector(mat->Channels(), 0.0), - // std::vector(mat->Channels(), 1.0)); - // Compute `result = mat * alpha + beta` directly by channel - std::vector alpha = {1.0f / 255.0f, 1.0f / 255.0f, 1.0f / 255.0f}; - std::vector beta = {0.0f, 0.0f, 0.0f}; - Convert::Run(mat, alpha, beta); + Normalize::Run(mat, std::vector(mat->Channels(), 0.0), + std::vector(mat->Channels(), 1.0)); // Record output shape of preprocessed image (*im_info)["output_shape"] = {static_cast(mat->Height()), @@ -202,11 +198,6 @@ bool YOLOv5::Postprocess( result->scores.push_back(confidence); } } - - if (result->boxes.size() == 0) { - return true; - } - utils::NMS(result, nms_iou_threshold); // scale the boxes to the origin image shape diff --git a/fastdeploy/vision/utils/sort_det_res.cc b/fastdeploy/vision/utils/sort_det_res.cc index 93dbb69694..e4a0db9761 100644 --- a/fastdeploy/vision/utils/sort_det_res.cc +++ b/fastdeploy/vision/utils/sort_det_res.cc @@ -68,11 +68,7 @@ void MergeSort(DetectionResult* result, size_t low, size_t high) { void SortDetectionResult(DetectionResult* result) { size_t low = 0; - size_t high = result->scores.size(); - if (high == 0) { - return; - } - high = high - 1; + size_t high = result->scores.size() - 1; MergeSort(result, low, high); } diff --git a/fastdeploy/vision/vision_pybind.cc b/fastdeploy/vision/vision_pybind.cc index 0334303ce6..e4ba05b893 100644 --- a/fastdeploy/vision/vision_pybind.cc +++ b/fastdeploy/vision/vision_pybind.cc @@ -16,8 +16,7 @@ namespace fastdeploy { -void BindPPCls(pybind11::module& m); -void BindPPDet(pybind11::module& m); +void BindPpClsModel(pybind11::module& m); void BindWongkinyiu(pybind11::module& m); void BindUltralytics(pybind11::module& m); void BindMeituan(pybind11::module& m); @@ -42,8 +41,7 @@ void BindVision(pybind11::module& m) { .def("__repr__", &vision::DetectionResult::Str) .def("__str__", &vision::DetectionResult::Str); - BindPPCls(m); - BindPPDet(m); + BindPpClsModel(m); BindUltralytics(m); BindWongkinyiu(m); BindMeituan(m); diff --git a/fastdeploy/vision/visualize/detection.cc b/fastdeploy/vision/visualize/detection.cc index 5b5538bff7..e5f01bdd35 100644 --- a/fastdeploy/vision/visualize/detection.cc +++ b/fastdeploy/vision/visualize/detection.cc @@ -43,7 +43,7 @@ void Visualize::VisDetection(cv::Mat* im, const DetectionResult& result, } std::string text = id + "," + score; int font = cv::FONT_HERSHEY_SIMPLEX; - cv::Size text_size = cv::getTextSize(text, font, font_size, 1, nullptr); + cv::Size text_size = cv::getTextSize(text, font, font_size, 0.5, nullptr); cv::Point origin; origin.x = rect.x; origin.y = rect.y; @@ -52,10 +52,10 @@ void Visualize::VisDetection(cv::Mat* im, const DetectionResult& result, text_size.width, text_size.height); cv::rectangle(*im, rect, rect_color, line_size); cv::putText(*im, text, origin, font, font_size, cv::Scalar(255, 255, 255), - 1); + 0.5); } } -} // namespace vision -} // namespace fastdeploy +} // namespace vision +} // namespace fastdeploy #endif diff --git a/fastdeploy/vision/wongkinyiu/__init__.py b/fastdeploy/vision/wongkinyiu/__init__.py index 542389e208..026d10062f 100644 --- a/fastdeploy/vision/wongkinyiu/__init__.py +++ b/fastdeploy/vision/wongkinyiu/__init__.py @@ -114,3 +114,101 @@ def max_wh(self, value): assert isinstance( value, float), "The value to set `max_wh` must be type of float." self._model.max_wh = value + + +class YOLOR(FastDeployModel): + def __init__(self, + model_file, + params_file="", + runtime_option=None, + model_format=Frontend.ONNX): + # 调用基函数进行backend_option的初始化 + # 初始化后的option保存在self._runtime_option + super(YOLOR, self).__init__(runtime_option) + + self._model = C.vision.wongkinyiu.YOLOR( + model_file, params_file, self._runtime_option, model_format) + # 通过self.initialized判断整个模型的初始化是否成功 + assert self.initialized, "YOLOR initialize failed." + + def predict(self, input_image, conf_threshold=0.25, nms_iou_threshold=0.5): + return self._model.predict(input_image, conf_threshold, + nms_iou_threshold) + + # 一些跟YOLOv7模型有关的属性封装 + # 多数是预处理相关,可通过修改如model.size = [1280, 1280]改变预处理时resize的大小(前提是模型支持) + @property + def size(self): + return self._model.size + + @property + def padding_value(self): + return self._model.padding_value + + @property + def is_no_pad(self): + return self._model.is_no_pad + + @property + def is_mini_pad(self): + return self._model.is_mini_pad + + @property + def is_scale_up(self): + return self._model.is_scale_up + + @property + def stride(self): + return self._model.stride + + @property + def max_wh(self): + return self._model.max_wh + + @size.setter + def size(self, wh): + assert isinstance(wh, [list, tuple]),\ + "The value to set `size` must be type of tuple or list." + assert len(wh) == 2,\ + "The value to set `size` must contatins 2 elements means [width, height], but now it contains {} elements.".format( + len(wh)) + self._model.size = wh + + @padding_value.setter + def padding_value(self, value): + assert isinstance( + value, + list), "The value to set `padding_value` must be type of list." + self._model.padding_value = value + + @is_no_pad.setter + def is_no_pad(self, value): + assert isinstance( + value, bool), "The value to set `is_no_pad` must be type of bool." + self._model.is_no_pad = value + + @is_mini_pad.setter + def is_mini_pad(self, value): + assert isinstance( + value, + bool), "The value to set `is_mini_pad` must be type of bool." + self._model.is_mini_pad = value + + @is_scale_up.setter + def is_scale_up(self, value): + assert isinstance( + value, + bool), "The value to set `is_scale_up` must be type of bool." + self._model.is_scale_up = value + + @stride.setter + def stride(self, value): + assert isinstance( + value, int), "The value to set `stride` must be type of int." + self._model.stride = value + + @max_wh.setter + def max_wh(self, value): + assert isinstance( + value, float), "The value to set `max_wh` must be type of float." + self._model.max_wh = value diff --git a/fastdeploy/vision/wongkinyiu/wongkinyiu_pybind.cc b/fastdeploy/vision/wongkinyiu/wongkinyiu_pybind.cc index 4a10f47a76..6bde2a1841 100644 --- a/fastdeploy/vision/wongkinyiu/wongkinyiu_pybind.cc +++ b/fastdeploy/vision/wongkinyiu/wongkinyiu_pybind.cc @@ -17,7 +17,7 @@ namespace fastdeploy { void BindWongkinyiu(pybind11::module& m) { auto wongkinyiu_module = - m.def_submodule("wongkinyiu", "https://github.com/WongKinYiu/yolov7"); + m.def_submodule("wongkinyiu", "https://github.com/WongKinYiu"); pybind11::class_( wongkinyiu_module, "YOLOv7") .def(pybind11::init()) @@ -37,5 +37,24 @@ void BindWongkinyiu(pybind11::module& m) { .def_readwrite("is_scale_up", &vision::wongkinyiu::YOLOv7::is_scale_up) .def_readwrite("stride", &vision::wongkinyiu::YOLOv7::stride) .def_readwrite("max_wh", &vision::wongkinyiu::YOLOv7::max_wh); + + pybind11::class_( + wongkinyiu_module, "YOLOR") + .def(pybind11::init()) + .def("predict", + [](vision::wongkinyiu::YOLOR& self, pybind11::array& data, + float conf_threshold, float nms_iou_threshold) { + auto mat = PyArrayToCvMat(data); + vision::DetectionResult res; + self.Predict(&mat, &res, conf_threshold, nms_iou_threshold); + return res; + }) + .def_readwrite("size", &vision::wongkinyiu::YOLOR::size) + .def_readwrite("padding_value", &vision::wongkinyiu::YOLOR::padding_value) + .def_readwrite("is_mini_pad", &vision::wongkinyiu::YOLOR::is_mini_pad) + .def_readwrite("is_no_pad", &vision::wongkinyiu::YOLOR::is_no_pad) + .def_readwrite("is_scale_up", &vision::wongkinyiu::YOLOR::is_scale_up) + .def_readwrite("stride", &vision::wongkinyiu::YOLOR::stride) + .def_readwrite("max_wh", &vision::wongkinyiu::YOLOR::max_wh); } } // namespace fastdeploy diff --git a/fastdeploy/vision/wongkinyiu/yolor.cc b/fastdeploy/vision/wongkinyiu/yolor.cc new file mode 100644 index 0000000000..5cf9d6cb83 --- /dev/null +++ b/fastdeploy/vision/wongkinyiu/yolor.cc @@ -0,0 +1,243 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision/wongkinyiu/yolor.h" +#include "fastdeploy/utils/perf.h" +#include "fastdeploy/vision/utils/utils.h" + +namespace fastdeploy { +namespace vision { +namespace wongkinyiu { + +void YOLOR::LetterBox(Mat* mat, const std::vector& size, + const std::vector& color, bool _auto, + bool scale_fill, bool scale_up, int stride) { + float scale = + std::min(size[1] * 1.0 / mat->Height(), size[0] * 1.0 / mat->Width()); + if (!scale_up) { + scale = std::min(scale, 1.0f); + } + + int resize_h = int(round(mat->Height() * scale)); + int resize_w = int(round(mat->Width() * scale)); + + int pad_w = size[0] - resize_w; + int pad_h = size[1] - resize_h; + if (_auto) { + pad_h = pad_h % stride; + pad_w = pad_w % stride; + } else if (scale_fill) { + pad_h = 0; + pad_w = 0; + resize_h = size[1]; + resize_w = size[0]; + } + Resize::Run(mat, resize_w, resize_h); + if (pad_h > 0 || pad_w > 0) { + float half_h = pad_h * 1.0 / 2; + int top = int(round(half_h - 0.1)); + int bottom = int(round(half_h + 0.1)); + float half_w = pad_w * 1.0 / 2; + int left = int(round(half_w - 0.1)); + int right = int(round(half_w + 0.1)); + Pad::Run(mat, top, bottom, left, right, color); + } +} + +YOLOR::YOLOR(const std::string& model_file, const std::string& params_file, + const RuntimeOption& custom_option, const Frontend& model_format) { + if (model_format == Frontend::ONNX) { + valid_cpu_backends = {Backend::ORT}; // 指定可用的CPU后端 + valid_gpu_backends = {Backend::ORT, Backend::TRT}; // 指定可用的GPU后端 + } else { + valid_cpu_backends = {Backend::PDINFER, Backend::ORT}; + valid_gpu_backends = {Backend::PDINFER, Backend::ORT, Backend::TRT}; + } + runtime_option = custom_option; + runtime_option.model_format = model_format; + runtime_option.model_file = model_file; + runtime_option.params_file = params_file; + initialized = Initialize(); +} + +bool YOLOR::Initialize() { + // parameters for preprocess + size = {640, 640}; + padding_value = {114.0, 114.0, 114.0}; + is_mini_pad = false; + is_no_pad = false; + is_scale_up = false; + stride = 32; + max_wh = 7680.0; + + if (!InitRuntime()) { + FDERROR << "Failed to initialize fastdeploy backend." << std::endl; + return false; + } + return true; +} + +bool YOLOR::Preprocess(Mat* mat, FDTensor* output, + std::map>* im_info) { + // process after image load + double ratio = (size[0] * 1.0) / std::max(static_cast(mat->Height()), + static_cast(mat->Width())); + if (ratio != 1.0) { + int interp = cv::INTER_AREA; + if (ratio > 1.0) { + interp = cv::INTER_LINEAR; + } + int resize_h = int(mat->Height() * ratio); + int resize_w = int(mat->Width() * ratio); + Resize::Run(mat, resize_w, resize_h, -1, -1, interp); + } + // yolor's preprocess steps + // 1. letterbox + // 2. BGR->RGB + // 3. HWC->CHW + YOLOR::LetterBox(mat, size, padding_value, is_mini_pad, is_no_pad, + is_scale_up, stride); + BGR2RGB::Run(mat); + Normalize::Run(mat, std::vector(mat->Channels(), 0.0), + std::vector(mat->Channels(), 1.0)); + + // Record output shape of preprocessed image + (*im_info)["output_shape"] = {static_cast(mat->Height()), + static_cast(mat->Width())}; + + HWC2CHW::Run(mat); + Cast::Run(mat, "float"); + mat->ShareWithTensor(output); + output->shape.insert(output->shape.begin(), 1); // reshape to n, h, w, c + return true; +} + +bool YOLOR::Postprocess( + FDTensor& infer_result, DetectionResult* result, + const std::map>& im_info, + float conf_threshold, float nms_iou_threshold) { + FDASSERT(infer_result.shape[0] == 1, "Only support batch =1 now."); + result->Clear(); + result->Reserve(infer_result.shape[1]); + if (infer_result.dtype != FDDataType::FP32) { + FDERROR << "Only support post process with float32 data." << std::endl; + return false; + } + float* data = static_cast(infer_result.Data()); + for (size_t i = 0; i < infer_result.shape[1]; ++i) { + int s = i * infer_result.shape[2]; + float confidence = data[s + 4]; + float* max_class_score = + std::max_element(data + s + 5, data + s + infer_result.shape[2]); + confidence *= (*max_class_score); + // filter boxes by conf_threshold + if (confidence <= conf_threshold) { + continue; + } + int32_t label_id = std::distance(data + s + 5, max_class_score); + // convert from [x, y, w, h] to [x1, y1, x2, y2] + result->boxes.emplace_back(std::array{ + data[s] - data[s + 2] / 2.0f + label_id * max_wh, + data[s + 1] - data[s + 3] / 2.0f + label_id * max_wh, + data[s + 0] + data[s + 2] / 2.0f + label_id * max_wh, + data[s + 1] + data[s + 3] / 2.0f + label_id * max_wh}); + result->label_ids.push_back(label_id); + result->scores.push_back(confidence); + } + utils::NMS(result, nms_iou_threshold); + + // scale the boxes to the origin image shape + auto iter_out = im_info.find("output_shape"); + auto iter_ipt = im_info.find("input_shape"); + FDASSERT(iter_out != im_info.end() && iter_ipt != im_info.end(), + "Cannot find input_shape or output_shape from im_info."); + float out_h = iter_out->second[0]; + float out_w = iter_out->second[1]; + float ipt_h = iter_ipt->second[0]; + float ipt_w = iter_ipt->second[1]; + float scale = std::min(out_h / ipt_h, out_w / ipt_w); + for (size_t i = 0; i < result->boxes.size(); ++i) { + float pad_h = (out_h - ipt_h * scale) / 2; + float pad_w = (out_w - ipt_w * scale) / 2; + int32_t label_id = (result->label_ids)[i]; + // clip box + result->boxes[i][0] = result->boxes[i][0] - max_wh * label_id; + result->boxes[i][1] = result->boxes[i][1] - max_wh * label_id; + result->boxes[i][2] = result->boxes[i][2] - max_wh * label_id; + result->boxes[i][3] = result->boxes[i][3] - max_wh * label_id; + result->boxes[i][0] = std::max((result->boxes[i][0] - pad_w) / scale, 0.0f); + result->boxes[i][1] = std::max((result->boxes[i][1] - pad_h) / scale, 0.0f); + result->boxes[i][2] = std::max((result->boxes[i][2] - pad_w) / scale, 0.0f); + result->boxes[i][3] = std::max((result->boxes[i][3] - pad_h) / scale, 0.0f); + result->boxes[i][0] = std::min(result->boxes[i][0], ipt_w); + result->boxes[i][1] = std::min(result->boxes[i][1], ipt_h); + result->boxes[i][2] = std::min(result->boxes[i][2], ipt_w); + result->boxes[i][3] = std::min(result->boxes[i][3], ipt_h); + } + return true; +} + +bool YOLOR::Predict(cv::Mat* im, DetectionResult* result, float conf_threshold, + float nms_iou_threshold) { +#ifdef FASTDEPLOY_DEBUG + TIMERECORD_START(0) +#endif + + Mat mat(*im); + std::vector input_tensors(1); + + std::map> im_info; + + // Record the shape of image and the shape of preprocessed image + im_info["input_shape"] = {static_cast(mat.Height()), + static_cast(mat.Width())}; + im_info["output_shape"] = {static_cast(mat.Height()), + static_cast(mat.Width())}; + + if (!Preprocess(&mat, &input_tensors[0], &im_info)) { + FDERROR << "Failed to preprocess input image." << std::endl; + return false; + } + +#ifdef FASTDEPLOY_DEBUG + TIMERECORD_END(0, "Preprocess") + TIMERECORD_START(1) +#endif + + input_tensors[0].name = InputInfoOfRuntime(0).name; + std::vector output_tensors; + if (!Infer(input_tensors, &output_tensors)) { + FDERROR << "Failed to inference." << std::endl; + return false; + } +#ifdef FASTDEPLOY_DEBUG + TIMERECORD_END(1, "Inference") + TIMERECORD_START(2) +#endif + + if (!Postprocess(output_tensors[0], result, im_info, conf_threshold, + nms_iou_threshold)) { + FDERROR << "Failed to post process." << std::endl; + return false; + } + +#ifdef FASTDEPLOY_DEBUG + TIMERECORD_END(2, "Postprocess") +#endif + return true; +} + +} // namespace wongkinyiu +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/wongkinyiu/yolor.h b/fastdeploy/vision/wongkinyiu/yolor.h new file mode 100644 index 0000000000..69f5ea8760 --- /dev/null +++ b/fastdeploy/vision/wongkinyiu/yolor.h @@ -0,0 +1,95 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#pragma once +#include "fastdeploy/fastdeploy_model.h" +#include "fastdeploy/vision/common/processors/transform.h" +#include "fastdeploy/vision/common/result.h" + +namespace fastdeploy { +namespace vision { +namespace wongkinyiu { + +class FASTDEPLOY_DECL YOLOR : public FastDeployModel { + public: + // 当model_format为ONNX时,无需指定params_file + // 当model_format为Paddle时,则需同时指定model_file & params_file + YOLOR(const std::string& model_file, const std::string& params_file = "", + const RuntimeOption& custom_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX); + + // 定义模型的名称 + virtual std::string ModelName() const { return "WongKinYiu/yolor"; } + + // 模型预测接口,即用户调用的接口 + // im 为用户的输入数据,目前对于CV均定义为cv::Mat + // result 为模型预测的输出结构体 + // conf_threshold 为后处理的参数 + // nms_iou_threshold 为后处理的参数 + virtual bool Predict(cv::Mat* im, DetectionResult* result, + float conf_threshold = 0.25, + float nms_iou_threshold = 0.5); + + // 以下为模型在预测时的一些参数,基本是前后处理所需 + // 用户在创建模型后,可根据模型的要求,以及自己的需求 + // 对参数进行修改 + // tuple of (width, height) + std::vector size; + // padding value, size should be same with Channels + std::vector padding_value; + // only pad to the minimum rectange which height and width is times of stride + bool is_mini_pad; + // while is_mini_pad = false and is_no_pad = true, will resize the image to + // the set size + bool is_no_pad; + // if is_scale_up is false, the input image only can be zoom out, the maximum + // resize scale cannot exceed 1.0 + bool is_scale_up; + // padding stride, for is_mini_pad + int stride; + // for offseting the boxes by classes when using NMS + float max_wh; + + private: + // 初始化函数,包括初始化后端,以及其它模型推理需要涉及的操作 + bool Initialize(); + + // 输入图像预处理操作 + // Mat为FastDeploy定义的数据结构 + // FDTensor为预处理后的Tensor数据,传给后端进行推理 + // im_info为预处理过程保存的数据,在后处理中需要用到 + bool Preprocess(Mat* mat, FDTensor* outputs, + std::map>* im_info); + + // 后端推理结果后处理,输出给用户 + // infer_result 为后端推理后的输出Tensor + // result 为模型预测的结果 + // im_info 为预处理记录的信息,后处理用于还原box + // conf_threshold 后处理时过滤box的置信度阈值 + // nms_iou_threshold 后处理时NMS设定的iou阈值 + bool Postprocess(FDTensor& infer_result, DetectionResult* result, + const std::map>& im_info, + float conf_threshold, float nms_iou_threshold); + + // 对图片进行LetterBox处理 + // mat 为读取到的原图 + // size 为输入模型的图像尺寸 + void LetterBox(Mat* mat, const std::vector& size, + const std::vector& color, bool _auto, + bool scale_fill = false, bool scale_up = true, + int stride = 32); +}; +} // namespace wongkinyiu +} // namespace vision +} // namespace fastdeploy diff --git a/model_zoo/vision/ppyoloe/README.md b/model_zoo/vision/ppyoloe/README.md deleted file mode 100644 index 42d18104ad..0000000000 --- a/model_zoo/vision/ppyoloe/README.md +++ /dev/null @@ -1,52 +0,0 @@ -# PaddleDetection/PPYOLOE部署示例 - -- 当前支持PaddleDetection版本为[release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) - -本文档说明如何进行[PPYOLOE](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4/configs/ppyoloe)的快速部署推理。本目录结构如下 -``` -. -├── cpp # C++ 代码目录 -│   ├── CMakeLists.txt # C++ 代码编译CMakeLists文件 -│   ├── README.md # C++ 代码编译部署文档 -│   └── ppyoloe.cc # C++ 示例代码 -├── README.md # PPYOLOE 部署文档 -└── ppyoloe.py # Python示例代码 -``` - -## 安装FastDeploy - -使用如下命令安装FastDeploy,注意到此处安装的是`vision-cpu`,也可根据需求安装`vision-gpu` -``` -# 安装fastdeploy-python工具 -pip install fastdeploy-python -``` - -## Python部署 - -执行如下代码即会自动下载PPYOLOE模型和测试图片 -``` -python ppyoloe.py -``` - -执行完成后会将可视化结果保存在本地`vis_result.jpg`,同时输出检测结果如下 -``` -DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] -162.380249,132.057449, 463.178345, 413.167114, 0.962918, 33 -414.914642,141.148666, 91.275269, 308.688293, 0.951003, 0 -163.449234,129.669067, 35.253891, 135.111786, 0.900734, 0 -267.232239,142.290436, 31.578918, 126.329773, 0.848709, 0 -581.790833,179.027115, 30.893127, 135.484940, 0.837986, 0 -104.407021,72.602615, 22.900627, 75.469055, 0.796468, 0 -348.795380,70.122147, 18.806061, 85.829330, 0.785557, 0 -364.118683,92.457428, 17.437622, 89.212891, 0.774282, 0 -75.180283,192.470490, 41.898407, 55.552414, 0.712569, 56 -328.133759,61.894299, 19.100616, 65.633575, 0.710519, 0 -504.797760,181.732574, 107.740814, 248.115082, 0.708902, 0 -379.063080,64.762360, 15.956146, 68.312546, 0.680725, 0 -25.858747,186.564178, 34.958130, 56.007080, 0.580415, 0 -``` - -## 其它文档 - -- [C++部署](./cpp/README.md) -- [PPYOLOE API文档](./api.md) diff --git a/model_zoo/vision/ppyoloe/cpp/README.md b/model_zoo/vision/ppyoloe/cpp/README.md deleted file mode 100644 index 1027c2eeb2..0000000000 --- a/model_zoo/vision/ppyoloe/cpp/README.md +++ /dev/null @@ -1,39 +0,0 @@ -# 编译PPYOLOE示例 - - -``` -# 下载和解压预测库 -wget https://bj.bcebos.com/paddle2onnx/fastdeploy/fastdeploy-linux-x64-0.0.3.tgz -tar xvf fastdeploy-linux-x64-0.0.3.tgz - -# 编译示例代码 -mkdir build & cd build -cmake .. -make -j - -# 下载模型和图片 -wget https://bj.bcebos.com/paddle2onnx/fastdeploy/models/ppdet/ppyoloe_crn_l_300e_coco.tgz -tar xvf ppyoloe_crn_l_300e_coco.tgz -wget https://raw.githubusercontent.com/PaddlePaddle/PaddleDetection/release/2.4/demo/000000014439_640x640.jpg - -# 执行 -./ppyoloe_demo -``` - -执行完后可视化的结果保存在本地`vis_result.jpg`,同时会将检测框输出在终端,如下所示 -``` -DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] -162.380249,132.057449, 463.178345, 413.167114, 0.962918, 33 -414.914642,141.148666, 91.275269, 308.688293, 0.951003, 0 -163.449234,129.669067, 35.253891, 135.111786, 0.900734, 0 -267.232239,142.290436, 31.578918, 126.329773, 0.848709, 0 -581.790833,179.027115, 30.893127, 135.484940, 0.837986, 0 -104.407021,72.602615, 22.900627, 75.469055, 0.796468, 0 -348.795380,70.122147, 18.806061, 85.829330, 0.785557, 0 -364.118683,92.457428, 17.437622, 89.212891, 0.774282, 0 -75.180283,192.470490, 41.898407, 55.552414, 0.712569, 56 -328.133759,61.894299, 19.100616, 65.633575, 0.710519, 0 -504.797760,181.732574, 107.740814, 248.115082, 0.708902, 0 -379.063080,64.762360, 15.956146, 68.312546, 0.680725, 0 -25.858747,186.564178, 34.958130, 56.007080, 0.580415, 0 -``` diff --git a/model_zoo/vision/ppyoloe/ppyoloe.py b/model_zoo/vision/ppyoloe/ppyoloe.py deleted file mode 100644 index 7d79dfd8cf..0000000000 --- a/model_zoo/vision/ppyoloe/ppyoloe.py +++ /dev/null @@ -1,24 +0,0 @@ -import fastdeploy as fd -import cv2 - -# 下载模型和测试图片 -model_url = "https://bj.bcebos.com/paddle2onnx/fastdeploy/models/ppdet/ppyoloe_crn_l_300e_coco.tgz" -test_jpg_url = "https://raw.githubusercontent.com/PaddlePaddle/PaddleDetection/release/2.4/demo/000000014439_640x640.jpg" -fd.download_and_decompress(model_url, ".") -fd.download(test_jpg_url, ".", show_progress=True) - -# 加载模型 -model = fd.vision.ppdet.PPYOLOE("ppyoloe_crn_l_300e_coco/model.pdmodel", - "ppyoloe_crn_l_300e_coco/model.pdiparams", - "ppyoloe_crn_l_300e_coco/infer_cfg.yml") - -# 预测图片 -im = cv2.imread("000000014439_640x640.jpg") -result = model.predict(im, conf_threshold=0.5) - -# 可视化结果 -fd.vision.visualize.vis_detection(im, result) -cv2.imwrite("vis_result.jpg", im) - -# 输出预测结果 -print(result) diff --git a/model_zoo/vision/yolor/README.md b/model_zoo/vision/yolor/README.md new file mode 100644 index 0000000000..467023f169 --- /dev/null +++ b/model_zoo/vision/yolor/README.md @@ -0,0 +1,67 @@ +# 编译YOLOR示例 + +当前支持模型版本为:[YOLOR v0.1](https://github.com/WongKinYiu/yolor/releases/tag/weights) + +本文档说明如何进行[YOLOR](https://github.com/WongKinYiu/yolor)的快速部署推理。本目录结构如下 + +``` +. +├── cpp +│   ├── CMakeLists.txt +│   ├── README.md +│   └── yolor.cc +├── README.md +└── yolor.py +``` + +## 获取ONNX文件 + +- 手动获取 + + 访问[YOLOR](https://github.com/WongKinYiu/yolor)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 + + + + ``` + #下载yolor模型文件 + wget https://github.com/WongKinYiu/yolor/releases/download/v0.1/yolor.pt + + # 导出onnx格式文件 + python models/export.py --grid --dynamic --weights PATH/TO/yolo7.pt + + # 移动onnx文件到demo目录 + cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolor/ + ``` + +## 安装FastDeploy + +使用如下命令安装FastDeploy,注意到此处安装的是`vision-cpu`,也可根据需求安装`vision-gpu` + +``` +# 安装fastdeploy-python工具 +pip install fastdeploy-python + +# 安装vision-cpu模块 +fastdeploy install vision-cpu +``` +## Python部署 + +执行如下代码即会自动下载测试图片 +``` +python yolor.py +``` + +执行完成后会将可视化结果保存在本地`vis_result.jpg`,同时输出检测结果如下 +``` +DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] +0.000000,185.201431, 315.673126, 410.071594, 0.959289, 17 +433.802826,211.603455, 595.489319, 346.425537, 0.952615, 17 +230.446854,195.618805, 418.365479, 362.712128, 0.884253, 17 +336.545624,208.555618, 457.704315, 323.543152, 0.788450, 17 +0.896423,183.936996, 154.788727, 304.916412, 0.672804, 17 +``` + +## 其它文档 + +- [C++部署](./cpp/README.md) +- [YOLOR API文档](./api.md) diff --git a/model_zoo/vision/ppyoloe/api.md b/model_zoo/vision/yolor/api.md similarity index 56% rename from model_zoo/vision/ppyoloe/api.md rename to model_zoo/vision/yolor/api.md index 1c5cbcaadb..b1e5be889b 100644 --- a/model_zoo/vision/ppyoloe/api.md +++ b/model_zoo/vision/yolor/api.md @@ -1,24 +1,23 @@ -# PPYOLOE API说明 +# YOLOR API说明 ## Python API -### PPYOLOE类 +### YOLOR类 ``` -fastdeploy.vision.ultralytics.PPYOLOE(model_file, params_file, config_file, runtime_option=None, model_format=fd.Frontend.PADDLE) +fastdeploy.vision.wongkinyiu.YOLOR(model_file, params_file=None, runtime_option=None, model_format=fd.Frontend.ONNX) ``` -PPYOLOE模型加载和初始化,需同时提供model_file和params_file, 当前仅支持model_format为Paddle格式 +YOLOR模型加载和初始化,当model_format为`fd.Frontend.ONNX`时,只需提供model_file,如`yolor.onnx`;当model_format为`fd.Frontend.PADDLE`时,则需同时提供model_file和params_file。 **参数** > * **model_file**(str): 模型文件路径 > * **params_file**(str): 参数文件路径 -> * **config_file**(str): 模型推理配置文件 > * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 > * **model_format**(Frontend): 模型格式 #### predict函数 > ``` -> PPYOLOE.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> YOLOR.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) > ``` > 模型预测结口,输入图像直接输出检测结果。 > @@ -26,35 +25,33 @@ PPYOLOE模型加载和初始化,需同时提供model_file和params_file, 当 > > > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 > > * **conf_threshold**(float): 检测框置信度过滤阈值 -> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值(当模型中包含nms处理时,此参数自动无效) +> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值 -示例代码参考[ppyoloe.py](./ppyoloe.py) +示例代码参考[yolor.py](./yolor.py) ## C++ API -### PPYOLOE类 +### YOLOR类 ``` -fastdeploy::vision::ultralytics::PPYOLOE( +fastdeploy::vision::wongkinyiu::YOLOR( const string& model_file, - const string& params_file, - const string& config_file, + const string& params_file = "", const RuntimeOption& runtime_option = RuntimeOption(), const Frontend& model_format = Frontend::ONNX) ``` -PPYOLOE模型加载和初始化,需同时提供model_file和params_file, 当前仅支持model_format为Paddle格式 +YOLOR模型加载和初始化,当model_format为`Frontend::ONNX`时,只需提供model_file,如`yolor.onnx`;当model_format为`Frontend::PADDLE`时,则需同时提供model_file和params_file。 **参数** > * **model_file**(str): 模型文件路径 > * **params_file**(str): 参数文件路径 -> * **config_file**(str): 模型推理配置文件 > * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 > * **model_format**(Frontend): 模型格式 #### Predict函数 > ``` -> YOLOv5::Predict(cv::Mat* im, DetectionResult* result, +> YOLOR::Predict(cv::Mat* im, DetectionResult* result, > float conf_threshold = 0.25, > float nms_iou_threshold = 0.5) > ``` @@ -65,9 +62,9 @@ PPYOLOE模型加载和初始化,需同时提供model_file和params_file, 当 > > * **im**: 输入图像,注意需为HWC,BGR格式 > > * **result**: 检测结果,包括检测框,各个框的置信度 > > * **conf_threshold**: 检测框置信度过滤阈值 -> > * **nms_iou_threshold**: NMS处理过程中iou阈值(当模型中包含nms处理时,此参数自动无效) +> > * **nms_iou_threshold**: NMS处理过程中iou阈值 -示例代码参考[cpp/yolov5.cc](cpp/yolov5.cc) +示例代码参考[cpp/yolor.cc](cpp/yolor.cc) ## 其它API使用 diff --git a/model_zoo/vision/ppyoloe/cpp/CMakeLists.txt b/model_zoo/vision/yolor/cpp/CMakeLists.txt similarity index 75% rename from model_zoo/vision/ppyoloe/cpp/CMakeLists.txt rename to model_zoo/vision/yolor/cpp/CMakeLists.txt index e681566517..18248b8452 100644 --- a/model_zoo/vision/ppyoloe/cpp/CMakeLists.txt +++ b/model_zoo/vision/yolor/cpp/CMakeLists.txt @@ -1,4 +1,4 @@ -PROJECT(ppyoloe_demo C CXX) +PROJECT(yolor_demo C CXX) CMAKE_MINIMUM_REQUIRED (VERSION 3.16) # 在低版本ABI环境中,通过如下代码进行兼容性编译 @@ -12,6 +12,6 @@ include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) # 添加FastDeploy依赖头文件 include_directories(${FASTDEPLOY_INCS}) -add_executable(ppyoloe_demo ${PROJECT_SOURCE_DIR}/ppyoloe.cc) +add_executable(yolor_demo ${PROJECT_SOURCE_DIR}/yolor.cc) # 添加FastDeploy库依赖 -target_link_libraries(ppyoloe_demo ${FASTDEPLOY_LIBS}) +target_link_libraries(yolor_demo ${FASTDEPLOY_LIBS}) diff --git a/model_zoo/vision/yolor/cpp/README.md b/model_zoo/vision/yolor/cpp/README.md new file mode 100644 index 0000000000..eddf5bc51b --- /dev/null +++ b/model_zoo/vision/yolor/cpp/README.md @@ -0,0 +1,51 @@ +# 编译YOLOR示例 + +当前支持模型版本为:[YOLOR v0.1](https://github.com/WongKinYiu/yolor/releases/tag/weights) + +## 获取ONNX文件 + +- 手动获取 + + 访问[YOLOR](https://github.com/WongKinYiu/yolor/releases/tag/weights)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 + + ``` + #下载yolor模型文件 + wget https://github.com/WongKinYiu/yolor/releases/download/v0.1/yolor.pt + + # 导出onnx格式文件 + python models/export.py --grid --dynamic --weights PATH/TO/yolo7.pt + + ``` + + +## 运行demo + +``` +# 下载和解压预测库 +wget https://bj.bcebos.com/paddle2onnx/fastdeploy/fastdeploy-linux-x64-0.0.3.tgz +tar xvf fastdeploy-linux-x64-0.0.3.tgz + +# 编译示例代码 +mkdir build & cd build +cmake .. +make -j + +# 移动onnx文件到demo目录 +cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolor/cpp/build/ + +# 下载图片 +wget https://raw.githubusercontent.com/WongKinYiu/yolor/paper/inference/images/horses.jpg + +# 执行 +./yolor_demo +``` + +执行完后可视化的结果保存在本地`vis_result.jpg`,同时会将检测框输出在终端,如下所示 +``` +DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] +0.000000,185.201431, 315.673126, 410.071594, 0.959289, 17 +433.802826,211.603455, 595.489319, 346.425537, 0.952615, 17 +230.446854,195.618805, 418.365479, 362.712128, 0.884253, 17 +336.545624,208.555618, 457.704315, 323.543152, 0.788450, 17 +0.896423,183.936996, 154.788727, 304.916412, 0.672804, 17 +``` diff --git a/model_zoo/vision/ppyoloe/cpp/ppyoloe.cc b/model_zoo/vision/yolor/cpp/yolor.cc similarity index 66% rename from model_zoo/vision/ppyoloe/cpp/ppyoloe.cc rename to model_zoo/vision/yolor/cpp/yolor.cc index e63f29e62a..db194583fc 100644 --- a/model_zoo/vision/ppyoloe/cpp/ppyoloe.cc +++ b/model_zoo/vision/yolor/cpp/yolor.cc @@ -16,28 +16,18 @@ int main() { namespace vis = fastdeploy::vision; - - std::string model_file = "ppyoloe_crn_l_300e_coco/model.pdmodel"; - std::string params_file = "ppyoloe_crn_l_300e_coco/model.pdiparams"; - std::string config_file = "ppyoloe_crn_l_300e_coco/infer_cfg.yml"; - std::string img_path = "000000014439_640x640.jpg"; - std::string vis_path = "vis.jpeg"; - - auto model = vis::ppdet::PPYOLOE(model_file, params_file, config_file); + auto model = vis::wongkinyiu::YOLOR("yolor.onnx"); if (!model.Initialized()) { std::cerr << "Init Failed." << std::endl; return -1; } - - cv::Mat im = cv::imread(img_path); + cv::Mat im = cv::imread("horses.jpg"); cv::Mat vis_im = im.clone(); vis::DetectionResult res; if (!model.Predict(&im, &res)) { std::cerr << "Prediction Failed." << std::endl; return -1; - } else { - std::cout << "Prediction Done!" << std::endl; } // 输出预测框结果 @@ -45,7 +35,6 @@ int main() { // 可视化预测结果 vis::Visualize::VisDetection(&vis_im, res); - cv::imwrite(vis_path, vis_im); - std::cout << "Detect Done! Saved: " << vis_path << std::endl; + cv::imwrite("vis_result.jpg", vis_im); return 0; } diff --git a/model_zoo/vision/yolor/yolor.py b/model_zoo/vision/yolor/yolor.py new file mode 100644 index 0000000000..56d3f9689e --- /dev/null +++ b/model_zoo/vision/yolor/yolor.py @@ -0,0 +1,21 @@ +import fastdeploy as fd +import cv2 + +# 下载模型和测试图片 +test_jpg_url = "https://raw.githubusercontent.com/WongKinYiu/yolor/paper/inference/images/horses.jpg" +fd.download(test_jpg_url, ".", show_progress=True) + +# 加载模型 +model = fd.vision.wongkinyiu.YOLOR("yolor.onnx") + +# 预测图片 +im = cv2.imread("horses.jpg") +result = model.predict(im, conf_threshold=0.25, nms_iou_threshold=0.5) + +# 可视化结果 +fd.vision.visualize.vis_detection(im, result) +cv2.imwrite("vis_result.jpg", im) + +# 输出预测结果 +print(result) +print(model.runtime_option) diff --git a/setup.py b/setup.py index e76f057b1c..5147025b4e 100644 --- a/setup.py +++ b/setup.py @@ -326,25 +326,14 @@ def run(self): shutil.copy("LICENSE", "fastdeploy") depend_libs = list() - if platform.system().lower() == "linux": - for f in os.listdir(".setuptools-cmake-build"): - full_name = os.path.join(".setuptools-cmake-build", f) - if not os.path.isfile(full_name): - continue - if not full_name.count("fastdeploy_main.cpython-"): - continue - if not full_name.endswith(".so"): - continue - # modify the search path of libraries - command = "patchelf --set-rpath '$ORIGIN/libs/' {}".format( - full_name) - # The sw_64 not suppot patchelf, so we just disable that. - if platform.machine() != 'sw_64' and platform.machine( - ) != 'mips64': - assert os.system( - command - ) == 0, "patch fastdeploy_main.cpython-36m-x86_64-linux-gnu.so failed, the command: {}".format( - command) + # modify the search path of libraries + command = "patchelf --set-rpath '$ORIGIN/libs/' .setuptools-cmake-build/fastdeploy_main.cpython-37m-x86_64-linux-gnu.so" + # The sw_64 not suppot patchelf, so we just disable that. + if platform.machine() != 'sw_64' and platform.machine() != 'mips64': + assert os.system( + command + ) == 0, "patch fastdeploy_main.cpython-37m-x86_64-linux-gnu.so failed, the command: {}".format( + command) for f in os.listdir(".setuptools-cmake-build"): if not os.path.isfile(os.path.join(".setuptools-cmake-build", f)): From 3aa015fd722877e7c449a25a9ad0eedbc6fc099a Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 20 Jul 2022 07:58:07 +0000 Subject: [PATCH 36/94] for merge --- examples/CMakeLists.txt | 25 +-- examples/vision/ppdet_ppyoloe.cc | 51 ++++++ fastdeploy/__init__.py | 2 +- fastdeploy/download.py | 2 +- fastdeploy/utils/utils.h | 4 + fastdeploy/vision.h | 1 + fastdeploy/vision/__init__.py | 1 + .../vision/common/processors/convert.cc | 62 +++++++ fastdeploy/vision/common/processors/convert.h | 42 +++++ .../vision/common/processors/transform.h | 1 + fastdeploy/vision/meituan/yolov6.cc | 8 +- fastdeploy/vision/ppcls/model.cc | 13 ++ fastdeploy/vision/ppcls/model.h | 14 ++ fastdeploy/vision/ppcls/ppcls_pybind.cc | 2 +- fastdeploy/vision/ppdet/__init__.py | 39 ++++ fastdeploy/vision/ppdet/ppdet_pybind.cc | 32 ++++ fastdeploy/vision/ppdet/ppyoloe.cc | 170 ++++++++++++++++++ fastdeploy/vision/ppdet/ppyoloe.h | 44 +++++ fastdeploy/vision/ultralytics/yolov5.cc | 13 +- fastdeploy/vision/utils/sort_det_res.cc | 6 +- fastdeploy/vision/vision_pybind.cc | 6 +- fastdeploy/vision/visualize/detection.cc | 4 +- model_zoo/vision/ppyoloe/README.md | 52 ++++++ model_zoo/vision/ppyoloe/api.md | 74 ++++++++ model_zoo/vision/ppyoloe/cpp/CMakeLists.txt | 17 ++ model_zoo/vision/ppyoloe/cpp/README.md | 39 ++++ model_zoo/vision/ppyoloe/cpp/ppyoloe.cc | 51 ++++++ model_zoo/vision/ppyoloe/ppyoloe.py | 24 +++ setup.py | 27 ++- 29 files changed, 794 insertions(+), 32 deletions(-) create mode 100644 examples/vision/ppdet_ppyoloe.cc create mode 100644 fastdeploy/vision/common/processors/convert.cc create mode 100644 fastdeploy/vision/common/processors/convert.h create mode 100644 fastdeploy/vision/ppdet/__init__.py create mode 100644 fastdeploy/vision/ppdet/ppdet_pybind.cc create mode 100644 fastdeploy/vision/ppdet/ppyoloe.cc create mode 100644 fastdeploy/vision/ppdet/ppyoloe.h create mode 100644 model_zoo/vision/ppyoloe/README.md create mode 100644 model_zoo/vision/ppyoloe/api.md create mode 100644 model_zoo/vision/ppyoloe/cpp/CMakeLists.txt create mode 100644 model_zoo/vision/ppyoloe/cpp/README.md create mode 100644 model_zoo/vision/ppyoloe/cpp/ppyoloe.cc create mode 100644 model_zoo/vision/ppyoloe/ppyoloe.py diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 67361223c6..112193c86a 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -1,25 +1,26 @@ -function(add_fastdeploy_executable field url model) +function(add_fastdeploy_executable FIELD CC_FILE) # temp target name/file var in function scope - set(TEMP_TARGET_FILE ${PROJECT_SOURCE_DIR}/examples/${field}/${url}_${model}.cc) - set(TEMP_TARGET_NAME ${field}_${url}_${model}) + set(TEMP_TARGET_FILE ${CC_FILE}) + string(REGEX MATCHALL "[0-9A-Za-z_]*.cc" FILE_NAME ${CC_FILE}) + string(REGEX REPLACE ".cc" "" FILE_PREFIX ${FILE_NAME}) + set(TEMP_TARGET_NAME ${FIELD}_${FILE_PREFIX}) if (EXISTS ${TEMP_TARGET_FILE} AND TARGET fastdeploy) add_executable(${TEMP_TARGET_NAME} ${TEMP_TARGET_FILE}) target_link_libraries(${TEMP_TARGET_NAME} PUBLIC fastdeploy) - message(STATUS "Found source file: [${field}/${url}_${model}.cc], ADD!!! fastdeploy executable: [${TEMP_TARGET_NAME}] !") - else () - message(WARNING "Can not found source file: [${field}/${url}_${model}.cc], SKIP!!! fastdeploy executable: [${TEMP_TARGET_NAME}] !") + message(STATUS " Added FastDeploy Executable : ${TEMP_TARGET_NAME}") endif() unset(TEMP_TARGET_FILE) unset(TEMP_TARGET_NAME) endfunction() # vision examples -if (WITH_VISION_EXAMPLES) - add_fastdeploy_executable(vision ultralytics yolov5) - add_fastdeploy_executable(vision meituan yolov6) - add_fastdeploy_executable(vision wongkinyiu yolov7) - add_fastdeploy_executable(vision megvii yolox) - add_fastdeploy_executable(vision wongkinyiu yolor) +if(WITH_VISION_EXAMPLES AND EXISTS ${PROJECT_SOURCE_DIR}/examples/vision) + message(STATUS "") + message(STATUS "*************FastDeploy Examples Summary**********") + file(GLOB ALL_VISION_EXAMPLE_SRCS ${PROJECT_SOURCE_DIR}/examples/vision/*.cc) + foreach(_CC_FILE ${ALL_VISION_EXAMPLE_SRCS}) + add_fastdeploy_executable(vision ${_CC_FILE}) + endforeach() endif() # other examples ... diff --git a/examples/vision/ppdet_ppyoloe.cc b/examples/vision/ppdet_ppyoloe.cc new file mode 100644 index 0000000000..b234021c92 --- /dev/null +++ b/examples/vision/ppdet_ppyoloe.cc @@ -0,0 +1,51 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +int main() { + namespace vis = fastdeploy::vision; + + std::string model_file = "ppyoloe_crn_l_300e_coco/model.pdmodel"; + std::string params_file = "ppyoloe_crn_l_300e_coco/model.pdiparams"; + std::string config_file = "ppyoloe_crn_l_300e_coco/infer_cfg.yml"; + std::string img_path = "test.jpeg"; + std::string vis_path = "vis.jpeg"; + + auto model = vis::ppdet::PPYOLOE(model_file, params_file, config_file); + if (!model.Initialized()) { + std::cerr << "Init Failed." << std::endl; + return -1; + } + + cv::Mat im = cv::imread(img_path); + cv::Mat vis_im = im.clone(); + + vis::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Prediction Failed." << std::endl; + return -1; + } else { + std::cout << "Prediction Done!" << std::endl; + } + + // 输出预测框结果 + std::cout << res.Str() << std::endl; + + // 可视化预测结果 + vis::Visualize::VisDetection(&vis_im, res); + cv::imwrite(vis_path, vis_im); + std::cout << "Detect Done! Saved: " << vis_path << std::endl; + return 0; +} diff --git a/fastdeploy/__init__.py b/fastdeploy/__init__.py index 500e7cc42a..68006c1bed 100644 --- a/fastdeploy/__init__.py +++ b/fastdeploy/__init__.py @@ -17,7 +17,7 @@ from .fastdeploy_runtime import * from . import fastdeploy_main as C from . import vision -from .download import download +from .download import download, download_and_decompress def TensorInfoStr(tensor_info): diff --git a/fastdeploy/download.py b/fastdeploy/download.py index e00af098df..67f21d8e76 100644 --- a/fastdeploy/download.py +++ b/fastdeploy/download.py @@ -156,7 +156,7 @@ def decompress(fname): def url2dir(url, path, rename=None): full_name = download(url, path, rename, show_progress=True) - print("SDK is donwloaded, now extracting...") + print("File is donwloaded, now extracting...") if url.count(".tgz") > 0 or url.count(".tar") > 0 or url.count("zip") > 0: return decompress(full_name) diff --git a/fastdeploy/utils/utils.h b/fastdeploy/utils/utils.h index 23ca6ee51a..e605ee5a75 100644 --- a/fastdeploy/utils/utils.h +++ b/fastdeploy/utils/utils.h @@ -65,6 +65,10 @@ class FASTDEPLOY_DECL FDLogger { bool verbose_ = true; }; +#ifndef __REL_FILE__ +#define __REL_FILE__ __FILE__ +#endif + #define FDERROR \ FDLogger(true, "[ERROR]") << __REL_FILE__ << "(" << __LINE__ \ << ")::" << __FUNCTION__ << "\t" diff --git a/fastdeploy/vision.h b/fastdeploy/vision.h index 4398463251..68c0881cac 100644 --- a/fastdeploy/vision.h +++ b/fastdeploy/vision.h @@ -18,6 +18,7 @@ #include "fastdeploy/vision/megvii/yolox.h" #include "fastdeploy/vision/meituan/yolov6.h" #include "fastdeploy/vision/ppcls/model.h" +#include "fastdeploy/vision/ppdet/ppyoloe.h" #include "fastdeploy/vision/ultralytics/yolov5.h" #include "fastdeploy/vision/wongkinyiu/yolor.h" #include "fastdeploy/vision/wongkinyiu/yolov7.h" diff --git a/fastdeploy/vision/__init__.py b/fastdeploy/vision/__init__.py index 7122bede0b..6acbf0c376 100644 --- a/fastdeploy/vision/__init__.py +++ b/fastdeploy/vision/__init__.py @@ -15,6 +15,7 @@ from . import evaluation from . import ppcls +from . import ppdet from . import ultralytics from . import meituan from . import megvii diff --git a/fastdeploy/vision/common/processors/convert.cc b/fastdeploy/vision/common/processors/convert.cc new file mode 100644 index 0000000000..a7ca6de07a --- /dev/null +++ b/fastdeploy/vision/common/processors/convert.cc @@ -0,0 +1,62 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision/common/processors/convert.h" + +namespace fastdeploy { + +namespace vision { + +Convert::Convert(const std::vector& alpha, + const std::vector& beta) { + FDASSERT(alpha.size() == beta.size(), + "Convert: requires the size of alpha equal to the size of beta."); + FDASSERT(alpha.size() != 0, + "Convert: requires the size of alpha and beta > 0."); + alpha_.assign(alpha.begin(), alpha.end()); + beta_.assign(beta.begin(), beta.end()); +} + +bool Convert::CpuRun(Mat* mat) { + cv::Mat* im = mat->GetCpuMat(); + std::vector split_im; + cv::split(*im, split_im); + for (int c = 0; c < im->channels(); c++) { + split_im[c].convertTo(split_im[c], CV_32FC1, alpha_[c], beta_[c]); + } + cv::merge(split_im, *im); + return true; +} + +#ifdef ENABLE_OPENCV_CUDA +bool Convert::GpuRun(Mat* mat) { + cv::cuda::GpuMat* im = mat->GetGpuMat(); + std::vector split_im; + cv::cuda::split(*im, split_im); + for (int c = 0; c < im->channels(); c++) { + split_im[c].convertTo(split_im[c], CV_32FC1, alpha_[c], beta_[c]); + } + cv::cuda::merge(split_im, *im); + return true; +} +#endif + +bool Convert::Run(Mat* mat, const std::vector& alpha, + const std::vector& beta, ProcLib lib) { + auto c = Convert(alpha, beta); + return c(mat, lib); +} + +} // namespace vision +} // namespace fastdeploy \ No newline at end of file diff --git a/fastdeploy/vision/common/processors/convert.h b/fastdeploy/vision/common/processors/convert.h new file mode 100644 index 0000000000..5d5a5276f5 --- /dev/null +++ b/fastdeploy/vision/common/processors/convert.h @@ -0,0 +1,42 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#pragma once + +#include "fastdeploy/vision/common/processors/base.h" + +namespace fastdeploy { +namespace vision { +class Convert : public Processor { + public: + Convert(const std::vector& alpha, const std::vector& beta); + + bool CpuRun(Mat* mat); +#ifdef ENABLE_OPENCV_CUDA + bool GpuRun(Mat* mat); +#endif + std::string Name() { return "Convert"; } + + // Compute `result = mat * alpha + beta` directly by channel. + // The default behavior is the same as OpenCV's convertTo method. + static bool Run(Mat* mat, const std::vector& alpha, + const std::vector& beta, + ProcLib lib = ProcLib::OPENCV_CPU); + + private: + std::vector alpha_; + std::vector beta_; +}; +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/common/processors/transform.h b/fastdeploy/vision/common/processors/transform.h index 12eec8d72d..08073b4e42 100644 --- a/fastdeploy/vision/common/processors/transform.h +++ b/fastdeploy/vision/common/processors/transform.h @@ -17,6 +17,7 @@ #include "fastdeploy/vision/common/processors/cast.h" #include "fastdeploy/vision/common/processors/center_crop.h" #include "fastdeploy/vision/common/processors/color_space_convert.h" +#include "fastdeploy/vision/common/processors/convert.h" #include "fastdeploy/vision/common/processors/hwc2chw.h" #include "fastdeploy/vision/common/processors/normalize.h" #include "fastdeploy/vision/common/processors/pad.h" diff --git a/fastdeploy/vision/meituan/yolov6.cc b/fastdeploy/vision/meituan/yolov6.cc index b75f2016ee..8ac7377194 100644 --- a/fastdeploy/vision/meituan/yolov6.cc +++ b/fastdeploy/vision/meituan/yolov6.cc @@ -129,8 +129,12 @@ bool YOLOv6::Preprocess(Mat* mat, FDTensor* output, LetterBox(mat, size, padding_value, is_mini_pad, is_no_pad, is_scale_up, stride); BGR2RGB::Run(mat); - Normalize::Run(mat, std::vector(mat->Channels(), 0.0), - std::vector(mat->Channels(), 1.0)); + // Normalize::Run(mat, std::vector(mat->Channels(), 0.0), + // std::vector(mat->Channels(), 1.0)); + // Compute `result = mat * alpha + beta` directly by channel + std::vector alpha = {1.0f / 255.0f, 1.0f / 255.0f, 1.0f / 255.0f}; + std::vector beta = {0.0f, 0.0f, 0.0f}; + Convert::Run(mat, alpha, beta); // Record output shape of preprocessed image (*im_info)["output_shape"] = {static_cast(mat->Height()), diff --git a/fastdeploy/vision/ppcls/model.cc b/fastdeploy/vision/ppcls/model.cc index 915cb97512..a89a1e4731 100644 --- a/fastdeploy/vision/ppcls/model.cc +++ b/fastdeploy/vision/ppcls/model.cc @@ -1,3 +1,16 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. #include "fastdeploy/vision/ppcls/model.h" #include "fastdeploy/vision/utils/utils.h" diff --git a/fastdeploy/vision/ppcls/model.h b/fastdeploy/vision/ppcls/model.h index fae99d4f3c..71800a7d76 100644 --- a/fastdeploy/vision/ppcls/model.h +++ b/fastdeploy/vision/ppcls/model.h @@ -1,3 +1,17 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + #pragma once #include "fastdeploy/fastdeploy_model.h" #include "fastdeploy/vision/common/processors/transform.h" diff --git a/fastdeploy/vision/ppcls/ppcls_pybind.cc b/fastdeploy/vision/ppcls/ppcls_pybind.cc index 828bef3c7a..10ff5ee109 100644 --- a/fastdeploy/vision/ppcls/ppcls_pybind.cc +++ b/fastdeploy/vision/ppcls/ppcls_pybind.cc @@ -14,7 +14,7 @@ #include "fastdeploy/pybind/main.h" namespace fastdeploy { -void BindPpClsModel(pybind11::module& m) { +void BindPPCls(pybind11::module& m) { auto ppcls_module = m.def_submodule("ppcls", "Module to deploy PaddleClas."); pybind11::class_(ppcls_module, "Model") .def(pybind11::init(ppdet_module, + "PPYOLOE") + .def(pybind11::init()) + .def("predict", [](vision::ppdet::PPYOLOE& self, pybind11::array& data, + float conf_threshold, float nms_iou_threshold) { + auto mat = PyArrayToCvMat(data); + vision::DetectionResult res; + self.Predict(&mat, &res, conf_threshold, nms_iou_threshold); + return res; + }); +} +} // namespace fastdeploy diff --git a/fastdeploy/vision/ppdet/ppyoloe.cc b/fastdeploy/vision/ppdet/ppyoloe.cc new file mode 100644 index 0000000000..c215ecb0ca --- /dev/null +++ b/fastdeploy/vision/ppdet/ppyoloe.cc @@ -0,0 +1,170 @@ +#include "fastdeploy/vision/ppdet/ppyoloe.h" +#include "fastdeploy/vision/utils/utils.h" +#include "yaml-cpp/yaml.h" + +namespace fastdeploy { +namespace vision { +namespace ppdet { + +PPYOLOE::PPYOLOE(const std::string& model_file, const std::string& params_file, + const std::string& config_file, + const RuntimeOption& custom_option, + const Frontend& model_format) { + config_file_ = config_file; + valid_cpu_backends = {Backend::ORT, Backend::PDINFER}; + valid_gpu_backends = {Backend::ORT, Backend::PDINFER}; + runtime_option = custom_option; + runtime_option.model_format = model_format; + runtime_option.model_file = model_file; + runtime_option.params_file = params_file; + initialized = Initialize(); +} + +bool PPYOLOE::Initialize() { + if (!BuildPreprocessPipelineFromConfig()) { + std::cout << "Failed to build preprocess pipeline from configuration file." + << std::endl; + return false; + } + if (!InitRuntime()) { + std::cout << "Failed to initialize fastdeploy backend." << std::endl; + return false; + } + return true; +} + +bool PPYOLOE::BuildPreprocessPipelineFromConfig() { + processors_.clear(); + YAML::Node cfg; + try { + cfg = YAML::LoadFile(config_file_); + } catch (YAML::BadFile& e) { + std::cout << "Failed to load yaml file " << config_file_ + << ", maybe you should check this file." << std::endl; + return false; + } + + if (cfg["arch"].as() != "YOLO") { + std::cout << "Require the arch of model is YOLO, but arch defined in " + "config file is " + << cfg["arch"].as() << "." << std::endl; + return false; + } + processors_.push_back(std::make_shared()); + + for (const auto& op : cfg["Preprocess"]) { + std::string op_name = op["type"].as(); + if (op_name == "NormalizeImage") { + auto mean = op["mean"].as>(); + auto std = op["std"].as>(); + bool is_scale = op["is_scale"].as(); + processors_.push_back(std::make_shared(mean, std, is_scale)); + } else if (op_name == "Resize") { + bool keep_ratio = op["keep_ratio"].as(); + auto target_size = op["target_size"].as>(); + int interp = op["interp"].as(); + FDASSERT(target_size.size(), + "Require size of target_size be 2, but now it's " + + std::to_string(target_size.size()) + "."); + FDASSERT(!keep_ratio, + "Only support keep_ratio is false while deploy " + "PaddleDetection model."); + int width = target_size[1]; + int height = target_size[0]; + processors_.push_back( + std::make_shared(width, height, -1.0, -1.0, interp, false)); + } else if (op_name == "Permute") { + processors_.push_back(std::make_shared()); + } else { + std::cout << "Unexcepted preprocess operator: " << op_name << "." + << std::endl; + return false; + } + } + return true; +} + +bool PPYOLOE::Preprocess(Mat* mat, std::vector* outputs) { + int origin_w = mat->Width(); + int origin_h = mat->Height(); + for (size_t i = 0; i < processors_.size(); ++i) { + if (!(*(processors_[i].get()))(mat)) { + std::cout << "Failed to process image data in " << processors_[i]->Name() + << "." << std::endl; + return false; + } + } + + outputs->resize(2); + (*outputs)[0].name = InputInfoOfRuntime(0).name; + mat->ShareWithTensor(&((*outputs)[0])); + + // reshape to [1, c, h, w] + (*outputs)[0].shape.insert((*outputs)[0].shape.begin(), 1); + + (*outputs)[1].Allocate({1, 2}, FDDataType::FP32, InputInfoOfRuntime(1).name); + float* ptr = static_cast((*outputs)[1].MutableData()); + ptr[0] = mat->Height() * 1.0 / mat->Height(); + ptr[1] = mat->Width() * 1.0 / mat->Width(); + return true; +} + +bool PPYOLOE::Postprocess(std::vector& infer_result, + DetectionResult* result, float conf_threshold, + float nms_threshold) { + FDASSERT(infer_result[1].shape[0] == 1, + "Only support batch = 1 in FastDeploy now."); + int box_num = 0; + if (infer_result[1].dtype == FDDataType::INT32) { + box_num = *(static_cast(infer_result[1].Data())); + } else if (infer_result[1].dtype == FDDataType::INT64) { + box_num = *(static_cast(infer_result[1].Data())); + } else { + FDASSERT( + false, + "The output box_num of PPYOLOE model should be type of int32/int64."); + } + result->Reserve(box_num); + float* box_data = static_cast(infer_result[0].Data()); + for (size_t i = 0; i < box_num; ++i) { + if (box_data[i * 6 + 1] < conf_threshold) { + continue; + } + result->label_ids.push_back(box_data[i * 6]); + result->scores.push_back(box_data[i * 6 + 1]); + result->boxes.emplace_back( + std::array{box_data[i * 6 + 2], box_data[i * 6 + 3], + box_data[i * 6 + 4] - box_data[i * 6 + 2], + box_data[i * 6 + 5] - box_data[i * 6 + 3]}); + } + return true; +} + +bool PPYOLOE::Predict(cv::Mat* im, DetectionResult* result, + float conf_threshold, float iou_threshold) { + Mat mat(*im); + std::vector processed_data; + if (!Preprocess(&mat, &processed_data)) { + FDERROR << "Failed to preprocess input data while using model:" + << ModelName() << "." << std::endl; + return false; + } + + std::vector infer_result; + if (!Infer(processed_data, &infer_result)) { + FDERROR << "Failed to inference while using model:" << ModelName() << "." + << std::endl; + return false; + } + + if (!Postprocess(infer_result, result, conf_threshold, iou_threshold)) { + FDERROR << "Failed to postprocess while using model:" << ModelName() << "." + << std::endl; + return false; + } + return true; +} + +} // namespace ppdet +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/ppdet/ppyoloe.h b/fastdeploy/vision/ppdet/ppyoloe.h new file mode 100644 index 0000000000..a3db268ca4 --- /dev/null +++ b/fastdeploy/vision/ppdet/ppyoloe.h @@ -0,0 +1,44 @@ +#pragma once +#include "fastdeploy/fastdeploy_model.h" +#include "fastdeploy/vision/common/processors/transform.h" +#include "fastdeploy/vision/common/result.h" + +#include "fastdeploy/vision/utils/utils.h" + +namespace fastdeploy { +namespace vision { +namespace ppdet { + +class FASTDEPLOY_DECL PPYOLOE : public FastDeployModel { + public: + PPYOLOE(const std::string& model_file, const std::string& params_file, + const std::string& config_file, + const RuntimeOption& custom_option = RuntimeOption(), + const Frontend& model_format = Frontend::PADDLE); + + std::string ModelName() const { return "PaddleDetection/PPYOLOE"; } + + virtual bool Initialize(); + + virtual bool BuildPreprocessPipelineFromConfig(); + + virtual bool Preprocess(Mat* mat, std::vector* outputs); + + virtual bool Postprocess(std::vector& infer_result, + DetectionResult* result, float conf_threshold, + float nms_threshold); + + virtual bool Predict(cv::Mat* im, DetectionResult* result, + float conf_threshold = 0.5, float nms_threshold = 0.7); + + private: + std::vector> processors_; + std::string config_file_; + // PaddleDetection can export model without nms + // This flag will help us to handle the different + // situation + bool has_nms_; +}; +} // namespace ppdet +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/ultralytics/yolov5.cc b/fastdeploy/vision/ultralytics/yolov5.cc index c8c6e06a94..b2e6009b1c 100644 --- a/fastdeploy/vision/ultralytics/yolov5.cc +++ b/fastdeploy/vision/ultralytics/yolov5.cc @@ -126,8 +126,12 @@ bool YOLOv5::Preprocess(Mat* mat, FDTensor* output, LetterBox(mat, size, padding_value, is_mini_pad, is_no_pad, is_scale_up, stride); BGR2RGB::Run(mat); - Normalize::Run(mat, std::vector(mat->Channels(), 0.0), - std::vector(mat->Channels(), 1.0)); + // Normalize::Run(mat, std::vector(mat->Channels(), 0.0), + // std::vector(mat->Channels(), 1.0)); + // Compute `result = mat * alpha + beta` directly by channel + std::vector alpha = {1.0f / 255.0f, 1.0f / 255.0f, 1.0f / 255.0f}; + std::vector beta = {0.0f, 0.0f, 0.0f}; + Convert::Run(mat, alpha, beta); // Record output shape of preprocessed image (*im_info)["output_shape"] = {static_cast(mat->Height()), @@ -198,6 +202,11 @@ bool YOLOv5::Postprocess( result->scores.push_back(confidence); } } + + if (result->boxes.size() == 0) { + return true; + } + utils::NMS(result, nms_iou_threshold); // scale the boxes to the origin image shape diff --git a/fastdeploy/vision/utils/sort_det_res.cc b/fastdeploy/vision/utils/sort_det_res.cc index e4a0db9761..790126a6ac 100644 --- a/fastdeploy/vision/utils/sort_det_res.cc +++ b/fastdeploy/vision/utils/sort_det_res.cc @@ -68,7 +68,11 @@ void MergeSort(DetectionResult* result, size_t low, size_t high) { void SortDetectionResult(DetectionResult* result) { size_t low = 0; - size_t high = result->scores.size() - 1; + size_t high = result->scores.size(); + if (high == 0) { + return; + } + high = high - 1; MergeSort(result, low, high); } diff --git a/fastdeploy/vision/vision_pybind.cc b/fastdeploy/vision/vision_pybind.cc index e4ba05b893..0334303ce6 100644 --- a/fastdeploy/vision/vision_pybind.cc +++ b/fastdeploy/vision/vision_pybind.cc @@ -16,7 +16,8 @@ namespace fastdeploy { -void BindPpClsModel(pybind11::module& m); +void BindPPCls(pybind11::module& m); +void BindPPDet(pybind11::module& m); void BindWongkinyiu(pybind11::module& m); void BindUltralytics(pybind11::module& m); void BindMeituan(pybind11::module& m); @@ -41,7 +42,8 @@ void BindVision(pybind11::module& m) { .def("__repr__", &vision::DetectionResult::Str) .def("__str__", &vision::DetectionResult::Str); - BindPpClsModel(m); + BindPPCls(m); + BindPPDet(m); BindUltralytics(m); BindWongkinyiu(m); BindMeituan(m); diff --git a/fastdeploy/vision/visualize/detection.cc b/fastdeploy/vision/visualize/detection.cc index e5f01bdd35..6d60072447 100644 --- a/fastdeploy/vision/visualize/detection.cc +++ b/fastdeploy/vision/visualize/detection.cc @@ -43,7 +43,7 @@ void Visualize::VisDetection(cv::Mat* im, const DetectionResult& result, } std::string text = id + "," + score; int font = cv::FONT_HERSHEY_SIMPLEX; - cv::Size text_size = cv::getTextSize(text, font, font_size, 0.5, nullptr); + cv::Size text_size = cv::getTextSize(text, font, font_size, 1, nullptr); cv::Point origin; origin.x = rect.x; origin.y = rect.y; @@ -52,7 +52,7 @@ void Visualize::VisDetection(cv::Mat* im, const DetectionResult& result, text_size.width, text_size.height); cv::rectangle(*im, rect, rect_color, line_size); cv::putText(*im, text, origin, font, font_size, cv::Scalar(255, 255, 255), - 0.5); + 1); } } diff --git a/model_zoo/vision/ppyoloe/README.md b/model_zoo/vision/ppyoloe/README.md new file mode 100644 index 0000000000..42d18104ad --- /dev/null +++ b/model_zoo/vision/ppyoloe/README.md @@ -0,0 +1,52 @@ +# PaddleDetection/PPYOLOE部署示例 + +- 当前支持PaddleDetection版本为[release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) + +本文档说明如何进行[PPYOLOE](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4/configs/ppyoloe)的快速部署推理。本目录结构如下 +``` +. +├── cpp # C++ 代码目录 +│   ├── CMakeLists.txt # C++ 代码编译CMakeLists文件 +│   ├── README.md # C++ 代码编译部署文档 +│   └── ppyoloe.cc # C++ 示例代码 +├── README.md # PPYOLOE 部署文档 +└── ppyoloe.py # Python示例代码 +``` + +## 安装FastDeploy + +使用如下命令安装FastDeploy,注意到此处安装的是`vision-cpu`,也可根据需求安装`vision-gpu` +``` +# 安装fastdeploy-python工具 +pip install fastdeploy-python +``` + +## Python部署 + +执行如下代码即会自动下载PPYOLOE模型和测试图片 +``` +python ppyoloe.py +``` + +执行完成后会将可视化结果保存在本地`vis_result.jpg`,同时输出检测结果如下 +``` +DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] +162.380249,132.057449, 463.178345, 413.167114, 0.962918, 33 +414.914642,141.148666, 91.275269, 308.688293, 0.951003, 0 +163.449234,129.669067, 35.253891, 135.111786, 0.900734, 0 +267.232239,142.290436, 31.578918, 126.329773, 0.848709, 0 +581.790833,179.027115, 30.893127, 135.484940, 0.837986, 0 +104.407021,72.602615, 22.900627, 75.469055, 0.796468, 0 +348.795380,70.122147, 18.806061, 85.829330, 0.785557, 0 +364.118683,92.457428, 17.437622, 89.212891, 0.774282, 0 +75.180283,192.470490, 41.898407, 55.552414, 0.712569, 56 +328.133759,61.894299, 19.100616, 65.633575, 0.710519, 0 +504.797760,181.732574, 107.740814, 248.115082, 0.708902, 0 +379.063080,64.762360, 15.956146, 68.312546, 0.680725, 0 +25.858747,186.564178, 34.958130, 56.007080, 0.580415, 0 +``` + +## 其它文档 + +- [C++部署](./cpp/README.md) +- [PPYOLOE API文档](./api.md) diff --git a/model_zoo/vision/ppyoloe/api.md b/model_zoo/vision/ppyoloe/api.md new file mode 100644 index 0000000000..1c5cbcaadb --- /dev/null +++ b/model_zoo/vision/ppyoloe/api.md @@ -0,0 +1,74 @@ +# PPYOLOE API说明 + +## Python API + +### PPYOLOE类 +``` +fastdeploy.vision.ultralytics.PPYOLOE(model_file, params_file, config_file, runtime_option=None, model_format=fd.Frontend.PADDLE) +``` +PPYOLOE模型加载和初始化,需同时提供model_file和params_file, 当前仅支持model_format为Paddle格式 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径 +> * **config_file**(str): 模型推理配置文件 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式 + +#### predict函数 +> ``` +> PPYOLOE.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> ``` +> 模型预测结口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 +> > * **conf_threshold**(float): 检测框置信度过滤阈值 +> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值(当模型中包含nms处理时,此参数自动无效) + +示例代码参考[ppyoloe.py](./ppyoloe.py) + + +## C++ API + +### PPYOLOE类 +``` +fastdeploy::vision::ultralytics::PPYOLOE( + const string& model_file, + const string& params_file, + const string& config_file, + const RuntimeOption& runtime_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX) +``` +PPYOLOE模型加载和初始化,需同时提供model_file和params_file, 当前仅支持model_format为Paddle格式 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径 +> * **config_file**(str): 模型推理配置文件 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式 + +#### Predict函数 +> ``` +> YOLOv5::Predict(cv::Mat* im, DetectionResult* result, +> float conf_threshold = 0.25, +> float nms_iou_threshold = 0.5) +> ``` +> 模型预测接口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **im**: 输入图像,注意需为HWC,BGR格式 +> > * **result**: 检测结果,包括检测框,各个框的置信度 +> > * **conf_threshold**: 检测框置信度过滤阈值 +> > * **nms_iou_threshold**: NMS处理过程中iou阈值(当模型中包含nms处理时,此参数自动无效) + +示例代码参考[cpp/yolov5.cc](cpp/yolov5.cc) + +## 其它API使用 + +- [模型部署RuntimeOption配置](../../../docs/api/runtime_option.md) diff --git a/model_zoo/vision/ppyoloe/cpp/CMakeLists.txt b/model_zoo/vision/ppyoloe/cpp/CMakeLists.txt new file mode 100644 index 0000000000..e681566517 --- /dev/null +++ b/model_zoo/vision/ppyoloe/cpp/CMakeLists.txt @@ -0,0 +1,17 @@ +PROJECT(ppyoloe_demo C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.16) + +# 在低版本ABI环境中,通过如下代码进行兼容性编译 +# add_definitions(-D_GLIBCXX_USE_CXX11_ABI=0) + +# 指定下载解压后的fastdeploy库路径 +set(FASTDEPLOY_INSTALL_DIR ${PROJECT_SOURCE_DIR}/fastdeploy-linux-x64-0.3.0/) + +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) + +# 添加FastDeploy依赖头文件 +include_directories(${FASTDEPLOY_INCS}) + +add_executable(ppyoloe_demo ${PROJECT_SOURCE_DIR}/ppyoloe.cc) +# 添加FastDeploy库依赖 +target_link_libraries(ppyoloe_demo ${FASTDEPLOY_LIBS}) diff --git a/model_zoo/vision/ppyoloe/cpp/README.md b/model_zoo/vision/ppyoloe/cpp/README.md new file mode 100644 index 0000000000..1027c2eeb2 --- /dev/null +++ b/model_zoo/vision/ppyoloe/cpp/README.md @@ -0,0 +1,39 @@ +# 编译PPYOLOE示例 + + +``` +# 下载和解压预测库 +wget https://bj.bcebos.com/paddle2onnx/fastdeploy/fastdeploy-linux-x64-0.0.3.tgz +tar xvf fastdeploy-linux-x64-0.0.3.tgz + +# 编译示例代码 +mkdir build & cd build +cmake .. +make -j + +# 下载模型和图片 +wget https://bj.bcebos.com/paddle2onnx/fastdeploy/models/ppdet/ppyoloe_crn_l_300e_coco.tgz +tar xvf ppyoloe_crn_l_300e_coco.tgz +wget https://raw.githubusercontent.com/PaddlePaddle/PaddleDetection/release/2.4/demo/000000014439_640x640.jpg + +# 执行 +./ppyoloe_demo +``` + +执行完后可视化的结果保存在本地`vis_result.jpg`,同时会将检测框输出在终端,如下所示 +``` +DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] +162.380249,132.057449, 463.178345, 413.167114, 0.962918, 33 +414.914642,141.148666, 91.275269, 308.688293, 0.951003, 0 +163.449234,129.669067, 35.253891, 135.111786, 0.900734, 0 +267.232239,142.290436, 31.578918, 126.329773, 0.848709, 0 +581.790833,179.027115, 30.893127, 135.484940, 0.837986, 0 +104.407021,72.602615, 22.900627, 75.469055, 0.796468, 0 +348.795380,70.122147, 18.806061, 85.829330, 0.785557, 0 +364.118683,92.457428, 17.437622, 89.212891, 0.774282, 0 +75.180283,192.470490, 41.898407, 55.552414, 0.712569, 56 +328.133759,61.894299, 19.100616, 65.633575, 0.710519, 0 +504.797760,181.732574, 107.740814, 248.115082, 0.708902, 0 +379.063080,64.762360, 15.956146, 68.312546, 0.680725, 0 +25.858747,186.564178, 34.958130, 56.007080, 0.580415, 0 +``` diff --git a/model_zoo/vision/ppyoloe/cpp/ppyoloe.cc b/model_zoo/vision/ppyoloe/cpp/ppyoloe.cc new file mode 100644 index 0000000000..e63f29e62a --- /dev/null +++ b/model_zoo/vision/ppyoloe/cpp/ppyoloe.cc @@ -0,0 +1,51 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +int main() { + namespace vis = fastdeploy::vision; + + std::string model_file = "ppyoloe_crn_l_300e_coco/model.pdmodel"; + std::string params_file = "ppyoloe_crn_l_300e_coco/model.pdiparams"; + std::string config_file = "ppyoloe_crn_l_300e_coco/infer_cfg.yml"; + std::string img_path = "000000014439_640x640.jpg"; + std::string vis_path = "vis.jpeg"; + + auto model = vis::ppdet::PPYOLOE(model_file, params_file, config_file); + if (!model.Initialized()) { + std::cerr << "Init Failed." << std::endl; + return -1; + } + + cv::Mat im = cv::imread(img_path); + cv::Mat vis_im = im.clone(); + + vis::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Prediction Failed." << std::endl; + return -1; + } else { + std::cout << "Prediction Done!" << std::endl; + } + + // 输出预测框结果 + std::cout << res.Str() << std::endl; + + // 可视化预测结果 + vis::Visualize::VisDetection(&vis_im, res); + cv::imwrite(vis_path, vis_im); + std::cout << "Detect Done! Saved: " << vis_path << std::endl; + return 0; +} diff --git a/model_zoo/vision/ppyoloe/ppyoloe.py b/model_zoo/vision/ppyoloe/ppyoloe.py new file mode 100644 index 0000000000..7d79dfd8cf --- /dev/null +++ b/model_zoo/vision/ppyoloe/ppyoloe.py @@ -0,0 +1,24 @@ +import fastdeploy as fd +import cv2 + +# 下载模型和测试图片 +model_url = "https://bj.bcebos.com/paddle2onnx/fastdeploy/models/ppdet/ppyoloe_crn_l_300e_coco.tgz" +test_jpg_url = "https://raw.githubusercontent.com/PaddlePaddle/PaddleDetection/release/2.4/demo/000000014439_640x640.jpg" +fd.download_and_decompress(model_url, ".") +fd.download(test_jpg_url, ".", show_progress=True) + +# 加载模型 +model = fd.vision.ppdet.PPYOLOE("ppyoloe_crn_l_300e_coco/model.pdmodel", + "ppyoloe_crn_l_300e_coco/model.pdiparams", + "ppyoloe_crn_l_300e_coco/infer_cfg.yml") + +# 预测图片 +im = cv2.imread("000000014439_640x640.jpg") +result = model.predict(im, conf_threshold=0.5) + +# 可视化结果 +fd.vision.visualize.vis_detection(im, result) +cv2.imwrite("vis_result.jpg", im) + +# 输出预测结果 +print(result) diff --git a/setup.py b/setup.py index 5147025b4e..e76f057b1c 100644 --- a/setup.py +++ b/setup.py @@ -326,14 +326,25 @@ def run(self): shutil.copy("LICENSE", "fastdeploy") depend_libs = list() - # modify the search path of libraries - command = "patchelf --set-rpath '$ORIGIN/libs/' .setuptools-cmake-build/fastdeploy_main.cpython-37m-x86_64-linux-gnu.so" - # The sw_64 not suppot patchelf, so we just disable that. - if platform.machine() != 'sw_64' and platform.machine() != 'mips64': - assert os.system( - command - ) == 0, "patch fastdeploy_main.cpython-37m-x86_64-linux-gnu.so failed, the command: {}".format( - command) + if platform.system().lower() == "linux": + for f in os.listdir(".setuptools-cmake-build"): + full_name = os.path.join(".setuptools-cmake-build", f) + if not os.path.isfile(full_name): + continue + if not full_name.count("fastdeploy_main.cpython-"): + continue + if not full_name.endswith(".so"): + continue + # modify the search path of libraries + command = "patchelf --set-rpath '$ORIGIN/libs/' {}".format( + full_name) + # The sw_64 not suppot patchelf, so we just disable that. + if platform.machine() != 'sw_64' and platform.machine( + ) != 'mips64': + assert os.system( + command + ) == 0, "patch fastdeploy_main.cpython-36m-x86_64-linux-gnu.so failed, the command: {}".format( + command) for f in os.listdir(".setuptools-cmake-build"): if not os.path.isfile(os.path.join(".setuptools-cmake-build", f)): From d6b98aa507ac785796541dfe18822204879376bf Mon Sep 17 00:00:00 2001 From: ziqi-jin <67993288+ziqi-jin@users.noreply.github.com> Date: Wed, 20 Jul 2022 15:59:53 +0800 Subject: [PATCH 37/94] Develop (#11) * Fix compile problem in different python version (#26) * fix some usage problem in linux * Fix compile problem Co-authored-by: root * Add PaddleDetetion/PPYOLOE model support (#22) * add ppdet/ppyoloe * Add demo code and documents * add convert processor to vision (#27) * update .gitignore * Added checking for cmake include dir * fixed missing trt_backend option bug when init from trt * remove un-need data layout and add pre-check for dtype * changed RGB2BRG to BGR2RGB in ppcls model * add model_zoo yolov6 c++/python demo * fixed CMakeLists.txt typos * update yolov6 cpp/README.md * add yolox c++/pybind and model_zoo demo * move some helpers to private * fixed CMakeLists.txt typos * add normalize with alpha and beta * add version notes for yolov5/yolov6/yolox * add copyright to yolov5.cc * revert normalize * fixed some bugs in yolox * fixed examples/CMakeLists.txt to avoid conflicts * add convert processor to vision * format examples/CMakeLists summary * Fix bug while the inference result is empty with YOLOv5 (#29) * Add multi-label function for yolov5 * Update README.md Update doc * Update fastdeploy_runtime.cc fix variable option.trt_max_shape wrong name * Update runtime_option.md Update resnet model dynamic shape setting name from images to x * Fix bug when inference result boxes are empty * Delete detection.py Co-authored-by: Jason Co-authored-by: root Co-authored-by: DefTruth <31974251+DefTruth@users.noreply.github.com> Co-authored-by: huangjianhui <852142024@qq.com> --- examples/CMakeLists.txt | 26 +-- examples/vision/ppdet_ppyoloe.cc | 51 ++++++ fastdeploy/__init__.py | 2 +- fastdeploy/download.py | 2 +- fastdeploy/utils/utils.h | 4 + fastdeploy/vision.h | 1 + fastdeploy/vision/__init__.py | 1 + .../vision/common/processors/convert.cc | 62 +++++++ fastdeploy/vision/common/processors/convert.h | 42 +++++ .../vision/common/processors/transform.h | 1 + fastdeploy/vision/meituan/yolov6.cc | 28 +-- fastdeploy/vision/ppcls/model.cc | 19 +- fastdeploy/vision/ppcls/model.h | 16 +- fastdeploy/vision/ppcls/ppcls_pybind.cc | 2 +- fastdeploy/vision/ppdet/__init__.py | 39 ++++ fastdeploy/vision/ppdet/ppdet_pybind.cc | 32 ++++ fastdeploy/vision/ppdet/ppyoloe.cc | 170 ++++++++++++++++++ fastdeploy/vision/ppdet/ppyoloe.h | 44 +++++ fastdeploy/vision/ultralytics/yolov5.cc | 19 +- fastdeploy/vision/utils/sort_det_res.cc | 6 +- fastdeploy/vision/vision_pybind.cc | 10 +- fastdeploy/vision/visualize/detection.cc | 4 +- model_zoo/vision/ppyoloe/README.md | 52 ++++++ model_zoo/vision/ppyoloe/api.md | 74 ++++++++ model_zoo/vision/ppyoloe/cpp/CMakeLists.txt | 17 ++ model_zoo/vision/ppyoloe/cpp/README.md | 39 ++++ model_zoo/vision/ppyoloe/cpp/ppyoloe.cc | 51 ++++++ model_zoo/vision/ppyoloe/ppyoloe.py | 24 +++ setup.py | 30 +++- 29 files changed, 818 insertions(+), 50 deletions(-) create mode 100644 examples/vision/ppdet_ppyoloe.cc create mode 100644 fastdeploy/vision/common/processors/convert.cc create mode 100644 fastdeploy/vision/common/processors/convert.h create mode 100644 fastdeploy/vision/ppdet/__init__.py create mode 100644 fastdeploy/vision/ppdet/ppdet_pybind.cc create mode 100644 fastdeploy/vision/ppdet/ppyoloe.cc create mode 100644 fastdeploy/vision/ppdet/ppyoloe.h create mode 100644 model_zoo/vision/ppyoloe/README.md create mode 100644 model_zoo/vision/ppyoloe/api.md create mode 100644 model_zoo/vision/ppyoloe/cpp/CMakeLists.txt create mode 100644 model_zoo/vision/ppyoloe/cpp/README.md create mode 100644 model_zoo/vision/ppyoloe/cpp/ppyoloe.cc create mode 100644 model_zoo/vision/ppyoloe/ppyoloe.py diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 1e2dc43bd4..112193c86a 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -1,24 +1,26 @@ -function(add_fastdeploy_executable field url model) +function(add_fastdeploy_executable FIELD CC_FILE) # temp target name/file var in function scope - set(TEMP_TARGET_FILE ${PROJECT_SOURCE_DIR}/examples/${field}/${url}_${model}.cc) - set(TEMP_TARGET_NAME ${field}_${url}_${model}) + set(TEMP_TARGET_FILE ${CC_FILE}) + string(REGEX MATCHALL "[0-9A-Za-z_]*.cc" FILE_NAME ${CC_FILE}) + string(REGEX REPLACE ".cc" "" FILE_PREFIX ${FILE_NAME}) + set(TEMP_TARGET_NAME ${FIELD}_${FILE_PREFIX}) if (EXISTS ${TEMP_TARGET_FILE} AND TARGET fastdeploy) add_executable(${TEMP_TARGET_NAME} ${TEMP_TARGET_FILE}) target_link_libraries(${TEMP_TARGET_NAME} PUBLIC fastdeploy) - message(STATUS "Found source file: [${field}/${url}_${model}.cc], ADD!!! fastdeploy executable: [${TEMP_TARGET_NAME}] !") - else () - message(WARNING "Can not found source file: [${field}/${url}_${model}.cc], SKIP!!! fastdeploy executable: [${TEMP_TARGET_NAME}] !") + message(STATUS " Added FastDeploy Executable : ${TEMP_TARGET_NAME}") endif() unset(TEMP_TARGET_FILE) unset(TEMP_TARGET_NAME) endfunction() # vision examples -if (WITH_VISION_EXAMPLES) - add_fastdeploy_executable(vision ultralytics yolov5) - add_fastdeploy_executable(vision meituan yolov6) - add_fastdeploy_executable(vision wongkinyiu yolov7) - add_fastdeploy_executable(vision megvii yolox) +if(WITH_VISION_EXAMPLES AND EXISTS ${PROJECT_SOURCE_DIR}/examples/vision) + message(STATUS "") + message(STATUS "*************FastDeploy Examples Summary**********") + file(GLOB ALL_VISION_EXAMPLE_SRCS ${PROJECT_SOURCE_DIR}/examples/vision/*.cc) + foreach(_CC_FILE ${ALL_VISION_EXAMPLE_SRCS}) + add_fastdeploy_executable(vision ${_CC_FILE}) + endforeach() endif() -# other examples ... \ No newline at end of file +# other examples ... diff --git a/examples/vision/ppdet_ppyoloe.cc b/examples/vision/ppdet_ppyoloe.cc new file mode 100644 index 0000000000..b234021c92 --- /dev/null +++ b/examples/vision/ppdet_ppyoloe.cc @@ -0,0 +1,51 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +int main() { + namespace vis = fastdeploy::vision; + + std::string model_file = "ppyoloe_crn_l_300e_coco/model.pdmodel"; + std::string params_file = "ppyoloe_crn_l_300e_coco/model.pdiparams"; + std::string config_file = "ppyoloe_crn_l_300e_coco/infer_cfg.yml"; + std::string img_path = "test.jpeg"; + std::string vis_path = "vis.jpeg"; + + auto model = vis::ppdet::PPYOLOE(model_file, params_file, config_file); + if (!model.Initialized()) { + std::cerr << "Init Failed." << std::endl; + return -1; + } + + cv::Mat im = cv::imread(img_path); + cv::Mat vis_im = im.clone(); + + vis::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Prediction Failed." << std::endl; + return -1; + } else { + std::cout << "Prediction Done!" << std::endl; + } + + // 输出预测框结果 + std::cout << res.Str() << std::endl; + + // 可视化预测结果 + vis::Visualize::VisDetection(&vis_im, res); + cv::imwrite(vis_path, vis_im); + std::cout << "Detect Done! Saved: " << vis_path << std::endl; + return 0; +} diff --git a/fastdeploy/__init__.py b/fastdeploy/__init__.py index 500e7cc42a..68006c1bed 100644 --- a/fastdeploy/__init__.py +++ b/fastdeploy/__init__.py @@ -17,7 +17,7 @@ from .fastdeploy_runtime import * from . import fastdeploy_main as C from . import vision -from .download import download +from .download import download, download_and_decompress def TensorInfoStr(tensor_info): diff --git a/fastdeploy/download.py b/fastdeploy/download.py index e00af098df..67f21d8e76 100644 --- a/fastdeploy/download.py +++ b/fastdeploy/download.py @@ -156,7 +156,7 @@ def decompress(fname): def url2dir(url, path, rename=None): full_name = download(url, path, rename, show_progress=True) - print("SDK is donwloaded, now extracting...") + print("File is donwloaded, now extracting...") if url.count(".tgz") > 0 or url.count(".tar") > 0 or url.count("zip") > 0: return decompress(full_name) diff --git a/fastdeploy/utils/utils.h b/fastdeploy/utils/utils.h index 1b9f625b5e..9312084265 100644 --- a/fastdeploy/utils/utils.h +++ b/fastdeploy/utils/utils.h @@ -64,6 +64,10 @@ class FASTDEPLOY_DECL FDLogger { bool verbose_ = true; }; +#ifndef __REL_FILE__ +#define __REL_FILE__ __FILE__ +#endif + #define FDERROR \ FDLogger(true, "[ERROR]") \ << __REL_FILE__ << "(" << __LINE__ << ")::" << __FUNCTION__ << "\t" diff --git a/fastdeploy/vision.h b/fastdeploy/vision.h index ac3f006c0a..cafe310c70 100644 --- a/fastdeploy/vision.h +++ b/fastdeploy/vision.h @@ -16,6 +16,7 @@ #include "fastdeploy/core/config.h" #ifdef ENABLE_VISION #include "fastdeploy/vision/ppcls/model.h" +#include "fastdeploy/vision/ppdet/ppyoloe.h" #include "fastdeploy/vision/ultralytics/yolov5.h" #include "fastdeploy/vision/wongkinyiu/yolov7.h" #include "fastdeploy/vision/meituan/yolov6.h" diff --git a/fastdeploy/vision/__init__.py b/fastdeploy/vision/__init__.py index 7122bede0b..6acbf0c376 100644 --- a/fastdeploy/vision/__init__.py +++ b/fastdeploy/vision/__init__.py @@ -15,6 +15,7 @@ from . import evaluation from . import ppcls +from . import ppdet from . import ultralytics from . import meituan from . import megvii diff --git a/fastdeploy/vision/common/processors/convert.cc b/fastdeploy/vision/common/processors/convert.cc new file mode 100644 index 0000000000..a7ca6de07a --- /dev/null +++ b/fastdeploy/vision/common/processors/convert.cc @@ -0,0 +1,62 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision/common/processors/convert.h" + +namespace fastdeploy { + +namespace vision { + +Convert::Convert(const std::vector& alpha, + const std::vector& beta) { + FDASSERT(alpha.size() == beta.size(), + "Convert: requires the size of alpha equal to the size of beta."); + FDASSERT(alpha.size() != 0, + "Convert: requires the size of alpha and beta > 0."); + alpha_.assign(alpha.begin(), alpha.end()); + beta_.assign(beta.begin(), beta.end()); +} + +bool Convert::CpuRun(Mat* mat) { + cv::Mat* im = mat->GetCpuMat(); + std::vector split_im; + cv::split(*im, split_im); + for (int c = 0; c < im->channels(); c++) { + split_im[c].convertTo(split_im[c], CV_32FC1, alpha_[c], beta_[c]); + } + cv::merge(split_im, *im); + return true; +} + +#ifdef ENABLE_OPENCV_CUDA +bool Convert::GpuRun(Mat* mat) { + cv::cuda::GpuMat* im = mat->GetGpuMat(); + std::vector split_im; + cv::cuda::split(*im, split_im); + for (int c = 0; c < im->channels(); c++) { + split_im[c].convertTo(split_im[c], CV_32FC1, alpha_[c], beta_[c]); + } + cv::cuda::merge(split_im, *im); + return true; +} +#endif + +bool Convert::Run(Mat* mat, const std::vector& alpha, + const std::vector& beta, ProcLib lib) { + auto c = Convert(alpha, beta); + return c(mat, lib); +} + +} // namespace vision +} // namespace fastdeploy \ No newline at end of file diff --git a/fastdeploy/vision/common/processors/convert.h b/fastdeploy/vision/common/processors/convert.h new file mode 100644 index 0000000000..5d5a5276f5 --- /dev/null +++ b/fastdeploy/vision/common/processors/convert.h @@ -0,0 +1,42 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#pragma once + +#include "fastdeploy/vision/common/processors/base.h" + +namespace fastdeploy { +namespace vision { +class Convert : public Processor { + public: + Convert(const std::vector& alpha, const std::vector& beta); + + bool CpuRun(Mat* mat); +#ifdef ENABLE_OPENCV_CUDA + bool GpuRun(Mat* mat); +#endif + std::string Name() { return "Convert"; } + + // Compute `result = mat * alpha + beta` directly by channel. + // The default behavior is the same as OpenCV's convertTo method. + static bool Run(Mat* mat, const std::vector& alpha, + const std::vector& beta, + ProcLib lib = ProcLib::OPENCV_CPU); + + private: + std::vector alpha_; + std::vector beta_; +}; +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/common/processors/transform.h b/fastdeploy/vision/common/processors/transform.h index 12eec8d72d..08073b4e42 100644 --- a/fastdeploy/vision/common/processors/transform.h +++ b/fastdeploy/vision/common/processors/transform.h @@ -17,6 +17,7 @@ #include "fastdeploy/vision/common/processors/cast.h" #include "fastdeploy/vision/common/processors/center_crop.h" #include "fastdeploy/vision/common/processors/color_space_convert.h" +#include "fastdeploy/vision/common/processors/convert.h" #include "fastdeploy/vision/common/processors/hwc2chw.h" #include "fastdeploy/vision/common/processors/normalize.h" #include "fastdeploy/vision/common/processors/pad.h" diff --git a/fastdeploy/vision/meituan/yolov6.cc b/fastdeploy/vision/meituan/yolov6.cc index 8f37bf89c6..8ac7377194 100644 --- a/fastdeploy/vision/meituan/yolov6.cc +++ b/fastdeploy/vision/meituan/yolov6.cc @@ -25,14 +25,14 @@ namespace meituan { void LetterBox(Mat* mat, std::vector size, std::vector color, bool _auto, bool scale_fill = false, bool scale_up = true, int stride = 32) { - float scale = std::min(size[1] * 1.0f / static_cast(mat->Height()), - size[0] * 1.0f / static_cast(mat->Width())); + float scale = std::min(size[1] * 1.0f / static_cast(mat->Height()), + size[0] * 1.0f / static_cast(mat->Width())); if (!scale_up) { scale = std::min(scale, 1.0f); } int resize_h = int(round(static_cast(mat->Height()) * scale)); - int resize_w = int(round(static_cast(mat->Width()) * scale)); + int resize_w = int(round(static_cast(mat->Width()) * scale)); int pad_w = size[0] - resize_w; int pad_h = size[1] - resize_h; @@ -85,13 +85,13 @@ bool YOLOv6::Initialize() { is_scale_up = false; stride = 32; max_wh = 4096.0f; - + if (!InitRuntime()) { FDERROR << "Failed to initialize fastdeploy backend." << std::endl; return false; } - // Check if the input shape is dynamic after Runtime already initialized, - // Note that, We need to force is_mini_pad 'false' to keep static + // Check if the input shape is dynamic after Runtime already initialized, + // Note that, We need to force is_mini_pad 'false' to keep static // shape after padding (LetterBox) when the is_dynamic_shape is 'false'. is_dynamic_input_ = false; auto shape = InputInfoOfRuntime(0).shape; @@ -102,7 +102,7 @@ bool YOLOv6::Initialize() { break; } } - if (!is_dynamic_input_) { + if (!is_dynamic_input_) { is_mini_pad = false; } return true; @@ -111,15 +111,15 @@ bool YOLOv6::Initialize() { bool YOLOv6::Preprocess(Mat* mat, FDTensor* output, std::map>* im_info) { // process after image load - float ratio = std::min(size[1] * 1.0f / static_cast(mat->Height()), - size[0] * 1.0f / static_cast(mat->Width())); + float ratio = std::min(size[1] * 1.0f / static_cast(mat->Height()), + size[0] * 1.0f / static_cast(mat->Width())); if (ratio != 1.0) { int interp = cv::INTER_AREA; if (ratio > 1.0) { interp = cv::INTER_LINEAR; } int resize_h = int(round(static_cast(mat->Height()) * ratio)); - int resize_w = int(round(static_cast(mat->Width()) * ratio)); + int resize_w = int(round(static_cast(mat->Width()) * ratio)); Resize::Run(mat, resize_w, resize_h, -1, -1, interp); } // yolov6's preprocess steps @@ -129,8 +129,12 @@ bool YOLOv6::Preprocess(Mat* mat, FDTensor* output, LetterBox(mat, size, padding_value, is_mini_pad, is_no_pad, is_scale_up, stride); BGR2RGB::Run(mat); - Normalize::Run(mat, std::vector(mat->Channels(), 0.0), - std::vector(mat->Channels(), 1.0)); + // Normalize::Run(mat, std::vector(mat->Channels(), 0.0), + // std::vector(mat->Channels(), 1.0)); + // Compute `result = mat * alpha + beta` directly by channel + std::vector alpha = {1.0f / 255.0f, 1.0f / 255.0f, 1.0f / 255.0f}; + std::vector beta = {0.0f, 0.0f, 0.0f}; + Convert::Run(mat, alpha, beta); // Record output shape of preprocessed image (*im_info)["output_shape"] = {static_cast(mat->Height()), diff --git a/fastdeploy/vision/ppcls/model.cc b/fastdeploy/vision/ppcls/model.cc index 915cb97512..c4e5b767c7 100644 --- a/fastdeploy/vision/ppcls/model.cc +++ b/fastdeploy/vision/ppcls/model.cc @@ -1,3 +1,16 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. #include "fastdeploy/vision/ppcls/model.h" #include "fastdeploy/vision/utils/utils.h" @@ -135,6 +148,6 @@ bool Model::Predict(cv::Mat* im, ClassifyResult* result, int topk) { return true; } -} // namespace ppcls -} // namespace vision -} // namespace fastdeploy +} // namespace ppcls +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/ppcls/model.h b/fastdeploy/vision/ppcls/model.h index 36841d74c6..265f92d32b 100644 --- a/fastdeploy/vision/ppcls/model.h +++ b/fastdeploy/vision/ppcls/model.h @@ -1,7 +1,21 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + #pragma once #include "fastdeploy/fastdeploy_model.h" -#include "fastdeploy/vision/common/result.h" #include "fastdeploy/vision/common/processors/transform.h" +#include "fastdeploy/vision/common/result.h" namespace fastdeploy { namespace vision { diff --git a/fastdeploy/vision/ppcls/ppcls_pybind.cc b/fastdeploy/vision/ppcls/ppcls_pybind.cc index ef3fffee8e..1abc0b2b7c 100644 --- a/fastdeploy/vision/ppcls/ppcls_pybind.cc +++ b/fastdeploy/vision/ppcls/ppcls_pybind.cc @@ -14,7 +14,7 @@ #include "fastdeploy/pybind/main.h" namespace fastdeploy { -void BindPpClsModel(pybind11::module& m) { +void BindPPCls(pybind11::module& m) { auto ppcls_module = m.def_submodule("ppcls", "Module to deploy PaddleClas."); pybind11::class_(ppcls_module, "Model") .def(pybind11::init(ppdet_module, + "PPYOLOE") + .def(pybind11::init()) + .def("predict", [](vision::ppdet::PPYOLOE& self, pybind11::array& data, + float conf_threshold, float nms_iou_threshold) { + auto mat = PyArrayToCvMat(data); + vision::DetectionResult res; + self.Predict(&mat, &res, conf_threshold, nms_iou_threshold); + return res; + }); +} +} // namespace fastdeploy diff --git a/fastdeploy/vision/ppdet/ppyoloe.cc b/fastdeploy/vision/ppdet/ppyoloe.cc new file mode 100644 index 0000000000..c215ecb0ca --- /dev/null +++ b/fastdeploy/vision/ppdet/ppyoloe.cc @@ -0,0 +1,170 @@ +#include "fastdeploy/vision/ppdet/ppyoloe.h" +#include "fastdeploy/vision/utils/utils.h" +#include "yaml-cpp/yaml.h" + +namespace fastdeploy { +namespace vision { +namespace ppdet { + +PPYOLOE::PPYOLOE(const std::string& model_file, const std::string& params_file, + const std::string& config_file, + const RuntimeOption& custom_option, + const Frontend& model_format) { + config_file_ = config_file; + valid_cpu_backends = {Backend::ORT, Backend::PDINFER}; + valid_gpu_backends = {Backend::ORT, Backend::PDINFER}; + runtime_option = custom_option; + runtime_option.model_format = model_format; + runtime_option.model_file = model_file; + runtime_option.params_file = params_file; + initialized = Initialize(); +} + +bool PPYOLOE::Initialize() { + if (!BuildPreprocessPipelineFromConfig()) { + std::cout << "Failed to build preprocess pipeline from configuration file." + << std::endl; + return false; + } + if (!InitRuntime()) { + std::cout << "Failed to initialize fastdeploy backend." << std::endl; + return false; + } + return true; +} + +bool PPYOLOE::BuildPreprocessPipelineFromConfig() { + processors_.clear(); + YAML::Node cfg; + try { + cfg = YAML::LoadFile(config_file_); + } catch (YAML::BadFile& e) { + std::cout << "Failed to load yaml file " << config_file_ + << ", maybe you should check this file." << std::endl; + return false; + } + + if (cfg["arch"].as() != "YOLO") { + std::cout << "Require the arch of model is YOLO, but arch defined in " + "config file is " + << cfg["arch"].as() << "." << std::endl; + return false; + } + processors_.push_back(std::make_shared()); + + for (const auto& op : cfg["Preprocess"]) { + std::string op_name = op["type"].as(); + if (op_name == "NormalizeImage") { + auto mean = op["mean"].as>(); + auto std = op["std"].as>(); + bool is_scale = op["is_scale"].as(); + processors_.push_back(std::make_shared(mean, std, is_scale)); + } else if (op_name == "Resize") { + bool keep_ratio = op["keep_ratio"].as(); + auto target_size = op["target_size"].as>(); + int interp = op["interp"].as(); + FDASSERT(target_size.size(), + "Require size of target_size be 2, but now it's " + + std::to_string(target_size.size()) + "."); + FDASSERT(!keep_ratio, + "Only support keep_ratio is false while deploy " + "PaddleDetection model."); + int width = target_size[1]; + int height = target_size[0]; + processors_.push_back( + std::make_shared(width, height, -1.0, -1.0, interp, false)); + } else if (op_name == "Permute") { + processors_.push_back(std::make_shared()); + } else { + std::cout << "Unexcepted preprocess operator: " << op_name << "." + << std::endl; + return false; + } + } + return true; +} + +bool PPYOLOE::Preprocess(Mat* mat, std::vector* outputs) { + int origin_w = mat->Width(); + int origin_h = mat->Height(); + for (size_t i = 0; i < processors_.size(); ++i) { + if (!(*(processors_[i].get()))(mat)) { + std::cout << "Failed to process image data in " << processors_[i]->Name() + << "." << std::endl; + return false; + } + } + + outputs->resize(2); + (*outputs)[0].name = InputInfoOfRuntime(0).name; + mat->ShareWithTensor(&((*outputs)[0])); + + // reshape to [1, c, h, w] + (*outputs)[0].shape.insert((*outputs)[0].shape.begin(), 1); + + (*outputs)[1].Allocate({1, 2}, FDDataType::FP32, InputInfoOfRuntime(1).name); + float* ptr = static_cast((*outputs)[1].MutableData()); + ptr[0] = mat->Height() * 1.0 / mat->Height(); + ptr[1] = mat->Width() * 1.0 / mat->Width(); + return true; +} + +bool PPYOLOE::Postprocess(std::vector& infer_result, + DetectionResult* result, float conf_threshold, + float nms_threshold) { + FDASSERT(infer_result[1].shape[0] == 1, + "Only support batch = 1 in FastDeploy now."); + int box_num = 0; + if (infer_result[1].dtype == FDDataType::INT32) { + box_num = *(static_cast(infer_result[1].Data())); + } else if (infer_result[1].dtype == FDDataType::INT64) { + box_num = *(static_cast(infer_result[1].Data())); + } else { + FDASSERT( + false, + "The output box_num of PPYOLOE model should be type of int32/int64."); + } + result->Reserve(box_num); + float* box_data = static_cast(infer_result[0].Data()); + for (size_t i = 0; i < box_num; ++i) { + if (box_data[i * 6 + 1] < conf_threshold) { + continue; + } + result->label_ids.push_back(box_data[i * 6]); + result->scores.push_back(box_data[i * 6 + 1]); + result->boxes.emplace_back( + std::array{box_data[i * 6 + 2], box_data[i * 6 + 3], + box_data[i * 6 + 4] - box_data[i * 6 + 2], + box_data[i * 6 + 5] - box_data[i * 6 + 3]}); + } + return true; +} + +bool PPYOLOE::Predict(cv::Mat* im, DetectionResult* result, + float conf_threshold, float iou_threshold) { + Mat mat(*im); + std::vector processed_data; + if (!Preprocess(&mat, &processed_data)) { + FDERROR << "Failed to preprocess input data while using model:" + << ModelName() << "." << std::endl; + return false; + } + + std::vector infer_result; + if (!Infer(processed_data, &infer_result)) { + FDERROR << "Failed to inference while using model:" << ModelName() << "." + << std::endl; + return false; + } + + if (!Postprocess(infer_result, result, conf_threshold, iou_threshold)) { + FDERROR << "Failed to postprocess while using model:" << ModelName() << "." + << std::endl; + return false; + } + return true; +} + +} // namespace ppdet +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/ppdet/ppyoloe.h b/fastdeploy/vision/ppdet/ppyoloe.h new file mode 100644 index 0000000000..a3db268ca4 --- /dev/null +++ b/fastdeploy/vision/ppdet/ppyoloe.h @@ -0,0 +1,44 @@ +#pragma once +#include "fastdeploy/fastdeploy_model.h" +#include "fastdeploy/vision/common/processors/transform.h" +#include "fastdeploy/vision/common/result.h" + +#include "fastdeploy/vision/utils/utils.h" + +namespace fastdeploy { +namespace vision { +namespace ppdet { + +class FASTDEPLOY_DECL PPYOLOE : public FastDeployModel { + public: + PPYOLOE(const std::string& model_file, const std::string& params_file, + const std::string& config_file, + const RuntimeOption& custom_option = RuntimeOption(), + const Frontend& model_format = Frontend::PADDLE); + + std::string ModelName() const { return "PaddleDetection/PPYOLOE"; } + + virtual bool Initialize(); + + virtual bool BuildPreprocessPipelineFromConfig(); + + virtual bool Preprocess(Mat* mat, std::vector* outputs); + + virtual bool Postprocess(std::vector& infer_result, + DetectionResult* result, float conf_threshold, + float nms_threshold); + + virtual bool Predict(cv::Mat* im, DetectionResult* result, + float conf_threshold = 0.5, float nms_threshold = 0.7); + + private: + std::vector> processors_; + std::string config_file_; + // PaddleDetection can export model without nms + // This flag will help us to handle the different + // situation + bool has_nms_; +}; +} // namespace ppdet +} // namespace vision +} // namespace fastdeploy diff --git a/fastdeploy/vision/ultralytics/yolov5.cc b/fastdeploy/vision/ultralytics/yolov5.cc index 193cfe9794..0b7e50e735 100644 --- a/fastdeploy/vision/ultralytics/yolov5.cc +++ b/fastdeploy/vision/ultralytics/yolov5.cc @@ -87,8 +87,8 @@ bool YOLOv5::Initialize() { FDERROR << "Failed to initialize fastdeploy backend." << std::endl; return false; } - // Check if the input shape is dynamic after Runtime already initialized, - // Note that, We need to force is_mini_pad 'false' to keep static + // Check if the input shape is dynamic after Runtime already initialized, + // Note that, We need to force is_mini_pad 'false' to keep static // shape after padding (LetterBox) when the is_dynamic_shape is 'false'. is_dynamic_input_ = false; auto shape = InputInfoOfRuntime(0).shape; @@ -99,7 +99,7 @@ bool YOLOv5::Initialize() { break; } } - if (!is_dynamic_input_) { + if (!is_dynamic_input_) { is_mini_pad = false; } return true; @@ -126,8 +126,12 @@ bool YOLOv5::Preprocess(Mat* mat, FDTensor* output, LetterBox(mat, size, padding_value, is_mini_pad, is_no_pad, is_scale_up, stride); BGR2RGB::Run(mat); - Normalize::Run(mat, std::vector(mat->Channels(), 0.0), - std::vector(mat->Channels(), 1.0)); + // Normalize::Run(mat, std::vector(mat->Channels(), 0.0), + // std::vector(mat->Channels(), 1.0)); + // Compute `result = mat * alpha + beta` directly by channel + std::vector alpha = {1.0f / 255.0f, 1.0f / 255.0f, 1.0f / 255.0f}; + std::vector beta = {0.0f, 0.0f, 0.0f}; + Convert::Run(mat, alpha, beta); // Record output shape of preprocessed image (*im_info)["output_shape"] = {static_cast(mat->Height()), @@ -198,6 +202,11 @@ bool YOLOv5::Postprocess( result->scores.push_back(confidence); } } + + if (result->boxes.size() == 0) { + return true; + } + utils::NMS(result, nms_iou_threshold); // scale the boxes to the origin image shape diff --git a/fastdeploy/vision/utils/sort_det_res.cc b/fastdeploy/vision/utils/sort_det_res.cc index e4a0db9761..93dbb69694 100644 --- a/fastdeploy/vision/utils/sort_det_res.cc +++ b/fastdeploy/vision/utils/sort_det_res.cc @@ -68,7 +68,11 @@ void MergeSort(DetectionResult* result, size_t low, size_t high) { void SortDetectionResult(DetectionResult* result) { size_t low = 0; - size_t high = result->scores.size() - 1; + size_t high = result->scores.size(); + if (high == 0) { + return; + } + high = high - 1; MergeSort(result, low, high); } diff --git a/fastdeploy/vision/vision_pybind.cc b/fastdeploy/vision/vision_pybind.cc index 41ada5541a..0334303ce6 100644 --- a/fastdeploy/vision/vision_pybind.cc +++ b/fastdeploy/vision/vision_pybind.cc @@ -16,7 +16,8 @@ namespace fastdeploy { -void BindPpClsModel(pybind11::module& m); +void BindPPCls(pybind11::module& m); +void BindPPDet(pybind11::module& m); void BindWongkinyiu(pybind11::module& m); void BindUltralytics(pybind11::module& m); void BindMeituan(pybind11::module& m); @@ -41,13 +42,14 @@ void BindVision(pybind11::module& m) { .def("__repr__", &vision::DetectionResult::Str) .def("__str__", &vision::DetectionResult::Str); - BindPpClsModel(m); + BindPPCls(m); + BindPPDet(m); BindUltralytics(m); BindWongkinyiu(m); BindMeituan(m); BindMegvii(m); #ifdef ENABLE_VISION_VISUALIZE BindVisualize(m); -#endif +#endif } -} // namespace fastdeploy +} // namespace fastdeploy diff --git a/fastdeploy/vision/visualize/detection.cc b/fastdeploy/vision/visualize/detection.cc index d0c4116148..5b5538bff7 100644 --- a/fastdeploy/vision/visualize/detection.cc +++ b/fastdeploy/vision/visualize/detection.cc @@ -43,7 +43,7 @@ void Visualize::VisDetection(cv::Mat* im, const DetectionResult& result, } std::string text = id + "," + score; int font = cv::FONT_HERSHEY_SIMPLEX; - cv::Size text_size = cv::getTextSize(text, font, font_size, 0.5, nullptr); + cv::Size text_size = cv::getTextSize(text, font, font_size, 1, nullptr); cv::Point origin; origin.x = rect.x; origin.y = rect.y; @@ -52,7 +52,7 @@ void Visualize::VisDetection(cv::Mat* im, const DetectionResult& result, text_size.width, text_size.height); cv::rectangle(*im, rect, rect_color, line_size); cv::putText(*im, text, origin, font, font_size, cv::Scalar(255, 255, 255), - 0.5); + 1); } } diff --git a/model_zoo/vision/ppyoloe/README.md b/model_zoo/vision/ppyoloe/README.md new file mode 100644 index 0000000000..42d18104ad --- /dev/null +++ b/model_zoo/vision/ppyoloe/README.md @@ -0,0 +1,52 @@ +# PaddleDetection/PPYOLOE部署示例 + +- 当前支持PaddleDetection版本为[release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) + +本文档说明如何进行[PPYOLOE](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4/configs/ppyoloe)的快速部署推理。本目录结构如下 +``` +. +├── cpp # C++ 代码目录 +│   ├── CMakeLists.txt # C++ 代码编译CMakeLists文件 +│   ├── README.md # C++ 代码编译部署文档 +│   └── ppyoloe.cc # C++ 示例代码 +├── README.md # PPYOLOE 部署文档 +└── ppyoloe.py # Python示例代码 +``` + +## 安装FastDeploy + +使用如下命令安装FastDeploy,注意到此处安装的是`vision-cpu`,也可根据需求安装`vision-gpu` +``` +# 安装fastdeploy-python工具 +pip install fastdeploy-python +``` + +## Python部署 + +执行如下代码即会自动下载PPYOLOE模型和测试图片 +``` +python ppyoloe.py +``` + +执行完成后会将可视化结果保存在本地`vis_result.jpg`,同时输出检测结果如下 +``` +DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] +162.380249,132.057449, 463.178345, 413.167114, 0.962918, 33 +414.914642,141.148666, 91.275269, 308.688293, 0.951003, 0 +163.449234,129.669067, 35.253891, 135.111786, 0.900734, 0 +267.232239,142.290436, 31.578918, 126.329773, 0.848709, 0 +581.790833,179.027115, 30.893127, 135.484940, 0.837986, 0 +104.407021,72.602615, 22.900627, 75.469055, 0.796468, 0 +348.795380,70.122147, 18.806061, 85.829330, 0.785557, 0 +364.118683,92.457428, 17.437622, 89.212891, 0.774282, 0 +75.180283,192.470490, 41.898407, 55.552414, 0.712569, 56 +328.133759,61.894299, 19.100616, 65.633575, 0.710519, 0 +504.797760,181.732574, 107.740814, 248.115082, 0.708902, 0 +379.063080,64.762360, 15.956146, 68.312546, 0.680725, 0 +25.858747,186.564178, 34.958130, 56.007080, 0.580415, 0 +``` + +## 其它文档 + +- [C++部署](./cpp/README.md) +- [PPYOLOE API文档](./api.md) diff --git a/model_zoo/vision/ppyoloe/api.md b/model_zoo/vision/ppyoloe/api.md new file mode 100644 index 0000000000..1c5cbcaadb --- /dev/null +++ b/model_zoo/vision/ppyoloe/api.md @@ -0,0 +1,74 @@ +# PPYOLOE API说明 + +## Python API + +### PPYOLOE类 +``` +fastdeploy.vision.ultralytics.PPYOLOE(model_file, params_file, config_file, runtime_option=None, model_format=fd.Frontend.PADDLE) +``` +PPYOLOE模型加载和初始化,需同时提供model_file和params_file, 当前仅支持model_format为Paddle格式 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径 +> * **config_file**(str): 模型推理配置文件 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式 + +#### predict函数 +> ``` +> PPYOLOE.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> ``` +> 模型预测结口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 +> > * **conf_threshold**(float): 检测框置信度过滤阈值 +> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值(当模型中包含nms处理时,此参数自动无效) + +示例代码参考[ppyoloe.py](./ppyoloe.py) + + +## C++ API + +### PPYOLOE类 +``` +fastdeploy::vision::ultralytics::PPYOLOE( + const string& model_file, + const string& params_file, + const string& config_file, + const RuntimeOption& runtime_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX) +``` +PPYOLOE模型加载和初始化,需同时提供model_file和params_file, 当前仅支持model_format为Paddle格式 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径 +> * **config_file**(str): 模型推理配置文件 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式 + +#### Predict函数 +> ``` +> YOLOv5::Predict(cv::Mat* im, DetectionResult* result, +> float conf_threshold = 0.25, +> float nms_iou_threshold = 0.5) +> ``` +> 模型预测接口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **im**: 输入图像,注意需为HWC,BGR格式 +> > * **result**: 检测结果,包括检测框,各个框的置信度 +> > * **conf_threshold**: 检测框置信度过滤阈值 +> > * **nms_iou_threshold**: NMS处理过程中iou阈值(当模型中包含nms处理时,此参数自动无效) + +示例代码参考[cpp/yolov5.cc](cpp/yolov5.cc) + +## 其它API使用 + +- [模型部署RuntimeOption配置](../../../docs/api/runtime_option.md) diff --git a/model_zoo/vision/ppyoloe/cpp/CMakeLists.txt b/model_zoo/vision/ppyoloe/cpp/CMakeLists.txt new file mode 100644 index 0000000000..e681566517 --- /dev/null +++ b/model_zoo/vision/ppyoloe/cpp/CMakeLists.txt @@ -0,0 +1,17 @@ +PROJECT(ppyoloe_demo C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.16) + +# 在低版本ABI环境中,通过如下代码进行兼容性编译 +# add_definitions(-D_GLIBCXX_USE_CXX11_ABI=0) + +# 指定下载解压后的fastdeploy库路径 +set(FASTDEPLOY_INSTALL_DIR ${PROJECT_SOURCE_DIR}/fastdeploy-linux-x64-0.3.0/) + +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) + +# 添加FastDeploy依赖头文件 +include_directories(${FASTDEPLOY_INCS}) + +add_executable(ppyoloe_demo ${PROJECT_SOURCE_DIR}/ppyoloe.cc) +# 添加FastDeploy库依赖 +target_link_libraries(ppyoloe_demo ${FASTDEPLOY_LIBS}) diff --git a/model_zoo/vision/ppyoloe/cpp/README.md b/model_zoo/vision/ppyoloe/cpp/README.md new file mode 100644 index 0000000000..1027c2eeb2 --- /dev/null +++ b/model_zoo/vision/ppyoloe/cpp/README.md @@ -0,0 +1,39 @@ +# 编译PPYOLOE示例 + + +``` +# 下载和解压预测库 +wget https://bj.bcebos.com/paddle2onnx/fastdeploy/fastdeploy-linux-x64-0.0.3.tgz +tar xvf fastdeploy-linux-x64-0.0.3.tgz + +# 编译示例代码 +mkdir build & cd build +cmake .. +make -j + +# 下载模型和图片 +wget https://bj.bcebos.com/paddle2onnx/fastdeploy/models/ppdet/ppyoloe_crn_l_300e_coco.tgz +tar xvf ppyoloe_crn_l_300e_coco.tgz +wget https://raw.githubusercontent.com/PaddlePaddle/PaddleDetection/release/2.4/demo/000000014439_640x640.jpg + +# 执行 +./ppyoloe_demo +``` + +执行完后可视化的结果保存在本地`vis_result.jpg`,同时会将检测框输出在终端,如下所示 +``` +DetectionResult: [xmin, ymin, xmax, ymax, score, label_id] +162.380249,132.057449, 463.178345, 413.167114, 0.962918, 33 +414.914642,141.148666, 91.275269, 308.688293, 0.951003, 0 +163.449234,129.669067, 35.253891, 135.111786, 0.900734, 0 +267.232239,142.290436, 31.578918, 126.329773, 0.848709, 0 +581.790833,179.027115, 30.893127, 135.484940, 0.837986, 0 +104.407021,72.602615, 22.900627, 75.469055, 0.796468, 0 +348.795380,70.122147, 18.806061, 85.829330, 0.785557, 0 +364.118683,92.457428, 17.437622, 89.212891, 0.774282, 0 +75.180283,192.470490, 41.898407, 55.552414, 0.712569, 56 +328.133759,61.894299, 19.100616, 65.633575, 0.710519, 0 +504.797760,181.732574, 107.740814, 248.115082, 0.708902, 0 +379.063080,64.762360, 15.956146, 68.312546, 0.680725, 0 +25.858747,186.564178, 34.958130, 56.007080, 0.580415, 0 +``` diff --git a/model_zoo/vision/ppyoloe/cpp/ppyoloe.cc b/model_zoo/vision/ppyoloe/cpp/ppyoloe.cc new file mode 100644 index 0000000000..e63f29e62a --- /dev/null +++ b/model_zoo/vision/ppyoloe/cpp/ppyoloe.cc @@ -0,0 +1,51 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +int main() { + namespace vis = fastdeploy::vision; + + std::string model_file = "ppyoloe_crn_l_300e_coco/model.pdmodel"; + std::string params_file = "ppyoloe_crn_l_300e_coco/model.pdiparams"; + std::string config_file = "ppyoloe_crn_l_300e_coco/infer_cfg.yml"; + std::string img_path = "000000014439_640x640.jpg"; + std::string vis_path = "vis.jpeg"; + + auto model = vis::ppdet::PPYOLOE(model_file, params_file, config_file); + if (!model.Initialized()) { + std::cerr << "Init Failed." << std::endl; + return -1; + } + + cv::Mat im = cv::imread(img_path); + cv::Mat vis_im = im.clone(); + + vis::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Prediction Failed." << std::endl; + return -1; + } else { + std::cout << "Prediction Done!" << std::endl; + } + + // 输出预测框结果 + std::cout << res.Str() << std::endl; + + // 可视化预测结果 + vis::Visualize::VisDetection(&vis_im, res); + cv::imwrite(vis_path, vis_im); + std::cout << "Detect Done! Saved: " << vis_path << std::endl; + return 0; +} diff --git a/model_zoo/vision/ppyoloe/ppyoloe.py b/model_zoo/vision/ppyoloe/ppyoloe.py new file mode 100644 index 0000000000..7d79dfd8cf --- /dev/null +++ b/model_zoo/vision/ppyoloe/ppyoloe.py @@ -0,0 +1,24 @@ +import fastdeploy as fd +import cv2 + +# 下载模型和测试图片 +model_url = "https://bj.bcebos.com/paddle2onnx/fastdeploy/models/ppdet/ppyoloe_crn_l_300e_coco.tgz" +test_jpg_url = "https://raw.githubusercontent.com/PaddlePaddle/PaddleDetection/release/2.4/demo/000000014439_640x640.jpg" +fd.download_and_decompress(model_url, ".") +fd.download(test_jpg_url, ".", show_progress=True) + +# 加载模型 +model = fd.vision.ppdet.PPYOLOE("ppyoloe_crn_l_300e_coco/model.pdmodel", + "ppyoloe_crn_l_300e_coco/model.pdiparams", + "ppyoloe_crn_l_300e_coco/infer_cfg.yml") + +# 预测图片 +im = cv2.imread("000000014439_640x640.jpg") +result = model.predict(im, conf_threshold=0.5) + +# 可视化结果 +fd.vision.visualize.vis_detection(im, result) +cv2.imwrite("vis_result.jpg", im) + +# 输出预测结果 +print(result) diff --git a/setup.py b/setup.py index f0ff3f16de..e76f057b1c 100644 --- a/setup.py +++ b/setup.py @@ -49,7 +49,8 @@ setup_configs["ENABLE_TRT_BACKEND"] = os.getenv("ENABLE_TRT_BACKEND", "OFF") setup_configs["WITH_GPU"] = os.getenv("WITH_GPU", "OFF") setup_configs["TRT_DIRECTORY"] = os.getenv("TRT_DIRECTORY", "UNDEFINED") -setup_configs["CUDA_DIRECTORY"] = os.getenv("CUDA_DIRECTORY", "/usr/local/cuda") +setup_configs["CUDA_DIRECTORY"] = os.getenv("CUDA_DIRECTORY", + "/usr/local/cuda") TOP_DIR = os.path.realpath(os.path.dirname(__file__)) SRC_DIR = os.path.join(TOP_DIR, "fastdeploy") @@ -325,17 +326,32 @@ def run(self): shutil.copy("LICENSE", "fastdeploy") depend_libs = list() - # modify the search path of libraries - command = "patchelf --set-rpath '$ORIGIN/libs/' .setuptools-cmake-build/fastdeploy_main.cpython-36m-x86_64-linux-gnu.so" - # The sw_64 not suppot patchelf, so we just disable that. - if platform.machine() != 'sw_64' and platform.machine() != 'mips64': - assert os.system(command) == 0, "patch fastdeploy_main.cpython-36m-x86_64-linux-gnu.so failed, the command: {}".format(command) + if platform.system().lower() == "linux": + for f in os.listdir(".setuptools-cmake-build"): + full_name = os.path.join(".setuptools-cmake-build", f) + if not os.path.isfile(full_name): + continue + if not full_name.count("fastdeploy_main.cpython-"): + continue + if not full_name.endswith(".so"): + continue + # modify the search path of libraries + command = "patchelf --set-rpath '$ORIGIN/libs/' {}".format( + full_name) + # The sw_64 not suppot patchelf, so we just disable that. + if platform.machine() != 'sw_64' and platform.machine( + ) != 'mips64': + assert os.system( + command + ) == 0, "patch fastdeploy_main.cpython-36m-x86_64-linux-gnu.so failed, the command: {}".format( + command) for f in os.listdir(".setuptools-cmake-build"): if not os.path.isfile(os.path.join(".setuptools-cmake-build", f)): continue if f.count("libfastdeploy") > 0: - shutil.copy(os.path.join(".setuptools-cmake-build", f), "fastdeploy/libs") + shutil.copy( + os.path.join(".setuptools-cmake-build", f), "fastdeploy/libs") for dirname in os.listdir(".setuptools-cmake-build/third_libs/install"): for lib in os.listdir( os.path.join(".setuptools-cmake-build/third_libs/install", From 013921ac21f7a77aa9a7f6ca98bb25990b4d9c19 Mon Sep 17 00:00:00 2001 From: ziqi-jin <67993288+ziqi-jin@users.noreply.github.com> Date: Thu, 21 Jul 2022 10:40:44 +0800 Subject: [PATCH 38/94] Yolor (#16) * Develop (#11) (#12) * Fix compile problem in different python version (#26) * fix some usage problem in linux * Fix compile problem Co-authored-by: root * Add PaddleDetetion/PPYOLOE model support (#22) * add ppdet/ppyoloe * Add demo code and documents * add convert processor to vision (#27) * update .gitignore * Added checking for cmake include dir * fixed missing trt_backend option bug when init from trt * remove un-need data layout and add pre-check for dtype * changed RGB2BRG to BGR2RGB in ppcls model * add model_zoo yolov6 c++/python demo * fixed CMakeLists.txt typos * update yolov6 cpp/README.md * add yolox c++/pybind and model_zoo demo * move some helpers to private * fixed CMakeLists.txt typos * add normalize with alpha and beta * add version notes for yolov5/yolov6/yolox * add copyright to yolov5.cc * revert normalize * fixed some bugs in yolox * fixed examples/CMakeLists.txt to avoid conflicts * add convert processor to vision * format examples/CMakeLists summary * Fix bug while the inference result is empty with YOLOv5 (#29) * Add multi-label function for yolov5 * Update README.md Update doc * Update fastdeploy_runtime.cc fix variable option.trt_max_shape wrong name * Update runtime_option.md Update resnet model dynamic shape setting name from images to x * Fix bug when inference result boxes are empty * Delete detection.py Co-authored-by: Jason Co-authored-by: root Co-authored-by: DefTruth <31974251+DefTruth@users.noreply.github.com> Co-authored-by: huangjianhui <852142024@qq.com> Co-authored-by: Jason Co-authored-by: root Co-authored-by: DefTruth <31974251+DefTruth@users.noreply.github.com> Co-authored-by: huangjianhui <852142024@qq.com> * Develop (#13) * Fix compile problem in different python version (#26) * fix some usage problem in linux * Fix compile problem Co-authored-by: root * Add PaddleDetetion/PPYOLOE model support (#22) * add ppdet/ppyoloe * Add demo code and documents * add convert processor to vision (#27) * update .gitignore * Added checking for cmake include dir * fixed missing trt_backend option bug when init from trt * remove un-need data layout and add pre-check for dtype * changed RGB2BRG to BGR2RGB in ppcls model * add model_zoo yolov6 c++/python demo * fixed CMakeLists.txt typos * update yolov6 cpp/README.md * add yolox c++/pybind and model_zoo demo * move some helpers to private * fixed CMakeLists.txt typos * add normalize with alpha and beta * add version notes for yolov5/yolov6/yolox * add copyright to yolov5.cc * revert normalize * fixed some bugs in yolox * fixed examples/CMakeLists.txt to avoid conflicts * add convert processor to vision * format examples/CMakeLists summary * Fix bug while the inference result is empty with YOLOv5 (#29) * Add multi-label function for yolov5 * Update README.md Update doc * Update fastdeploy_runtime.cc fix variable option.trt_max_shape wrong name * Update runtime_option.md Update resnet model dynamic shape setting name from images to x * Fix bug when inference result boxes are empty * Delete detection.py Co-authored-by: Jason Co-authored-by: root Co-authored-by: DefTruth <31974251+DefTruth@users.noreply.github.com> Co-authored-by: huangjianhui <852142024@qq.com> * documents * documents * documents * documents * documents * documents * documents * documents * documents * documents * documents * documents * Develop (#14) * Fix compile problem in different python version (#26) * fix some usage problem in linux * Fix compile problem Co-authored-by: root * Add PaddleDetetion/PPYOLOE model support (#22) * add ppdet/ppyoloe * Add demo code and documents * add convert processor to vision (#27) * update .gitignore * Added checking for cmake include dir * fixed missing trt_backend option bug when init from trt * remove un-need data layout and add pre-check for dtype * changed RGB2BRG to BGR2RGB in ppcls model * add model_zoo yolov6 c++/python demo * fixed CMakeLists.txt typos * update yolov6 cpp/README.md * add yolox c++/pybind and model_zoo demo * move some helpers to private * fixed CMakeLists.txt typos * add normalize with alpha and beta * add version notes for yolov5/yolov6/yolox * add copyright to yolov5.cc * revert normalize * fixed some bugs in yolox * fixed examples/CMakeLists.txt to avoid conflicts * add convert processor to vision * format examples/CMakeLists summary * Fix bug while the inference result is empty with YOLOv5 (#29) * Add multi-label function for yolov5 * Update README.md Update doc * Update fastdeploy_runtime.cc fix variable option.trt_max_shape wrong name * Update runtime_option.md Update resnet model dynamic shape setting name from images to x * Fix bug when inference result boxes are empty * Delete detection.py Co-authored-by: root Co-authored-by: DefTruth <31974251+DefTruth@users.noreply.github.com> Co-authored-by: huangjianhui <852142024@qq.com> Co-authored-by: Jason Co-authored-by: root Co-authored-by: DefTruth <31974251+DefTruth@users.noreply.github.com> Co-authored-by: huangjianhui <852142024@qq.com> Co-authored-by: Jason <928090362@qq.com> --- fastdeploy/vision/wongkinyiu/__init__.py | 2 +- model_zoo/vision/yolor/README.md | 13 ++++++------- model_zoo/vision/yolor/cpp/README.md | 14 ++++++++------ model_zoo/vision/yolov7/README.md | 4 ++-- model_zoo/vision/yolov7/cpp/README.md | 4 ++-- 5 files changed, 19 insertions(+), 18 deletions(-) diff --git a/fastdeploy/vision/wongkinyiu/__init__.py b/fastdeploy/vision/wongkinyiu/__init__.py index 026d10062f..3c77e85896 100644 --- a/fastdeploy/vision/wongkinyiu/__init__.py +++ b/fastdeploy/vision/wongkinyiu/__init__.py @@ -135,7 +135,7 @@ def predict(self, input_image, conf_threshold=0.25, nms_iou_threshold=0.5): return self._model.predict(input_image, conf_threshold, nms_iou_threshold) - # 一些跟YOLOv7模型有关的属性封装 + # 一些跟YOLOR模型有关的属性封装 # 多数是预处理相关,可通过修改如model.size = [1280, 1280]改变预处理时resize的大小(前提是模型支持) @property def size(self): diff --git a/model_zoo/vision/yolor/README.md b/model_zoo/vision/yolor/README.md index 467023f169..358e62bbe1 100644 --- a/model_zoo/vision/yolor/README.md +++ b/model_zoo/vision/yolor/README.md @@ -1,6 +1,7 @@ # 编译YOLOR示例 -当前支持模型版本为:[YOLOR v0.1](https://github.com/WongKinYiu/yolor/releases/tag/weights) +当前支持模型版本为:[YOLOR weights](https://github.com/WongKinYiu/yolor/releases/tag/weights) +(tips: 如果使用 `git clone` 的方式下载仓库代码,请将分支切换(checkout)到 `paper` 分支). 本文档说明如何进行[YOLOR](https://github.com/WongKinYiu/yolor)的快速部署推理。本目录结构如下 @@ -18,19 +19,17 @@ - 手动获取 - 访问[YOLOR](https://github.com/WongKinYiu/yolor)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 - - + 访问[YOLOR](https://github.com/WongKinYiu/yolor)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。如果您导出的`onnx`模型出现精度不达标或者是数据维度的问题,可以参考[yolor#32](https://github.com/WongKinYiu/yolor/issues/32)的解决办法 ``` #下载yolor模型文件 - wget https://github.com/WongKinYiu/yolor/releases/download/v0.1/yolor.pt + wget https://github.com/WongKinYiu/yolor/releases/download/weights/yolor-d6-paper-570.pt # 导出onnx格式文件 - python models/export.py --grid --dynamic --weights PATH/TO/yolo7.pt + python models/export.py --weights PATH/TO/yolor-xx-xx-xx.pt --img-size 640 # 移动onnx文件到demo目录 - cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolor/ + cp PATH/TO/yolor.onnx PATH/TO/model_zoo/vision/yolor/ ``` ## 安装FastDeploy diff --git a/model_zoo/vision/yolor/cpp/README.md b/model_zoo/vision/yolor/cpp/README.md index eddf5bc51b..d06bbe3005 100644 --- a/model_zoo/vision/yolor/cpp/README.md +++ b/model_zoo/vision/yolor/cpp/README.md @@ -1,20 +1,22 @@ # 编译YOLOR示例 -当前支持模型版本为:[YOLOR v0.1](https://github.com/WongKinYiu/yolor/releases/tag/weights) - +当前支持模型版本为:[YOLOR weights](https://github.com/WongKinYiu/yolor/releases/tag/weights) +(tips: 如果使用 `git clone` 的方式下载仓库代码,请将分支切换(checkout)到 `paper` 分支). ## 获取ONNX文件 - 手动获取 - 访问[YOLOR](https://github.com/WongKinYiu/yolor/releases/tag/weights)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。 + 访问[YOLOR](https://github.com/WongKinYiu/yolor)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。如果您导出的`onnx`模型出现精度不达标或者是数据维度的问题,可以参考[yolor#32](https://github.com/WongKinYiu/yolor/issues/32)的解决办法 ``` #下载yolor模型文件 - wget https://github.com/WongKinYiu/yolor/releases/download/v0.1/yolor.pt + wget https://github.com/WongKinYiu/yolor/releases/download/weights/yolor-d6-paper-570.pt # 导出onnx格式文件 - python models/export.py --grid --dynamic --weights PATH/TO/yolo7.pt + python models/export.py --weights PATH/TO/yolor-xx-xx-xx.pt --img-size 640 + # 移动onnx文件到demo目录 + cp PATH/TO/yolor.onnx PATH/TO/model_zoo/vision/yolor/ ``` @@ -31,7 +33,7 @@ cmake .. make -j # 移动onnx文件到demo目录 -cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolor/cpp/build/ +cp PATH/TO/yolor.onnx PATH/TO/model_zoo/vision/yolor/cpp/build/ # 下载图片 wget https://raw.githubusercontent.com/WongKinYiu/yolor/paper/inference/images/horses.jpg diff --git a/model_zoo/vision/yolov7/README.md b/model_zoo/vision/yolov7/README.md index 2bb13ce459..8b2f06d761 100644 --- a/model_zoo/vision/yolov7/README.md +++ b/model_zoo/vision/yolov7/README.md @@ -27,10 +27,10 @@ wget https://github.com/WongKinYiu/yolov7/releases/download/v0.1/yolov7.pt # 导出onnx格式文件 - python models/export.py --grid --dynamic --weights PATH/TO/yolo7.pt + python models/export.py --grid --dynamic --weights PATH/TO/yolov7.pt # 移动onnx文件到demo目录 - cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolov7/ + cp PATH/TO/yolov7.onnx PATH/TO/model_zoo/vision/yolov7/ ``` ## 安装FastDeploy diff --git a/model_zoo/vision/yolov7/cpp/README.md b/model_zoo/vision/yolov7/cpp/README.md index f216c1aecf..655e98678c 100644 --- a/model_zoo/vision/yolov7/cpp/README.md +++ b/model_zoo/vision/yolov7/cpp/README.md @@ -13,7 +13,7 @@ wget https://github.com/WongKinYiu/yolov7/releases/download/v0.1/yolov7.pt # 导出onnx格式文件 - python models/export.py --grid --dynamic --weights PATH/TO/yolo7.pt + python models/export.py --grid --dynamic --weights PATH/TO/yolov7.pt ``` @@ -31,7 +31,7 @@ cmake .. make -j # 移动onnx文件到demo目录 -cp PATH/TO/yolo7.onnx PATH/TO/model_zoo/vision/yolov7/cpp/build/ +cp PATH/TO/yolov7.onnx PATH/TO/model_zoo/vision/yolov7/cpp/build/ # 下载图片 wget https://raw.githubusercontent.com/WongKinYiu/yolov7/main/inference/images/horses.jpg From 90ca4cb0cd2c29a657dbe544d570b4498e4e35d7 Mon Sep 17 00:00:00 2001 From: ziqi-jin <67993288+ziqi-jin@users.noreply.github.com> Date: Fri, 29 Jul 2022 14:49:38 +0800 Subject: [PATCH 39/94] add is_dynamic for YOLO series (#22) --- csrcs/fastdeploy/vision/ppogg/yolov5lite.cc | 15 +++++++++++++++ csrcs/fastdeploy/vision/ppogg/yolov5lite.h | 10 ++++++++++ .../vision/wongkinyiu/scaledyolov4.cc | 15 +++++++++++++++ .../fastdeploy/vision/wongkinyiu/scaledyolov4.h | 10 ++++++++++ csrcs/fastdeploy/vision/wongkinyiu/yolor.cc | 17 ++++++++++++++++- csrcs/fastdeploy/vision/wongkinyiu/yolor.h | 10 ++++++++++ csrcs/fastdeploy/vision/wongkinyiu/yolov7.cc | 17 ++++++++++++++++- csrcs/fastdeploy/vision/wongkinyiu/yolov7.h | 10 ++++++++++ 8 files changed, 102 insertions(+), 2 deletions(-) diff --git a/csrcs/fastdeploy/vision/ppogg/yolov5lite.cc b/csrcs/fastdeploy/vision/ppogg/yolov5lite.cc index 320867f581..a84ead937a 100644 --- a/csrcs/fastdeploy/vision/ppogg/yolov5lite.cc +++ b/csrcs/fastdeploy/vision/ppogg/yolov5lite.cc @@ -118,6 +118,21 @@ bool YOLOv5Lite::Initialize() { FDERROR << "Failed to initialize fastdeploy backend." << std::endl; return false; } + // Check if the input shape is dynamic after Runtime already initialized, + // Note that, We need to force is_mini_pad 'false' to keep static + // shape after padding (LetterBox) when the is_dynamic_shape is 'false'. + is_dynamic_input_ = false; + auto shape = InputInfoOfRuntime(0).shape; + for (int i = 0; i < shape.size(); ++i) { + // if height or width is dynamic + if (i >= 2 && shape[i] <= 0) { + is_dynamic_input_ = true; + break; + } + } + if (!is_dynamic_input_) { + is_mini_pad = false; + } return true; } diff --git a/csrcs/fastdeploy/vision/ppogg/yolov5lite.h b/csrcs/fastdeploy/vision/ppogg/yolov5lite.h index 3eb556cfa3..669240e211 100644 --- a/csrcs/fastdeploy/vision/ppogg/yolov5lite.h +++ b/csrcs/fastdeploy/vision/ppogg/yolov5lite.h @@ -126,6 +126,16 @@ class FASTDEPLOY_DECL YOLOv5Lite : public FastDeployModel { void GenerateAnchors(const std::vector& size, const std::vector& downsample_strides, std::vector* anchors, const int num_anchors = 3); + + // 查看输入是否为动态维度的 不建议直接使用 不同模型的逻辑可能不一致 + bool IsDynamicInput() const { return is_dynamic_input_; } + + // whether to inference with dynamic shape (e.g ONNX export with dynamic shape + // or not.) + // while is_dynamic_shape if 'false', is_mini_pad will force 'false'. This + // value will + // auto check by fastdeploy after the internal Runtime already initialized. + bool is_dynamic_input_; }; } // namespace ppogg } // namespace vision diff --git a/csrcs/fastdeploy/vision/wongkinyiu/scaledyolov4.cc b/csrcs/fastdeploy/vision/wongkinyiu/scaledyolov4.cc index 7321fc01bb..a562c9b275 100644 --- a/csrcs/fastdeploy/vision/wongkinyiu/scaledyolov4.cc +++ b/csrcs/fastdeploy/vision/wongkinyiu/scaledyolov4.cc @@ -89,6 +89,21 @@ bool ScaledYOLOv4::Initialize() { FDERROR << "Failed to initialize fastdeploy backend." << std::endl; return false; } + // Check if the input shape is dynamic after Runtime already initialized, + // Note that, We need to force is_mini_pad 'false' to keep static + // shape after padding (LetterBox) when the is_dynamic_shape is 'false'. + is_dynamic_input_ = false; + auto shape = InputInfoOfRuntime(0).shape; + for (int i = 0; i < shape.size(); ++i) { + // if height or width is dynamic + if (i >= 2 && shape[i] <= 0) { + is_dynamic_input_ = true; + break; + } + } + if (!is_dynamic_input_) { + is_mini_pad = false; + } return true; } diff --git a/csrcs/fastdeploy/vision/wongkinyiu/scaledyolov4.h b/csrcs/fastdeploy/vision/wongkinyiu/scaledyolov4.h index 39066a29ec..247d5221e1 100644 --- a/csrcs/fastdeploy/vision/wongkinyiu/scaledyolov4.h +++ b/csrcs/fastdeploy/vision/wongkinyiu/scaledyolov4.h @@ -90,6 +90,16 @@ class FASTDEPLOY_DECL ScaledYOLOv4 : public FastDeployModel { const std::vector& color, bool _auto, bool scale_fill = false, bool scale_up = true, int stride = 32); + + // 查看输入是否为动态维度的 不建议直接使用 不同模型的逻辑可能不一致 + bool IsDynamicInput() const { return is_dynamic_input_; } + + // whether to inference with dynamic shape (e.g ONNX export with dynamic shape + // or not.) + // while is_dynamic_shape if 'false', is_mini_pad will force 'false'. This + // value will + // auto check by fastdeploy after the internal Runtime already initialized. + bool is_dynamic_input_; }; } // namespace wongkinyiu } // namespace vision diff --git a/csrcs/fastdeploy/vision/wongkinyiu/yolor.cc b/csrcs/fastdeploy/vision/wongkinyiu/yolor.cc index 070ea72e60..7de994f2a4 100644 --- a/csrcs/fastdeploy/vision/wongkinyiu/yolor.cc +++ b/csrcs/fastdeploy/vision/wongkinyiu/yolor.cc @@ -87,6 +87,21 @@ bool YOLOR::Initialize() { FDERROR << "Failed to initialize fastdeploy backend." << std::endl; return false; } + // Check if the input shape is dynamic after Runtime already initialized, + // Note that, We need to force is_mini_pad 'false' to keep static + // shape after padding (LetterBox) when the is_dynamic_shape is 'false'. + is_dynamic_input_ = false; + auto shape = InputInfoOfRuntime(0).shape; + for (int i = 0; i < shape.size(); ++i) { + // if height or width is dynamic + if (i >= 2 && shape[i] <= 0) { + is_dynamic_input_ = true; + break; + } + } + if (!is_dynamic_input_) { + is_mini_pad = false; + } return true; } @@ -176,7 +191,7 @@ bool YOLOR::Postprocess( float pad_h = (out_h - ipt_h * scale) / 2.0f; float pad_w = (out_w - ipt_w * scale) / 2.0f; if (is_mini_pad) { - // 和 LetterBox中_auto=true的处理逻辑对应 + // 和 LetterBox中_auto=true的处理逻辑对应 pad_h = static_cast(static_cast(pad_h) % stride); pad_w = static_cast(static_cast(pad_w) % stride); } diff --git a/csrcs/fastdeploy/vision/wongkinyiu/yolor.h b/csrcs/fastdeploy/vision/wongkinyiu/yolor.h index 7597f42d32..b3a00663c1 100644 --- a/csrcs/fastdeploy/vision/wongkinyiu/yolor.h +++ b/csrcs/fastdeploy/vision/wongkinyiu/yolor.h @@ -89,6 +89,16 @@ class FASTDEPLOY_DECL YOLOR : public FastDeployModel { const std::vector& color, bool _auto, bool scale_fill = false, bool scale_up = true, int stride = 32); + + // 查看输入是否为动态维度的 不建议直接使用 不同模型的逻辑可能不一致 + bool IsDynamicInput() const { return is_dynamic_input_; } + + // whether to inference with dynamic shape (e.g ONNX export with dynamic shape + // or not.) + // while is_dynamic_shape if 'false', is_mini_pad will force 'false'. This + // value will + // auto check by fastdeploy after the internal Runtime already initialized. + bool is_dynamic_input_; }; } // namespace wongkinyiu } // namespace vision diff --git a/csrcs/fastdeploy/vision/wongkinyiu/yolov7.cc b/csrcs/fastdeploy/vision/wongkinyiu/yolov7.cc index 457f8800cf..6f603c87fc 100644 --- a/csrcs/fastdeploy/vision/wongkinyiu/yolov7.cc +++ b/csrcs/fastdeploy/vision/wongkinyiu/yolov7.cc @@ -88,6 +88,21 @@ bool YOLOv7::Initialize() { FDERROR << "Failed to initialize fastdeploy backend." << std::endl; return false; } + // Check if the input shape is dynamic after Runtime already initialized, + // Note that, We need to force is_mini_pad 'false' to keep static + // shape after padding (LetterBox) when the is_dynamic_shape is 'false'. + is_dynamic_input_ = false; + auto shape = InputInfoOfRuntime(0).shape; + for (int i = 0; i < shape.size(); ++i) { + // if height or width is dynamic + if (i >= 2 && shape[i] <= 0) { + is_dynamic_input_ = true; + break; + } + } + if (!is_dynamic_input_) { + is_mini_pad = false; + } return true; } @@ -177,7 +192,7 @@ bool YOLOv7::Postprocess( float pad_h = (out_h - ipt_h * scale) / 2.0f; float pad_w = (out_w - ipt_w * scale) / 2.0f; if (is_mini_pad) { - // 和 LetterBox中_auto=true的处理逻辑对应 + // 和 LetterBox中_auto=true的处理逻辑对应 pad_h = static_cast(static_cast(pad_h) % stride); pad_w = static_cast(static_cast(pad_w) % stride); } diff --git a/csrcs/fastdeploy/vision/wongkinyiu/yolov7.h b/csrcs/fastdeploy/vision/wongkinyiu/yolov7.h index 64e18ad47b..5dbdfb8f4a 100644 --- a/csrcs/fastdeploy/vision/wongkinyiu/yolov7.h +++ b/csrcs/fastdeploy/vision/wongkinyiu/yolov7.h @@ -89,6 +89,16 @@ class FASTDEPLOY_DECL YOLOv7 : public FastDeployModel { const std::vector& color, bool _auto, bool scale_fill = false, bool scale_up = true, int stride = 32); + + // 查看输入是否为动态维度的 不建议直接使用 不同模型的逻辑可能不一致 + bool IsDynamicInput() const { return is_dynamic_input_; } + + // whether to inference with dynamic shape (e.g ONNX export with dynamic shape + // or not.) + // while is_dynamic_shape if 'false', is_mini_pad will force 'false'. This + // value will + // auto check by fastdeploy after the internal Runtime already initialized. + bool is_dynamic_input_; }; } // namespace wongkinyiu } // namespace vision From 3590990bc2acb86fd473f4c34716323c14d12c35 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 10 Aug 2022 08:06:17 +0000 Subject: [PATCH 40/94] first commit test photo --- .../detection/yolov5/cpp/CMakeLists.txt | 14 +++ .../vision/detection/yolov5/cpp/README.md | 77 +++++++++++++ examples/vision/detection/yolov5/cpp/infer.cc | 105 ++++++++++++++++++ .../vision/detection/yolov5/python/README.md | 71 ++++++++++++ .../vision/detection/yolov5/python/infer.py | 51 +++++++++ examples/vision/detection/yolov7/README.md | 11 +- .../vision/detection/yolov7/cpp/README.md | 20 ++-- .../vision/detection/yolov7/python/README.md | 26 +++-- 8 files changed, 353 insertions(+), 22 deletions(-) create mode 100644 examples/vision/detection/yolov5/cpp/CMakeLists.txt create mode 100644 examples/vision/detection/yolov5/cpp/README.md create mode 100644 examples/vision/detection/yolov5/cpp/infer.cc create mode 100644 examples/vision/detection/yolov5/python/README.md create mode 100644 examples/vision/detection/yolov5/python/infer.py diff --git a/examples/vision/detection/yolov5/cpp/CMakeLists.txt b/examples/vision/detection/yolov5/cpp/CMakeLists.txt new file mode 100644 index 0000000000..fea1a2888b --- /dev/null +++ b/examples/vision/detection/yolov5/cpp/CMakeLists.txt @@ -0,0 +1,14 @@ +PROJECT(infer_demo C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.12) + +# 指定下载解压后的fastdeploy库路径 +option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.") + +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) + +# 添加FastDeploy依赖头文件 +include_directories(${FASTDEPLOY_INCS}) + +add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc) +# 添加FastDeploy库依赖 +target_link_libraries(infer_demo ${FASTDEPLOY_LIBS}) diff --git a/examples/vision/detection/yolov5/cpp/README.md b/examples/vision/detection/yolov5/cpp/README.md new file mode 100644 index 0000000000..6d4c7fe7fd --- /dev/null +++ b/examples/vision/detection/yolov5/cpp/README.md @@ -0,0 +1,77 @@ +# YOLOv7 C++部署示例 + +本目录下提供`infer.cc`快速完成YOLOv7在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. 根据开发环境,下载预编译部署库和samples代码,参考[FastDeploy预编译库](../../../../../docs/compile/prebuild_libraries.md) + +以Linux上CPU推理为例,在本目录执行如下命令即可完成编译测试 + +``` +mkdir build +cd build +wget https://xxx.tgz +tar xvf fastdeploy-linux-x64-0.2.0.tgz +cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 +make -j + +#下载官方转换好的yolov7模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/yolov7.onnx +wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/000000087038.jpg + + +# CPU推理 +./infer_demo yolov7.onnx 000000087038.jpg 0 +# GPU推理 +./infer_demo yolov7.onnx 000000087038.jpg 1 +# GPU上TensorRT推理 +./infer_demo yolov7.onnx 000000087038.jpg 2 +``` + +## YOLOv7 C++接口 + +### YOLOv7类 + +``` +fastdeploy::vision::detection::YOLOv7( + const string& model_file, + const string& params_file = "", + const RuntimeOption& runtime_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX) +``` + +YOLOv7模型加载和初始化,其中model_file为导出的ONNX模型格式。 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX时,此参数传入空字符串即可 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX格式 + +#### Predict函数 + +> ``` +> YOLOv7::Predict(cv::Mat* im, DetectionResult* result, +> float conf_threshold = 0.25, +> float nms_iou_threshold = 0.5) +> ``` +> +> 模型预测接口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **im**: 输入图像,注意需为HWC,BGR格式 +> > * **result**: 检测结果,包括检测框,各个框的置信度, DetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > * **conf_threshold**: 检测框置信度过滤阈值 +> > * **nms_iou_threshold**: NMS处理过程中iou阈值 + +### 类成员变量 + +> > * **size**(vector): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] + +- [模型介绍](../../) +- [Python部署](../python) +- [视觉模型预测结果](../../../../../docs/api/vision_results/) diff --git a/examples/vision/detection/yolov5/cpp/infer.cc b/examples/vision/detection/yolov5/cpp/infer.cc new file mode 100644 index 0000000000..1ddca8f1c8 --- /dev/null +++ b/examples/vision/detection/yolov5/cpp/infer.cc @@ -0,0 +1,105 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +void CpuInfer(const std::string& model_file, const std::string& image_file) { + auto model = fastdeploy::vision::detection::YOLOv7(model_file); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +void GpuInfer(const std::string& model_file, const std::string& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + auto model = fastdeploy::vision::detection::YOLOv7(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +void TrtInfer(const std::string& model_file, const std::string& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + option.UseTrtBackend(); + option.SetTrtInputShape("images", {1, 3, 640, 640}); + auto model = fastdeploy::vision::detection::YOLOv7(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +int main(int argc, char* argv[]) { + if (argc < 4) { + std::cout << "Usage: infer_demo path/to/model path/to/image run_option, " + "e.g ./infer_model ./yolov7.onnx ./test.jpeg 0" + << std::endl; + std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " + "with gpu; 2: run with gpu and use tensorrt backend." + << std::endl; + return -1; + } + + if (std::atoi(argv[3]) == 0) { + CpuInfer(argv[1], argv[2]); + } else if (std::atoi(argv[3]) == 1) { + GpuInfer(argv[1], argv[2]); + } else if (std::atoi(argv[3]) == 2) { + TrtInfer(argv[1], argv[2]); + } + return 0; +} diff --git a/examples/vision/detection/yolov5/python/README.md b/examples/vision/detection/yolov5/python/README.md new file mode 100644 index 0000000000..74078e2add --- /dev/null +++ b/examples/vision/detection/yolov5/python/README.md @@ -0,0 +1,71 @@ +# YOLOv7 Python部署示例 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. FastDeploy Python whl包安装,参考[FastDeploy Python安装](../../../../../docs/quick_start/install.md) + +本目录下提供`infer.py`快速完成YOLOv7在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 + +``` +#下载yolov7模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/yolov7.onnx +wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/000000014439.jpg + + +#下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd examples/vison/detection/yolov7/python/ + +# CPU推理 +python infer.py --model yolov7.onnx --image 000000087038.jpg --device cpu +# GPU推理 +python infer.py --model yolov7.onnx --image 000000087038.jpg --device gpu +# GPU上使用TensorRT推理 +python infer.py --model yolov7.onnx --image 000000087038.jpg --device gpu --use_trt True +``` + +运行完成可视化结果如下图所示 + +## YOLOv7 Python接口 + +``` +fastdeploy.vision.detection.YOLOv7(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +``` + +YOLOv7模型加载和初始化,其中model_file为导出的ONNX模型格式 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX格式时,此参数无需设定 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX + +### predict函数 + +> ``` +> YOLOv7.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> ``` +> +> 模型预测结口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 +> > * **conf_threshold**(float): 检测框置信度过滤阈值 +> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值 + +> **返回** +> +> > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) + +### 类成员属性 + +> > * **size**(list | tuple): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] + +## 其它文档 + +- [YOLOv7 模型介绍](..) +- [YOLOv7 C++部署](../cpp) +- [模型预测结果说明](../../../../../docs/api/vision_results/) diff --git a/examples/vision/detection/yolov5/python/infer.py b/examples/vision/detection/yolov5/python/infer.py new file mode 100644 index 0000000000..574755c3a3 --- /dev/null +++ b/examples/vision/detection/yolov5/python/infer.py @@ -0,0 +1,51 @@ +import fastdeploy as fd +import cv2 + + +def parse_arguments(): + import argparse + import ast + parser = argparse.ArgumentParser() + parser.add_argument( + "--model", required=True, help="Path of yolov7 onnx model.") + parser.add_argument( + "--image", required=True, help="Path of test image file.") + parser.add_argument( + "--device", + type=str, + default='cpu', + help="Type of inference device, support 'cpu' or 'gpu'.") + parser.add_argument( + "--use_trt", + type=ast.literal_eval, + default=False, + help="Wether to use tensorrt.") + return parser.parse_args() + + +def build_option(args): + option = fd.RuntimeOption() + + if args.device.lower() == "gpu": + option.use_gpu() + + if args.use_trt: + option.use_trt_backend() + option.set_trt_input_shape("images", [1, 3, 640, 640]) + return option + + +args = parse_arguments() + +# 配置runtime,加载模型 +runtime_option = build_option(args) +model = fd.vision.detection.YOLOv7(args.model, runtime_option=runtime_option) + +# 预测图片检测结果 +im = cv2.imread(args.image) +result = model.predict(im) + +# 预测结果可视化 +vis_im = fd.vision.vis_detection(im, result) +cv2.imwrite("visualized_result.jpg", vis_im) +print("Visualized result save in ./visualized_result.jpg") diff --git a/examples/vision/detection/yolov7/README.md b/examples/vision/detection/yolov7/README.md index 5f4848075d..995d278b11 100644 --- a/examples/vision/detection/yolov7/README.md +++ b/examples/vision/detection/yolov7/README.md @@ -3,13 +3,14 @@ ## 模型版本说明 - [YOLOv7 0.1](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1) - - (1)[YOLOv7 0.1](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1)链接中.pt后缀模型通过[导出ONNX模型](#导出ONNX模型)操作后,可直接部署;.onnx、.trt和 .pose后缀模型暂不支持部署; - - (2)开发者基于自己数据训练的YOLOv7 0.1模型,可按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)后,完成部署。 + - (1)[链接中](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; + - (2)[链接中](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1)的*.onnx、*.trt和 *.pose模型不支持部署; + - (3)开发者基于自己数据训练的YOLOv7 0.1模型,可按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)后,完成部署。 ## 导出ONNX模型 ``` -# 下载yolov7模型文件,或准备训练好的YOLOv7模型文件 +# 下载yolov7模型文件 wget https://github.com/WongKinYiu/yolov7/releases/download/v0.1/yolov7.pt # 导出onnx格式文件 (Tips: 对应 YOLOv7 release v0.1 代码) @@ -18,8 +19,8 @@ python models/export.py --grid --dynamic --weights PATH/TO/yolov7.pt # 如果您的代码版本中有支持NMS的ONNX文件导出,请使用如下命令导出ONNX文件(请暂时不要使用 "--end2end",我们后续将支持带有NMS的ONNX模型的部署) python models/export.py --grid --dynamic --weights PATH/TO/yolov7.pt -# 移动onnx文件到examples目录 -cp PATH/TO/yolov7.onnx PATH/TO/FastDeploy/examples/vision/detextion/yolov7/ +# 移动onnx文件到demo目录 +cp PATH/TO/yolov7.onnx PATH/TO/model_zoo/vision/yolov7/ ``` ## 下载预训练模型 diff --git a/examples/vision/detection/yolov7/cpp/README.md b/examples/vision/detection/yolov7/cpp/README.md index 2dab72beb8..9e28ffcb5f 100644 --- a/examples/vision/detection/yolov7/cpp/README.md +++ b/examples/vision/detection/yolov7/cpp/README.md @@ -5,7 +5,7 @@ 在部署前,需确认以下两个步骤 - 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) -- 2. 根据开发环境,下载预编译部署库和samples代码,参考[FastDeploy预编译库](../../../../../docs/compile/prebuilt_libraries.md) +- 2. 根据开发环境,下载预编译部署库和samples代码,参考[FastDeploy预编译库](../../../../../docs/compile/prebuild_libraries.md) 以Linux上CPU推理为例,在本目录执行如下命令即可完成编译测试 @@ -19,15 +19,15 @@ make -j #下载官方转换好的yolov7模型文件和测试图片 wget https://bj.bcebos.com/paddlehub/fastdeploy/yolov7.onnx -wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/000000087038.jpg +wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/000000014439.jpg # CPU推理 -./infer_demo yolov7.onnx 000000087038.jpg 0 +./infer_demo yolov7.onnx 000000014439.jpg 0 # GPU推理 -./infer_demo yolov7.onnx 000000087038.jpg 1 +./infer_demo yolov7.onnx 000000014439.jpg 1 # GPU上TensorRT推理 -./infer_demo yolov7.onnx 000000087038.jpg 2 +./infer_demo yolov7.onnx 000000014439.jpg 2 ``` ## YOLOv7 C++接口 @@ -58,11 +58,11 @@ YOLOv7模型加载和初始化,其中model_file为导出的ONNX模型格式。 > float conf_threshold = 0.25, > float nms_iou_threshold = 0.5) > ``` -> +> > 模型预测接口,输入图像直接输出检测结果。 -> +> > **参数** -> +> > > * **im**: 输入图像,注意需为HWC,BGR格式 > > * **result**: 检测结果,包括检测框,各个框的置信度, DetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) > > * **conf_threshold**: 检测框置信度过滤阈值 @@ -71,6 +71,10 @@ YOLOv7模型加载和初始化,其中model_file为导出的ONNX模型格式。 ### 类成员变量 > > * **size**(vector): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(vector): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` +> > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` - [模型介绍](../../) - [Python部署](../python) diff --git a/examples/vision/detection/yolov7/python/README.md b/examples/vision/detection/yolov7/python/README.md index c45d8a416c..b3a4f12a1b 100644 --- a/examples/vision/detection/yolov7/python/README.md +++ b/examples/vision/detection/yolov7/python/README.md @@ -18,15 +18,17 @@ git clone https://github.com/PaddlePaddle/FastDeploy.git cd examples/vison/detection/yolov7/python/ # CPU推理 -python infer.py --model yolov7.onnx --image 000000087038.jpg --device cpu +python infer.py --model yolov7.onnx --image 000000014439.jpg --device cpu # GPU推理 -python infer.py --model yolov7.onnx --image 000000087038.jpg --device gpu -# GPU上使用TensorRT推理 (注意:TensorRT推理第一次运行,有序列化模型的操作,有一定耗时,需要耐心等待) -python infer.py --model yolov7.onnx --image 000000087038.jpg --device gpu --use_trt True +python infer.py --model yolov7.onnx --image 000000014439.jpg --device gpu +# GPU上使用TensorRT推理 +python infer.py --model yolov7.onnx --image 000000014439.jpg --device gpu --use_trt True ``` 运行完成可视化结果如下图所示 + + ## YOLOv7 Python接口 ``` @@ -47,22 +49,28 @@ YOLOv7模型加载和初始化,其中model_file为导出的ONNX模型格式 > ``` > YOLOv7.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) > ``` -> +> > 模型预测结口,输入图像直接输出检测结果。 -> +> > **参数** -> +> > > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 > > * **conf_threshold**(float): 检测框置信度过滤阈值 > > * **nms_iou_threshold**(float): NMS处理过程中iou阈值 > **返回** -> +> > > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) ### 类成员属性 -> > * **size**(list | tuple): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` +> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` + + ## 其它文档 From 09c64ef6d992f29e2ac445578e2e7a629c3640c0 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 10 Aug 2022 08:21:59 +0000 Subject: [PATCH 41/94] yolov7 doc --- examples/vision/{detection => }/README.md | 0 examples/vision/detection/yolov7/README.md | 8 +++++++- examples/vision/detection/yolov7/cpp/README.md | 4 ++++ 3 files changed, 11 insertions(+), 1 deletion(-) rename examples/vision/{detection => }/README.md (100%) diff --git a/examples/vision/detection/README.md b/examples/vision/README.md similarity index 100% rename from examples/vision/detection/README.md rename to examples/vision/README.md diff --git a/examples/vision/detection/yolov7/README.md b/examples/vision/detection/yolov7/README.md index 995d278b11..a661d9bd9f 100644 --- a/examples/vision/detection/yolov7/README.md +++ b/examples/vision/detection/yolov7/README.md @@ -30,7 +30,13 @@ cp PATH/TO/yolov7.onnx PATH/TO/model_zoo/vision/yolov7/ | 模型 | 大小 | 精度 | |:---------------------------------------------------------------- |:----- |:----- | | [YOLOv7](https://bj.bcebos.com/paddlehub/fastdeploy/yolov7.onnx) | 141MB | 51.4% | -| [YOLOv7-x] | 10MB | 51.4% | +| [YOLOv7x](https://bj.bcebos.com/paddlehub/fastdeploy/yolov7x.onnx) | 273MB | 53.1% | +| [YOLOv7-w6](https://bj.bcebos.com/paddlehub/fastdeploy/yolov7-w6.onnx) | 269MB | 54.9% | +| [YOLOv7-e6](https://bj.bcebos.com/paddlehub/fastdeploy/yolov7-e6.onnx) | 372MB | 56.0% | +| [YOLOv7-d6](https://bj.bcebos.com/paddlehub/fastdeploy/yolov7-d6.onnx) | 511MB | 56.6% | +| [YOLOv7-e6e](https://bj.bcebos.com/paddlehub/fastdeploy/yolov7-e6e.onnx) | 579MB | 56.8% | + + ## 详细部署文档 diff --git a/examples/vision/detection/yolov7/cpp/README.md b/examples/vision/detection/yolov7/cpp/README.md index 9e28ffcb5f..e308d35f35 100644 --- a/examples/vision/detection/yolov7/cpp/README.md +++ b/examples/vision/detection/yolov7/cpp/README.md @@ -30,6 +30,10 @@ wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/0000000 ./infer_demo yolov7.onnx 000000014439.jpg 2 ``` +运行完成可视化结果如下图所示 + + + ## YOLOv7 C++接口 ### YOLOv7类 From 19154992bfc67f15630194383b3b50bed2d1c4a5 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 10 Aug 2022 08:26:00 +0000 Subject: [PATCH 42/94] yolov7 doc --- examples/vision/detection/yolov7/cpp/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/vision/detection/yolov7/cpp/README.md b/examples/vision/detection/yolov7/cpp/README.md index e308d35f35..5e4ee4eeac 100644 --- a/examples/vision/detection/yolov7/cpp/README.md +++ b/examples/vision/detection/yolov7/cpp/README.md @@ -74,7 +74,7 @@ YOLOv7模型加载和初始化,其中model_file为导出的ONNX模型格式。 ### 类成员变量 -> > * **size**(vector): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **size**(vector< int>>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] > > * **padding_value**(vector): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] > > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` > > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` From c3cd45573b53f67de29992692baf683f65c2e51f Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 10 Aug 2022 08:26:46 +0000 Subject: [PATCH 43/94] yolov7 doc --- examples/vision/detection/yolov7/cpp/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/vision/detection/yolov7/cpp/README.md b/examples/vision/detection/yolov7/cpp/README.md index 5e4ee4eeac..5c27318028 100644 --- a/examples/vision/detection/yolov7/cpp/README.md +++ b/examples/vision/detection/yolov7/cpp/README.md @@ -74,7 +74,7 @@ YOLOv7模型加载和初始化,其中model_file为导出的ONNX模型格式。 ### 类成员变量 -> > * **size**(vector< int>>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **size**(vector<int>>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] > > * **padding_value**(vector): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] > > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` > > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` From 4c05253c0ccad7ba464cf67bc3882619d0e05bfd Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 10 Aug 2022 08:30:12 +0000 Subject: [PATCH 44/94] yolov7 doc --- examples/vision/detection/yolov7/cpp/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/vision/detection/yolov7/cpp/README.md b/examples/vision/detection/yolov7/cpp/README.md index 5c27318028..f6e23e3f1c 100644 --- a/examples/vision/detection/yolov7/cpp/README.md +++ b/examples/vision/detection/yolov7/cpp/README.md @@ -74,7 +74,7 @@ YOLOv7模型加载和初始化,其中model_file为导出的ONNX模型格式。 ### 类成员变量 -> > * **size**(vector<int>>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] > > * **padding_value**(vector): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] > > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` > > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` From 49486ce41b0abafa6c85411f5138d78860929587 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 10 Aug 2022 09:27:37 +0000 Subject: [PATCH 45/94] add yolov5 docs --- examples/vision/detection/yolov5/README.md | 28 ++++++++++++++ .../vision/detection/yolov5/cpp/README.md | 36 +++++++++++------- examples/vision/detection/yolov5/cpp/infer.cc | 8 ++-- .../vision/detection/yolov5/python/README.md | 38 +++++++++++-------- .../vision/detection/yolov5/python/infer.py | 4 +- examples/vision/detection/yolov7/README.md | 2 +- .../vision/detection/yolov7/cpp/README.md | 2 +- 7 files changed, 81 insertions(+), 37 deletions(-) create mode 100644 examples/vision/detection/yolov5/README.md diff --git a/examples/vision/detection/yolov5/README.md b/examples/vision/detection/yolov5/README.md new file mode 100644 index 0000000000..30e638944c --- /dev/null +++ b/examples/vision/detection/yolov5/README.md @@ -0,0 +1,28 @@ +# YOLOv7准备部署模型 + +## 模型版本说明 + +- [YOLOv5 v6.0](https://github.com/ultralytics/yolov5/releases/tag/v6.0) + - (1)[链接中](https://github.com/ultralytics/yolov5/releases/tag/v6.0)的*.onnx可直接进行部署; + - (2)开发者基于自己数据训练的YOLOv5 v6.0模型,可使用[YOLOv5](https://github.com/ultralytics/yolov5)中的`export.py`导出ONNX文件后后,完成部署。 + + +## 下载预训练ONNX模型 + +为了方便开发者的测试,下面提供了YOLOv7导出的各系列模型,开发者可直接下载使用。 + +| 模型 | 大小 | 精度 | +|:---------------------------------------------------------------- |:----- |:----- | +| [YOLOv5n](https://bj.bcebos.com/paddlehub/fastdeploy/yolov5n.onnx) | 1.9MB | 28.4% | +| [YOLOv5s](https://bj.bcebos.com/paddlehub/fastdeploy/yolov5s.onnx) | 7.2MB | 37.2% | +| [YOLOv5m](https://bj.bcebos.com/paddlehub/fastdeploy/yolov5m.onnx) | 21.2MB | 45.2% | +| [YOLOv5l](https://bj.bcebos.com/paddlehub/fastdeploy/yolov5l.onnx) | 46.5MB | 48.8% | +| [YOLOv5x](https://bj.bcebos.com/paddlehub/fastdeploy/yolov5x.onnx) | 86.7MB | 50.7% | + + + + +## 详细部署文档 + +- [Python部署](python) +- [C++部署](cpp) diff --git a/examples/vision/detection/yolov5/cpp/README.md b/examples/vision/detection/yolov5/cpp/README.md index 6d4c7fe7fd..ceba2d0c38 100644 --- a/examples/vision/detection/yolov5/cpp/README.md +++ b/examples/vision/detection/yolov5/cpp/README.md @@ -1,6 +1,6 @@ -# YOLOv7 C++部署示例 +# YOLOv5 C++部署示例 -本目录下提供`infer.cc`快速完成YOLOv7在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 +本目录下提供`infer.cc`快速完成YOLOv5在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 在部署前,需确认以下两个步骤 @@ -17,32 +17,36 @@ tar xvf fastdeploy-linux-x64-0.2.0.tgz cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 make -j -#下载官方转换好的yolov7模型文件和测试图片 -wget https://bj.bcebos.com/paddlehub/fastdeploy/yolov7.onnx -wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/000000087038.jpg +#下载官方转换好的yolov5模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/yolov5.onnx +wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/000000014439.jpg # CPU推理 -./infer_demo yolov7.onnx 000000087038.jpg 0 +./infer_demo yolov5.onnx 000000014439.jpg 0 # GPU推理 -./infer_demo yolov7.onnx 000000087038.jpg 1 +./infer_demo yolov5.onnx 000000014439.jpg 1 # GPU上TensorRT推理 -./infer_demo yolov7.onnx 000000087038.jpg 2 +./infer_demo yolov5.onnx 000000014439.jpg 2 ``` -## YOLOv7 C++接口 +运行完成可视化结果如下图所示 -### YOLOv7类 + + +## YOLOv5 C++接口 + +### YOLOv5类 ``` -fastdeploy::vision::detection::YOLOv7( +fastdeploy::vision::detection::YOLOv5( const string& model_file, const string& params_file = "", const RuntimeOption& runtime_option = RuntimeOption(), const Frontend& model_format = Frontend::ONNX) ``` -YOLOv7模型加载和初始化,其中model_file为导出的ONNX模型格式。 +YOLOv5模型加载和初始化,其中model_file为导出的ONNX模型格式。 **参数** @@ -54,7 +58,7 @@ YOLOv7模型加载和初始化,其中model_file为导出的ONNX模型格式。 #### Predict函数 > ``` -> YOLOv7::Predict(cv::Mat* im, DetectionResult* result, +> YOLOv5::Predict(cv::Mat* im, DetectionResult* result, > float conf_threshold = 0.25, > float nms_iou_threshold = 0.5) > ``` @@ -70,7 +74,11 @@ YOLOv7模型加载和初始化,其中model_file为导出的ONNX模型格式。 ### 类成员变量 -> > * **size**(vector): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` +> > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` - [模型介绍](../../) - [Python部署](../python) diff --git a/examples/vision/detection/yolov5/cpp/infer.cc b/examples/vision/detection/yolov5/cpp/infer.cc index 1ddca8f1c8..ef3e47ea1f 100644 --- a/examples/vision/detection/yolov5/cpp/infer.cc +++ b/examples/vision/detection/yolov5/cpp/infer.cc @@ -15,7 +15,7 @@ #include "fastdeploy/vision.h" void CpuInfer(const std::string& model_file, const std::string& image_file) { - auto model = fastdeploy::vision::detection::YOLOv7(model_file); + auto model = fastdeploy::vision::detection::YOLOv5(model_file); if (!model.Initialized()) { std::cerr << "Failed to initialize." << std::endl; return; @@ -38,7 +38,7 @@ void CpuInfer(const std::string& model_file, const std::string& image_file) { void GpuInfer(const std::string& model_file, const std::string& image_file) { auto option = fastdeploy::RuntimeOption(); option.UseGpu(); - auto model = fastdeploy::vision::detection::YOLOv7(model_file, "", option); + auto model = fastdeploy::vision::detection::YOLOv5(model_file, "", option); if (!model.Initialized()) { std::cerr << "Failed to initialize." << std::endl; return; @@ -63,7 +63,7 @@ void TrtInfer(const std::string& model_file, const std::string& image_file) { option.UseGpu(); option.UseTrtBackend(); option.SetTrtInputShape("images", {1, 3, 640, 640}); - auto model = fastdeploy::vision::detection::YOLOv7(model_file, "", option); + auto model = fastdeploy::vision::detection::YOLOv5(model_file, "", option); if (!model.Initialized()) { std::cerr << "Failed to initialize." << std::endl; return; @@ -86,7 +86,7 @@ void TrtInfer(const std::string& model_file, const std::string& image_file) { int main(int argc, char* argv[]) { if (argc < 4) { std::cout << "Usage: infer_demo path/to/model path/to/image run_option, " - "e.g ./infer_model ./yolov7.onnx ./test.jpeg 0" + "e.g ./infer_model ./yolov5.onnx ./test.jpeg 0" << std::endl; std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " "with gpu; 2: run with gpu and use tensorrt backend." diff --git a/examples/vision/detection/yolov5/python/README.md b/examples/vision/detection/yolov5/python/README.md index 74078e2add..6d099e7d7b 100644 --- a/examples/vision/detection/yolov5/python/README.md +++ b/examples/vision/detection/yolov5/python/README.md @@ -1,39 +1,41 @@ -# YOLOv7 Python部署示例 +# YOLOv5 Python部署示例 在部署前,需确认以下两个步骤 - 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) - 2. FastDeploy Python whl包安装,参考[FastDeploy Python安装](../../../../../docs/quick_start/install.md) -本目录下提供`infer.py`快速完成YOLOv7在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 +本目录下提供`infer.py`快速完成YOLOv5在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 ``` -#下载yolov7模型文件和测试图片 -wget https://bj.bcebos.com/paddlehub/fastdeploy/yolov7.onnx +#下载yolov5模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/yolov5.onnx wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/000000014439.jpg #下载部署示例代码 git clone https://github.com/PaddlePaddle/FastDeploy.git -cd examples/vison/detection/yolov7/python/ +cd examples/vison/detection/yolov5/python/ # CPU推理 -python infer.py --model yolov7.onnx --image 000000087038.jpg --device cpu +python infer.py --model yolov5.onnx --image 000000014439.jpg --device cpu # GPU推理 -python infer.py --model yolov7.onnx --image 000000087038.jpg --device gpu +python infer.py --model yolov5.onnx --image 000000014439.jpg --device gpu # GPU上使用TensorRT推理 -python infer.py --model yolov7.onnx --image 000000087038.jpg --device gpu --use_trt True +python infer.py --model yolov5.onnx --image 000000014439.jpg --device gpu --use_trt True ``` 运行完成可视化结果如下图所示 -## YOLOv7 Python接口 + + +## YOLOv5 Python接口 ``` -fastdeploy.vision.detection.YOLOv7(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +fastdeploy.vision.detection.YOLOv5(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) ``` -YOLOv7模型加载和初始化,其中model_file为导出的ONNX模型格式 +YOLOv5模型加载和初始化,其中model_file为导出的ONNX模型格式 **参数** @@ -45,7 +47,7 @@ YOLOv7模型加载和初始化,其中model_file为导出的ONNX模型格式 ### predict函数 > ``` -> YOLOv7.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> YOLOv5.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) > ``` > > 模型预测结口,输入图像直接输出检测结果。 @@ -62,10 +64,16 @@ YOLOv7模型加载和初始化,其中model_file为导出的ONNX模型格式 ### 类成员属性 -> > * **size**(list | tuple): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` +> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` + + ## 其它文档 -- [YOLOv7 模型介绍](..) -- [YOLOv7 C++部署](../cpp) +- [YOLOv5 模型介绍](..) +- [YOLOv5 C++部署](../cpp) - [模型预测结果说明](../../../../../docs/api/vision_results/) diff --git a/examples/vision/detection/yolov5/python/infer.py b/examples/vision/detection/yolov5/python/infer.py index 574755c3a3..3f7a91f99d 100644 --- a/examples/vision/detection/yolov5/python/infer.py +++ b/examples/vision/detection/yolov5/python/infer.py @@ -7,7 +7,7 @@ def parse_arguments(): import ast parser = argparse.ArgumentParser() parser.add_argument( - "--model", required=True, help="Path of yolov7 onnx model.") + "--model", required=True, help="Path of yolov5 onnx model.") parser.add_argument( "--image", required=True, help="Path of test image file.") parser.add_argument( @@ -39,7 +39,7 @@ def build_option(args): # 配置runtime,加载模型 runtime_option = build_option(args) -model = fd.vision.detection.YOLOv7(args.model, runtime_option=runtime_option) +model = fd.vision.detection.YOLOv5(args.model, runtime_option=runtime_option) # 预测图片检测结果 im = cv2.imread(args.image) diff --git a/examples/vision/detection/yolov7/README.md b/examples/vision/detection/yolov7/README.md index a661d9bd9f..857bdda31d 100644 --- a/examples/vision/detection/yolov7/README.md +++ b/examples/vision/detection/yolov7/README.md @@ -23,7 +23,7 @@ python models/export.py --grid --dynamic --weights PATH/TO/yolov7.pt cp PATH/TO/yolov7.onnx PATH/TO/model_zoo/vision/yolov7/ ``` -## 下载预训练模型 +## 下载预训练ONNX模型 为了方便开发者的测试,下面提供了YOLOv7导出的各系列模型,开发者可直接下载使用。 diff --git a/examples/vision/detection/yolov7/cpp/README.md b/examples/vision/detection/yolov7/cpp/README.md index f6e23e3f1c..c67689570d 100644 --- a/examples/vision/detection/yolov7/cpp/README.md +++ b/examples/vision/detection/yolov7/cpp/README.md @@ -75,7 +75,7 @@ YOLOv7模型加载和初始化,其中model_file为导出的ONNX模型格式。 ### 类成员变量 > > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] -> > * **padding_value**(vector): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] > > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` > > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` > > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` From 6034490ab0d2dc5f98f1de0c13404f5b114d6f95 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 10 Aug 2022 12:47:15 +0000 Subject: [PATCH 46/94] modify yolov5 doc --- examples/vision/detection/yolov5/cpp/README.md | 8 ++++---- examples/vision/detection/yolov5/python/README.md | 8 ++++---- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/examples/vision/detection/yolov5/cpp/README.md b/examples/vision/detection/yolov5/cpp/README.md index ceba2d0c38..feb44d13df 100644 --- a/examples/vision/detection/yolov5/cpp/README.md +++ b/examples/vision/detection/yolov5/cpp/README.md @@ -18,16 +18,16 @@ cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 make -j #下载官方转换好的yolov5模型文件和测试图片 -wget https://bj.bcebos.com/paddlehub/fastdeploy/yolov5.onnx +wget https://bj.bcebos.com/paddlehub/fastdeploy/yolov5s.onnx wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/000000014439.jpg # CPU推理 -./infer_demo yolov5.onnx 000000014439.jpg 0 +./infer_demo yolov5s.onnx 000000014439.jpg 0 # GPU推理 -./infer_demo yolov5.onnx 000000014439.jpg 1 +./infer_demo yolov5s.onnx 000000014439.jpg 1 # GPU上TensorRT推理 -./infer_demo yolov5.onnx 000000014439.jpg 2 +./infer_demo yolov5s.onnx 000000014439.jpg 2 ``` 运行完成可视化结果如下图所示 diff --git a/examples/vision/detection/yolov5/python/README.md b/examples/vision/detection/yolov5/python/README.md index 6d099e7d7b..57cdba44cb 100644 --- a/examples/vision/detection/yolov5/python/README.md +++ b/examples/vision/detection/yolov5/python/README.md @@ -9,7 +9,7 @@ ``` #下载yolov5模型文件和测试图片 -wget https://bj.bcebos.com/paddlehub/fastdeploy/yolov5.onnx +wget https://bj.bcebos.com/paddlehub/fastdeploy/yolov5s.onnx wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/000000014439.jpg @@ -18,11 +18,11 @@ git clone https://github.com/PaddlePaddle/FastDeploy.git cd examples/vison/detection/yolov5/python/ # CPU推理 -python infer.py --model yolov5.onnx --image 000000014439.jpg --device cpu +python infer.py --model yolov5s.onnx --image 000000014439.jpg --device cpu # GPU推理 -python infer.py --model yolov5.onnx --image 000000014439.jpg --device gpu +python infer.py --model yolov5s.onnx --image 000000014439.jpg --device gpu # GPU上使用TensorRT推理 -python infer.py --model yolov5.onnx --image 000000014439.jpg --device gpu --use_trt True +python infer.py --model yolov5s.onnx --image 000000014439.jpg --device gpu --use_trt True ``` 运行完成可视化结果如下图所示 From 08bf982f103cf24ad9080fb4fb8f43e2bc228de4 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 10 Aug 2022 13:51:46 +0000 Subject: [PATCH 47/94] first commit for retinaface --- examples/vision/facedet/retinaface/README.md | 54 ++++++++++++ .../facedet/retinaface/cpp/CMakeLists.txt | 14 +++ .../vision/facedet/retinaface/cpp/README.md | 85 +++++++++++++++++++ .../facedet/retinaface/python/README.md | 79 +++++++++++++++++ 4 files changed, 232 insertions(+) create mode 100644 examples/vision/facedet/retinaface/README.md create mode 100644 examples/vision/facedet/retinaface/cpp/CMakeLists.txt create mode 100644 examples/vision/facedet/retinaface/cpp/README.md create mode 100644 examples/vision/facedet/retinaface/python/README.md diff --git a/examples/vision/facedet/retinaface/README.md b/examples/vision/facedet/retinaface/README.md new file mode 100644 index 0000000000..b545b98d21 --- /dev/null +++ b/examples/vision/facedet/retinaface/README.md @@ -0,0 +1,54 @@ +# RetinaFace准备部署模型 + +## 模型版本说明 + +- [RetinaFace CommitID:b984b4b](https://github.com/biubug6/Pytorch_Retinaface/commit/b984b4b) + - (1)[链接中](https://github.com/biubug6/Pytorch_Retinaface/commit/b984b4b)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; + - (2)开发者基于自己数据训练的RetinaFace CommitID:b984b4b模型,可按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)后,完成部署。 + +## 导出ONNX模型 + +自动下载的模型文件是我们事先转换好的,如果您需要从RetinaFace官方repo导出ONNX,请参考以下步骤。 + +* 下载官方仓库并 +```bash +git clone https://github.com/biubug6/Pytorch_Retinaface.git +``` +* 下载预训练权重并放在weights文件夹 +```text +./weights/ + mobilenet0.25_Final.pth + mobilenetV1X0.25_pretrain.tar + Resnet50_Final.pth +``` +* 运行convert_to_onnx.py导出ONNX模型文件 +```bash +PYTHONPATH=. python convert_to_onnx.py --trained_model ./weights/mobilenet0.25_Final.pth --network mobile0.25 --long_side 640 --cpu +PYTHONPATH=. python convert_to_onnx.py --trained_model ./weights/Resnet50_Final.pth --network resnet50 --long_side 640 --cpu +``` +注意:需要先对convert_to_onnx.py脚本中的--long_side参数增加类型约束,type=int. +* 使用onnxsim对模型进行简化 +```bash +onnxsim FaceDetector.onnx Pytorch_RetinaFace_mobile0.25-640-640.onnx # mobilenet +onnxsim FaceDetector.onnx Pytorch_RetinaFace_resnet50-640-640.onnx # resnet50 +``` + +## 下载预训练ONNX模型 + +为了方便开发者的测试,下面提供了RetinaFace导出的各系列模型,开发者可直接下载使用。 + +| 模型 | 大小 | 精度 | +|:---------------------------------------------------------------- |:----- |:----- | +| [RetinaFace_mobile0.25-640](https://bj.bcebos.com/paddlehub/fastdeploy/Pytorch_RetinaFace_mobile0.25-640-640.onnx) | 1.7MB | - | +| [RetinaFace_mobile0.25-720](https://bj.bcebos.com/paddlehub/fastdeploy/Pytorch_RetinaFace_mobile0.25-720-1080.onnx) | 1.7MB | -| +| [RetinaFace_resnet50-640](https://bj.bcebos.com/paddlehub/fastdeploy/Pytorch_RetinaFace_resnet50-720-1080.onnx) | 105MB | - | +| [RetinaFace_resnet50-720](https://bj.bcebos.com/paddlehub/fastdeploy/Pytorch_RetinaFace_resnet50-640-640.onnx) | 105MB | - | + + + + + +## 详细部署文档 + +- [Python部署](python) +- [C++部署](cpp) diff --git a/examples/vision/facedet/retinaface/cpp/CMakeLists.txt b/examples/vision/facedet/retinaface/cpp/CMakeLists.txt new file mode 100644 index 0000000000..fea1a2888b --- /dev/null +++ b/examples/vision/facedet/retinaface/cpp/CMakeLists.txt @@ -0,0 +1,14 @@ +PROJECT(infer_demo C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.12) + +# 指定下载解压后的fastdeploy库路径 +option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.") + +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) + +# 添加FastDeploy依赖头文件 +include_directories(${FASTDEPLOY_INCS}) + +add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc) +# 添加FastDeploy库依赖 +target_link_libraries(infer_demo ${FASTDEPLOY_LIBS}) diff --git a/examples/vision/facedet/retinaface/cpp/README.md b/examples/vision/facedet/retinaface/cpp/README.md new file mode 100644 index 0000000000..b14b92bbba --- /dev/null +++ b/examples/vision/facedet/retinaface/cpp/README.md @@ -0,0 +1,85 @@ +# YOLOv7 C++部署示例 + +本目录下提供`infer.cc`快速完成YOLOv7在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. 根据开发环境,下载预编译部署库和samples代码,参考[FastDeploy预编译库](../../../../../docs/compile/prebuild_libraries.md) + +以Linux上CPU推理为例,在本目录执行如下命令即可完成编译测试 + +``` +mkdir build +cd build +wget https://xxx.tgz +tar xvf fastdeploy-linux-x64-0.2.0.tgz +cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 +make -j + +#下载官方转换好的yolov7模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/Pytorch_RetinaFace_mobile0.25-640-640.onnx +wget todo + + +# CPU推理 +./infer_demo Pytorch_RetinaFace_mobile0.25-640-640.onnx todo 0 +# GPU推理 +./infer_demo Pytorch_RetinaFace_mobile0.25-640-640.onnx todo 1 +# GPU上TensorRT推理 +./infer_demo Pytorch_RetinaFace_mobile0.25-640-640.onnx todo 2 +``` + +运行完成可视化结果如下图所示 + + + +## YOLOv7 C++接口 + +### YOLOv7类 + +``` +fastdeploy::vision::detection::YOLOv7( + const string& model_file, + const string& params_file = "", + const RuntimeOption& runtime_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX) +``` + +YOLOv7模型加载和初始化,其中model_file为导出的ONNX模型格式。 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX时,此参数传入空字符串即可 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX格式 + +#### Predict函数 + +> ``` +> YOLOv7::Predict(cv::Mat* im, DetectionResult* result, +> float conf_threshold = 0.25, +> float nms_iou_threshold = 0.5) +> ``` +> +> 模型预测接口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **im**: 输入图像,注意需为HWC,BGR格式 +> > * **result**: 检测结果,包括检测框,各个框的置信度, DetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > * **conf_threshold**: 检测框置信度过滤阈值 +> > * **nms_iou_threshold**: NMS处理过程中iou阈值 + +### 类成员变量 + +> > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` +> > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` + +- [模型介绍](../../) +- [Python部署](../python) +- [视觉模型预测结果](../../../../../docs/api/vision_results/) diff --git a/examples/vision/facedet/retinaface/python/README.md b/examples/vision/facedet/retinaface/python/README.md new file mode 100644 index 0000000000..2ef4bfd132 --- /dev/null +++ b/examples/vision/facedet/retinaface/python/README.md @@ -0,0 +1,79 @@ +# YOLOv7 Python部署示例 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. FastDeploy Python whl包安装,参考[FastDeploy Python安装](../../../../../docs/quick_start/install.md) + +本目录下提供`infer.py`快速完成YOLOv7在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 + +``` +#下载yolov7模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/Pytorch_RetinaFace_mobile0.25-640-640.onnx +wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/todo + + +#下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd examples/vison/detection/yolov7/python/ + +# CPU推理 +python infer.py --model Pytorch_RetinaFace_mobile0.25-640-640.onnx --image todo --device cpu +# GPU推理 +python infer.py --model Pytorch_RetinaFace_mobile0.25-640-640.onnx --image todo --device gpu +# GPU上使用TensorRT推理 +python infer.py --model Pytorch_RetinaFace_mobile0.25-640-640.onnx --image todo --device gpu --use_trt True +``` + +运行完成可视化结果如下图所示 + + + +## YOLOv7 Python接口 + +``` +fastdeploy.vision.detection.YOLOv7(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +``` + +YOLOv7模型加载和初始化,其中model_file为导出的ONNX模型格式 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX格式时,此参数无需设定 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX + +### predict函数 + +> ``` +> YOLOv7.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> ``` +> +> 模型预测结口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 +> > * **conf_threshold**(float): 检测框置信度过滤阈值 +> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值 + +> **返回** +> +> > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) + +### 类成员属性 + +> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` +> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` + + + +## 其它文档 + +- [YOLOv7 模型介绍](..) +- [YOLOv7 C++部署](../cpp) +- [模型预测结果说明](../../../../../docs/api/vision_results/) From 5e9518063302ee9b0add91cf2427ecbf03691172 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 10 Aug 2022 13:54:17 +0000 Subject: [PATCH 48/94] first commit for retinaface --- examples/vision/facedet/retinaface/cpp/README.md | 16 ++++++++-------- .../vision/facedet/retinaface/python/README.md | 16 ++++++++-------- 2 files changed, 16 insertions(+), 16 deletions(-) diff --git a/examples/vision/facedet/retinaface/cpp/README.md b/examples/vision/facedet/retinaface/cpp/README.md index b14b92bbba..dc36657076 100644 --- a/examples/vision/facedet/retinaface/cpp/README.md +++ b/examples/vision/facedet/retinaface/cpp/README.md @@ -1,6 +1,6 @@ -# YOLOv7 C++部署示例 +# RetinaFace C++部署示例 -本目录下提供`infer.cc`快速完成YOLOv7在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 +本目录下提供`infer.cc`快速完成RetinaFace在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 在部署前,需确认以下两个步骤 @@ -17,7 +17,7 @@ tar xvf fastdeploy-linux-x64-0.2.0.tgz cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 make -j -#下载官方转换好的yolov7模型文件和测试图片 +#下载官方转换好的RetinaFace模型文件和测试图片 wget https://bj.bcebos.com/paddlehub/fastdeploy/Pytorch_RetinaFace_mobile0.25-640-640.onnx wget todo @@ -34,19 +34,19 @@ wget todo -## YOLOv7 C++接口 +## RetinaFace C++接口 -### YOLOv7类 +### RetinaFace类 ``` -fastdeploy::vision::detection::YOLOv7( +fastdeploy::vision::facedet::RetinaFace( const string& model_file, const string& params_file = "", const RuntimeOption& runtime_option = RuntimeOption(), const Frontend& model_format = Frontend::ONNX) ``` -YOLOv7模型加载和初始化,其中model_file为导出的ONNX模型格式。 +RetinaFace模型加载和初始化,其中model_file为导出的ONNX模型格式。 **参数** @@ -58,7 +58,7 @@ YOLOv7模型加载和初始化,其中model_file为导出的ONNX模型格式。 #### Predict函数 > ``` -> YOLOv7::Predict(cv::Mat* im, DetectionResult* result, +> RetinaFace::Predict(cv::Mat* im, DetectionResult* result, > float conf_threshold = 0.25, > float nms_iou_threshold = 0.5) > ``` diff --git a/examples/vision/facedet/retinaface/python/README.md b/examples/vision/facedet/retinaface/python/README.md index 2ef4bfd132..d7c295c375 100644 --- a/examples/vision/facedet/retinaface/python/README.md +++ b/examples/vision/facedet/retinaface/python/README.md @@ -1,11 +1,11 @@ -# YOLOv7 Python部署示例 +# RetinaFace Python部署示例 在部署前,需确认以下两个步骤 - 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) - 2. FastDeploy Python whl包安装,参考[FastDeploy Python安装](../../../../../docs/quick_start/install.md) -本目录下提供`infer.py`快速完成YOLOv7在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 +本目录下提供`infer.py`快速完成RetinaFace在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 ``` #下载yolov7模型文件和测试图片 @@ -29,13 +29,13 @@ python infer.py --model Pytorch_RetinaFace_mobile0.25-640-640.onnx --image todo -## YOLOv7 Python接口 +## RetinaFace Python接口 ``` -fastdeploy.vision.detection.YOLOv7(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +fastdeploy.vision.facedet.RetinaFace(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) ``` -YOLOv7模型加载和初始化,其中model_file为导出的ONNX模型格式 +RetinaFace模型加载和初始化,其中model_file为导出的ONNX模型格式 **参数** @@ -47,7 +47,7 @@ YOLOv7模型加载和初始化,其中model_file为导出的ONNX模型格式 ### predict函数 > ``` -> YOLOv7.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> RetinaFace.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) > ``` > > 模型预测结口,输入图像直接输出检测结果。 @@ -74,6 +74,6 @@ YOLOv7模型加载和初始化,其中model_file为导出的ONNX模型格式 ## 其它文档 -- [YOLOv7 模型介绍](..) -- [YOLOv7 C++部署](../cpp) +- [RetinaFace 模型介绍](..) +- [RetinaFace C++部署](../cpp) - [模型预测结果说明](../../../../../docs/api/vision_results/) From 80d1ca64899d229d9d63dcf282f207db1d38bb7a Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 10 Aug 2022 14:08:33 +0000 Subject: [PATCH 49/94] firt commit for ultraface --- .../facedet/retinaface/python/README.md | 2 +- examples/vision/facedet/ultraface/README.md | 23 +++++ .../facedet/ultraface/cpp/CMakeLists.txt | 14 +++ .../vision/facedet/ultraface/cpp/README.md | 85 +++++++++++++++++++ .../vision/facedet/ultraface/python/README.md | 79 +++++++++++++++++ 5 files changed, 202 insertions(+), 1 deletion(-) create mode 100644 examples/vision/facedet/ultraface/README.md create mode 100644 examples/vision/facedet/ultraface/cpp/CMakeLists.txt create mode 100644 examples/vision/facedet/ultraface/cpp/README.md create mode 100644 examples/vision/facedet/ultraface/python/README.md diff --git a/examples/vision/facedet/retinaface/python/README.md b/examples/vision/facedet/retinaface/python/README.md index d7c295c375..c063bedc1c 100644 --- a/examples/vision/facedet/retinaface/python/README.md +++ b/examples/vision/facedet/retinaface/python/README.md @@ -10,7 +10,7 @@ ``` #下载yolov7模型文件和测试图片 wget https://bj.bcebos.com/paddlehub/fastdeploy/Pytorch_RetinaFace_mobile0.25-640-640.onnx -wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/todo +wget todo #下载部署示例代码 diff --git a/examples/vision/facedet/ultraface/README.md b/examples/vision/facedet/ultraface/README.md new file mode 100644 index 0000000000..f1dcca0b98 --- /dev/null +++ b/examples/vision/facedet/ultraface/README.md @@ -0,0 +1,23 @@ +# UltraFace准备部署模型 + +## 模型版本说明 + +- [UltraFace CommitID:dffdddd](https://github.com/Linzaer/Ultra-Light-Fast-Generic-Face-Detector-1MB/commit/dffdddd) + - (1)[链接中](https://github.com/Linzaer/Ultra-Light-Fast-Generic-Face-Detector-1MB/commit/dffdddd)的*.onnx可下载, 也可以通过下面模型链接下载并进行部署 + + +## 下载预训练ONNX模型 + +为了方便开发者的测试,下面提供了UltraFace导出的各系列模型,开发者可直接下载使用。 + +| 模型 | 大小 | 精度 | +|:---------------------------------------------------------------- |:----- |:----- | +| [RFB-320](https://bj.bcebos.com/paddlehub/fastdeploy/version-RFB-320.onnx) | 1.3MB | - | +| [RFB-320-sim](https://bj.bcebos.com/paddlehub/fastdeploy/version-RFB-320-sim.onnx) | 1.2MB | -| + + + +## 详细部署文档 + +- [Python部署](python) +- [C++部署](cpp) diff --git a/examples/vision/facedet/ultraface/cpp/CMakeLists.txt b/examples/vision/facedet/ultraface/cpp/CMakeLists.txt new file mode 100644 index 0000000000..fea1a2888b --- /dev/null +++ b/examples/vision/facedet/ultraface/cpp/CMakeLists.txt @@ -0,0 +1,14 @@ +PROJECT(infer_demo C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.12) + +# 指定下载解压后的fastdeploy库路径 +option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.") + +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) + +# 添加FastDeploy依赖头文件 +include_directories(${FASTDEPLOY_INCS}) + +add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc) +# 添加FastDeploy库依赖 +target_link_libraries(infer_demo ${FASTDEPLOY_LIBS}) diff --git a/examples/vision/facedet/ultraface/cpp/README.md b/examples/vision/facedet/ultraface/cpp/README.md new file mode 100644 index 0000000000..1eae69c0fb --- /dev/null +++ b/examples/vision/facedet/ultraface/cpp/README.md @@ -0,0 +1,85 @@ +# UltraFace C++部署示例 + +本目录下提供`infer.cc`快速完成UltraFace在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. 根据开发环境,下载预编译部署库和samples代码,参考[FastDeploy预编译库](../../../../../docs/compile/prebuild_libraries.md) + +以Linux上CPU推理为例,在本目录执行如下命令即可完成编译测试 + +``` +mkdir build +cd build +wget https://xxx.tgz +tar xvf fastdeploy-linux-x64-0.2.0.tgz +cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 +make -j + +#下载官方转换好的UltraFace模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/version-RFB-320.onnx +wget todo + + +# CPU推理 +./infer_demo version-RFB-320.onnx todo 0 +# GPU推理 +./infer_demo version-RFB-320.onnx todo 1 +# GPU上TensorRT推理 +./infer_demo version-RFB-320.onnx todo 2 +``` + +运行完成可视化结果如下图所示 + + + +## UltraFace C++接口 + +### UltraFace类 + +``` +fastdeploy::vision::facedet::UltraFace( + const string& model_file, + const string& params_file = "", + const RuntimeOption& runtime_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX) +``` + +UltraFace模型加载和初始化,其中model_file为导出的ONNX模型格式。 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX时,此参数传入空字符串即可 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX格式 + +#### Predict函数 + +> ``` +> UltraFace::Predict(cv::Mat* im, DetectionResult* result, +> float conf_threshold = 0.25, +> float nms_iou_threshold = 0.5) +> ``` +> +> 模型预测接口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **im**: 输入图像,注意需为HWC,BGR格式 +> > * **result**: 检测结果,包括检测框,各个框的置信度, DetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > * **conf_threshold**: 检测框置信度过滤阈值 +> > * **nms_iou_threshold**: NMS处理过程中iou阈值 + +### 类成员变量 + +> > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` +> > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` + +- [模型介绍](../../) +- [Python部署](../python) +- [视觉模型预测结果](../../../../../docs/api/vision_results/) diff --git a/examples/vision/facedet/ultraface/python/README.md b/examples/vision/facedet/ultraface/python/README.md new file mode 100644 index 0000000000..df7545547a --- /dev/null +++ b/examples/vision/facedet/ultraface/python/README.md @@ -0,0 +1,79 @@ +# UltraFace Python部署示例 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. FastDeploy Python whl包安装,参考[FastDeploy Python安装](../../../../../docs/quick_start/install.md) + +本目录下提供`infer.py`快速完成UltraFace在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 + +``` +#下载yolov7模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/version-RFB-320.onnx +wget todo + + +#下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd examples/vison/detection/yolov7/python/ + +# CPU推理 +python infer.py --model version-RFB-320.onnx --image todo --device cpu +# GPU推理 +python infer.py --model version-RFB-320.onnx --image todo --device gpu +# GPU上使用TensorRT推理 +python infer.py --model version-RFB-320.onnx --image todo --device gpu --use_trt True +``` + +运行完成可视化结果如下图所示 + + + +## UltraFace Python接口 + +``` +fastdeploy.vision.facedet.UltraFace(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +``` + +UltraFace模型加载和初始化,其中model_file为导出的ONNX模型格式 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX格式时,此参数无需设定 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX + +### predict函数 + +> ``` +> UltraFace.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> ``` +> +> 模型预测结口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 +> > * **conf_threshold**(float): 检测框置信度过滤阈值 +> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值 + +> **返回** +> +> > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) + +### 类成员属性 + +> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` +> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` + + + +## 其它文档 + +- [UltraFace 模型介绍](..) +- [UltraFace C++部署](../cpp) +- [模型预测结果说明](../../../../../docs/api/vision_results/) From 4b97d57802569719895a365e1d91a846d759d5a8 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 10 Aug 2022 14:11:04 +0000 Subject: [PATCH 50/94] firt commit for ultraface --- examples/vision/facedet/retinaface/python/README.md | 4 ++-- examples/vision/facedet/ultraface/python/README.md | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/examples/vision/facedet/retinaface/python/README.md b/examples/vision/facedet/retinaface/python/README.md index c063bedc1c..b8c3251359 100644 --- a/examples/vision/facedet/retinaface/python/README.md +++ b/examples/vision/facedet/retinaface/python/README.md @@ -8,14 +8,14 @@ 本目录下提供`infer.py`快速完成RetinaFace在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 ``` -#下载yolov7模型文件和测试图片 +#下载retinaface模型文件和测试图片 wget https://bj.bcebos.com/paddlehub/fastdeploy/Pytorch_RetinaFace_mobile0.25-640-640.onnx wget todo #下载部署示例代码 git clone https://github.com/PaddlePaddle/FastDeploy.git -cd examples/vison/detection/yolov7/python/ +cd examples/vison/detection/retinaface/python/ # CPU推理 python infer.py --model Pytorch_RetinaFace_mobile0.25-640-640.onnx --image todo --device cpu diff --git a/examples/vision/facedet/ultraface/python/README.md b/examples/vision/facedet/ultraface/python/README.md index df7545547a..88026ecff3 100644 --- a/examples/vision/facedet/ultraface/python/README.md +++ b/examples/vision/facedet/ultraface/python/README.md @@ -8,14 +8,14 @@ 本目录下提供`infer.py`快速完成UltraFace在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 ``` -#下载yolov7模型文件和测试图片 +#下载ultraface模型文件和测试图片 wget https://bj.bcebos.com/paddlehub/fastdeploy/version-RFB-320.onnx wget todo #下载部署示例代码 git clone https://github.com/PaddlePaddle/FastDeploy.git -cd examples/vison/detection/yolov7/python/ +cd examples/vison/detection/ultraface/python/ # CPU推理 python infer.py --model version-RFB-320.onnx --image todo --device cpu From 7924e7ef06552e66037db00bbe7a2d9a3b2e895d Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 10 Aug 2022 14:18:55 +0000 Subject: [PATCH 51/94] firt commit for yolov5face --- examples/vision/facedet/yolov5face/README.md | 46 ++++++++++ .../facedet/yolov5face/cpp/CMakeLists.txt | 14 +++ .../vision/facedet/yolov5face/cpp/README.md | 85 +++++++++++++++++++ .../facedet/yolov5face/python/README.md | 79 +++++++++++++++++ 4 files changed, 224 insertions(+) create mode 100644 examples/vision/facedet/yolov5face/README.md create mode 100644 examples/vision/facedet/yolov5face/cpp/CMakeLists.txt create mode 100644 examples/vision/facedet/yolov5face/cpp/README.md create mode 100644 examples/vision/facedet/yolov5face/python/README.md diff --git a/examples/vision/facedet/yolov5face/README.md b/examples/vision/facedet/yolov5face/README.md new file mode 100644 index 0000000000..22b9868e1b --- /dev/null +++ b/examples/vision/facedet/yolov5face/README.md @@ -0,0 +1,46 @@ +# YOLOv5Face准备部署模型 + +## 模型版本说明 + +- [YOLOv5Face CommitID:4fd1ead](https://github.com/deepcam-cn/yolov5-face/commit/4fd1ead) + - (1)[链接中](https://github.com/deepcam-cn/yolov5-face/commit/4fd1ead)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; + - (2)开发者基于自己数据训练的YOLOv5Face CommitID:b984b4b模型,可按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)后,完成部署。 + +## 导出ONNX模型 + +访问[YOLOv5Face](https://github.com/deepcam-cn/yolov5-face)官方github库,按照指引下载安装,下载`yolov5s-face.pt` 模型,利用 `export.py` 得到`onnx`格式文件。 + +* 下载yolov5face模型文件 + ``` + Link: https://pan.baidu.com/s/1fyzLxZYx7Ja1_PCIWRhxbw Link: eq0q + https://drive.google.com/file/d/1zxaHeLDyID9YU4-hqK7KNepXIwbTkRIO/view?usp=sharing + ``` + +* 导出onnx格式文件 + ```bash + PYTHONPATH=. python export.py --weights weights/yolov5s-face.pt --img_size 640 640 --batch_size 1 + ``` +* onnx模型简化(可选) + ```bash + onnxsim yolov5s-face.onnx yolov5s-face.onnx + ``` +* 移动onnx文件到model_zoo/yolov5face的目录 + ```bash + cp PATH/TO/yolov5s-face.onnx PATH/TO/model_zoo/vision/yolov5face/ + ``` + +## 下载预训练ONNX模型 + +为了方便开发者的测试,下面提供了YOLOv5Face导出的各系列模型,开发者可直接下载使用。 + +| 模型 | 大小 | 精度 | +|:---------------------------------------------------------------- |:----- |:----- | +| [YOLOv5s-Face](https://bj.bcebos.com/paddlehub/fastdeploy/yolov5s-face.onnx) | 30MB | - | +| [YOLOv5s-Face-bak](https://bj.bcebos.com/paddlehub/fastdeploy/yolov5face-s-640x640.bak.onnx) | 30MB | -| +| [YOLOv5l-Face](https://bj.bcebos.com/paddlehub/fastdeploy/yolov5face-l-640x640.onnx ) | 181MB | - | + + +## 详细部署文档 + +- [Python部署](python) +- [C++部署](cpp) diff --git a/examples/vision/facedet/yolov5face/cpp/CMakeLists.txt b/examples/vision/facedet/yolov5face/cpp/CMakeLists.txt new file mode 100644 index 0000000000..fea1a2888b --- /dev/null +++ b/examples/vision/facedet/yolov5face/cpp/CMakeLists.txt @@ -0,0 +1,14 @@ +PROJECT(infer_demo C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.12) + +# 指定下载解压后的fastdeploy库路径 +option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.") + +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) + +# 添加FastDeploy依赖头文件 +include_directories(${FASTDEPLOY_INCS}) + +add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc) +# 添加FastDeploy库依赖 +target_link_libraries(infer_demo ${FASTDEPLOY_LIBS}) diff --git a/examples/vision/facedet/yolov5face/cpp/README.md b/examples/vision/facedet/yolov5face/cpp/README.md new file mode 100644 index 0000000000..ec0b48ad0a --- /dev/null +++ b/examples/vision/facedet/yolov5face/cpp/README.md @@ -0,0 +1,85 @@ +# YOLOv5Face C++部署示例 + +本目录下提供`infer.cc`快速完成YOLOv5Face在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. 根据开发环境,下载预编译部署库和samples代码,参考[FastDeploy预编译库](../../../../../docs/compile/prebuild_libraries.md) + +以Linux上CPU推理为例,在本目录执行如下命令即可完成编译测试 + +``` +mkdir build +cd build +wget https://xxx.tgz +tar xvf fastdeploy-linux-x64-0.2.0.tgz +cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 +make -j + +#下载官方转换好的YOLOv5Face模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/yolov5s-face.onnx +wget todo + + +# CPU推理 +./infer_demo yolov5s-face.onnx todo 0 +# GPU推理 +./infer_demo yolov5s-face.onnx todo 1 +# GPU上TensorRT推理 +./infer_demo yolov5s-face.onnx todo 2 +``` + +运行完成可视化结果如下图所示 + + + +## YOLOv5Face C++接口 + +### YOLOv5Face类 + +``` +fastdeploy::vision::facedet::YOLOv5Face( + const string& model_file, + const string& params_file = "", + const RuntimeOption& runtime_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX) +``` + +YOLOv5Face模型加载和初始化,其中model_file为导出的ONNX模型格式。 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX时,此参数传入空字符串即可 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX格式 + +#### Predict函数 + +> ``` +> YOLOv5Face::Predict(cv::Mat* im, DetectionResult* result, +> float conf_threshold = 0.25, +> float nms_iou_threshold = 0.5) +> ``` +> +> 模型预测接口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **im**: 输入图像,注意需为HWC,BGR格式 +> > * **result**: 检测结果,包括检测框,各个框的置信度, DetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > * **conf_threshold**: 检测框置信度过滤阈值 +> > * **nms_iou_threshold**: NMS处理过程中iou阈值 + +### 类成员变量 + +> > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` +> > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` + +- [模型介绍](../../) +- [Python部署](../python) +- [视觉模型预测结果](../../../../../docs/api/vision_results/) diff --git a/examples/vision/facedet/yolov5face/python/README.md b/examples/vision/facedet/yolov5face/python/README.md new file mode 100644 index 0000000000..2fc847f008 --- /dev/null +++ b/examples/vision/facedet/yolov5face/python/README.md @@ -0,0 +1,79 @@ +# YOLOv5Face Python部署示例 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. FastDeploy Python whl包安装,参考[FastDeploy Python安装](../../../../../docs/quick_start/install.md) + +本目录下提供`infer.py`快速完成YOLOv5Face在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 + +``` +#下载YOLOv5Face模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/yolov5s-face.onnx +wget todo + + +#下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd examples/vison/detection/yolov5face/python/ + +# CPU推理 +python infer.py --model yolov5s-face.onnx --image todo --device cpu +# GPU推理 +python infer.py --model yolov5s-face.onnx --image todo --device gpu +# GPU上使用TensorRT推理 +python infer.py --model yolov5s-face.onnx --image todo --device gpu --use_trt True +``` + +运行完成可视化结果如下图所示 + + + +## YOLOv5Face Python接口 + +``` +fastdeploy.vision.facedet.YOLOv5Face(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +``` + +YOLOv5Face模型加载和初始化,其中model_file为导出的ONNX模型格式 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX格式时,此参数无需设定 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX + +### predict函数 + +> ``` +> YOLOv5Face.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> ``` +> +> 模型预测结口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 +> > * **conf_threshold**(float): 检测框置信度过滤阈值 +> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值 + +> **返回** +> +> > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) + +### 类成员属性 + +> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` +> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` + + + +## 其它文档 + +- [YOLOv5Face 模型介绍](..) +- [YOLOv5Face C++部署](../cpp) +- [模型预测结果说明](../../../../../docs/api/vision_results/) From 17288568c1f17baa36ff5938b919ae5e267177ec Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 10 Aug 2022 14:41:01 +0000 Subject: [PATCH 52/94] firt commit for modnet and arcface --- examples/vision/facedet/yolov5face/README.md | 4 - examples/vision/faceid/arcface/README.md | 40 +++++++++ .../vision/faceid/arcface/cpp/CMakeLists.txt | 14 +++ examples/vision/faceid/arcface/cpp/README.md | 85 +++++++++++++++++++ .../vision/faceid/arcface/python/README.md | 79 +++++++++++++++++ examples/vision/matting/modnet/README.md | 42 +++++++++ .../vision/matting/modnet/cpp/CMakeLists.txt | 14 +++ examples/vision/matting/modnet/cpp/README.md | 85 +++++++++++++++++++ .../vision/matting/modnet/python/README.md | 79 +++++++++++++++++ 9 files changed, 438 insertions(+), 4 deletions(-) create mode 100644 examples/vision/faceid/arcface/README.md create mode 100644 examples/vision/faceid/arcface/cpp/CMakeLists.txt create mode 100644 examples/vision/faceid/arcface/cpp/README.md create mode 100644 examples/vision/faceid/arcface/python/README.md create mode 100644 examples/vision/matting/modnet/README.md create mode 100644 examples/vision/matting/modnet/cpp/CMakeLists.txt create mode 100644 examples/vision/matting/modnet/cpp/README.md create mode 100644 examples/vision/matting/modnet/python/README.md diff --git a/examples/vision/facedet/yolov5face/README.md b/examples/vision/facedet/yolov5face/README.md index 22b9868e1b..34828b1938 100644 --- a/examples/vision/facedet/yolov5face/README.md +++ b/examples/vision/facedet/yolov5face/README.md @@ -24,10 +24,6 @@ ```bash onnxsim yolov5s-face.onnx yolov5s-face.onnx ``` -* 移动onnx文件到model_zoo/yolov5face的目录 - ```bash - cp PATH/TO/yolov5s-face.onnx PATH/TO/model_zoo/vision/yolov5face/ - ``` ## 下载预训练ONNX模型 diff --git a/examples/vision/faceid/arcface/README.md b/examples/vision/faceid/arcface/README.md new file mode 100644 index 0000000000..6a122ec400 --- /dev/null +++ b/examples/vision/faceid/arcface/README.md @@ -0,0 +1,40 @@ +# RetinaFace准备部署模型 + +## 模型版本说明 + +- [ArcFace CommitID:babb9a5](https://github.com/deepinsight/insightface/commit/babb9a5) + - (1)[链接中](https://github.com/deepinsight/insightface/commit/babb9a5)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; + - (2)开发者基于自己数据训练的RetinaFace CommitID:b984b4b模型,可按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)后,完成部署。 + +## 导出ONNX模型 + +访问[ArcFace](https://github.com/deepinsight/insightface/tree/master/recognition/arcface_torch)官方github库,按照指引下载安装,下载pt模型文件,利用 `torch2onnx.py` 得到`onnx`格式文件。 + +* 下载ArcFace模型文件 + ``` + Link: https://pan.baidu.com/share/init?surl=CL-l4zWqsI1oDuEEYVhj-g code: e8pw + ``` + +* 导出onnx格式文件 + ```bash + PYTHONPATH=. python ./torch2onnx.py ms1mv3_arcface_r100_fp16/backbone.pth --output ms1mv3_arcface_r100.onnx --network r100 --simplify 1 + ``` + +## 下载预训练ONNX模型 + + + +todo + + +## 详细部署文档 + +- [Python部署](python) +- [C++部署](cpp) diff --git a/examples/vision/faceid/arcface/cpp/CMakeLists.txt b/examples/vision/faceid/arcface/cpp/CMakeLists.txt new file mode 100644 index 0000000000..fea1a2888b --- /dev/null +++ b/examples/vision/faceid/arcface/cpp/CMakeLists.txt @@ -0,0 +1,14 @@ +PROJECT(infer_demo C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.12) + +# 指定下载解压后的fastdeploy库路径 +option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.") + +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) + +# 添加FastDeploy依赖头文件 +include_directories(${FASTDEPLOY_INCS}) + +add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc) +# 添加FastDeploy库依赖 +target_link_libraries(infer_demo ${FASTDEPLOY_LIBS}) diff --git a/examples/vision/faceid/arcface/cpp/README.md b/examples/vision/faceid/arcface/cpp/README.md new file mode 100644 index 0000000000..505d144bbb --- /dev/null +++ b/examples/vision/faceid/arcface/cpp/README.md @@ -0,0 +1,85 @@ +# ArcFace C++部署示例 + +本目录下提供`infer.cc`快速完成ArcFace在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. 根据开发环境,下载预编译部署库和samples代码,参考[FastDeploy预编译库](../../../../../docs/compile/prebuild_libraries.md) + +以Linux上CPU推理为例,在本目录执行如下命令即可完成编译测试 + +``` +mkdir build +cd build +wget https://xxx.tgz +tar xvf fastdeploy-linux-x64-0.2.0.tgz +cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 +make -j + +#下载官方转换好的ArcFace模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/ms1mv3_arcface_r34.onnx +wget todo + + +# CPU推理 +./infer_demo ms1mv3_arcface_r34.onnx todo 0 +# GPU推理 +./infer_demo ms1mv3_arcface_r34.onnx todo 1 +# GPU上TensorRT推理 +./infer_demo ms1mv3_arcface_r34.onnx todo 2 +``` + +运行完成可视化结果如下图所示 + + + +## ArcFace C++接口 + +### ArcFace类 + +``` +fastdeploy::vision::faceid::ArcFace( + const string& model_file, + const string& params_file = "", + const RuntimeOption& runtime_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX) +``` + +ArcFace模型加载和初始化,其中model_file为导出的ONNX模型格式。 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX时,此参数传入空字符串即可 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX格式 + +#### Predict函数 + +> ``` +> ArcFace::Predict(cv::Mat* im, DetectionResult* result, +> float conf_threshold = 0.25, +> float nms_iou_threshold = 0.5) +> ``` +> +> 模型预测接口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **im**: 输入图像,注意需为HWC,BGR格式 +> > * **result**: 检测结果,包括检测框,各个框的置信度, DetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > * **conf_threshold**: 检测框置信度过滤阈值 +> > * **nms_iou_threshold**: NMS处理过程中iou阈值 + +### 类成员变量 + +> > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` +> > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` + +- [模型介绍](../../) +- [Python部署](../python) +- [视觉模型预测结果](../../../../../docs/api/vision_results/) diff --git a/examples/vision/faceid/arcface/python/README.md b/examples/vision/faceid/arcface/python/README.md new file mode 100644 index 0000000000..034b93049e --- /dev/null +++ b/examples/vision/faceid/arcface/python/README.md @@ -0,0 +1,79 @@ +# ArcFace Python部署示例 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. FastDeploy Python whl包安装,参考[FastDeploy Python安装](../../../../../docs/quick_start/install.md) + +本目录下提供`infer.py`快速完成ArcFace在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 + +``` +#下载arcface模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/ms1mv3_arcface_r34.onnx +wget todo + + +#下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd examples/vison/detection/arcface/python/ + +# CPU推理 +python infer.py --model ms1mv3_arcface_r34.onnx --image todo --device cpu +# GPU推理 +python infer.py --model ms1mv3_arcface_r34.onnx --image todo --device gpu +# GPU上使用TensorRT推理 +python infer.py --model ms1mv3_arcface_r34.onnx --image todo --device gpu --use_trt True +``` + +运行完成可视化结果如下图所示 + + + +## ArcFace Python接口 + +``` +fastdeploy.vision.faceid.ArcFace(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +``` + +ArcFace模型加载和初始化,其中model_file为导出的ONNX模型格式 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX格式时,此参数无需设定 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX + +### predict函数 + +> ``` +> ArcFace.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> ``` +> +> 模型预测结口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 +> > * **conf_threshold**(float): 检测框置信度过滤阈值 +> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值 + +> **返回** +> +> > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) + +### 类成员属性 + +> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` +> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` + + + +## 其它文档 + +- [ArcFace 模型介绍](..) +- [ArcFace C++部署](../cpp) +- [模型预测结果说明](../../../../../docs/api/vision_results/) diff --git a/examples/vision/matting/modnet/README.md b/examples/vision/matting/modnet/README.md new file mode 100644 index 0000000000..fc3f7c0080 --- /dev/null +++ b/examples/vision/matting/modnet/README.md @@ -0,0 +1,42 @@ +# MODNet准备部署模型 + +## 模型版本说明 + +- [MODNet CommitID:28165a4](https://github.com/ZHKKKe/MODNet/commit/28165a4) + - (1)[链接中](https://github.com/ZHKKKe/MODNet/commit/28165a4)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; + - (2)开发者基于自己数据训练的MODNet CommitID:b984b4b模型,可按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)后,完成部署。 + +## 导出ONNX模型 + + +访问[MODNet](https://github.com/ZHKKKe/MODNet)官方github库,按照指引下载安装,下载模型文件,利用 `onnx/export_onnx.py` 得到`onnx`格式文件。 + +* 导出onnx格式文件 + ```bash + python -m onnx.export_onnx \ + --ckpt-path=pretrained/modnet_photographic_portrait_matting.ckpt \ + --output-path=pretrained/modnet_photographic_portrait_matting.onnx + ``` + +## 下载预训练ONNX模型 + +为了方便开发者的测试,下面提供了MODNet导出的各系列模型,开发者可直接下载使用。 + +| 模型 | 大小 | 精度 | +|:---------------------------------------------------------------- |:----- |:----- | +| [modnet_photographic](https://bj.bcebos.com/paddlehub/fastdeploy/modnet_photographic__portrait_matting.onnx) | 25MB | - | +| [modnet_webcam](https://bj.bcebos.com/paddlehub/fastdeploy/modnet_webcam_portrait_matting.onnx) | 25MB | -| +| [modnet_photographic_256](https://bj.bcebos.com/paddlehub/fastdeploy/modnet_photographic_portrait_matting-256x256.onnx) | 25MB | - | +| [modnet_webcam_256](https://bj.bcebos.com/paddlehub/fastdeploy/modnet_webcam_portrait_matting-256x256.onnx) | 25MB | - | +| [modnet_photographic_512](https://bj.bcebos.com/paddlehub/fastdeploy/modnet_photographic_portrait_matting-512x512.onnx) | 25MB | - | +| [modnet_webcam_512](https://bj.bcebos.com/paddlehub/fastdeploy/modnet_webcam_portrait_matting-512x512.onnx) | 25MB | - | +| [modnet_photographic_1024](https://bj.bcebos.com/paddlehub/fastdeploy/modnet_photographic_portrait_matting-1024x1024.onnx) | 25MB | - | +| [modnet_webcam_1024](https://bj.bcebos.com/paddlehub/fastdeploy/modnet_webcam_portrait_matting-1024x1024.onnx) | 25MB | -| + + + + +## 详细部署文档 + +- [Python部署](python) +- [C++部署](cpp) diff --git a/examples/vision/matting/modnet/cpp/CMakeLists.txt b/examples/vision/matting/modnet/cpp/CMakeLists.txt new file mode 100644 index 0000000000..fea1a2888b --- /dev/null +++ b/examples/vision/matting/modnet/cpp/CMakeLists.txt @@ -0,0 +1,14 @@ +PROJECT(infer_demo C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.12) + +# 指定下载解压后的fastdeploy库路径 +option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.") + +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) + +# 添加FastDeploy依赖头文件 +include_directories(${FASTDEPLOY_INCS}) + +add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc) +# 添加FastDeploy库依赖 +target_link_libraries(infer_demo ${FASTDEPLOY_LIBS}) diff --git a/examples/vision/matting/modnet/cpp/README.md b/examples/vision/matting/modnet/cpp/README.md new file mode 100644 index 0000000000..82226ae4c8 --- /dev/null +++ b/examples/vision/matting/modnet/cpp/README.md @@ -0,0 +1,85 @@ +# MODNet C++部署示例 + +本目录下提供`infer.cc`快速完成MODNet在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. 根据开发环境,下载预编译部署库和samples代码,参考[FastDeploy预编译库](../../../../../docs/compile/prebuild_libraries.md) + +以Linux上CPU推理为例,在本目录执行如下命令即可完成编译测试 + +``` +mkdir build +cd build +wget https://xxx.tgz +tar xvf fastdeploy-linux-x64-0.2.0.tgz +cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 +make -j + +#下载官方转换好的MODNet模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/modnet_photographic__portrait_matting.onnx +wget todo + + +# CPU推理 +./infer_demo modnet_photographic__portrait_matting.onnx todo 0 +# GPU推理 +./infer_demo modnet_photographic__portrait_matting.onnx todo 1 +# GPU上TensorRT推理 +./infer_demo modnet_photographic__portrait_matting.onnx todo 2 +``` + +运行完成可视化结果如下图所示 + + + +## MODNet C++接口 + +### MODNet类 + +``` +fastdeploy::vision::matting::MODNet( + const string& model_file, + const string& params_file = "", + const RuntimeOption& runtime_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX) +``` + +MODNet模型加载和初始化,其中model_file为导出的ONNX模型格式。 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX时,此参数传入空字符串即可 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX格式 + +#### Predict函数 + +> ``` +> MODNet::Predict(cv::Mat* im, DetectionResult* result, +> float conf_threshold = 0.25, +> float nms_iou_threshold = 0.5) +> ``` +> +> 模型预测接口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **im**: 输入图像,注意需为HWC,BGR格式 +> > * **result**: 检测结果,包括检测框,各个框的置信度, DetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > * **conf_threshold**: 检测框置信度过滤阈值 +> > * **nms_iou_threshold**: NMS处理过程中iou阈值 + +### 类成员变量 + +> > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` +> > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` + +- [模型介绍](../../) +- [Python部署](../python) +- [视觉模型预测结果](../../../../../docs/api/vision_results/) diff --git a/examples/vision/matting/modnet/python/README.md b/examples/vision/matting/modnet/python/README.md new file mode 100644 index 0000000000..1ae86020f4 --- /dev/null +++ b/examples/vision/matting/modnet/python/README.md @@ -0,0 +1,79 @@ +# MODNet Python部署示例 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. FastDeploy Python whl包安装,参考[FastDeploy Python安装](../../../../../docs/quick_start/install.md) + +本目录下提供`infer.py`快速完成MODNet在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 + +``` +#下载modnet模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/modnet_photographic__portrait_matting.onnx +wget todo + + +#下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd examples/vison/detection/modnet/python/ + +# CPU推理 +python infer.py --model modnet_photographic__portrait_matting.onnx --image todo --device cpu +# GPU推理 +python infer.py --model modnet_photographic__portrait_matting.onnx --image todo --device gpu +# GPU上使用TensorRT推理 +python infer.py --model modnet_photographic__portrait_matting.onnx --image todo --device gpu --use_trt True +``` + +运行完成可视化结果如下图所示 + + + +## MODNet Python接口 + +``` +fastdeploy.vision.facedet.MODNet(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +``` + +MODNet模型加载和初始化,其中model_file为导出的ONNX模型格式 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX格式时,此参数无需设定 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX + +### predict函数 + +> ``` +> MODNet.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> ``` +> +> 模型预测结口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 +> > * **conf_threshold**(float): 检测框置信度过滤阈值 +> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值 + +> **返回** +> +> > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) + +### 类成员属性 + +> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` +> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` + + + +## 其它文档 + +- [MODNet 模型介绍](..) +- [MODNet C++部署](../cpp) +- [模型预测结果说明](../../../../../docs/api/vision_results/) From 3d83654c9980e0f7be11206814dd7056aba4c4d4 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 10 Aug 2022 14:42:09 +0000 Subject: [PATCH 53/94] firt commit for modnet and arcface --- examples/vision/matting/modnet/python/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/vision/matting/modnet/python/README.md b/examples/vision/matting/modnet/python/README.md index 1ae86020f4..d7b1149f8e 100644 --- a/examples/vision/matting/modnet/python/README.md +++ b/examples/vision/matting/modnet/python/README.md @@ -32,7 +32,7 @@ python infer.py --model modnet_photographic__portrait_matting.onnx --image todo ## MODNet Python接口 ``` -fastdeploy.vision.facedet.MODNet(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +fastdeploy.vision.matting.MODNet(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) ``` MODNet模型加载和初始化,其中model_file为导出的ONNX模型格式 From b53179d29ac3d98c75b553f987296b4aac0eb47c Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 10 Aug 2022 14:55:22 +0000 Subject: [PATCH 54/94] first commit for partial_fc --- examples/vision/faceid/partial_fc/README.md | 37 ++++++++ .../faceid/partial_fc/cpp/CMakeLists.txt | 14 +++ .../vision/faceid/partial_fc/cpp/README.md | 85 +++++++++++++++++++ .../vision/faceid/partial_fc/python/README.md | 79 +++++++++++++++++ 4 files changed, 215 insertions(+) create mode 100644 examples/vision/faceid/partial_fc/README.md create mode 100644 examples/vision/faceid/partial_fc/cpp/CMakeLists.txt create mode 100644 examples/vision/faceid/partial_fc/cpp/README.md create mode 100644 examples/vision/faceid/partial_fc/python/README.md diff --git a/examples/vision/faceid/partial_fc/README.md b/examples/vision/faceid/partial_fc/README.md new file mode 100644 index 0000000000..ca03ba2e79 --- /dev/null +++ b/examples/vision/faceid/partial_fc/README.md @@ -0,0 +1,37 @@ + + + + + + +## 下载预训练ONNX模型 + +为了方便开发者的测试,下面提供了RetinaFace导出的各系列模型,开发者可直接下载使用。 + +| 模型 | 大小 | 精度 | +|:---------------------------------------------------------------- |:----- |:----- | +| [partial_fc_glint360k_r50](https://bj.bcebos.com/paddlehub/fastdeploy/partial_fc_glint360k_r50.onnx) | 167MB | - | +| [partial_fc_glint360k_r100](https://bj.bcebos.com/paddlehub/fastdeploy/partial_fc_glint360k_r100.onnx) | 249MB | -| + + + +## 详细部署文档 + +- [Python部署](python) +- [C++部署](cpp) diff --git a/examples/vision/faceid/partial_fc/cpp/CMakeLists.txt b/examples/vision/faceid/partial_fc/cpp/CMakeLists.txt new file mode 100644 index 0000000000..fea1a2888b --- /dev/null +++ b/examples/vision/faceid/partial_fc/cpp/CMakeLists.txt @@ -0,0 +1,14 @@ +PROJECT(infer_demo C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.12) + +# 指定下载解压后的fastdeploy库路径 +option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.") + +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) + +# 添加FastDeploy依赖头文件 +include_directories(${FASTDEPLOY_INCS}) + +add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc) +# 添加FastDeploy库依赖 +target_link_libraries(infer_demo ${FASTDEPLOY_LIBS}) diff --git a/examples/vision/faceid/partial_fc/cpp/README.md b/examples/vision/faceid/partial_fc/cpp/README.md new file mode 100644 index 0000000000..20a2f0eb6e --- /dev/null +++ b/examples/vision/faceid/partial_fc/cpp/README.md @@ -0,0 +1,85 @@ +# PartialFC C++部署示例 + +本目录下提供`infer.cc`快速完成PartialFC在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. 根据开发环境,下载预编译部署库和samples代码,参考[FastDeploy预编译库](../../../../../docs/compile/prebuild_libraries.md) + +以Linux上CPU推理为例,在本目录执行如下命令即可完成编译测试 + +``` +mkdir build +cd build +wget https://xxx.tgz +tar xvf fastdeploy-linux-x64-0.2.0.tgz +cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 +make -j + +#下载官方转换好的PartialFC模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/partial_fc_glint360k_r50.onnx +wget todo + + +# CPU推理 +./infer_demo partial_fc_glint360k_r50.onnx todo 0 +# GPU推理 +./infer_demo partial_fc_glint360k_r50.onnx todo 1 +# GPU上TensorRT推理 +./infer_demo partial_fc_glint360k_r50.onnx todo 2 +``` + +运行完成可视化结果如下图所示 + + + +## PartialFC C++接口 + +### PartialFC类 + +``` +fastdeploy::vision::faceid::PartialFC( + const string& model_file, + const string& params_file = "", + const RuntimeOption& runtime_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX) +``` + +PartialFC模型加载和初始化,其中model_file为导出的ONNX模型格式。 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX时,此参数传入空字符串即可 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX格式 + +#### Predict函数 + +> ``` +> PartialFC::Predict(cv::Mat* im, DetectionResult* result, +> float conf_threshold = 0.25, +> float nms_iou_threshold = 0.5) +> ``` +> +> 模型预测接口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **im**: 输入图像,注意需为HWC,BGR格式 +> > * **result**: 检测结果,包括检测框,各个框的置信度, DetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > * **conf_threshold**: 检测框置信度过滤阈值 +> > * **nms_iou_threshold**: NMS处理过程中iou阈值 + +### 类成员变量 + +> > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` +> > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` + +- [模型介绍](../../) +- [Python部署](../python) +- [视觉模型预测结果](../../../../../docs/api/vision_results/) diff --git a/examples/vision/faceid/partial_fc/python/README.md b/examples/vision/faceid/partial_fc/python/README.md new file mode 100644 index 0000000000..6189e99c47 --- /dev/null +++ b/examples/vision/faceid/partial_fc/python/README.md @@ -0,0 +1,79 @@ +# PartialFC Python部署示例 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. FastDeploy Python whl包安装,参考[FastDeploy Python安装](../../../../../docs/quick_start/install.md) + +本目录下提供`infer.py`快速完成PartialFC在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 + +``` +#下载partial_fc模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/partial_fc_glint360k_r50.onnx +wget todo + + +#下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd examples/vison/detection/partial_fc/python/ + +# CPU推理 +python infer.py --model partial_fc_glint360k_r50.onnx --image todo --device cpu +# GPU推理 +python infer.py --model partial_fc_glint360k_r50.onnx --image todo --device gpu +# GPU上使用TensorRT推理 +python infer.py --model partial_fc_glint360k_r50.onnx --image todo --device gpu --use_trt True +``` + +运行完成可视化结果如下图所示 + + + +## PartialFC Python接口 + +``` +fastdeploy.vision.faceid.PartialFC(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +``` + +PartialFC模型加载和初始化,其中model_file为导出的ONNX模型格式 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX格式时,此参数无需设定 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX + +### predict函数 + +> ``` +> PartialFC.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> ``` +> +> 模型预测结口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 +> > * **conf_threshold**(float): 检测框置信度过滤阈值 +> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值 + +> **返回** +> +> > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) + +### 类成员属性 + +> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` +> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` + + + +## 其它文档 + +- [PartialFC 模型介绍](..) +- [PartialFC C++部署](../cpp) +- [模型预测结果说明](../../../../../docs/api/vision_results/) From d2a12d18ccc7b2053448ffe1460677b7687e2344 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 10 Aug 2022 14:58:08 +0000 Subject: [PATCH 55/94] first commit for partial_fc --- examples/vision/faceid/arcface/README.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/vision/faceid/arcface/README.md b/examples/vision/faceid/arcface/README.md index 6a122ec400..cb93054029 100644 --- a/examples/vision/faceid/arcface/README.md +++ b/examples/vision/faceid/arcface/README.md @@ -1,10 +1,10 @@ -# RetinaFace准备部署模型 +# ArcFace准备部署模型 ## 模型版本说明 - [ArcFace CommitID:babb9a5](https://github.com/deepinsight/insightface/commit/babb9a5) - (1)[链接中](https://github.com/deepinsight/insightface/commit/babb9a5)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; - - (2)开发者基于自己数据训练的RetinaFace CommitID:b984b4b模型,可按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)后,完成部署。 + - (2)开发者基于自己数据训练的ArcFace CommitID:babb9a5模型,可按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)后,完成部署。 ## 导出ONNX模型 From 2aecb9665f061cab924e23cf97aea4f5595e63e9 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 10 Aug 2022 15:10:49 +0000 Subject: [PATCH 56/94] first commit for yolox --- examples/vision/detection/yolox/README.md | 23 +++++ .../vision/detection/yolox/cpp/CMakeLists.txt | 14 +++ examples/vision/detection/yolox/cpp/README.md | 85 +++++++++++++++++++ .../vision/detection/yolox/python/README.md | 79 +++++++++++++++++ 4 files changed, 201 insertions(+) create mode 100644 examples/vision/detection/yolox/README.md create mode 100644 examples/vision/detection/yolox/cpp/CMakeLists.txt create mode 100644 examples/vision/detection/yolox/cpp/README.md create mode 100644 examples/vision/detection/yolox/python/README.md diff --git a/examples/vision/detection/yolox/README.md b/examples/vision/detection/yolox/README.md new file mode 100644 index 0000000000..2a0d10d8ac --- /dev/null +++ b/examples/vision/detection/yolox/README.md @@ -0,0 +1,23 @@ +# YOLOv7准备部署模型 + +## 模型版本说明 + +- [YOLOX v0.1.1](https://github.com/Megvii-BaseDetection/YOLOX/releases/download/0.1.1rc0) + - (1)[链接中](https://github.com/Megvii-BaseDetection/YOLOX/releases/download/0.1.1rc0)的*.onnx可直接进行部署; + + +## 下载预训练ONNX模型 + +为了方便开发者的测试,下面提供了YOLOv7导出的各系列模型,开发者可直接下载使用。 + +| 模型 | 大小 | 精度 | +|:---------------------------------------------------------------- |:----- |:----- | +| [YOLOX-s](https://bj.bcebos.com/paddlehub/fastdeploy/yolox_s.onnx) | 35MB | 40.5% | + + + + +## 详细部署文档 + +- [Python部署](python) +- [C++部署](cpp) diff --git a/examples/vision/detection/yolox/cpp/CMakeLists.txt b/examples/vision/detection/yolox/cpp/CMakeLists.txt new file mode 100644 index 0000000000..fea1a2888b --- /dev/null +++ b/examples/vision/detection/yolox/cpp/CMakeLists.txt @@ -0,0 +1,14 @@ +PROJECT(infer_demo C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.12) + +# 指定下载解压后的fastdeploy库路径 +option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.") + +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) + +# 添加FastDeploy依赖头文件 +include_directories(${FASTDEPLOY_INCS}) + +add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc) +# 添加FastDeploy库依赖 +target_link_libraries(infer_demo ${FASTDEPLOY_LIBS}) diff --git a/examples/vision/detection/yolox/cpp/README.md b/examples/vision/detection/yolox/cpp/README.md new file mode 100644 index 0000000000..abe7611266 --- /dev/null +++ b/examples/vision/detection/yolox/cpp/README.md @@ -0,0 +1,85 @@ +# YOLOX C++部署示例 + +本目录下提供`infer.cc`快速完成YOLOX在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. 根据开发环境,下载预编译部署库和samples代码,参考[FastDeploy预编译库](../../../../../docs/compile/prebuild_libraries.md) + +以Linux上CPU推理为例,在本目录执行如下命令即可完成编译测试 + +``` +mkdir build +cd build +wget https://xxx.tgz +tar xvf fastdeploy-linux-x64-0.2.0.tgz +cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 +make -j + +#下载官方转换好的YOLOX模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/yolox_s.onnx +wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/000000014439.jpg + + +# CPU推理 +./infer_demo yolox_s.onnx 000000014439.jpg 0 +# GPU推理 +./infer_demo yolox_s.onnx 000000014439.jpg 1 +# GPU上TensorRT推理 +./infer_demo yolox_s.onnx 000000014439.jpg 2 +``` + +运行完成可视化结果如下图所示 + + + +## YOLOX C++接口 + +### YOLOX类 + +``` +fastdeploy::vision::detection::YOLOX( + const string& model_file, + const string& params_file = "", + const RuntimeOption& runtime_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX) +``` + +YOLOX模型加载和初始化,其中model_file为导出的ONNX模型格式。 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX时,此参数传入空字符串即可 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX格式 + +#### Predict函数 + +> ``` +> YOLOX::Predict(cv::Mat* im, DetectionResult* result, +> float conf_threshold = 0.25, +> float nms_iou_threshold = 0.5) +> ``` +> +> 模型预测接口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **im**: 输入图像,注意需为HWC,BGR格式 +> > * **result**: 检测结果,包括检测框,各个框的置信度, DetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > * **conf_threshold**: 检测框置信度过滤阈值 +> > * **nms_iou_threshold**: NMS处理过程中iou阈值 + +### 类成员变量 + +> > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` +> > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` + +- [模型介绍](../../) +- [Python部署](../python) +- [视觉模型预测结果](../../../../../docs/api/vision_results/) diff --git a/examples/vision/detection/yolox/python/README.md b/examples/vision/detection/yolox/python/README.md new file mode 100644 index 0000000000..7a73132a26 --- /dev/null +++ b/examples/vision/detection/yolox/python/README.md @@ -0,0 +1,79 @@ +# YOLOX Python部署示例 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. FastDeploy Python whl包安装,参考[FastDeploy Python安装](../../../../../docs/quick_start/install.md) + +本目录下提供`infer.py`快速完成YOLOX在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 + +``` +#下载YOLOX模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/yolox_s.onnx +wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/000000014439.jpg + + +#下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd examples/vison/detection/yolox/python/ + +# CPU推理 +python infer.py --model yolox_s.onnx --image 000000014439.jpg --device cpu +# GPU推理 +python infer.py --model yolox_s.onnx --image 000000014439.jpg --device gpu +# GPU上使用TensorRT推理 +python infer.py --model yolox_s.onnx --image 000000014439.jpg --device gpu --use_trt True +``` + +运行完成可视化结果如下图所示 + + + +## YOLOX Python接口 + +``` +fastdeploy.vision.detection.YOLOX(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +``` + +YOLOX模型加载和初始化,其中model_file为导出的ONNX模型格式 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX格式时,此参数无需设定 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX + +### predict函数 + +> ``` +> YOLOX.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> ``` +> +> 模型预测结口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 +> > * **conf_threshold**(float): 检测框置信度过滤阈值 +> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值 + +> **返回** +> +> > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) + +### 类成员属性 + +> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` +> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` + + + +## 其它文档 + +- [YOLOX 模型介绍](..) +- [YOLOX C++部署](../cpp) +- [模型预测结果说明](../../../../../docs/api/vision_results/) From 7165e0e668b7cbbcb557afb37092fae6de9a4282 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 10 Aug 2022 15:18:19 +0000 Subject: [PATCH 57/94] first commit for yolov6 --- .../vision/detection/nanodet_plus/README.md | 23 +++++ .../detection/nanodet_plus/cpp/CMakeLists.txt | 14 +++ .../detection/nanodet_plus/cpp/README.md | 85 +++++++++++++++++++ .../detection/nanodet_plus/python/README.md | 79 +++++++++++++++++ examples/vision/detection/yolov6/README.md | 23 +++++ .../detection/yolov6/cpp/CMakeLists.txt | 14 +++ .../vision/detection/yolov6/cpp/README.md | 85 +++++++++++++++++++ .../vision/detection/yolov6/python/README.md | 79 +++++++++++++++++ examples/vision/detection/yolox/README.md | 4 +- 9 files changed, 404 insertions(+), 2 deletions(-) create mode 100644 examples/vision/detection/nanodet_plus/README.md create mode 100644 examples/vision/detection/nanodet_plus/cpp/CMakeLists.txt create mode 100644 examples/vision/detection/nanodet_plus/cpp/README.md create mode 100644 examples/vision/detection/nanodet_plus/python/README.md create mode 100644 examples/vision/detection/yolov6/README.md create mode 100644 examples/vision/detection/yolov6/cpp/CMakeLists.txt create mode 100644 examples/vision/detection/yolov6/cpp/README.md create mode 100644 examples/vision/detection/yolov6/python/README.md diff --git a/examples/vision/detection/nanodet_plus/README.md b/examples/vision/detection/nanodet_plus/README.md new file mode 100644 index 0000000000..7f52f30310 --- /dev/null +++ b/examples/vision/detection/nanodet_plus/README.md @@ -0,0 +1,23 @@ +# YOLOv6准备部署模型 + +## 模型版本说明 + +- [YOLOv6 v0.1.0](https://github.com/meituan/YOLOv6/releases/download/0.1.0) + - (1)[链接中](https://github.com/meituan/YOLOv6/releases/download/0.1.0)的*.onnx可直接进行部署; + + +## 下载预训练ONNX模型 + +为了方便开发者的测试,下面提供了YOLOv6导出的各系列模型,开发者可直接下载使用。 + +| 模型 | 大小 | 精度 | +|:---------------------------------------------------------------- |:----- |:----- | +| [YOLOv6s](https://bj.bcebos.com/paddlehub/fastdeploy/yolov6s.onnx) | 66MB | 43.1% | +| [YOLOv6s_640](https://bj.bcebos.com/paddlehub/fastdeploy/yolov6s-640x640.onnx) | 66MB | 43.1% | + +nanodet-plus-m_320.onnx nanodet-plus-m_320-sim.onnx + +## 详细部署文档 + +- [Python部署](python) +- [C++部署](cpp) diff --git a/examples/vision/detection/nanodet_plus/cpp/CMakeLists.txt b/examples/vision/detection/nanodet_plus/cpp/CMakeLists.txt new file mode 100644 index 0000000000..fea1a2888b --- /dev/null +++ b/examples/vision/detection/nanodet_plus/cpp/CMakeLists.txt @@ -0,0 +1,14 @@ +PROJECT(infer_demo C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.12) + +# 指定下载解压后的fastdeploy库路径 +option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.") + +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) + +# 添加FastDeploy依赖头文件 +include_directories(${FASTDEPLOY_INCS}) + +add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc) +# 添加FastDeploy库依赖 +target_link_libraries(infer_demo ${FASTDEPLOY_LIBS}) diff --git a/examples/vision/detection/nanodet_plus/cpp/README.md b/examples/vision/detection/nanodet_plus/cpp/README.md new file mode 100644 index 0000000000..5a73f8b55e --- /dev/null +++ b/examples/vision/detection/nanodet_plus/cpp/README.md @@ -0,0 +1,85 @@ +# YOLOv6 C++部署示例 + +本目录下提供`infer.cc`快速完成YOLOv6在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. 根据开发环境,下载预编译部署库和samples代码,参考[FastDeploy预编译库](../../../../../docs/compile/prebuild_libraries.md) + +以Linux上CPU推理为例,在本目录执行如下命令即可完成编译测试 + +``` +mkdir build +cd build +wget https://xxx.tgz +tar xvf fastdeploy-linux-x64-0.2.0.tgz +cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 +make -j + +#下载官方转换好的YOLOv6模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/yolov6s.onnx +wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/000000014439.jpg + + +# CPU推理 +./infer_demo yolov6s.onnx 000000014439.jpg 0 +# GPU推理 +./infer_demo yolov6s.onnx 000000014439.jpg 1 +# GPU上TensorRT推理 +./infer_demo yolov6s.onnx 000000014439.jpg 2 +``` + +运行完成可视化结果如下图所示 + + + +## YOLOv6 C++接口 + +### YOLOv6类 + +``` +fastdeploy::vision::detection::YOLOv6( + const string& model_file, + const string& params_file = "", + const RuntimeOption& runtime_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX) +``` + +YOLOv6模型加载和初始化,其中model_file为导出的ONNX模型格式。 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX时,此参数传入空字符串即可 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX格式 + +#### Predict函数 + +> ``` +> YOLOv6::Predict(cv::Mat* im, DetectionResult* result, +> float conf_threshold = 0.25, +> float nms_iou_threshold = 0.5) +> ``` +> +> 模型预测接口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **im**: 输入图像,注意需为HWC,BGR格式 +> > * **result**: 检测结果,包括检测框,各个框的置信度, DetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > * **conf_threshold**: 检测框置信度过滤阈值 +> > * **nms_iou_threshold**: NMS处理过程中iou阈值 + +### 类成员变量 + +> > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` +> > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` + +- [模型介绍](../../) +- [Python部署](../python) +- [视觉模型预测结果](../../../../../docs/api/vision_results/) diff --git a/examples/vision/detection/nanodet_plus/python/README.md b/examples/vision/detection/nanodet_plus/python/README.md new file mode 100644 index 0000000000..35c35b2084 --- /dev/null +++ b/examples/vision/detection/nanodet_plus/python/README.md @@ -0,0 +1,79 @@ +# YOLOv6 Python部署示例 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. FastDeploy Python whl包安装,参考[FastDeploy Python安装](../../../../../docs/quick_start/install.md) + +本目录下提供`infer.py`快速完成YOLOv6在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 + +``` +#下载YOLOv6模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/yolov6s.onnx +wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/000000014439.jpg + + +#下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd examples/vison/detection/yolov6/python/ + +# CPU推理 +python infer.py --model yolov6s.onnx --image 000000014439.jpg --device cpu +# GPU推理 +python infer.py --model yolov6s.onnx --image 000000014439.jpg --device gpu +# GPU上使用TensorRT推理 +python infer.py --model yolov6s.onnx --image 000000014439.jpg --device gpu --use_trt True +``` + +运行完成可视化结果如下图所示 + + + +## YOLOv6 Python接口 + +``` +fastdeploy.vision.detection.YOLOv6(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +``` + +YOLOv6模型加载和初始化,其中model_file为导出的ONNX模型格式 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX格式时,此参数无需设定 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX + +### predict函数 + +> ``` +> YOLOv6.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> ``` +> +> 模型预测结口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 +> > * **conf_threshold**(float): 检测框置信度过滤阈值 +> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值 + +> **返回** +> +> > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) + +### 类成员属性 + +> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` +> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` + + + +## 其它文档 + +- [YOLOv6 模型介绍](..) +- [YOLOv6 C++部署](../cpp) +- [模型预测结果说明](../../../../../docs/api/vision_results/) diff --git a/examples/vision/detection/yolov6/README.md b/examples/vision/detection/yolov6/README.md new file mode 100644 index 0000000000..878e530bda --- /dev/null +++ b/examples/vision/detection/yolov6/README.md @@ -0,0 +1,23 @@ +# YOLOv6准备部署模型 + +## 模型版本说明 + +- [YOLOv6 v0.1.0](https://github.com/meituan/YOLOv6/releases/download/0.1.0) + - (1)[链接中](https://github.com/meituan/YOLOv6/releases/download/0.1.0)的*.onnx可直接进行部署; + + +## 下载预训练ONNX模型 + +为了方便开发者的测试,下面提供了YOLOv6导出的各系列模型,开发者可直接下载使用。 + +| 模型 | 大小 | 精度 | +|:---------------------------------------------------------------- |:----- |:----- | +| [YOLOv6s](https://bj.bcebos.com/paddlehub/fastdeploy/yolov6s.onnx) | 66MB | 43.1% | +| [YOLOv6s_640](https://bj.bcebos.com/paddlehub/fastdeploy/yolov6s-640x640.onnx) | 66MB | 43.1% | + + + +## 详细部署文档 + +- [Python部署](python) +- [C++部署](cpp) diff --git a/examples/vision/detection/yolov6/cpp/CMakeLists.txt b/examples/vision/detection/yolov6/cpp/CMakeLists.txt new file mode 100644 index 0000000000..fea1a2888b --- /dev/null +++ b/examples/vision/detection/yolov6/cpp/CMakeLists.txt @@ -0,0 +1,14 @@ +PROJECT(infer_demo C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.12) + +# 指定下载解压后的fastdeploy库路径 +option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.") + +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) + +# 添加FastDeploy依赖头文件 +include_directories(${FASTDEPLOY_INCS}) + +add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc) +# 添加FastDeploy库依赖 +target_link_libraries(infer_demo ${FASTDEPLOY_LIBS}) diff --git a/examples/vision/detection/yolov6/cpp/README.md b/examples/vision/detection/yolov6/cpp/README.md new file mode 100644 index 0000000000..5a73f8b55e --- /dev/null +++ b/examples/vision/detection/yolov6/cpp/README.md @@ -0,0 +1,85 @@ +# YOLOv6 C++部署示例 + +本目录下提供`infer.cc`快速完成YOLOv6在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. 根据开发环境,下载预编译部署库和samples代码,参考[FastDeploy预编译库](../../../../../docs/compile/prebuild_libraries.md) + +以Linux上CPU推理为例,在本目录执行如下命令即可完成编译测试 + +``` +mkdir build +cd build +wget https://xxx.tgz +tar xvf fastdeploy-linux-x64-0.2.0.tgz +cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 +make -j + +#下载官方转换好的YOLOv6模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/yolov6s.onnx +wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/000000014439.jpg + + +# CPU推理 +./infer_demo yolov6s.onnx 000000014439.jpg 0 +# GPU推理 +./infer_demo yolov6s.onnx 000000014439.jpg 1 +# GPU上TensorRT推理 +./infer_demo yolov6s.onnx 000000014439.jpg 2 +``` + +运行完成可视化结果如下图所示 + + + +## YOLOv6 C++接口 + +### YOLOv6类 + +``` +fastdeploy::vision::detection::YOLOv6( + const string& model_file, + const string& params_file = "", + const RuntimeOption& runtime_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX) +``` + +YOLOv6模型加载和初始化,其中model_file为导出的ONNX模型格式。 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX时,此参数传入空字符串即可 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX格式 + +#### Predict函数 + +> ``` +> YOLOv6::Predict(cv::Mat* im, DetectionResult* result, +> float conf_threshold = 0.25, +> float nms_iou_threshold = 0.5) +> ``` +> +> 模型预测接口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **im**: 输入图像,注意需为HWC,BGR格式 +> > * **result**: 检测结果,包括检测框,各个框的置信度, DetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > * **conf_threshold**: 检测框置信度过滤阈值 +> > * **nms_iou_threshold**: NMS处理过程中iou阈值 + +### 类成员变量 + +> > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` +> > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` + +- [模型介绍](../../) +- [Python部署](../python) +- [视觉模型预测结果](../../../../../docs/api/vision_results/) diff --git a/examples/vision/detection/yolov6/python/README.md b/examples/vision/detection/yolov6/python/README.md new file mode 100644 index 0000000000..35c35b2084 --- /dev/null +++ b/examples/vision/detection/yolov6/python/README.md @@ -0,0 +1,79 @@ +# YOLOv6 Python部署示例 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. FastDeploy Python whl包安装,参考[FastDeploy Python安装](../../../../../docs/quick_start/install.md) + +本目录下提供`infer.py`快速完成YOLOv6在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 + +``` +#下载YOLOv6模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/yolov6s.onnx +wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/000000014439.jpg + + +#下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd examples/vison/detection/yolov6/python/ + +# CPU推理 +python infer.py --model yolov6s.onnx --image 000000014439.jpg --device cpu +# GPU推理 +python infer.py --model yolov6s.onnx --image 000000014439.jpg --device gpu +# GPU上使用TensorRT推理 +python infer.py --model yolov6s.onnx --image 000000014439.jpg --device gpu --use_trt True +``` + +运行完成可视化结果如下图所示 + + + +## YOLOv6 Python接口 + +``` +fastdeploy.vision.detection.YOLOv6(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +``` + +YOLOv6模型加载和初始化,其中model_file为导出的ONNX模型格式 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX格式时,此参数无需设定 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX + +### predict函数 + +> ``` +> YOLOv6.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> ``` +> +> 模型预测结口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 +> > * **conf_threshold**(float): 检测框置信度过滤阈值 +> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值 + +> **返回** +> +> > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) + +### 类成员属性 + +> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` +> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` + + + +## 其它文档 + +- [YOLOv6 模型介绍](..) +- [YOLOv6 C++部署](../cpp) +- [模型预测结果说明](../../../../../docs/api/vision_results/) diff --git a/examples/vision/detection/yolox/README.md b/examples/vision/detection/yolox/README.md index 2a0d10d8ac..72dc51be1d 100644 --- a/examples/vision/detection/yolox/README.md +++ b/examples/vision/detection/yolox/README.md @@ -1,4 +1,4 @@ -# YOLOv7准备部署模型 +# YOLOX准备部署模型 ## 模型版本说明 @@ -8,7 +8,7 @@ ## 下载预训练ONNX模型 -为了方便开发者的测试,下面提供了YOLOv7导出的各系列模型,开发者可直接下载使用。 +为了方便开发者的测试,下面提供了YOLOX导出的各系列模型,开发者可直接下载使用。 | 模型 | 大小 | 精度 | |:---------------------------------------------------------------- |:----- |:----- | From 1c7a5786c1e6cfeffabea97222cbb4b99735f65b Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Wed, 10 Aug 2022 15:27:23 +0000 Subject: [PATCH 58/94] first commit for nano_det --- .../vision/detection/nanodet_plus/README.md | 13 ++++----- .../detection/nanodet_plus/cpp/README.md | 24 ++++++++-------- .../detection/nanodet_plus/python/README.md | 28 +++++++++---------- 3 files changed, 32 insertions(+), 33 deletions(-) diff --git a/examples/vision/detection/nanodet_plus/README.md b/examples/vision/detection/nanodet_plus/README.md index 7f52f30310..b3fd574631 100644 --- a/examples/vision/detection/nanodet_plus/README.md +++ b/examples/vision/detection/nanodet_plus/README.md @@ -1,21 +1,20 @@ -# YOLOv6准备部署模型 +# NanoDetPlus准备部署模型 ## 模型版本说明 -- [YOLOv6 v0.1.0](https://github.com/meituan/YOLOv6/releases/download/0.1.0) - - (1)[链接中](https://github.com/meituan/YOLOv6/releases/download/0.1.0)的*.onnx可直接进行部署; +- [NanoDetPlus v1.0.0-alpha-1](https://github.com/RangiLyu/nanodet/releases/tag/v1.0.0-alpha-1) + - (1)[链接中](https://github.com/RangiLyu/nanodet/releases/tag/v1.0.0-alpha-1)的*.onnx可直接进行部署 ## 下载预训练ONNX模型 -为了方便开发者的测试,下面提供了YOLOv6导出的各系列模型,开发者可直接下载使用。 +为了方便开发者的测试,下面提供了NanoDetPlus导出的各系列模型,开发者可直接下载使用。 | 模型 | 大小 | 精度 | |:---------------------------------------------------------------- |:----- |:----- | -| [YOLOv6s](https://bj.bcebos.com/paddlehub/fastdeploy/yolov6s.onnx) | 66MB | 43.1% | -| [YOLOv6s_640](https://bj.bcebos.com/paddlehub/fastdeploy/yolov6s-640x640.onnx) | 66MB | 43.1% | +| [NanoDetPlus_320](https://bj.bcebos.com/paddlehub/fastdeploy/nanodet-plus-m_320.onnx ) | 4.6MB | 27.0% | +| [NanoDetPlus_320_sim](https://bj.bcebos.com/paddlehub/fastdeploy/nanodet-plus-m_320-sim.onnx) | 4.6MB | 27.0% | -nanodet-plus-m_320.onnx nanodet-plus-m_320-sim.onnx ## 详细部署文档 diff --git a/examples/vision/detection/nanodet_plus/cpp/README.md b/examples/vision/detection/nanodet_plus/cpp/README.md index 5a73f8b55e..2dbee5e31d 100644 --- a/examples/vision/detection/nanodet_plus/cpp/README.md +++ b/examples/vision/detection/nanodet_plus/cpp/README.md @@ -1,6 +1,6 @@ -# YOLOv6 C++部署示例 +# NanoDetPlus C++部署示例 -本目录下提供`infer.cc`快速完成YOLOv6在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 +本目录下提供`infer.cc`快速完成NanoDetPlus在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 在部署前,需确认以下两个步骤 @@ -17,36 +17,36 @@ tar xvf fastdeploy-linux-x64-0.2.0.tgz cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 make -j -#下载官方转换好的YOLOv6模型文件和测试图片 -wget https://bj.bcebos.com/paddlehub/fastdeploy/yolov6s.onnx +#下载官方转换好的NanoDetPlus模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/nanodet-plus-m_320.onnx wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/000000014439.jpg # CPU推理 -./infer_demo yolov6s.onnx 000000014439.jpg 0 +./infer_demo nanodet-plus-m_320.onnx 000000014439.jpg 0 # GPU推理 -./infer_demo yolov6s.onnx 000000014439.jpg 1 +./infer_demo nanodet-plus-m_320.onnx 000000014439.jpg 1 # GPU上TensorRT推理 -./infer_demo yolov6s.onnx 000000014439.jpg 2 +./infer_demo nanodet-plus-m_320.onnx 000000014439.jpg 2 ``` 运行完成可视化结果如下图所示 -## YOLOv6 C++接口 +## NanoDetPlus C++接口 -### YOLOv6类 +### NanoDetPlus类 ``` -fastdeploy::vision::detection::YOLOv6( +fastdeploy::vision::detection::NanoDetPlus( const string& model_file, const string& params_file = "", const RuntimeOption& runtime_option = RuntimeOption(), const Frontend& model_format = Frontend::ONNX) ``` -YOLOv6模型加载和初始化,其中model_file为导出的ONNX模型格式。 +NanoDetPlus模型加载和初始化,其中model_file为导出的ONNX模型格式。 **参数** @@ -58,7 +58,7 @@ YOLOv6模型加载和初始化,其中model_file为导出的ONNX模型格式。 #### Predict函数 > ``` -> YOLOv6::Predict(cv::Mat* im, DetectionResult* result, +> NanoDetPlus::Predict(cv::Mat* im, DetectionResult* result, > float conf_threshold = 0.25, > float nms_iou_threshold = 0.5) > ``` diff --git a/examples/vision/detection/nanodet_plus/python/README.md b/examples/vision/detection/nanodet_plus/python/README.md index 35c35b2084..7a60a31c8f 100644 --- a/examples/vision/detection/nanodet_plus/python/README.md +++ b/examples/vision/detection/nanodet_plus/python/README.md @@ -1,41 +1,41 @@ -# YOLOv6 Python部署示例 +# NanoDetPlus Python部署示例 在部署前,需确认以下两个步骤 - 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) - 2. FastDeploy Python whl包安装,参考[FastDeploy Python安装](../../../../../docs/quick_start/install.md) -本目录下提供`infer.py`快速完成YOLOv6在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 +本目录下提供`infer.py`快速完成NanoDetPlus在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 ``` -#下载YOLOv6模型文件和测试图片 -wget https://bj.bcebos.com/paddlehub/fastdeploy/yolov6s.onnx +#下载NanoDetPlus模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/nanodet-plus-m_320.onnx wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/000000014439.jpg #下载部署示例代码 git clone https://github.com/PaddlePaddle/FastDeploy.git -cd examples/vison/detection/yolov6/python/ +cd examples/vison/detection/nanodet_plus/python/ # CPU推理 -python infer.py --model yolov6s.onnx --image 000000014439.jpg --device cpu +python infer.py --model nanodet-plus-m_320.onnx --image 000000014439.jpg --device cpu # GPU推理 -python infer.py --model yolov6s.onnx --image 000000014439.jpg --device gpu +python infer.py --model nanodet-plus-m_320.onnx --image 000000014439.jpg --device gpu # GPU上使用TensorRT推理 -python infer.py --model yolov6s.onnx --image 000000014439.jpg --device gpu --use_trt True +python infer.py --model nanodet-plus-m_320.onnx --image 000000014439.jpg --device gpu --use_trt True ``` 运行完成可视化结果如下图所示 -## YOLOv6 Python接口 +## NanoDetPlus Python接口 ``` -fastdeploy.vision.detection.YOLOv6(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +fastdeploy.vision.detection.NanoDetPlus(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) ``` -YOLOv6模型加载和初始化,其中model_file为导出的ONNX模型格式 +NanoDetPlus模型加载和初始化,其中model_file为导出的ONNX模型格式 **参数** @@ -47,7 +47,7 @@ YOLOv6模型加载和初始化,其中model_file为导出的ONNX模型格式 ### predict函数 > ``` -> YOLOv6.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> NanoDetPlus.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) > ``` > > 模型预测结口,输入图像直接输出检测结果。 @@ -74,6 +74,6 @@ YOLOv6模型加载和初始化,其中model_file为导出的ONNX模型格式 ## 其它文档 -- [YOLOv6 模型介绍](..) -- [YOLOv6 C++部署](../cpp) +- [NanoDetPlus 模型介绍](..) +- [NanoDetPlus C++部署](../cpp) - [模型预测结果说明](../../../../../docs/api/vision_results/) From d6a4289db37f5e7d5d1a0a7cd9207c434b7ed7cc Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Thu, 11 Aug 2022 03:46:04 +0000 Subject: [PATCH 59/94] first commit for scrfd --- examples/vision/facedet/scrfd/README.md | 69 ++++++++++++ .../vision/facedet/scrfd/cpp/CMakeLists.txt | 14 +++ examples/vision/facedet/scrfd/cpp/README.md | 89 +++++++++++++++ examples/vision/facedet/scrfd/cpp/infer.cc | 106 ++++++++++++++++++ .../vision/facedet/scrfd/python/README.md | 82 ++++++++++++++ examples/vision/facedet/scrfd/python/infer.py | 51 +++++++++ 6 files changed, 411 insertions(+) create mode 100644 examples/vision/facedet/scrfd/README.md create mode 100644 examples/vision/facedet/scrfd/cpp/CMakeLists.txt create mode 100644 examples/vision/facedet/scrfd/cpp/README.md create mode 100644 examples/vision/facedet/scrfd/cpp/infer.cc create mode 100644 examples/vision/facedet/scrfd/python/README.md create mode 100644 examples/vision/facedet/scrfd/python/infer.py diff --git a/examples/vision/facedet/scrfd/README.md b/examples/vision/facedet/scrfd/README.md new file mode 100644 index 0000000000..a0e7a51513 --- /dev/null +++ b/examples/vision/facedet/scrfd/README.md @@ -0,0 +1,69 @@ +# SCRFD准备部署模型 + +## 模型版本说明 + +- [SCRFD CID:17cdeab](https://github.com/deepinsight/insightface/tree/17cdeab12a35efcebc2660453a8cbeae96e20950) + - (1)[链接中](https://github.com/deepinsight/insightface/tree/17cdeab12a35efcebc2660453a8cbeae96e20950)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; + - (2)开发者基于自己数据训练的SCRFD CID:17cdeab模型,可按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)后,完成部署。 + +## 导出ONNX模型 + + ``` + #下载scrfd模型文件 + e.g. download from https://onedrive.live.com/?authkey=%21ABbFJx2JMhNjhNA&id=4A83B6B633B029CC%215542&cid=4A83B6B633B029CC + + # 安装官方库配置环境,此版本导出环境为: + - 手动配置环境 + torch==1.8.0 + mmcv==1.3.5 + mmdet==2.7.0 + + - 通过docker配置 + docker pull qyjdefdocker/onnx-scrfd-converter:v0.3 + + # 导出onnx格式文件 + - 手动生成 + python tools/scrfd2onnx.py configs/scrfd/scrfd_500m.py weights/scrfd_500m.pth --shape 640 --input-img face-xxx.jpg + + - docker + docker的onnx目录中已有生成好的onnx文件 + + + # 移动onnx文件到demo目录 + cp PATH/TO/SCRFD.onnx PATH/TO/model_zoo/vision/scrfd/ + ``` + +## 下载预训练ONNX模型 + +为了方便开发者的测试,下面提供了SCRFD导出的各系列模型,开发者可直接下载使用。 + +| 模型 | 大小 | 精度 | +|:---------------------------------------------------------------- |:----- |:----- | +| [SCRFD-500M-kps-160](https://bj.bcebos.com/paddlehub/fastdeploy/scrfd_500m_bnkps_shape160x160.onnx) | 2.5MB | - | +| [SCRFD-500M-160](https://bj.bcebos.com/paddlehub/fastdeploy/scrfd_500m_shape160x160.onnx) | 2.2MB | - | +| [SCRFD-500M-kps-320](https://bj.bcebos.com/paddlehub/fastdeploy/scrfd_500m_bnkps_shape320x320.onnx) | 2.5MB | - | +| [SCRFD-500M-320](https://bj.bcebos.com/paddlehub/fastdeploy/scrfd_500m_shape320x320.onnx) | 2.2MB | - | +| [SCRFD-500M-kps-640](https://bj.bcebos.com/paddlehub/fastdeploy/scrfd_500m_bnkps_shape640x640.onnx) | 2.5MB | 90.97% | +| [SCRFD-500M-640](https://bj.bcebos.com/paddlehub/fastdeploy/scrfd_500m_shape640x640.onnx) | 2.2MB | 90.57% | +| [SCRFD-1G-160](https://bj.bcebos.com/paddlehub/fastdeploy/scrfd_1g_shape160x160.onnx ) | 2.5MB | - | +| [SCRFD-1G-320](https://bj.bcebos.com/paddlehub/fastdeploy/scrfd_1g_shape320x320.onnx) | 2.5MB | - | +| [SCRFD-1G-640](https://bj.bcebos.com/paddlehub/fastdeploy/scrfd_1g_shape640x640.onnx) | 2.5MB | 92.38% | +| [SCRFD-2.5G-kps-160](https://bj.bcebos.com/paddlehub/fastdeploy/scrfd_2.5g_bnkps_shape160x160.onnx) | 3.2MB | - | +| [SCRFD-2.5G-160](https://bj.bcebos.com/paddlehub/fastdeploy/scrfd_2.5g_shape160x160.onnx) | 2.6MB | - | +| [SCRFD-2.5G-kps-320](https://bj.bcebos.com/paddlehub/fastdeploy/scrfd_2.5g_bnkps_shape320x320.onnx) | 3.2MB | - | +| [SCRFD-2.5G-320](https://bj.bcebos.com/paddlehub/fastdeploy/scrfd_2.5g_shape320x320.onnx) | 2.6MB | - | +| [SCRFD-2.5G-kps-640](https://bj.bcebos.com/paddlehub/fastdeploy/scrfd_2.5g_bnkps_shape640x640.onnx) | 3.2MB | 93.8% | +| [SCRFD-2.5G-640](https://bj.bcebos.com/paddlehub/fastdeploy/scrfd_2.5g_shape640x640.onnx) | 2.6MB | 93.78% | +| [SCRFD-10G-kps-160](https://bj.bcebos.com/paddlehub/fastdeploy/scrfd_10g_bnkps_shape160x160.onnx) | 17MB | - | +| [SCRFD-10G-160](https://bj.bcebos.com/paddlehub/fastdeploy/scrfd_10g_shape160x160.onnx) | 15MB | - | +| [SCRFD-10G-kps-320](https://bj.bcebos.com/paddlehub/fastdeploy/scrfd_10g_bnkps_shape320x320.onnx) | 17MB | - | +| [SCRFD-10G-320](https://bj.bcebos.com/paddlehub/fastdeploy/scrfd_10g_shape320x320.onnx) | 15MB | - | +| [SCRFD-10G-kps-640](https://bj.bcebos.com/paddlehub/fastdeploy/scrfd_10g_bnkps_shape640x640.onnx) | 17MB | 95.4% | +| [SCRFD-10G-640](https://bj.bcebos.com/paddlehub/fastdeploy/scrfd_10g_shape640x640.onnx) | 15MB | 95.16% | + + + +## 详细部署文档 + +- [Python部署](python) +- [C++部署](cpp) diff --git a/examples/vision/facedet/scrfd/cpp/CMakeLists.txt b/examples/vision/facedet/scrfd/cpp/CMakeLists.txt new file mode 100644 index 0000000000..fea1a2888b --- /dev/null +++ b/examples/vision/facedet/scrfd/cpp/CMakeLists.txt @@ -0,0 +1,14 @@ +PROJECT(infer_demo C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.12) + +# 指定下载解压后的fastdeploy库路径 +option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.") + +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) + +# 添加FastDeploy依赖头文件 +include_directories(${FASTDEPLOY_INCS}) + +add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc) +# 添加FastDeploy库依赖 +target_link_libraries(infer_demo ${FASTDEPLOY_LIBS}) diff --git a/examples/vision/facedet/scrfd/cpp/README.md b/examples/vision/facedet/scrfd/cpp/README.md new file mode 100644 index 0000000000..cc1a1a71b7 --- /dev/null +++ b/examples/vision/facedet/scrfd/cpp/README.md @@ -0,0 +1,89 @@ +# SCRFD C++部署示例 + +本目录下提供`infer.cc`快速完成SCRFD在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. 根据开发环境,下载预编译部署库和samples代码,参考[FastDeploy预编译库](../../../../../docs/compile/prebuild_libraries.md) + +以Linux上CPU推理为例,在本目录执行如下命令即可完成编译测试 + +``` +mkdir build +cd build +wget https://xxx.tgz +tar xvf fastdeploy-linux-x64-0.2.0.tgz +cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 +make -j + +#下载官方转换好的SCRFD模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/scrfd_500m_bnkps_shape640x640.onnx +wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/lite/resources/test_lite_face_detector_3.jpg + + +# CPU推理 +./infer_demo scrfd_500m_bnkps_shape640x640.onnx test_lite_face_detector_3.jpg 0 +# GPU推理 +./infer_demo scrfd_500m_bnkps_shape640x640.onnx test_lite_face_detector_3.jpg 1 +# GPU上TensorRT推理 +./infer_demo scrfd_500m_bnkps_shape640x640.onnx test_lite_face_detector_3.jpg 2 +``` + +运行完成可视化结果如下图所示 + + + +## SCRFD C++接口 + +### SCRFD类 + +``` +fastdeploy::vision::detection::SCRFD( + const string& model_file, + const string& params_file = "", + const RuntimeOption& runtime_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX) +``` + +SCRFD模型加载和初始化,其中model_file为导出的ONNX模型格式。 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX时,此参数传入空字符串即可 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX格式 + +#### Predict函数 + +> ``` +> SCRFD::Predict(cv::Mat* im, DetectionResult* result, +> float conf_threshold = 0.25, +> float nms_iou_threshold = 0.5) +> ``` +> +> 模型预测接口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **im**: 输入图像,注意需为HWC,BGR格式 +> > * **result**: 检测结果,包括检测框,各个框的置信度, DetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > * **conf_threshold**: 检测框置信度过滤阈值 +> > * **nms_iou_threshold**: NMS处理过程中iou阈值 + +### 类成员变量 + +> > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` +> > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` +> > * **downsample_strides**(vector<int>): 通过此参数可以修改生成anchor的特征图的下采样倍数, 包含三个整型元素, 分别表示默认的生成anchor的下采样倍数, 默认值为[8, 16, 32] +> > * **landmarks_per_face**(int): 如果使用具有人脸关键点的输出, 可以修改人脸关键点数量, 默认值为`landmarks_per_face=5` +> > * **use_kps**(bool): 通过此参数可以设置模型是否使用关键点,如果ONNX文件没有关键点输出则需要将`use_kps=false`, 并将`landmarks_per_face=0`, 默认值为`use_kps=true` +> > * **num_anchors**(int): 通过此参数可以设置每个锚点预测的anchor数量, 需要跟进训练模型的参数设定, 默认值为`num_anchors=2` + +- [模型介绍](../../) +- [Python部署](../python) +- [视觉模型预测结果](../../../../../docs/api/vision_results/) diff --git a/examples/vision/facedet/scrfd/cpp/infer.cc b/examples/vision/facedet/scrfd/cpp/infer.cc new file mode 100644 index 0000000000..ba68974d39 --- /dev/null +++ b/examples/vision/facedet/scrfd/cpp/infer.cc @@ -0,0 +1,106 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +void CpuInfer(const std::string& model_file, const std::string& image_file) { + auto model = fastdeploy::vision::facedet::SCRFD(model_file); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::FaceDetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisFaceDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +void GpuInfer(const std::string& model_file, const std::string& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + auto model = fastdeploy::vision::facedet::SCRFD(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::FaceDetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisFaceDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +void TrtInfer(const std::string& model_file, const std::string& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + option.UseTrtBackend(); + option.SetTrtInputShape("images", {1, 3, 640, 640}); + auto model = fastdeploy::vision::facedet::SCRFD(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::FaceDetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisFaceDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +int main(int argc, char* argv[]) { + if (argc < 4) { + std::cout + << "Usage: infer_demo path/to/model path/to/image run_option, " + "e.g ./infer_model scrfd_500m_bnkps_shape640x640.onnx ./test.jpeg 0" + << std::endl; + std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " + "with gpu; 2: run with gpu and use tensorrt backend." + << std::endl; + return -1; + } + + if (std::atoi(argv[3]) == 0) { + CpuInfer(argv[1], argv[2]); + } else if (std::atoi(argv[3]) == 1) { + GpuInfer(argv[1], argv[2]); + } else if (std::atoi(argv[3]) == 2) { + TrtInfer(argv[1], argv[2]); + } + return 0; +} diff --git a/examples/vision/facedet/scrfd/python/README.md b/examples/vision/facedet/scrfd/python/README.md new file mode 100644 index 0000000000..1b19f8670d --- /dev/null +++ b/examples/vision/facedet/scrfd/python/README.md @@ -0,0 +1,82 @@ +# SCRFD Python部署示例 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. FastDeploy Python whl包安装,参考[FastDeploy Python安装](../../../../../docs/quick_start/install.md) + +本目录下提供`infer.py`快速完成SCRFD在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 + +``` +#下载SCRFD模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/scrfd_500m_bnkps_shape640x640.onnx +wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/lite/resources/test_lite_face_detector_3.jpg + + +#下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd examples/vison/detection/scrfd/python/ + +# CPU推理 +python infer.py --model scrfd_500m_bnkps_shape640x640.onnx --image test_lite_face_detector_3.jpg --device cpu +# GPU推理 +python infer.py --model scrfd_500m_bnkps_shape640x640.onnx --image test_lite_face_detector_3.jpg --device gpu +# GPU上使用TensorRT推理 +python infer.py --model scrfd_500m_bnkps_shape640x640.onnx --image test_lite_face_detector_3.jpg --device gpu --use_trt True +``` + +运行完成可视化结果如下图所示 + + + +## SCRFD Python接口 + +``` +fastdeploy.vision.detection.SCRFD(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +``` + +SCRFD模型加载和初始化,其中model_file为导出的ONNX模型格式 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX格式时,此参数无需设定 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX + +### predict函数 + +> ``` +> SCRFD.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> ``` +> +> 模型预测结口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 +> > * **conf_threshold**(float): 检测框置信度过滤阈值 +> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值 + +> **返回** +> +> > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) + +### 类成员属性 + +> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` +> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` +> > * **downsample_strides**(list[int]): 通过此参数可以修改生成anchor的特征图的下采样倍数, 包含三个整型元素, 分别表示默认的生成anchor的下采样倍数, 默认值为[8, 16, 32] +> > * **landmarks_per_face**(int): 如果使用具有人脸关键点的输出, 可以修改人脸关键点数量, 默认值为`landmarks_per_face=5` +> > * **use_kps**(bool): 通过此参数可以设置模型是否使用关键点,如果ONNX文件没有关键点输出则需要将`use_kps=False`, 并将`landmarks_per_face=0`, 默认值为`use_kps=True` +> > * **num_anchors**(int): 通过此参数可以设置每个锚点预测的anchor数量, 需要跟进训练模型的参数设定, 默认值为`num_anchors=2` + + +## 其它文档 + +- [SCRFD 模型介绍](..) +- [SCRFD C++部署](../cpp) +- [模型预测结果说明](../../../../../docs/api/vision_results/) diff --git a/examples/vision/facedet/scrfd/python/infer.py b/examples/vision/facedet/scrfd/python/infer.py new file mode 100644 index 0000000000..0bd79a3d37 --- /dev/null +++ b/examples/vision/facedet/scrfd/python/infer.py @@ -0,0 +1,51 @@ +import fastdeploy as fd +import cv2 + + +def parse_arguments(): + import argparse + import ast + parser = argparse.ArgumentParser() + parser.add_argument( + "--model", required=True, help="Path of yolov7 onnx model.") + parser.add_argument( + "--image", required=True, help="Path of test image file.") + parser.add_argument( + "--device", + type=str, + default='cpu', + help="Type of inference device, support 'cpu' or 'gpu'.") + parser.add_argument( + "--use_trt", + type=ast.literal_eval, + default=False, + help="Wether to use tensorrt.") + return parser.parse_args() + + +def build_option(args): + option = fd.RuntimeOption() + + if args.device.lower() == "gpu": + option.use_gpu() + + if args.use_trt: + option.use_trt_backend() + option.set_trt_input_shape("images", [1, 3, 640, 640]) + return option + + +args = parse_arguments() + +# 配置runtime,加载模型 +runtime_option = build_option(args) +model = fd.vision.facedet.SCRFD(args.model, runtime_option=runtime_option) + +# 预测图片检测结果 +im = cv2.imread(args.image) +result = model.predict(im) + +# 预测结果可视化 +vis_im = fd.vision.vis_face_detection(im, result) +cv2.imwrite("visualized_result.jpg", vis_im) +print("Visualized result save in ./visualized_result.jpg") From dba0368c6bb8dd19a618ce381ee7bc978b715eb9 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Thu, 11 Aug 2022 03:51:11 +0000 Subject: [PATCH 60/94] first commit for scrfd --- examples/vision/facedet/scrfd/cpp/README.md | 2 +- examples/vision/facedet/scrfd/python/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/vision/facedet/scrfd/cpp/README.md b/examples/vision/facedet/scrfd/cpp/README.md index cc1a1a71b7..d01ad619a6 100644 --- a/examples/vision/facedet/scrfd/cpp/README.md +++ b/examples/vision/facedet/scrfd/cpp/README.md @@ -32,7 +32,7 @@ wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/li 运行完成可视化结果如下图所示 - + ## SCRFD C++接口 diff --git a/examples/vision/facedet/scrfd/python/README.md b/examples/vision/facedet/scrfd/python/README.md index 1b19f8670d..87e668b550 100644 --- a/examples/vision/facedet/scrfd/python/README.md +++ b/examples/vision/facedet/scrfd/python/README.md @@ -27,7 +27,7 @@ python infer.py --model scrfd_500m_bnkps_shape640x640.onnx --image test_lite_fac 运行完成可视化结果如下图所示 - + ## SCRFD Python接口 From ed5ea33441da6f3a07ae532ae0fa2b98044171e2 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Thu, 11 Aug 2022 04:03:12 +0000 Subject: [PATCH 61/94] first commit for retinaface --- .../vision/facedet/retinaface/cpp/README.md | 8 +- .../vision/facedet/retinaface/cpp/infer.cc | 106 ++++++++++++++++++ .../facedet/retinaface/python/README.md | 8 +- .../vision/facedet/retinaface/python/infer.py | 51 +++++++++ 4 files changed, 165 insertions(+), 8 deletions(-) create mode 100644 examples/vision/facedet/retinaface/cpp/infer.cc create mode 100644 examples/vision/facedet/retinaface/python/infer.py diff --git a/examples/vision/facedet/retinaface/cpp/README.md b/examples/vision/facedet/retinaface/cpp/README.md index dc36657076..95a8d4a616 100644 --- a/examples/vision/facedet/retinaface/cpp/README.md +++ b/examples/vision/facedet/retinaface/cpp/README.md @@ -19,15 +19,15 @@ make -j #下载官方转换好的RetinaFace模型文件和测试图片 wget https://bj.bcebos.com/paddlehub/fastdeploy/Pytorch_RetinaFace_mobile0.25-640-640.onnx -wget todo +wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/lite/resources/test_lite_face_detector_3.jpg # CPU推理 -./infer_demo Pytorch_RetinaFace_mobile0.25-640-640.onnx todo 0 +./infer_demo Pytorch_RetinaFace_mobile0.25-640-640.onnx test_lite_face_detector_3.jpg 0 # GPU推理 -./infer_demo Pytorch_RetinaFace_mobile0.25-640-640.onnx todo 1 +./infer_demo Pytorch_RetinaFace_mobile0.25-640-640.onnx test_lite_face_detector_3.jpg 1 # GPU上TensorRT推理 -./infer_demo Pytorch_RetinaFace_mobile0.25-640-640.onnx todo 2 +./infer_demo Pytorch_RetinaFace_mobile0.25-640-640.onnx test_lite_face_detector_3.jpg 2 ``` 运行完成可视化结果如下图所示 diff --git a/examples/vision/facedet/retinaface/cpp/infer.cc b/examples/vision/facedet/retinaface/cpp/infer.cc new file mode 100644 index 0000000000..ddda3d78a5 --- /dev/null +++ b/examples/vision/facedet/retinaface/cpp/infer.cc @@ -0,0 +1,106 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +void CpuInfer(const std::string& model_file, const std::string& image_file) { + auto model = fastdeploy::vision::facedet::RetinaFace(model_file); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::FaceDetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisFaceDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +void GpuInfer(const std::string& model_file, const std::string& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + auto model = fastdeploy::vision::facedet::RetinaFace(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::FaceDetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisFaceDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +void TrtInfer(const std::string& model_file, const std::string& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + option.UseTrtBackend(); + option.SetTrtInputShape("images", {1, 3, 640, 640}); + auto model = fastdeploy::vision::facedet::RetinaFace(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::FaceDetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisFaceDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +int main(int argc, char* argv[]) { + if (argc < 4) { + std::cout << "Usage: infer_demo path/to/model path/to/image run_option, " + "e.g ./infer_model Pytorch_RetinaFace_mobile0.25-640-640.onnx " + "./test.jpeg 0" + << std::endl; + std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " + "with gpu; 2: run with gpu and use tensorrt backend." + << std::endl; + return -1; + } + + if (std::atoi(argv[3]) == 0) { + CpuInfer(argv[1], argv[2]); + } else if (std::atoi(argv[3]) == 1) { + GpuInfer(argv[1], argv[2]); + } else if (std::atoi(argv[3]) == 2) { + TrtInfer(argv[1], argv[2]); + } + return 0; +} diff --git a/examples/vision/facedet/retinaface/python/README.md b/examples/vision/facedet/retinaface/python/README.md index b8c3251359..6964e9b12f 100644 --- a/examples/vision/facedet/retinaface/python/README.md +++ b/examples/vision/facedet/retinaface/python/README.md @@ -10,7 +10,7 @@ ``` #下载retinaface模型文件和测试图片 wget https://bj.bcebos.com/paddlehub/fastdeploy/Pytorch_RetinaFace_mobile0.25-640-640.onnx -wget todo +wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/lite/resources/test_lite_face_detector_3.jpg #下载部署示例代码 @@ -18,11 +18,11 @@ git clone https://github.com/PaddlePaddle/FastDeploy.git cd examples/vison/detection/retinaface/python/ # CPU推理 -python infer.py --model Pytorch_RetinaFace_mobile0.25-640-640.onnx --image todo --device cpu +python infer.py --model Pytorch_RetinaFace_mobile0.25-640-640.onnx --image test_lite_face_detector_3.jpg --device cpu # GPU推理 -python infer.py --model Pytorch_RetinaFace_mobile0.25-640-640.onnx --image todo --device gpu +python infer.py --model Pytorch_RetinaFace_mobile0.25-640-640.onnx --image test_lite_face_detector_3.jpg --device gpu # GPU上使用TensorRT推理 -python infer.py --model Pytorch_RetinaFace_mobile0.25-640-640.onnx --image todo --device gpu --use_trt True +python infer.py --model Pytorch_RetinaFace_mobile0.25-640-640.onnx --image test_lite_face_detector_3.jpg --device gpu --use_trt True ``` 运行完成可视化结果如下图所示 diff --git a/examples/vision/facedet/retinaface/python/infer.py b/examples/vision/facedet/retinaface/python/infer.py new file mode 100644 index 0000000000..6ba6ad82ff --- /dev/null +++ b/examples/vision/facedet/retinaface/python/infer.py @@ -0,0 +1,51 @@ +import fastdeploy as fd +import cv2 + + +def parse_arguments(): + import argparse + import ast + parser = argparse.ArgumentParser() + parser.add_argument( + "--model", required=True, help="Path of yolov7 onnx model.") + parser.add_argument( + "--image", required=True, help="Path of test image file.") + parser.add_argument( + "--device", + type=str, + default='cpu', + help="Type of inference device, support 'cpu' or 'gpu'.") + parser.add_argument( + "--use_trt", + type=ast.literal_eval, + default=False, + help="Wether to use tensorrt.") + return parser.parse_args() + + +def build_option(args): + option = fd.RuntimeOption() + + if args.device.lower() == "gpu": + option.use_gpu() + + if args.use_trt: + option.use_trt_backend() + option.set_trt_input_shape("images", [1, 3, 640, 640]) + return option + + +args = parse_arguments() + +# 配置runtime,加载模型 +runtime_option = build_option(args) +model = fd.vision.facedet.RetinaFace(args.model, runtime_option=runtime_option) + +# 预测图片检测结果 +im = cv2.imread(args.image) +result = model.predict(im) + +# 预测结果可视化 +vis_im = fd.vision.vis_face_detection(im, result) +cv2.imwrite("visualized_result.jpg", vis_im) +print("Visualized result save in ./visualized_result.jpg") From 53a89a73948bd90b2b241f0a18c2c6a425bbe6ee Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Thu, 11 Aug 2022 04:23:43 +0000 Subject: [PATCH 62/94] first commit for ultraface --- .../vision/facedet/ultraface/cpp/README.md | 8 +- .../vision/facedet/ultraface/cpp/infer.cc | 105 ++++++++++++++++++ .../vision/facedet/ultraface/python/README.md | 8 +- .../vision/facedet/ultraface/python/infer.py | 51 +++++++++ 4 files changed, 164 insertions(+), 8 deletions(-) create mode 100644 examples/vision/facedet/ultraface/cpp/infer.cc create mode 100644 examples/vision/facedet/ultraface/python/infer.py diff --git a/examples/vision/facedet/ultraface/cpp/README.md b/examples/vision/facedet/ultraface/cpp/README.md index 1eae69c0fb..768d459be9 100644 --- a/examples/vision/facedet/ultraface/cpp/README.md +++ b/examples/vision/facedet/ultraface/cpp/README.md @@ -19,15 +19,15 @@ make -j #下载官方转换好的UltraFace模型文件和测试图片 wget https://bj.bcebos.com/paddlehub/fastdeploy/version-RFB-320.onnx -wget todo +wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/lite/resources/test_lite_face_detector_3.jpg # CPU推理 -./infer_demo version-RFB-320.onnx todo 0 +./infer_demo version-RFB-320.onnx test_lite_face_detector_3.jpg 0 # GPU推理 -./infer_demo version-RFB-320.onnx todo 1 +./infer_demo version-RFB-320.onnx test_lite_face_detector_3.jpg 1 # GPU上TensorRT推理 -./infer_demo version-RFB-320.onnx todo 2 +./infer_demo version-RFB-320.onnx test_lite_face_detector_3.jpg 2 ``` 运行完成可视化结果如下图所示 diff --git a/examples/vision/facedet/ultraface/cpp/infer.cc b/examples/vision/facedet/ultraface/cpp/infer.cc new file mode 100644 index 0000000000..2467b12c0b --- /dev/null +++ b/examples/vision/facedet/ultraface/cpp/infer.cc @@ -0,0 +1,105 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +void CpuInfer(const std::string& model_file, const std::string& image_file) { + auto model = fastdeploy::vision::facedet::UltraFace(model_file); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::FaceDetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisFaceDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +void GpuInfer(const std::string& model_file, const std::string& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + auto model = fastdeploy::vision::facedet::UltraFace(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::FaceDetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisFaceDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +void TrtInfer(const std::string& model_file, const std::string& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + option.UseTrtBackend(); + option.SetTrtInputShape("images", {1, 3, 640, 640}); + auto model = fastdeploy::vision::facedet::UltraFace(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::FaceDetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisFaceDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +int main(int argc, char* argv[]) { + if (argc < 4) { + std::cout << "Usage: infer_demo path/to/model path/to/image run_option, " + "e.g ./infer_model version-RFB-320.onnx ./test.jpeg 0" + << std::endl; + std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " + "with gpu; 2: run with gpu and use tensorrt backend." + << std::endl; + return -1; + } + + if (std::atoi(argv[3]) == 0) { + CpuInfer(argv[1], argv[2]); + } else if (std::atoi(argv[3]) == 1) { + GpuInfer(argv[1], argv[2]); + } else if (std::atoi(argv[3]) == 2) { + TrtInfer(argv[1], argv[2]); + } + return 0; +} diff --git a/examples/vision/facedet/ultraface/python/README.md b/examples/vision/facedet/ultraface/python/README.md index 88026ecff3..74ff4f707d 100644 --- a/examples/vision/facedet/ultraface/python/README.md +++ b/examples/vision/facedet/ultraface/python/README.md @@ -10,7 +10,7 @@ ``` #下载ultraface模型文件和测试图片 wget https://bj.bcebos.com/paddlehub/fastdeploy/version-RFB-320.onnx -wget todo +wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/lite/resources/test_lite_face_detector_3.jpg #下载部署示例代码 @@ -18,11 +18,11 @@ git clone https://github.com/PaddlePaddle/FastDeploy.git cd examples/vison/detection/ultraface/python/ # CPU推理 -python infer.py --model version-RFB-320.onnx --image todo --device cpu +python infer.py --model version-RFB-320.onnx --image test_lite_face_detector_3.jpg --device cpu # GPU推理 -python infer.py --model version-RFB-320.onnx --image todo --device gpu +python infer.py --model version-RFB-320.onnx --image test_lite_face_detector_3.jpg --device gpu # GPU上使用TensorRT推理 -python infer.py --model version-RFB-320.onnx --image todo --device gpu --use_trt True +python infer.py --model version-RFB-320.onnx --image test_lite_face_detector_3.jpg --device gpu --use_trt True ``` 运行完成可视化结果如下图所示 diff --git a/examples/vision/facedet/ultraface/python/infer.py b/examples/vision/facedet/ultraface/python/infer.py new file mode 100644 index 0000000000..9b2f759088 --- /dev/null +++ b/examples/vision/facedet/ultraface/python/infer.py @@ -0,0 +1,51 @@ +import fastdeploy as fd +import cv2 + + +def parse_arguments(): + import argparse + import ast + parser = argparse.ArgumentParser() + parser.add_argument( + "--model", required=True, help="Path of yolov7 onnx model.") + parser.add_argument( + "--image", required=True, help="Path of test image file.") + parser.add_argument( + "--device", + type=str, + default='cpu', + help="Type of inference device, support 'cpu' or 'gpu'.") + parser.add_argument( + "--use_trt", + type=ast.literal_eval, + default=False, + help="Wether to use tensorrt.") + return parser.parse_args() + + +def build_option(args): + option = fd.RuntimeOption() + + if args.device.lower() == "gpu": + option.use_gpu() + + if args.use_trt: + option.use_trt_backend() + option.set_trt_input_shape("images", [1, 3, 640, 640]) + return option + + +args = parse_arguments() + +# 配置runtime,加载模型 +runtime_option = build_option(args) +model = fd.vision.facedet.UltraFace(args.model, runtime_option=runtime_option) + +# 预测图片检测结果 +im = cv2.imread(args.image) +result = model.predict(im) + +# 预测结果可视化 +vis_im = fd.vision.vis_face_detection(im, result) +cv2.imwrite("visualized_result.jpg", vis_im) +print("Visualized result save in ./visualized_result.jpg") From c335b6b53df3f37235de2a31f9b8853ea72a6b5f Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Thu, 11 Aug 2022 06:53:03 +0000 Subject: [PATCH 63/94] first commit for yolov5face --- .../vision/facedet/retinaface/python/infer.py | 2 +- examples/vision/facedet/scrfd/python/infer.py | 2 +- .../vision/facedet/ultraface/python/infer.py | 2 +- .../vision/facedet/yolov5face/cpp/README.md | 8 +- .../vision/facedet/yolov5face/cpp/infer.cc | 105 ++++++++++++++++++ .../facedet/yolov5face/python/README.md | 8 +- .../vision/facedet/yolov5face/python/infer.py | 51 +++++++++ 7 files changed, 167 insertions(+), 11 deletions(-) create mode 100644 examples/vision/facedet/yolov5face/cpp/infer.cc create mode 100644 examples/vision/facedet/yolov5face/python/infer.py diff --git a/examples/vision/facedet/retinaface/python/infer.py b/examples/vision/facedet/retinaface/python/infer.py index 6ba6ad82ff..16e38c7f6d 100644 --- a/examples/vision/facedet/retinaface/python/infer.py +++ b/examples/vision/facedet/retinaface/python/infer.py @@ -7,7 +7,7 @@ def parse_arguments(): import ast parser = argparse.ArgumentParser() parser.add_argument( - "--model", required=True, help="Path of yolov7 onnx model.") + "--model", required=True, help="Path of retinaface onnx model.") parser.add_argument( "--image", required=True, help="Path of test image file.") parser.add_argument( diff --git a/examples/vision/facedet/scrfd/python/infer.py b/examples/vision/facedet/scrfd/python/infer.py index 0bd79a3d37..828877fbd8 100644 --- a/examples/vision/facedet/scrfd/python/infer.py +++ b/examples/vision/facedet/scrfd/python/infer.py @@ -7,7 +7,7 @@ def parse_arguments(): import ast parser = argparse.ArgumentParser() parser.add_argument( - "--model", required=True, help="Path of yolov7 onnx model.") + "--model", required=True, help="Path of scrfd onnx model.") parser.add_argument( "--image", required=True, help="Path of test image file.") parser.add_argument( diff --git a/examples/vision/facedet/ultraface/python/infer.py b/examples/vision/facedet/ultraface/python/infer.py index 9b2f759088..e8084333ca 100644 --- a/examples/vision/facedet/ultraface/python/infer.py +++ b/examples/vision/facedet/ultraface/python/infer.py @@ -7,7 +7,7 @@ def parse_arguments(): import ast parser = argparse.ArgumentParser() parser.add_argument( - "--model", required=True, help="Path of yolov7 onnx model.") + "--model", required=True, help="Path of ultraface onnx model.") parser.add_argument( "--image", required=True, help="Path of test image file.") parser.add_argument( diff --git a/examples/vision/facedet/yolov5face/cpp/README.md b/examples/vision/facedet/yolov5face/cpp/README.md index ec0b48ad0a..77ad27ea07 100644 --- a/examples/vision/facedet/yolov5face/cpp/README.md +++ b/examples/vision/facedet/yolov5face/cpp/README.md @@ -19,15 +19,15 @@ make -j #下载官方转换好的YOLOv5Face模型文件和测试图片 wget https://bj.bcebos.com/paddlehub/fastdeploy/yolov5s-face.onnx -wget todo +wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/lite/resources/test_lite_face_detector_3.jpg # CPU推理 -./infer_demo yolov5s-face.onnx todo 0 +./infer_demo yolov5s-face.onnx test_lite_face_detector_3.jpg 0 # GPU推理 -./infer_demo yolov5s-face.onnx todo 1 +./infer_demo yolov5s-face.onnx test_lite_face_detector_3.jpg 1 # GPU上TensorRT推理 -./infer_demo yolov5s-face.onnx todo 2 +./infer_demo yolov5s-face.onnx test_lite_face_detector_3.jpg 2 ``` 运行完成可视化结果如下图所示 diff --git a/examples/vision/facedet/yolov5face/cpp/infer.cc b/examples/vision/facedet/yolov5face/cpp/infer.cc new file mode 100644 index 0000000000..4cbf9d3de7 --- /dev/null +++ b/examples/vision/facedet/yolov5face/cpp/infer.cc @@ -0,0 +1,105 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +void CpuInfer(const std::string& model_file, const std::string& image_file) { + auto model = fastdeploy::vision::facedet::YOLOv5Face(model_file); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::FaceDetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisFaceDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +void GpuInfer(const std::string& model_file, const std::string& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + auto model = fastdeploy::vision::facedet::YOLOv5Face(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::FaceDetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisFaceDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +void TrtInfer(const std::string& model_file, const std::string& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + option.UseTrtBackend(); + option.SetTrtInputShape("images", {1, 3, 640, 640}); + auto model = fastdeploy::vision::facedet::YOLOv5Face(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::FaceDetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisFaceDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +int main(int argc, char* argv[]) { + if (argc < 4) { + std::cout << "Usage: infer_demo path/to/model path/to/image run_option, " + "e.g ./infer_model yolov5s-face.onnx ./test.jpeg 0" + << std::endl; + std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " + "with gpu; 2: run with gpu and use tensorrt backend." + << std::endl; + return -1; + } + + if (std::atoi(argv[3]) == 0) { + CpuInfer(argv[1], argv[2]); + } else if (std::atoi(argv[3]) == 1) { + GpuInfer(argv[1], argv[2]); + } else if (std::atoi(argv[3]) == 2) { + TrtInfer(argv[1], argv[2]); + } + return 0; +} diff --git a/examples/vision/facedet/yolov5face/python/README.md b/examples/vision/facedet/yolov5face/python/README.md index 2fc847f008..ec3f42bde3 100644 --- a/examples/vision/facedet/yolov5face/python/README.md +++ b/examples/vision/facedet/yolov5face/python/README.md @@ -10,7 +10,7 @@ ``` #下载YOLOv5Face模型文件和测试图片 wget https://bj.bcebos.com/paddlehub/fastdeploy/yolov5s-face.onnx -wget todo +wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/lite/resources/test_lite_face_detector_3.jpg #下载部署示例代码 @@ -18,11 +18,11 @@ git clone https://github.com/PaddlePaddle/FastDeploy.git cd examples/vison/detection/yolov5face/python/ # CPU推理 -python infer.py --model yolov5s-face.onnx --image todo --device cpu +python infer.py --model yolov5s-face.onnx --image test_lite_face_detector_3.jpg --device cpu # GPU推理 -python infer.py --model yolov5s-face.onnx --image todo --device gpu +python infer.py --model yolov5s-face.onnx --image test_lite_face_detector_3.jpg --device gpu # GPU上使用TensorRT推理 -python infer.py --model yolov5s-face.onnx --image todo --device gpu --use_trt True +python infer.py --model yolov5s-face.onnx --image test_lite_face_detector_3.jpg --device gpu --use_trt True ``` 运行完成可视化结果如下图所示 diff --git a/examples/vision/facedet/yolov5face/python/infer.py b/examples/vision/facedet/yolov5face/python/infer.py new file mode 100644 index 0000000000..eae11254f1 --- /dev/null +++ b/examples/vision/facedet/yolov5face/python/infer.py @@ -0,0 +1,51 @@ +import fastdeploy as fd +import cv2 + + +def parse_arguments(): + import argparse + import ast + parser = argparse.ArgumentParser() + parser.add_argument( + "--model", required=True, help="Path of yolov5face onnx model.") + parser.add_argument( + "--image", required=True, help="Path of test image file.") + parser.add_argument( + "--device", + type=str, + default='cpu', + help="Type of inference device, support 'cpu' or 'gpu'.") + parser.add_argument( + "--use_trt", + type=ast.literal_eval, + default=False, + help="Wether to use tensorrt.") + return parser.parse_args() + + +def build_option(args): + option = fd.RuntimeOption() + + if args.device.lower() == "gpu": + option.use_gpu() + + if args.use_trt: + option.use_trt_backend() + option.set_trt_input_shape("images", [1, 3, 640, 640]) + return option + + +args = parse_arguments() + +# 配置runtime,加载模型 +runtime_option = build_option(args) +model = fd.vision.facedet.YOLOv5Face(args.model, runtime_option=runtime_option) + +# 预测图片检测结果 +im = cv2.imread(args.image) +result = model.predict(im) + +# 预测结果可视化 +vis_im = fd.vision.vis_face_detection(im, result) +cv2.imwrite("visualized_result.jpg", vis_im) +print("Visualized result save in ./visualized_result.jpg") From 6ce781e47d9242f7b474a67420345d4c5db636e5 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Thu, 11 Aug 2022 08:45:44 +0000 Subject: [PATCH 64/94] first commit for yolox yolov6 nano --- .../detection/nanodet_plus/cpp/infer.cc | 107 ++++++++++++++++++ .../detection/nanodet_plus/python/infer.py | 52 +++++++++ examples/vision/detection/yolov6/cpp/infer.cc | 105 +++++++++++++++++ .../detection/yolov6/python/000000014439.jpg | Bin 0 -> 195229 bytes .../vision/detection/yolov6/python/infer.py | 51 +++++++++ .../yolov6/python/visualized_result.jpg | Bin 0 -> 205296 bytes examples/vision/detection/yolox/cpp/infer.cc | 105 +++++++++++++++++ .../vision/detection/yolox/python/infer.py | 51 +++++++++ 8 files changed, 471 insertions(+) create mode 100644 examples/vision/detection/nanodet_plus/cpp/infer.cc create mode 100644 examples/vision/detection/nanodet_plus/python/infer.py create mode 100644 examples/vision/detection/yolov6/cpp/infer.cc create mode 100644 examples/vision/detection/yolov6/python/000000014439.jpg create mode 100644 examples/vision/detection/yolov6/python/infer.py create mode 100644 examples/vision/detection/yolov6/python/visualized_result.jpg create mode 100644 examples/vision/detection/yolox/cpp/infer.cc create mode 100644 examples/vision/detection/yolox/python/infer.py diff --git a/examples/vision/detection/nanodet_plus/cpp/infer.cc b/examples/vision/detection/nanodet_plus/cpp/infer.cc new file mode 100644 index 0000000000..debc6db9b1 --- /dev/null +++ b/examples/vision/detection/nanodet_plus/cpp/infer.cc @@ -0,0 +1,107 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +void CpuInfer(const std::string& model_file, const std::string& image_file) { + auto model = fastdeploy::vision::detection::NanoDetPlus(model_file); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +void GpuInfer(const std::string& model_file, const std::string& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + auto model = + fastdeploy::vision::detection::NanoDetPlus(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +void TrtInfer(const std::string& model_file, const std::string& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + option.UseTrtBackend(); + option.SetTrtInputShape("images", {1, 3, 640, 640}); + auto model = + fastdeploy::vision::detection::NanoDetPlus(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +int main(int argc, char* argv[]) { + if (argc < 4) { + std::cout << "Usage: infer_demo path/to/model path/to/image run_option, " + "e.g ./infer_model ./nanodet-plus-m_320.onnx ./test.jpeg 0" + << std::endl; + std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " + "with gpu; 2: run with gpu and use tensorrt backend." + << std::endl; + return -1; + } + + if (std::atoi(argv[3]) == 0) { + CpuInfer(argv[1], argv[2]); + } else if (std::atoi(argv[3]) == 1) { + GpuInfer(argv[1], argv[2]); + } else if (std::atoi(argv[3]) == 2) { + TrtInfer(argv[1], argv[2]); + } + return 0; +} diff --git a/examples/vision/detection/nanodet_plus/python/infer.py b/examples/vision/detection/nanodet_plus/python/infer.py new file mode 100644 index 0000000000..8ad585f3df --- /dev/null +++ b/examples/vision/detection/nanodet_plus/python/infer.py @@ -0,0 +1,52 @@ +import fastdeploy as fd +import cv2 + + +def parse_arguments(): + import argparse + import ast + parser = argparse.ArgumentParser() + parser.add_argument( + "--model", required=True, help="Path of nanodet_plus onnx model.") + parser.add_argument( + "--image", required=True, help="Path of test image file.") + parser.add_argument( + "--device", + type=str, + default='cpu', + help="Type of inference device, support 'cpu' or 'gpu'.") + parser.add_argument( + "--use_trt", + type=ast.literal_eval, + default=False, + help="Wether to use tensorrt.") + return parser.parse_args() + + +def build_option(args): + option = fd.RuntimeOption() + + if args.device.lower() == "gpu": + option.use_gpu() + + if args.use_trt: + option.use_trt_backend() + option.set_trt_input_shape("images", [1, 3, 640, 640]) + return option + + +args = parse_arguments() + +# 配置runtime,加载模型 +runtime_option = build_option(args) +model = fd.vision.detection.NanoDetPlus( + args.model, runtime_option=runtime_option) + +# 预测图片检测结果 +im = cv2.imread(args.image) +result = model.predict(im) + +# 预测结果可视化 +vis_im = fd.vision.vis_detection(im, result) +cv2.imwrite("visualized_result.jpg", vis_im) +print("Visualized result save in ./visualized_result.jpg") diff --git a/examples/vision/detection/yolov6/cpp/infer.cc b/examples/vision/detection/yolov6/cpp/infer.cc new file mode 100644 index 0000000000..affb655771 --- /dev/null +++ b/examples/vision/detection/yolov6/cpp/infer.cc @@ -0,0 +1,105 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +void CpuInfer(const std::string& model_file, const std::string& image_file) { + auto model = fastdeploy::vision::detection::YOLOv6(model_file); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +void GpuInfer(const std::string& model_file, const std::string& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + auto model = fastdeploy::vision::detection::YOLOv6(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +void TrtInfer(const std::string& model_file, const std::string& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + option.UseTrtBackend(); + option.SetTrtInputShape("images", {1, 3, 640, 640}); + auto model = fastdeploy::vision::detection::YOLOv6(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +int main(int argc, char* argv[]) { + if (argc < 4) { + std::cout << "Usage: infer_demo path/to/model path/to/image run_option, " + "e.g ./infer_model ./yolov6s.onnx ./test.jpeg 0" + << std::endl; + std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " + "with gpu; 2: run with gpu and use tensorrt backend." + << std::endl; + return -1; + } + + if (std::atoi(argv[3]) == 0) { + CpuInfer(argv[1], argv[2]); + } else if (std::atoi(argv[3]) == 1) { + GpuInfer(argv[1], argv[2]); + } else if (std::atoi(argv[3]) == 2) { + TrtInfer(argv[1], argv[2]); + } + return 0; +} diff --git a/examples/vision/detection/yolov6/python/000000014439.jpg b/examples/vision/detection/yolov6/python/000000014439.jpg new file mode 100644 index 0000000000000000000000000000000000000000..0abbdab06eb5950b93908cc91adfa640e8a3ac78 GIT binary patch literal 195229 zcmbTd1ymf(_UPNg;O+r}Gq?nIcXtmyIKdr)ySuwPgkZrXI0Tmj3mTk2a0u}D{%5Uo z&bw>f`=n-8uixz2)q8hUb@lG5nwRe{n*gS~w45{m0)YV8*AMWrjjAgrAz`elt|Bd` zDD|2F0I0HxHV%$3m;hk!;O3$(BTlZXr%#SB1;7AE06f430H#n^CnXJOP2e@jNlB2q zzAF90|4BDX!0VX+V4hi4m7M%P^8Z(a1$A<90|1cft2T$Zg)8)xLtpuWr<>D1^7B{5 zHM9GNVPOAZmsba0ncyF``WG|(kIuhX^dC02cQAj|`NwA`b9?iDc<_}YJ=`r`8Larq z;T|>?p0B*{%8(E4_BOA4_sY2T7N)KM0E_yM+|2@N{mQJbjOwDPF7e8O0Dy>U^B-sn_;|@>Ej&M1xVbT_nnLYNUChZP9POP<9lQbH zU)TIkDFFMQx+Q=#U{ip1I zWRArEzzkBED>B(Ya z0cH88LjRrq9~J(s`M-z%vd8j|y?@soxuk`qsrv^v@_#B7>iEIY-G$uM$rNfq&iuc2 z;{V?T|I4la@`Fj;!qURU!r|4F_G>S*aj<%AZU=K4HycL>avO*LT?_yJEcRbM{Dc4M z*ROz6`V8P2vjCXWxB&Qc5`e`)0l)_NuQ{N9x0@o8Ht^4#r$e^?uYUi^ulfIT{J$Mw z5?+78xY}5e|09-A*CdC!yLkM=ulK}12RHy3zyNRoLVy&Y0_Xr{fF0lg1OQP$3Xlg> z08Ky-Fan@}HDC|80G_}{AP5KtVt_;-4af!xfKs3es0Tg+Z9o^$4-5knzznbmtOA?B z9&ikt12@3q>p+G8LIdG|2ti~ZY7irc9mER~21$VwLFynqkO{~N#t_B|#u>&3 zCKM(fCKIL@rWU3ZrXOYkW)Wr+<{0K041iI<_+Sb!Gnf}F0agL)gDt@>V1IBlI2~LJ zt_OF3N5J#oP4Ef$9u^)J2bLU`8CC#R7FG)u3hNB(4;u@c4O4*Lv; z21g9X2*(d62d4{X1?LGD4wnvB4%Y%V1h)vc2X_Mx504K|3(o^D3$F)n3-1db3!ewy z0N)2c3%>(@jevkah`@*-grI_8ir|J2j*x{=i_nWOhp>lmhlqkmhRBX6ji`_4fEbLJ zj#!P@gE)t{kNAj$iA0UWkEDWRj^vG$fK-aqfi#V@hxCYyg-nYqgsh2diyVZUiCm97 zguI4)iGqSciNc4XhGK&fh?0r&31t*z3*{aa8#;66~+oJ2+q* zDjW$MC{74YDb5hi9xgmCEv_7{Ep7~M9qttFIUY71C!RK*7hVotH{K>b7@r1T4*vsw z0{&w7yrdyzgr{|@&qtBzCVSr)aVz6e&VVGuwVdQ4CVa#KkWrAbkV{%|B zW?E)OWfo)hV6I`_V!>xoWC>x>Vd8hv_>D|b?XI3uO53HrEt8Cb8@@&Cu9c+Kt z8Q7uhIqVA@XdKcU0UTdAE;t!EEjaTzzjNVmDRD(`4RAei^KiRx*K;58KzK}ea(R|{ zad}mFV|hpU;P}M(0{FW4?)ka--S|KA{}x~tuoI{jI1r>2v=A&6+!3M_G8HNk+7u=i zHW4lq-V~t_F%>Bm*%qY|H5V-x-4~-5vlXiqI}>LWcM)$9zm?#Z@RR711WU?DMoUgf zVM}RAWlL>HQ%YM&*GQksaLM?}^vS}>D##|uF3XX~LFKCC&gFUJ{p1G~P!%*3vK6)! z85CUd;!mI^FujM#3iB=E7Fiw!rq*PQ|Xm?&X8d zhX#8@dlUP12W$sBhe1bDM=!@&Cq}1mr%h*m=QQVY7X_DcSCFfr>lZiN*I;VWoz6YX zeal1GBgf;;Q`@u23(L#FYtoy+JIZ_CN7|>%7uFZ*JMfX>W6;MfKQX@|e~`bK|3Cm$ zKxn{TpiE$85OR=R(6?aL;N;+|5S@^YP?FHV(48=uu$pl6aF_7q2!V)#NZ3f5$f+of zsH~{xX!Gdt7}l8dn7^^m*zq{FxXiehc+2>y1nz|VMEFF9#HA#Wq{?KhWS``n6s45b zRPxm5)T=b(w6S!~^nwiJ4EK!9OvTJES&*#6tiRdT*^4>iISskQxly^ddFFX@`J(yt z1tbMA1rLQbh2M*0idu_liqlKrOFT+`m1>ucl<}5Tl@pc6ls{ECRBTqNR}NP3R8>_I zS0~iK)VS9i)*95#)JfKTsb{V)YQS%Z`2>7&|8&%7(zx_l;qyQfZ&Q6Ub#rbDPD^wv zsMWjmyv@38^NZe>*>?H%fewL=mQI$=iY|(-oNm1CgdXIc(4LoGpWf>}r@oVZtNxt< zi4)3XOJ+@sG8S^NzPoa8I;Oa!s~;-DTua%*y1V|#T6x^ukiy8Cx8^cUK%^nJ4Zssr|eu0!d= znWOhdd&f@4k0;@$Sf{yXbZ5=KMSoA8>z?mjxLmyaiM=GethnO58n{-y-ng;9dAyCj zBfP7;=e-|!(0SN@^!$tPH}i@9spDDUdF{pi<>h4+kO06iuMbuj@N0s7{lX!_!NJ19 zAtNBbBcdXsqM{(9prD~+W1^vBp`)N+;$vdr;Ns!op<=utz{e%P#>K<^=OiHTYZ+KL zBse%ETr?Ck-2cz%r4PVD1f{|RfFBC1$r3-NM2(axYzLEKYXd`0fY!3 zy=Kc{0x&S3e>{hQ1;fL^02m+`Ff0}vHU$9BhC?ZafUCxCiik($EFP4E&!Jw_cn&e^ zpWSk~K%$n=2!6xqnp{jk!^JK2k0sFmY6%8-y{K1Pi2q{=3jo2uz8Zmr`^Ov@gam@Y z0>Bh(u-Ia1aHh^UltD>F=kSgFv$*WyTNema>Ud@@9Kp%>kmAn+a~jZX#5dFuoFOSM zD*!4O^m>6{EIu#Krj@)(m9pISk2 z^gFppY!zCt$}}n4B41(sEHxag;`>wdonB)u5>S-5gD-+hY)Vky*nW}ZCp0rI%Q?ow zD_?^%q=pfw)*R4Uz zCl9T+S+e1a>2TbeaWxo+2xNyNisqAV)IV;E@#~_JjbjvD8P&Z_o_xAo{YI}o;PtC1 zB_-&T4HO&P1-;y_o`+PKc1n6b&xpIgxzvomdyqMw4$z#ploN<9rVhuadbHKH82x}@ z$2}(l7Ac^uriCp_=fFAg52j%x_t)5a9O$e#M0lh+Xs!8)%jpd(O_OMIDQA<4?n+iI zEApg!u|OLd>^m>hu*BGaHF~Ex?AmZspWxZjnLZV@@y54AZ@xzSR6%*BT*|`(Ik1WE z55aTdNXQ6m2r;{2g!+D2n0|w@M|2Y#N#B@OUzCvixK#Q}sbbQfGk-GjgMKg&XTtcExN%uPAKY6pXNsn=q4tJA`EUWrL|jqQEai zzF@Z_pOgpbmQ`bi_|CfBFloWc@4I9ndEW^ymfR*4QzJ6Lk4uQ9D{ra~^So6 zm*}XZD>9GGX1`J2`0Wx5?R#p!JCfs_#GSL)!bNUfUOI<^pkImKhUZ(awpqy+OXvDw zzW|QrQ?-^ZjrHe+d}c@^o38oItd}r-ii@6wOg7ft9?HdSZY?v`Uj@Fu7cS+hmTa(E z*m3^4OKk5rf`AbJsqs&5`wG)jvBjIhsCA95h6T<_?mYdqqoHKqa9jq#0M+o7imTj< zVXvcdG9X!thYxJ6H_90u@%X+K5y+Mb5DMIMXUclH-byOZmWpjl2&PjWyT zWvK8zKRplih6JK{l`i}1wh+L2@cx_NbT^$qA_GB+K8uuc=D2#ES#a~yCyGtxisBYH z8+1T6VGKIw6Lju7_vzdIG0#TvoH+D>a_g3!>0;W{Gz|}5OVwMNB7{}s&&X*mdb@WF zlQeYGjIs#*n8whx?YF9~oeEmDhv~a7bYpRW!aMyhK-b-oAp5V0JZ5V>Cg}=^B{(}b zOpn;FcP=US1@o@j`f4wL_y(1>s=n;5RuZ2uyk}y-&2htBvh5zUX+p1(C`omzryP~3 z4AlO2hx?g+Q`R@5Z6~@zhgLa}ec+AhbK{Cr@Au2g=R9{2WJUe+OT~ewEtdx=gCPZB zGUF9$k+3*yW}{o@F=p*HA_*hx9H`r^K)wVxnR^#{JDuyUuswEZfkc-qOUqV}fLc3t+Sj^01=IpRI>nWRK4#HE1 zxiNcYd--5dWBp%%UZc)%eJyHOEK}l+5kDy%uPNcuqbr`zBl=-E?{Fv6f2|GSBK9MN z1Bsqc@_SlAlTByNNem&_{~qv( z?~l)(Gr<;%?TiEW1~iRvisdez+ZU<^aq;=p63*mh*UlBP1v3}HE!SsKz5@&B zuqq8hg#_q6ltkgPxsC2|VivOk+3Ht)MnqJVv%O3wUQn8{r)dy*th?4nb^TNv%DMpm zdojjNuR0qddqW8YuU+l%h4)FjHcw?;^cLDl?T4juv+DOAPvVmX;^2v8hKsJUVx;OZbMc zXKcE3Ijp__M$OdUrr+@eA#fGXSI^34Dv){NMVXs@UZG$|f8&f;8kFpPH_R$~OlOu_ zN50X3-CbA0QX+UU=7ankjjFp~ex)~kK*KIH9W*v2B1l>}HG%};m!lPfN3<@njdS6n z)Q`)gmKGfk9R5Z!Es_hb+=3+Q{} zrhSbd%~bya2?}KOBH_hl#u%PY4IAN1TM0e;rG!>T2wrq*qJ54 zO#`?f+3V;cGcmCd3ig$1XckVe?LNym>#v+mkgtK{SQVVT6+o^`42w)8*c0WYGKGMG zoi5f~k%EMB=SF@el&UZTvBS*k-~KY(vcy$E!WkRO3D;>3}PnN{T~4d0`BiH(hfFf%Np&ZyrK$5JN!{l-!qrDFuoZza}_rl&1sCw6ILHpzTmc96uH>969hFN3f+mTm4yOi47SHzbgiwA*a zO@1p37Zn>m1g#j4yN^HAHxwx}$e~rWv@YUKR8E#<(_2JNq)G}e*oZz!JX25kc*T?P zmlcUt+QWz~B<;L0WH7EuvAgiJ%&YR0ZmhKt@Kc!fqsc}*!HWu9lVf-R<`8U?2EwW0 ziwN$K)gY^>*kIju7RA#YjfHiL@z+PoYRAgURwd zg3ub{$h1_U4r}*AY?P4e2zMcz4*wEAmU+%&p^6$$Tz00mt0(pPRQAvEyOS3aMjctI z^0Gj3k=L-3gvY@ol8N7+5H3O@2EUqhl3QkGqSlO(zK0P9*+u_f!i(_%t!#sK+ZM8X ziNRBT66q!=uG1R+327esCa(~~>^1^BwK0R{!&``QAf07 z(n4qTf{js=4Bt;mhA~pYg}@&#KssL3gEQRJv$pgq|p_%i7QSgGx7_y6rXpDU`vOP!G5x#}M6>EQ{V z)u4ac+dLy+CyH(sZ>MSh-G+A3EH;|Wv=yRfrxF`gU#B8VR*4WfA zJwNZ=e9wCNQP-e&_gK~MOb+31Uy?#jwAFT)n;pP>M}+E^Zzdby{6`M4Co>083*8aU zsNR10WPRK4W1i+cWAFU6qy;EJq&60vmly-qYD8nN2Id0ih#bwj-&3@xYHq%?hy>SX ziCM3u%T}Az(!^9X#3Y`*aJSWC-)RjO5=>_jg?dqRBgKr*xQt#Vp}OoQdXSXOV^(8R zVEYh8qzj1&em`yn3w+EkVm8xK3&_e^98bc%EQzMd#0kbLkbLy{wjcajF+$(gQjVAMT^@w|xbzti zKVu=Kdn=pE0lb*WI$DdYEkJ&oFnkXh44r_Pj zRQI~?x@}M=xCJ_O7iqRlwp}PZHCzM+VrU-a7-`Z9K9iJwYgO&s91S9Ex1&rPO^jnp znhAb%8o4=7C95>@AO7n41L`M{fLh*8tuAuy!iAI8l)vLL^Se%F3vU&5Yn619`*#z( zP16icHsttjp_SP3vFw%fy2u)^>b4ma%SMEZx>z&SZev;}%K(mm{Ab*-K=6b8mq#xL zpN=fikS>lyScE#;8S8A8Jct(MBBVK|7Rhch2K8&wm(#0#SZq!3p7NROkty|=J^_&o zXJ|M-k$nuwZ0X@Nb25{*AM)HcH9Yp@P8q{=NlU{rtTu44*VK<-(CuY7h76mDP&xNeHz=bKHQ($ae;c@$3ewLtRBN90l-ropGf zA1*U-pVA$4dtue8;$$tY%8~2+N|O@BrV)g6Gnma63mV2&C>Z3>bKE$u!kG^C)y^hX z6p11}=)}xpl|8v?a9-6_;)VE}67qfkvcx(?NE{UTEO-_2DxaHUQ(?0PvA+hmUq!9! zG}{@S5^m3RJ|6mKR2k6dX#E~1MR);HM%lt{Utc}@!2QQu1XYDe-(RN{x;`Med`{2K zIB3w9?1RBwAtCc)d@8iH0Tl_so(|LIy*F-7{-g@Zk2Cp_Dag znqgz{m`-nUHb=ph+aAjJvN+}cO+tPge% z2JAG#1X08Z`RsyZmtXaHbN4V#QsVlEDs@90UZLK_x9fuL_6gbd_vu{ zxw0AAx{@&)2a55TQqK0F=80azK77WJ>GGDve_J9aTaYUg*sg*f5f0~WnV!3=>%^(E zCIlD@*%@1BR9}Z|(341f4I_bhDW2BmUIt;Tp|6@nXO)6DHlAIUK9}WYDj)BZH&wwy zem#q=#W)UHtezZ-vgsT7>vS7SJ+tK0#-z89*+y-?7a%eqV-xszE#}%+w=Ez!VSVZm ziXs#}Tgn3)!%^hM*yOh1$ouUZrrGO+F>rZ%8GwT*&o06jdJ*xtp4x zXt`DAM^ufeLBeFN5$?hH@q0;?1(jOE*$vEZMmZ!fT``{yulvU| zp5LSbx=^t73P8#`t(AZSGF+iu;z+GZc>#{ReXC+SznHXD%#{SF`KClt2U#XXhhd(V z!YQIl!+%W4y+2}$SNu~t!8Kp2Cx#)rz&utuw(2)TjP)bFmQB0?V$SByZlTX4EVi02 zH%dTawWatqWQbY5QelaHdx2C}WbY=rZ~n?&k;%OVg4b`rmB(8Y7ZF!qtIRem>^yDc zQ#B!G)qZRJ8GGxLaAb~Ud8YR1{ckkkD^(a{i0o7*|$vQE&kUgVh@2XRdZ8va&H2Ui^|w3=36zIiqT}`RH^)d;l=*x`AeuTW>V6bCEsho%ojkjAl5IKosqVc7q`2RIHaA!q>BA=ZG$AjcNc&M~J!7=`Smw7;PNu3kT&_N}Ab=)+f%TV&;ymrH6-_IIZl<{mw ze%;j2<^s=<@lAAiWfdXOT=Rt)`iBA5Y8&}jcq;~#R*DCrjw z%U?$$Q%%vT%%ERM<>W?8+CzV12lr!7my9+yzNv@yWPegL*>~X4ixs0?z-%g+_dP18!UYbj<9ip(|6v953*rC|Y#R++=BqodR zz=+hdm7~eZIMBe^bCLdO73+Fejo7PZI~(QUSDt~Kj*8sq={ed*?1CV&w`_MF+cOPY zCh$w`3XoDw)-IpzQMr!MIgdB<@~y_MfdpzLQmD!ired>;(B525>%9PPO4yRG|Nd0@ z#Zrczd8HtsO)!`N32k(EYYpi^$0a6pQX%o)%*gl?cpfb~$07W;g6kWTd`nTCn0=wN zo{#`bR~TcwhlL977}YuGxOLMnGgnoFTjQ}$F}X1CGo2#BHJK>CLliQK+)-Ek`1#Nw z-VHhi=}F4i{c+=+gKZ;Hb}=2|!x9Hu)VR2QW0IfR{I6%7;tj0cRq@Y*<`SoIG2l@$ zgg*fU$5mNuEGtVe@M)ot@mlC7?EFIKU|tqX&j#8V@^2v>^j<4mRUgi>NAm!k1@P0FL(iFf zk-thO6>)yGEFoZKMh%gsXNlr+_`7iRl_c8Iw$~?-Xj4{BikuSkM{a}+cz*Jr!o~|x zTx}7_3@pU~-;#YWm~-?vt*Y{2Gi<+U*)XW0bzy~U0&Hh+a+{=eCeFzfQIYbwfft0Z z9D`ET$wt3zQ%ye^?lqE_)t#(&D0+*Oy5`1ulh{>8x!`{0q@BYChvUaQR}?b(O>*Yy zdh3~FsK$4mp_agDqy+Mi!R}l23%OM0<7g3yaq799ee14}5YEyRyU~Gg5jGTQuC))w zvktbwb+hiTX=2Ka=ruVfFBxPHW|_5Jzis=_e{~;c$hTQ$WZ8mS-MeIAAgM#ORtdjG zvFSQSTZfJjEq`HFe-*B;Ao+VH{#)D6UIoioHDC9V7G{Na94A2hhU`CYW>o}wx`?$Du9KtieLq-=DX+0FXd*}|bs$47Rbj=ZaRn&!^dR@@1S zk-KQh49ET-1yptD)Ug@Csby=Wyd>6w-zeQCzDXyTeLuCam@O(QJ50aoNGu*gEPqa| zA{YgTV{=_x^W}E(yTk~>Ci7MmU6eNJ{qA)oW8~AMV>!B>Gmf7+wUuNP7dw*~iQ>Mv zdDeOCC^BWORU@o!+S9HKjqQgK9?$R^<9_|dM?0Ya!QNu}X8gesFN2mDR!cblyHWpd zL%U}G!hY`f?Ce@I1uPksk+Cb|fVfg@Z7H*&u3(P1dPwO7Dm$AOPA0AB@2Q+PvGWL7 zbpr;f-beytxrYW9CADO`HanfH)A`Cz2;&5ohr9JBMH3iyIElcq!q$D&potzRGjV$6 z$#jgo`CVJ5^zVhK$tA8w?Vzd^BEtFR$Wb-O4HX8zgIaE2?cn7ZQ;DFvLXikvJMt--e_og_`8 z4!jtHigN1I$yrg(3uyd7+6!($E*WSD9**{h^zRL4-I1Y0_CM?>1)5l^n9Ot+a?SHT zv;Y3^2~mVZB6eE?Q;kc~GvxPsvXbj?g&)NNjUTM@@>YFLHx7H%Cpla6+{&`Y6XVcu zuZ}9p+4aurzyDFdiFaMRKP(tzQ!m}huYzr~^i6(0{zQ%(jQ#7P;EDEkw~X!_WQLQ3 z-o{h&c5i{vAekPHnY|^j4ssHp0q5ZsFYJlkBYNx^Z8{s0nQKG|Ed=OnW#@-?4}HHj zX0ABCX(;VzExPJFn)bNKL*HI1t*VkhZ&Sa@_`#&OCFv>{CLeA;n6h(qSs-NQ%tFh9 z6_^(Lm8mogH@7sXL*)|tlO!TNr^4cQ=f+0D~PokooD%(1O@GKsnkh%*X6e=fuCy&q%^q2AEw30IJywOsn%l+66&}*jE5$T#HK;S7-x2yuZxuGjkfv` zH*?#;Y0xs8&i+d5=7q;vW8v4SwV&N}^@l$3jjCFYY)nC%3^YR)DDCEYcz83|p=So9 z=ollqcPf!=e#Hj7P9k(~ZGoS%Tu7mwsj~aJ7cyoyr^+wD@2Q`sHK^A%)9)HQy!ztB zqvNRTojVb%hn@F&oR`a;nN0gl7De8BCC`k3S>b7gYY$dfd#`EV&y`eEEL7Dl6_p>f zQ0dM!C&T-qChmWS`P#O^_9W4Fv8V{o{b8d|30GZ?Ha-7dls+bjH0xBPJX<>BM+bh2 zl~@@|k$SANioIJUY}hWS%+jB*6m`U|zzIx7+aNgfQ7$@*%Tgn>-PZacqj!AfUa7Ep zJ)v~ETtR)>*80@bL3ry*la7sMz5lE0K(Z(5{*zDWN3mGApa6`9(=zhaj#;(er?iKj z^M|H6s-i3TtpwDoI|=W5&zUB<)?%>lQIgAAuAuDf?Wf?L@qSP zACh@Z9{LI8Q9n2;sLpXIF^st0qK$@n?yOoeASV$-qO*O8`<4v4#dICfkJ+wL9c0l% zKAB@SvsVx@r6v@G=W2P-{B-29G|wEMC>n8k!W26)1}odMl`_@`B)bT@!}idWIrWaz z3NSLFtrrarhV5Ec-SSvzIo1gktkRW}2oL9RttRfUYW{#FN+^mlcfvMc|7HOK^;lrgjBFELKoxspQb7>X zB*U3h(hP5C)SRznLr`hhc>9oZ<=bR&BINI|KHst~pkJlCUc+%q(S*z?;p>oaI@2T0 zGuqEbR#B5IOsDu`Of=nAv_lCAY98_@a-tSfCP_rM;feo2V0C@$YQODW3(Kxu2IpQA z^SE|sA{Bc|Vu5uZ>SJ_<$TsYL4R5^=zv6Wv`DeLGbov<){Q=Rlzzwd1&X7Zos8xQ) zb?%Znq8Gpw21mF|U;dF)Qymer27W~sMj8wa=9o2krXmgG>GeVA~;t z;QnmwY4B6ck^X%gy<&m<8pI5htLU}J)+*^EQCjGsh*uj!8mR}p0BoOw*&#@|L3gnjdBN@0v1k*${SwJ)W1;}^-VtK_i#oieicWJTsp;WDgDAl}we^MTNozUh+ zq&2Dql7n26WVa#?N}9X42y;Y540`cRr&|~L=6D5-Gd7?mi&1@JedF^)-B#kqyF^>I zW?00XS3a;L#msX?nXF!$%wF7LF(#Z%%r??AdVn*++T=1JJ|{ex0Ory^55H8QrpZZ#ATWs4=3qRzGVk?-cH?za|6#nttN%>=d!ky=y?r`A&9J@y1xer$z@JU;0M;q+-%#dH?3zoUedAoPaRerqOi<0GPpwO z&MWc}+CPc@y`G)U`7X;*j(%Wg&|VRKZkJ76Nv-MYebpekNk zDKaEX6ce*#ryhYUE{EH$N)z7>G)_JWnzU7#Dn~ZNkK3k-D{?xRYpAp3e5>gjY2}12 z8~~jk?{lWFtzu6%){ayhQ^{cUFQ&Xc`(9ROy<91e$&{uK@v4(xmF{2hi}i+e;kZA< ziP}n;t5y7*ZZ@KZnidwUZh}#+qVc6CLyWO6*o3HV)Z>CJE)abXv_r*r8T;40Gnz)4 zEE2aWi~Nq^EoO#6#v2j3#H!K}X+k)8%ED2T{!#Ux2rAg*rFHUXM_A;EA3W0qg*j!0K0H8jFu~!#NVI0P`yVz z*f40cR*Zm`f%6NH1HKqZ$nW|#Q*4J6$*R_1`Wf25Q~>AN{Kq-?<7Ps9t_5ue`3}}I zo#RtaM4^p_^qdOwn0*Ev2r&{$5g^zjGSQ%L$1W|`RG|wX#662MeB=8??psH3>Ug;5 zzwiL9mXarYtM=$stKdKQzHA){o1rA+i4(!I*_$?BMP3Du_ryhUU<-l`8;HiF4eLG zq|~%PYuCdAc%D8@3#*JBk$;nrC4S(P*I24v&?BwIhsLs>#u4iB4X^(oc6b5u5rp}} zG|DP2>5D~At!-ZbZ)%EzL8ie_>_V1h+OaWp*L>cc{qhWUJr7OsGOSV^cY7D2o82$g z1{ILQLpe{^FKN=OdaTki1fx^JKHV$Li(w2-C~sJ(g%ccW(djf z^TQua0vdEmw`U=iYPVZ3d(y$2rzkopvf<2JS2?(54KPe*3l=(HLh3+1s$>~bpFaJ) zNi|)K&Y^)FS(;z%Ce>YtA0zoMiE@8IRC)F=$JuzJt19}B|(G-}p0@yrBy)$8BPYU$Pyhf+Oh}dQiX#~Cq zp&E*(CQJJ494zItJW7R4kG(o*Gj*~RWi*1GJ) zK$31b*M^2F=S%g;Pph5z@Tv^FvJ}zGb-Z&WRbd*N_UG3Ss73Dxj+u4)IpL|>F}kD| zVttRa6T7>we;)oS!t*FYZ+fdPu#HaXqz6+uh7hH55ziLiF^NyrxNS7F>+cI?7D`K*K|C`&N)j^UNChfhK1bg0EkxTSf_VU#_t_gsFJAz6H( zp()~wI>o+g`rFFbf%c>1n!-zppzLZpwowZKqu1MpyLg#-Q=!g?ce@&RlVIY+5 zM+rk>UXq3KRcf$TvL=FZ!fO1=s{0!+fb3QOQr%i6XH$v2K@~Uh{z%fW$n@oMc2;KD z8+sPLfl2hnbielt*S=&L-eM!gkKo$TGw`QHXQ5r0Njw9GR@UOh+1b)Vg&uG2Np zyf5rg#W9rIpR~i`DTQQRe3!N<$eOni%0OB*MxON46+lF$Gd|gU{Wzc z@RaEm@1zP+?je|7Xe(1E^ZSWOMp`v=R7C6A-gYpXCTVT4%-*pL>nh^YJOZ=c`;Ul& zPD4n3pTXfW#z`qBxikj3)j1JDLyN8aSBIsddNQ2W1p^j)1;%If>z0}IC1uU$%TZ~5 zIKyy=?T#@Jx?;sEi z;_i@Y@?>f=ANniMwH&x@Aa{M*c0JgLGH&*LY;Qg$8NzSjd=O5ua3?4Wke+dO?O^U>i}Pft{6NZ) zFsw3UW=VmairLSg%f#*=cWTUp_E zWAl}r=bcV??A~Ef$Q3CZyCq6zbb-uDm)Lu^5)^`!PZk>1V!5oXM+hc^bRL@GCYpD1 zt687E+>aP03?8X}%Jdd}d-_?F$TcKmy(%W*{IK}o!jEa@MDYm!I!A{@$b<;Oo;fSJ zdw~mG-!J&Rgz#2>F%A;44(OiFyO{2U?m=tSA2&{wkjcuqXwoU55@%7B`pNWQ4jIE8 z$*e#@MoH-F8kY*eF%*vc-Gz)?^3SQd1+r4fLNPghf@V+GgKUr9Yp!hR7`+7n?`oU$ zC+j#Z^Ekw@00uQf={aSCb6FCfWyCv?2xt%KS2GJdOTu(lRbHCdC1)3V-EG>7`#XPX z4g@YLI(nq>Y3+8JlPl}O05r|3t)D(08hq$rNxE*zvWJ)0bn~6+u+;rlq30E3mHQ2X zcd1OKpMkSa^p((Swv}@@wdfVdpNvm(W+CP;X1B|nCSX?kzeMf3Kp#X zZT{ji{UFV65g>X=NhXjaJ6D|Dk(!prMaWjwl#V!L~s8^W*S}!j|b5|g^4+l z{k!|L^->4~&0msh)1Byc=4+Wom5^%9jkRBZD2MY0ts1>K&fhl(dNWXO#)Sm+IqRdg z4d!T-de!r2-D-}lNCNv+p9kNqg3!xN2qQ^|IKC)L(Z=qH818y=EYkNh2B>_dCgX>? zGUC!+y5Z)8?aQP3#3jnpv%5uKW94-1OdEb2W`4Xef>onWa1<(eQLj6-%&+I1QL*og z^q`DyEP>;1)0gNlOLCC=q(7#I*jfQGDm9ma{DSMNaM#`ogj|L=D%}*`h~!O5hB5YQ ztitf3m&=^(UN$zGdt+iuDto|cif@-vwh43Qede`t##-~$QgmLolmlil-$`G;SyIOL zxwId-#8%)Dk@z0V90B(9(!xd`jnigZ7%<|uN2qRe#=@!g?%Zc_>^>>O-9LSF1F^9Q ztHiF$Efj6 zSQ(s^spZ`=6MJ25VJ<818AsAds15~F=A#B%eL76CebgC*WIxG*eLiOqglL8w-bBWZ zdBn=7EkNddOOCYLJ!FRq*3d9kCva-Noi5bKcJ}ofA5z@y2VE`~mEf`Xlc9u3{1Ro^ zc`92SrX=T&zbNfp_=aqULF|)k-qb0>gh47Jh(8hw8r>N^=UB#O^=gLT`^Uj$2@I6{ z_CsgWov2c?2zS)Oo7H49(#$RQ5+S~S>PVv>vJUYJMX=7+rabkNn1RbfDp@*YjJXK6 zF<()+sxbWf*P`!!H~?J3&icLG%mz^;qrPx3*i2K(lw|)WKiAohj;xT(1nQMdVTCjsR^FUtav|iZ&RN7(L)3hb zgLw!iFucJ6;Z7n02qQ+N=8CG|W?@Sj_3$LkPHLaL=RcfWaWsz)M_vh7h}>ud7e%4N~8H{%x}s#F=20TLa@(QV>Ct<9V4e`qtL-L zUr#dSbiQ{sNt3(0KipB}%L0KO#5I*ZG6WJED%hZK&A3sX6~@%&zKRXv_@Vj@rIq}m z3jQBxXbdXu>U;#IdF$h7k!%yF`+fT)GE@=8>cMdxgTp3Lq+QR>8JWMOu8rtxZ>=1U z-njd5yYVCsWSY^fwh~t~(GMgfmReP@-H8m7!~HJ0?Sf5VVyY054X|WhQ4aEyZm^K? zzDXfuRQerGTsFW%B;ZFYln(-m%u(# zK*Fb;55k#cWM4OfF&Z(7I==`L;U&osGS6H%c6pvoQ)@if9f`|f5;ug z<2^7Fkc_YRaIj*OLbf1;Iagw6H+(6^n$9Ir;J?5pg6t&-&An!M7qZ3Xs>S8{wXz$% zny&!2V_ON2Hl$PL*Z%&j0+1z32P0WL5&P{^ z#VNYg7)9!{I3r2>cfXv%@I=oJ6()*CrWFnr+|AZS_P;yCcM$01Qqs1UcX;*vQ{jAc z3X|Gj2|v?3-%r8?N}kI(U66=HD#z8Ey$C6)*B6Z-zI|BpYr!5W`YaBFT9^x6PuHA& zkuF;KQ=@M)-P-fMa{_aGBgIb*g60Xi?TOIG*H9qdfdjp?Ybg8$=)fEvmpy~SneAyb zn|D;pQIFSNuF;zB>MBu6+9QCyAV{Ho+;xv`zoXR8H|9-LQu#%h-^-iduF~AXkTxq^ zB*-;bf11Dg$(+_Uq($+|-IE^S^ON0{^A`g|YHF_UShvwBS~NE9mZYF(Cj^g;d77E+ z1cfs~fnZ*Xs|_2@l!{Xd_&0WJ%t=#QujBpuE(!wYm2b)G+k5%XY|=i7^5Ud9fv8%K zHDa>@InovIO?5{;a^VYgMC$X;r|9D0mS0)sSP@co?~XR;<;2kSSxjBpCod!)k_yoR zjBA90gyJ-b>_}qpi}dSzG7&?AcZn75D9?+qeD`fjgU@hP>?QZkP;Xj3XCoXqeHqxL}?i%qv1ot zz|q|?fBXLazW2WFcJ4XPd7kGTrx)fF!L5F}n)yXa_5`e&RFRJtP74)We%|ln=O&l zUaI=4FXHFLWWIh&FW}Bk`YQg=t(dV;@(P&mGdZM{AO`C#ndqqR(HKl@Z~d6|ZMWcA zrN8n=Drp(4#cjQvfJBBEgr`wKPiIf zrXm!)3udJ@n4#{%`PGA6W%?1DEZ-T#yx=5Wbz2g|Ao0byvTiu!4AcX-4?R4K|8Vt(3t$D1okhe?kaQP@ken;Jo zux#-P2oCzwJ0v2{Lkn_BZ&au3EXd7FEcd(k`2)_-*Qo6v1#**kom8}E)Eh%VZMPXV zs+xjS=(8*Go^W_96=F33AGBs*WJn}=J;Pvrm)`oMQTx591jg&k535Rqp+{GX#-=`| zeABy6dGWqUbr9(B2BACkV(Zs$#`)00Z$UR5_Hh}Q_hRf6E0M{b>U3LuZDv}H5&i23 z&RD7`=KObk8oEpKoH;5u)Xm?n!ANmZ-PR~tQMG?BI*f){IKk>4j)Vy4vVK+|L%o)@ z57+^gj|yov*M=yM$rS&z3(OrcE~epAm-KE)XWETuvx&3meoOIw!v|@=QcRRa9@S`i zD`ZVr*cJB=P;$XidJb&eoU(5*k2($cRX(}*>6A@6L&l!BJ8)rCmf-%TB~j5q-5}Pb zStaU6R8Z)7XzxA~Z{XR!gmDUQH$O`2T@|&~P%N)TcixCsrW6+(#^k-F)iw#xH!?Tj zf5WPNFPwJ&@CH%jGJo+)_s7GZ9l&P;Sd#X@pr4?0}Zc6H7GVfkbS&xg|l+HXi2pLy#W{fj%GM%)cl(5Pi zWZyv7=sah)=Od9Z!V%7&mXFw`<38v%(&wJ5{cQJJ?WKU6pU!%2lxAR+%UJi*+S0H& zGP^y#NH|vY?TobI#}5%1V_%-OR&WZ+gsBBRwt8395F&UWSnls~0gkW5L2az7E}8th z!!{4Bxkd1!pFU{{9KtaJ`$>CQi7m4J45^(_vhtd2Dt+92c&`(=?Ot>C#Vku{pUGe8 z;EAQoOUJg_(zvzyMv##3K3>u|OV=qpJYQijGsi&TFm0st7eI*b!2mHqUg)v;4K0^V zZ_#_$a$cSPyYxT!Cbx1n8j8gC;_TwnQXIHX#)>UXWIBt;pq!_CIel27Rz5T+|N5^s zNFzjbD}%&bEi}tzhnTsTe7im$Z*-G@h|xP#7irj)PC&0IXN*vxd_WDke|r6u<)gt* zF#KCa#bVRh!AR=L)Sh_^{MYaILh@>qik>2yMi1X`aUAC-dfU@5uj5{u7Q)gvf)}Cs zx!-J2)Nl3!l=9nV;rvO`N?j?v{n)efc$@cZg|B43WW452?wIW2RvnezwYMo>yRxxo zPfj((FLFkYsCf2hSaN|CaxZ49b?-Ul>gGDNU&hJQZI{+)wsV&vwSyj82K_a3!p+N4 z)*@{{!^N427~T1&k*R&5 zXAYm_ezQG$8kB;W;)q~Ydp$r<)4{>1muyTBan?|~@sZNUE546|DmjK{R_Qzu(+>XS zr|Pu#DQof%>j#M_)H}`m`!2>y_pHI&-w0as<+A+Gw4 zOGO;4PYWSgKb0sR>y#byXq`bE!h+IhhY;rNS|#s&w(HzfB>_$U0Fplocu=w@4<8bv z(5KS`y;ng{P5|&0HS+)xM{paj28K9|d|1Ajc&S@=U%8|?*UatrH>*_pK^)b6<$JM4 zl26U4JR+=bYXN{z9pS{MYn~HFpR)u>>5{iTwn}u)Q>97z&p|#bAj~@|{hr?o9oUWC zK5FH0$)ujZ^^+^jQ%CssV^fss4WiCThyXBDQ=O`BuQga(*OkhA{j@la5r#JrOT*)e&OB~1OirG%46AP`aY%%C{e>`6vX*nscX`KFAgY zW;-{kILEW@t;TMx%4%PfkAs&EpyjCLXYC#Pq%J+pe*_Gukzm6hwJY9Hn!bD$T7)E% zgRSFnwC)$dN3j~I$p=QBe&Yw|1Gaj^3w?!LvuoArD*qur)O+NM(s_$W2PWR6?G`yr zrLSoglqw{wCCNcegH30(W^+dm)px_zzQaA9W!qZ|rFPM>KT`{FS$=H6W@JNF_CVo= zNPy09{P1gT@IQdT;qIV;s)4h>K+DTdTkn7GFxCsV&9{qo5iKN;oo~Z~G)j(QQuwuo z^hGv3SB{@MH`^D6q~Go6kqW;1D{*%7Blm1YXHxfb(|Xn7EJy0^d&oB@(;5}H&bSo3 zZJ9|;RHVY90*~KY9&;r7F!-9o6fRrB|B0*VdUp9nrJ|22*8Jcc;Qz*RMUu6n&_xdk zsar5NR~xTVav+~5${4;iSLQ#08VF0PiHjT3I6N@@&|9%S&S{ksrQ>M|b>n9^AXVR< z0qy@SRqE6wy?Mo4yzD*a%+^xAmBSKayAb5vM9~fOfIFgV-5sg=SvCfulA+x!cSY_q z&Xs21cTO(lJ_5wWw*hCp4ESNt01FnyxtCRSWGVzrHO1ZYlGAOx=F@(RD zW{r)VsI_d$d3B=86HCxY>~jz*sq`P=`VFOGA4R+FxtmOtFgP z5{p3A**es>CXb)Vu``cB*TG}|;UpV5r*XO!d8f55zhJemr(fDtmM_&@>=~DEONinZ zJdI;)dfYLYA1Xbww&)R$KITsOBE(tzgPYfP%(vRbvs=VLfN9ep!GHxz{xYU8kla{@U1B*xiA~`)Hu=>A3sy6JBs$kA>a07Q3Ek6DRsl zVdPRM2skM1nDD=@%OPIBaA^hT-n5rU&XDlJcC>lP0pVSOSJ&~BiWSE?NX(+hRfi}4R+GZ(+xk#Xr1&jGx;MC&Wr&B7zERY@1Y#LuhNJ0X*?`+;qsK0FDxqOlDBdd z$ER6D|D$RXFdjU+piX``j=9CvqO&hy2#F_<<}3xZM#JYk@z=MGx=gLr&V$B%_4B#( z_jo_s#RBL}`UART6z%rhRG%SQ=6QCu?qQVRYg1`0k#4+ zXUyxbesgDb{3YP#1gCAe?hWbt)~cgjZyRny5jW87@dPl>?VrOE{(8tiz+^+t;-9Zf zeODsTSs4B4@$I>*W*=S&f5egEKLB}!>iLJ54_%fdkQG_4aT3!+WHs0j`R>Z6XeXZY z>#f2)hv+_I!qvd>c$wkRJE0gJ^=Q>Qv8~TS8Rs(mq-bY6`MbO8Q`FrLk(`m=O2e1Y z84^z7f@|njko+H;wM8KddVitoQ^f7@7>UmdlWlVSr+gGjcVNE=9ga*sarT8~QOzXC zFetx-^ozo{fR{fE1()JTB<$=M+aYbu0SHwfd|Di^i+l8_TCmu;j-lS|kD}b~ zZz(qo2%>(ksZ14l9uoMB$Ui`hw6->w_leVuAGXu@Z5BH*`ZmQbFl{dz0Lce~j>uS-s{q})7It^TR?UeVq7zbK& zCIm0KJ=?&QkV(Jt76L9VmQ%mGV)j@FaH2H9*1I3*aMPx1#81+*ZbI7X(Mp|KVqX)j1N z?xq(LwxPD9Um9*Ll}Ybw3WvT55jZEOL}KVIvz#S-{)Qg4tAFYJJRj1@R^h%fu^1t~ z9tEO1hIyn+VsfXsM!NkbApV~+>1XP1f!Opw|v2> zP%eF07%kiwJHt4DMjj1xGke{hS^F?xMuGpkN_C&JkoIDB+(p`r`x-Xz4*r~&1(AwtDt0~JO@`P%)CNN7aO`Yc`!uuI%Rc30)0BA5`6Mr*b*ARDr{@Yip{)8U&uS7 zBa02w!u86=AcvT;eNNy^@Y={`o-O*UyqSZ%TroQGNWDR5>Hg?033|vR;VfjPin@rN zuihnS9nT+3QFC93M-$@hn8Cvy$`grLg(I-{v~Dl6`LDYigi3DY{;Oi{E>y8b-4@$e zbP&tBBVh~0`xsG2Fti!s@@w_w4bj;?O~geDb!!G|DIJm}0jwfC&S=b15-Duy*_53I z)NFTr&s`|^U0ii#4h14gz8)MrMqO@E$HDoV4tprRSsv3C-9OwTpKmpQF*P zj>%87o?BRQ6AwM-pA*B;-Vzod31hu`JL+x?X5RY$$!m_^Z7z|8>vsS zydXHO8d6md_&J^7?F7-URgJ+H1r$-5dN<1d%^M>uGeXG4DlNp7DRWhbMXv=?ePED= zogWw-^CkhiZ1A*JSF=Dzy5Gki8os2ZMN$SJNb}V{z~_WeX+_`h8x&l^<=0x~4dg!` z_T_R9lfDA}sGm7DqTSDb+ComZ+XMdr{)SvB(SM3I!M!fmXYeAIcZOUA=bhpU{}7FX*Nrp=Tl-Z#6I#15nXh6=c6Qy9;uSP~9(T8{BRc7K3LUvUigMISQavkf zUP~VmD_{o;`*kDtMD8dSitb*m-Hb@MGzE|f7RklGF$~dQ0w{Y;6{DC2yFWmVDGVN? zxj;Bil^|&Uy0>3Q;gDF&r~)s3d$-3~Rx_Pz2lhC(J)Ip#8D*4@FDDWoEybM7m!e}L>jIUY!#Vs(?VwqE;0^ssv=Wy^aGrq z1bp;M(a4MGk4TOi?f))8sU%{gO-k%;)T|CHg>=?89SWxyqDV{$*E(KFFXmS1;;TgX z!WPO#!)|3hH;^nY!GdJqbu>}FCkX*qHb9Pc-3y5Q+Ib>OZ)~k6TcF+%b}yBG^$UwM zxGmMM;{7AyO6ay4^u^E`hGY&t=M~P*vE_B(`39XQl5()7^16SqZ;Jg*t1dtT)H6wX zpEi(^LoDJss`|acHeVE#9}&vw&iT$!^7|Gzg-Rb+EzyKI-mv~>V47+?(L!NvY%{^x z>L;>r^hU;>eiQ(KQ*>>WpuOmzH=3Z~eTMAn;5+x%aZ+JT+XSs+g5@Z4ZX$R3_ey-) zjMOe!=Iklye3D9F0?N2YR#Z>zR`I4Ie+z$7SBHN+yHznyj;7^dzO%l!-HNrjeB8$G zvea(!x{38tSFVa|k%Hjnr)?5|S-LnDAwddne5Nl2rS$dZc|w7!fNq?Feke&e85tSj z*+op3SYvJ826khyK|hkX5|lsM2i~tXNxBvfk;iVSI*sjr$!mEm6WJc{b`d8d_L<^KpezGI9j9wK6Z5Pyv5J;XeOu(h@qJU0D z`|tP2$QkepMX#@<`Jzn2-4C@Rl~D7Bqz~*oulv{R@4VHB(Aa-~RDB}u! zzdSvk);S7<<=eFTrhAkUHYNI-t^w5pI=y{4+4Ayul7E0_Tvo)mv>vxfe7S#4;+czk z&IWuq30{~&o4Oh0+EJ0nakla3@505$<|I)`wF5mGy@JgdN*=zSsf4LLhfv%NTe#}I zUbp)Fg;foedHK^6WJ5mh8_imO!59Fgx)dGiaOSJ9hQ=V^&E=)j%x|v`>q7eV3EhNQ zZ519ibXj@_QtVbAyx0-|+VUmg$go8W7mkiGSrr@4KNi#J0VL> zYYtsLfG}dPf!qH8Q`6PtAE4>NEVFe#27SzGz?6OTg4@m=u>V2sQD&-?>i+QxR%4S}nc0jx-nD>!{0%4yH;SRj zecVctQRMT3A}a=>qlRY&UYF{h=2A__cUjIDW|6+cw#-WqHW< zt?xtcI2)^;uKI|P2TvtxH^^nvu!!9;2O?98;+}i)qY(uH*=x{kQl=Ar#g+UJ+}%pI zEsWUN;GHJz5 z_Gxph(u#alHmO{stjILCnKE_HxBS;D5_(NKx1y9^1(^hxBS@>B!vqbeh>Hb(4z$4- zc^itvlm`UdE>b556RpCb9yiz}A)$OQ3CJr$)?2+BvN@R{CKmi;F|5lK>@z7V7EHV7 zDWoQw(XWPsso&iqb=3mQyTz*4IRsg{*VI9gSHSRmX}_p_5~n384uyW z^s~k|J~KDN_9*1hvL?z;Qm+|*-=U2cvF|uC7>9wB1dUty)4N(pX2s50F>+nySXDz z>xp(XW)#UdZN~|}eHLE7qZ z)A^^$e*Q_1w^ie&b)<}22S4rVn zuoy1C^b1#gGvW9~+P+WZHzz5E%rJstz~0K|YUk$^{d;bsK9!R6XTk#c-hm5UgGWC9 z06K-9ESAzrqnm7r`6euE37G<`c-K-$CT1%)!em2vHE&zc_}`mIiDRinX>h?!qR!H! z)1L~cU@A+?ALA!Tg7Vf>d)G`Nzt`Q2loF2WYF+X1S+9+rRkXLCI!zfrbnVzy7@fKQ zq0*9o4&YEcDe-DeImWHhq_(~3gfvoK=&6_=-%o^BK3MT#@X6c1UeDX)kbQF`PxbRV)lt!a- zTVR#eqPB`%xxC);iF-Y}n7!1Z<6RoNkCgRoQc?}+TpLA&J@h(;-3JBtJY}y{+rVJYr`qSoA00xO$|5C;=%rp_@>>uTMg*Onla^h~*s(lG|2ZZ8k z1d+JTg-24X&Pykg8dZwhUNfnJK+bI)+w8Dy{PC{ zct->RVg`_MqP;WH(ICEVLjr;@BFNcxFLA(iJ_jHggOogl&7Cz7yQfUb+C@-0V(5Uk zXPb&ogV}kZOO-5FAmE}SF4&^VM#e6qNDTYsKBr-D+nOMZIpF?AL7`C9+)0x$nhQ+4 z0o}gFQ6d-0M+8aG9rZR>yu||xCs0P{c0S^zH9A5bx{B!t!jbf1pRNjsS+*a~GRlQ!B<;E{ zik?9jVb_@Q5jN9ZW>eUrJJBf&a(TVoORNEV5oj?)FhA_g}$UZ_@g=J93 znqP7?F-Z~x4;<(xKAjZ+YaE@@@H*K*QnsvaOQbd9PH}`2=W_{@J@DdW^B<=C^q|>A z)t6I74~p?r%VLM%!W=OaERHCP@?Obc>8Z9;g7hZ0H~H;yb@Zl9!amd-V%jL_Cf^l* zT%7m3!PNP+dT(1)^So#)-K9mx7uq~qw670Cf*1j!c{Sab6ikh~On9$$YPCj1N#wg? z^M)oq+12zk4-e)IKf|H;5cxKJy$uPl!ZlN1px)Dfq~q@Jv`qGtD0)H)6r)vW5M{yG zSe*$!-cW)S^*;dg5~(RBgEJ7sgGHg}?m{7QB!;rS^(OQKT*>4RT;l|xn@g=CaHpt- zt^E|CouiJoTpS}Jh&qH_Gyfu5Ymu$|^}n5UW}k8YJRVxm2yB?S^I5vDWBq~ zr5zO7Ics9<{nZFuau-Il;e!ngQqU`|z^>~Ff@GN=5~Ynh%n+6Mes`dEhw47YMLNtR z4Y+?8Yg}GZ~w;J=rUh@ zXSPy|ykU@AtpV{SxK!R4vq%ZdZ)rHisHNoauhmNZ&e`gz=8*JOuWKg=xQHEJ9&JDS z2iUH2-(sj~tJyK!O@Y*|B`9aN7L-GZWTOFT3F4ASOSkwwvR9khc<{5!&IBtP#aBH~ ztW{ThFx#Xzu=$2d@|?%h4H|T3N9L~sV~fVK6g_nTn}%2HXu9L+gXi&7e>YzPo@X_? z9+`=HQ5$jUGv=PT1e#1dy79|vE5Fc5DfjCr5Ohx8Q6U`>H;pmopQs*8i+y3ywAShf z!%yP$G~DDC6rJ_Zv22a*575Fgz5FX~WEz2GvKfHU+ond+_fv48i6By7=_)ui32 zbP|TCWzIaI!Bp7dhk;H}Ln>2Q&bKw)ZjzrE5b!)#vjW~QeneOrtfBJ}A8UNhCS2qH zV{Lyp=tI)fOv}(rrG@i*!)$xr0SN*&%HNM3_jyLL!Z20TTqI6vDK8Bn>2pk5W;eu_ zu2nf-zNx*S%R@>-s@+3U>gHx|l{DIY)xQ0$yLU)Yyh$jj^g1$XXa^+)6&e0c1a)7X zMOj}ZY!vl%BIf$GbBy|D-zLQL0(>SNF^qQ{e~lcvSi_bM=>7p@??RVIOq;o{Myl}$ zxebu*M2IfnO(>*QH~&dedp{kHP%p3YpaIj3U3%t4X8p+8r+f1|$n%l{7s?T8Bb4(! z`9oG8FUdBne_|nLTRw?vYYtt?P%rzd5yWM_&jg5CeQh>j&x6zf>XN1?`jz4;2{ z4mce^LsG7;uuJ^6N{TsITm({dEXOIw_nl14ZiVQ-nc^7{6|CPEh5IQ`cb$_Zh9pjk zlqY!6{ej2^ZxRglm#`jXHgdUQ&igL^`MfEvOVtr%^J`5^ys1D&%{B>I`9D-33v(|G z_{FN)-%~*3iT9Hh6n;ET(=7IaKC1Maw@>t(tSuV*OY%+X*ilrvdOI;OPjB!l;_wue z4;NUqMfeK&OOlZXj>_Lf=t^xn#zqSZS3LeM%`8-6dkZ_f-gbE3)8Y5jKuMjy!>0lEqDrT^l} zU5Bl#D8dgi!}=n5lRQ94Jvxa5PcJO={>Qr=YKzKE^0CRwj+Jj5i^0Ko+=yzPl2IWj~G*if#>9cKUA`y;jd2Z~_=a zXCh_)*EE*APV`U2t5 zGR|OJkg%17l7NUawetucGUes@*v0I7?y>Oihf0}r8wfvDl%m%pUXg0GWc3s5qFDkT zzuq9dbyLhE*aC#^dV3UX1xZ(Qa%*4E%b=YwlplGBZ%1*6!X+6yqOe@0FxJK%C3%}5BT*a+4z~&e5DjCJK>yuBgnAwi$}}!j>^*tuQm;goHf-~X7^G~ezPua? zxMrtFmTrsb*Fs1%4IeHQmA!DZ<*v?5Ky+3wbXFdwIy(vtGQ~V-GYfMYB_=>X2eC`8 z0xrYpZj6$z4cAL)KhN6x8%2$=#d~qM^pT}=q`dR7rKf)$8Ge5=k`uQlUrN*o|L|5$ ziPD;&xCob~=L@KHYG3fCH;V;Ebe9J+tiz!=R?)E@~D<#~%( zfus8&a)SMMDzU>r38!}{(&QMuR`0dZag@h%wchDb;!I__`n?ELxuKoL$$3|!u-khGih+R-@ z*KDi4IP~Z>-eY?Y^s#Y{0v9GUr#2;*&SfYYlZvQcV3)kF4mbp9;w)JrM4tz#q}oTZ za?@DTV~+9uy9{kG8mk>!FxHmUe)b=g$~nytf|PiG4~~_++BJ#tYPRyhk5&K6#xe9D zuuaSa92p{&wY}o~573-@!&%T`oheOejF?Q7(E*s{SkS43`zIDu4w6x{)_^&VA3kSd z8PYIZlCP*(>Rsj_4|`*JMs=@_Z33}mDug|DTAFnxSX84sMS!K(Z+)e1dxauVK}Mfd zs@%@aXE^H3-szSyqHOS=roI#Ydfjtt)@FUYny|sas;u}!E>wG$P(N8B7CL_;Ub;YD zPNT0h3m;B< z!rRLk#;8a0B7!wg179#^YS(z&-Qt%{(OT`>>Wmo^RQsgf{@%dAqJL=M?zp5){nb+* z;@5&E4s~kCT*I~`s$Lgh+U?J`;Ka9`8t|6lQXA)U+K1fh78|Qf#g9@+*l`%E4Wd!NdyeZG;>6o>BxU@ZgOP+0}ZkgUp`v{;|*nnX;E78tjtztek3? zOJ=FN{GvO12uBchEKp^9ZZ3os28^6XC0X7 zRH6(c@`-(f{Z0=KcD(!H^J&<(hlIk34UG-&_ zwazmQl? zt4&s?VRI-;+pBoj)!XAwId6aa?D+jWJexJJRvF2tl{V5;TbJw$%Ov8IpOIRXcNpiH2lzt#rq{)LCt_mct^yfwq= zbKNBqt&2~7J3LFYOU5&n{z8;W3VaWq|nvBp{x?xKhY?N zW@ypU0W)@j*F|`Cj@_(RF^V=-q%I*fz=~WRxKLXQ?LD#AH0EcrtGuVEtr;8wo>RUo*mDO_Du)SXN#tG3|T}nNTv9kjh zW=L|dfWPf?dLGgo^z2>{?!igG`rv}`4t)lL{{xJ8&6wr!=B>R`i)>Pxp|d;;6PL51 zOEwGPd=NM=Fo?ar!zq82UYMy83VsxNriNR(-<#jB5hO z61MW|>nr&{98^Hy`tpk>v$t0Y?DR^G^;7C7&H|t3X{^=3{G?WW$q2h2o@$8#*C>FC z3jsU5MylD^rFM0tC$f$&(d61RE~y!KNo5L2kcG)hogNPWndth|Ihu&gN*Nq{ zW>#8ds=yP?ycu7d@?Vi*;*iuiB^=Y$eHn~}-(fIiyjdpVF?*AQ-8ITDeFPT`UM*KB z&VTlHKhij04^L7fBo|F88H)Ln9hev#yxJ~yNJSI6K7Ux-FjV->ftn^7*x3R@1KW_8 z$`tn6%8Db1X^v#@Osw6fdHh$8Iq6fvl=iJel$ZghC~O{OZ^fKaDE+#gQ`4IW$8v|? z+pNlh_|D>#R+#5@Qbb(ylHDvkS`)Y3b~5;I@CRicf$7%Kz;t0GDpCNa^~ZwQfnLaQXfX@ke{h+2WlH2nMa-k zR?N6eDN9c8^#BQHW9qqSv6ASzfD&D8gl9|BnQlxib+RozK_(d$__Xt1zp`X9Y zCm^fvkb1iT4GpU`y|d-rO(>V?@ZG9#i$+w`QYT%E9oY;WqYS5P(?N za7>a;AVj^!eCq(x1aWB|A_|ER^uWNl@1<}P( zpf%{b*!04;i5KW-N!L^;Fb#vazTP6cRvf1M^T4p}ao38+uGw50rzEq=>l42Oc;KZi z!Rd64Gc|Vvwv(GB!Z!1qce+~1(t!OLJ4bQ&y1lb7hK}e0doL1WM3`n#;7uFT-&FRO z!gxq<(GGbp@%$Re=(FK?oS!qPSPD40W_T`r&J-zraftTh0zuA>g3%R26EEsn-oSnR zIkRK~v=xlCqWa9NAcTzh>c{<(vd0tDl!eMuA1_OunU4H^-!R}e8#v4UFkWp`jRlS- z0Io>N`Iu2`#||j34HkB25|*;2KeK<*ct8Yf#Q-6j69Y}qP`zuimtdH?T5*Cg`v62^ zfPfpxOaTt^;&D?0@#yW%+kcG)xrrG2>Ao*mWG4)kLP9c5lM)I(D{`}?oJkW-s^Smp z5SyXCj;nM2HRwKFP1eN#a4kYDd!9L939XG5LoQ=yKp(rl7z_T!NkU+ER~I4KY1;h{ zFYbu3btBvDW=-Y_QybVA-}mo=-!brvey{xhO<$CLJaFBbo;_Ddcj1!lt$@=}$U>&8 z9hQz+6)AhB`q<{^KB73W$h1MGYnOn(BvXWveEgOI1>B){CA7o2PShwKoisJodjHd_ z=ePVw;t#pVO}{kUwC@N4_HbWXwbMA0|Czs>F@bncZL0h=1CrzuUytfYViMq(B-jxz zOV1t+qx-^CNp?g^|I>%(#6#g|J{WK(_)2QWZZ$EVQB9DE;lXB-{I}R^~^3VOdCDu1eTwgIQH0=tY68%^1=TYsUTbAP>~}vgVmX2(R=cp&IS;5qm>q zCPvDb2!s=fdUiaI%85~7wG{@v@%8v=!xc4XKA6q;D?dM_?w$u`Feye+-mpW~PmZaP z)p5KO6>~RldU(`cFY-o~3>$;rTTPXw*E_`TM@o6FGhbu9k=ncQWQXM8{%uoQ-jF;2 zD#>T8m@HW%<8YKjB&ME24qpw|^6yuhWwCC^6)3<+k;#VrJl#kj&$pzfeM!k?O1Wt9 z<0zyF=`tx3N!@EHeXF=}dhkJ@dxJ6XYMV@2p5_ws_`lvJzp45@nn<{m(WQ!F1-+um z0SSolO4hOPPUayr9mL6~%_b)0;?rR7b`<#3HjK2(Ry))F z%(~>f#O#Zg-O}>}{E&r4q68q=C#kXqk2TDR3U{+5DAg#s?KVS2zXK}BX`dSZ2CA@RqlK%NpvF2sNPSHVP zNIAdUh#m;1Nr7@pE8sNoq~9qb?*Uouk;Nh7l{d+WN}29aWqngQ(KLNA%yjW zr{Hz@QjDbaW2z}Bd^SyFvy~eWBZYq&rlRZt<=B6Zkf356lE+mHt_hT}f`pt_xH6Qt z?*&-Xkr8vTMVU~VVSRWVMfGB=%@-Lla)Kl?Wr41t<%_%SV^rC_`k_)xy2+{Np(n3E z+Mi5I<8a*mREvg2*LjNP!`$G%7sV{PJMkB)T4-}=R*T2k`{-R>|VTgl196H-t z8W1d3yWy!h-hUtPy!fN!u~MSU9mUV1u>Pns^`NQ9tg}MXVfvI^I3|;jv^3r@i9OMX zZ4|Q9|NXhTRKLy+mmsYuuaY+zV2gYC$h?aUM20HlQ~v0{3Q=B7cDcj95(Cxd9MrF1!kTV={Zn|7u0*Kzs1@(rzz{sh&u(+d|pRat7lNOd);0U?WilG+7OXQW{A{?15y<4?Ue z^%kIcBAk%C0X}{cyA_PkXuoZ6{Ra>d-&$$R;Me4hI(3*e;WSL<22B|4m`u+Da=Z46=w)y*v!t_EH%wkEJ^K(TFR7Vv~EKKf5}r_>-1%?FRiMEZPS`4bj_jH8Yj~2j?o;l7|YC@(mHo1zn?t6hO(lOTrS%M-kCAW*lc$t)ig) zjCGhzpDx`nnh_eyNR|)SE?p}d;YigjV0POi$k-=`yx%owNDbj_WDWT$u!C!67<6C6 zWO3n6>cZYz;t;uf6?>e07}w6bAp)K-zS-IjB-U_tPFabwZw;=SI`jZCk&@6l7=Y}Y z)PA$A@bP!1BqzTtKf#J0_!bvYpW(BymAgvRNdJnKgv z;>)l`{`|=>O)0Q23Uk*VRN}*)br_IDL@o?*B)~E8*!I5q;=ds*AkN=xKM{1IC{+OPJsBger+!ZBXCl-d&t-&I1 zebLmP>H@x9lqW~*4Td?(-Sajl_y7Givy79w&@-z)+8|3oaz!5Jr&oTD*Io9YCZl}C zrta!O%e>g|rUkrVFY>ecwz~_^o?O6CiRYbdr~kdU&U{b4YNP;ubFfr|NXUxStFC7D3URzAu*?FD4gC4 z;r#9@RD}X?B8k*q9(tH;^TwD+n^Q_3^y#!P($nVa@2rG+UZLmg-1I0tKd2zI58_~* zovQXFbfA@QEtiA8&&n}hbi?OF_5;p;V*)ft#3wuui_ z80)`2oizA*TI~TW)07?3&t^_+HE_JcBElu>s(T!yGUbr1?F` zT4R;V4q4A!Ee0=u*(5RT{!n9Cmq}KkC!w|f7I5w0Q`+$5fgthT7Xp`>iX`3 z&D+ZNCK$Z?+mk4L2ClDqR1*<>_k#=5>LW0+*_wDm{JF<*bg4o@B0k9guA0DLIck8p z7mwzF=3Rmi37eo`K@rOWyFfEq2vYI=YLsXrVfA`D6i+bAl0H?6~vq^u0H{ zlWX^YdA-LN9M#r~rKyB27)Fw&no$&Q+*~Q{^J`U-5kuOBNjSd0JKO242w$pGvx#i> z(^zIt6>VNKdEz~HH7IL~fpL{_lWizEDFr>4b4~NuPRuTwA+$?@0hyt47R_5@{?B`)fD42fA41~rhT_d^}+E{?|vkgnwt~x z2RCHj^BaAmQ6u&4J4X0XT!Mqo*2sP*Fq@i`v-5p7GPXBb{2C_uB7lflCvXs@pLCEs z@`_cdfyAZWfaL+m)l2@EGO0aj;jzI{q&{o+qc)J+o zgF^ns5;gkP9;tWgjU_mf4Cq1`VJG)1*+7b``2EPmcLuka=TKuveN2tSf5diSyhSmK z!T2K&gW=b^W8W?Lj^@;@cHf!CS@n%f;=Opce+JVEk=*|=aWA)TU-zYjXM5~Mv=KzP z%Zx;Fy>|>+WlTt!eH16EwH+bb89( zpT*Jj3MeaOk?q%KrZ?~lW0ue$6z0~K!{s$6wLVi_X^9tKNeT_WA1*#K2X;Yj5OW`Z z4n>T>(DPW5C#TLLkCh|7#=tnjC0zsh12?d;nB%D){Rz{!;EY(%*b!a9z^Gp2Dgt$J zjKUFNRP4Ctl%Do-sph`?5WV@Cf8-AfS!giS8hIb|xj|pIi69eHxc|loM6>p-zH8#y zJYV6`0GHHJ0IrVHopUG%-y5O&Ay6rb<8J4bB>pNR9p~)txuQd-s(8VWT0*~#JEO#v z$Sn4%#6gCco8Qs-`Xfz7wYDDaLOwt)RFY6|@VVfcVe-bitbL0AWNFL&%8G5;t9JI* z9-4v2DeJqq*aa`|e7%oIvmXJ~gn5%yZy^=6cGkDHrqYbjAi+h ziiQM&PP=h2P)t+rTWcD&Qi90t4Khc9)8i>bB#(|21WFVp($>|Alf+XS3PGDiqcd^< zsb574y9V+6t_c$`FyLG#NyewJUj-kjoQ`Ha?!_$s%ivJw$VEH)PADodI;JG7u3inS zULTK*B)4!=fj!f>IEpNh?k+yxeSD3LFPa26l5M#N_~%%#E1=sC#9cHvBh<7;q3-?L z85Nt^8VOd<<|2Y{QswyZb-D2VTLtO{ ziddVBVIRAZ_w!m(YB;Bk&Ac&5eQCw^MK|Flx`X_$tRo!(aG^4ZGF)%Ts-60$jLA2S z*vY!fGf?tJzmR6Hr|44{+zJl?k+mk{sb`_HNLi-x6)FeO!oDzk;zNPSAK&xwO>z{H$z$vF zg2+Ta7K=16hq8!~vChyo;2sC>@%^*V3^KsXp%2-Z`fV0Bo6|I)u<{cTbG44ZcMw73 zjE+V;^U(}3IzwdM2~9FYn8?gyjFwQ&;SjOpk{Ev8e1CqNVbqfFf+17r>ndfSEH>aS z@cZ&|Kps5f$LIIzLn+0+u!s;gucT;thMQuZpwwd1?ALfM8SX^_7@bOx<**mef_NOB ztV)!w)g6fMKk@9qOiD^i*`o*k6MmM(+mt)9>~_6-zjGFvEuG5}MuDBe5DLlz>;^?U zTO4)MarG+_sdB{_A2Uy+MIjx>-|ZfO{z7Hk+r5ziy-IqDq?}}y0dw=5;B)(9lTHyt zxcNn)Dm>- z@(ye{BcsLv+0=WFx^(Sg3sg0oN9-&8^lRuaNPQN}u5uSFql^-GXW*W?CCb9#d-A|b z(uemkrkrLc5{#Iok_Bohwt#xW{pDx2wd(%>VPCmoiW#Iy9VxSSTW~5V^Buu7nD7*bA>}#68(w%omePX?B5qqOZc<%agOZPI$vqko*BL{4c z>?0>0GFQ(*E?lYjg-n>5_G;yA{OK99q?sv7kY1wIc=T7-{?MDczMb2z6~xhORli#7 zHg!vBS{lSmQIz7^u6-mS-d6VTq>SE%YngasMqiBq<@3nhUnF(b@ z(&9+>WBd+eBzg0Jjz>ZGJT(HARCy!;;7NEs%$OdJ+qT9^6IGu83U-3;Q=9@?HzSS@ z9x?s8uBlQ|d!x_xF!?C$5({!cFRN5(v%9i0OkhIU$;ZA?k&&De$DchxF-uUd)9BDSSTKNLs~iGAVMzUN z_l{H*317F3VHC&GRkpJnC6bK z+Eg@YmHeJh_B><%08XM&AW*b1Ob<4jF@NYt13H1Z)pAOafKEn*09%r4R~>o$`{$BioJKoSuKr z&*!H~K{ODW7Z#BnTQQO!crBkJc>e$iX4-%2kH=>` z)vS7A#DNf$=0-TiG5T@`gVY5oU<5Nm^XY08B6U*L*^~!TjIGWv3I6;HdHwnx#$cmc zuY@^FC2K)CDw7R}fDN=SsHB7P!-g68&jfyYKFw)0_xFZq=t!o7#L}3{57iNLl01$N z1CO6M0|UoMB}s*p!(y~WiaB3PAEVI;fDe;_@ISEQp(t4;?eCNz40qLmrHund_OM81 zj0RN59)D0AV?R7+$3_EC-`|M(!?Xp;BYgNe?L7k zkXeC*5YR(tu*WM%!c4L*==T%@8P3Jz0(kNGALhnV&ve7&2C>STO2nor=@!))2(P!( zBn*-J{*nG?{Dbi71hsyi?G6$(5KGgr!a@>jQfJjwX@CSA0uO*jJ_`PSeuTc zV`3`k94|Xx-Jii3Cy?skl~2tKtDGSc~oJMO6__wyq2lB zSwE&BwuGvZ8;@}~!SkG-#yW*Kd_q!6^73Ln{{Xx*Lgk7}W!$|qRbjbU6~^V{ z<32d*(Lr06PmezsVj(fbVyN{6`sG57e&hfL#t!Tg#y>tfLQ0W!k2pX_C4g!YERT63 z0m{1t^<-8DC;Wfw@G+cxbs>}&Qopa(45qh(6HF7SRKCn`8weW?_&jaT!6%R3Bcl8^ zVM(7m`N3rbQUy&0%f~E{`n!W|1cUAeIVu6GY{Ohn7mk$o0x&00Ob90CSXV001YU z#U@;}%8@Lvm*!6|deS1pVWtctEV7qggt@PYX4pi(@NuCwk=mW*+B)Q(mb2SE!6Y^z zCwAS)6yu-L&l{Zi&+boExQ-THBN5u@K#Le=UKu3*U`Rhdyl-=96!y%GGP=eKIrQ0f zFN~9n00{Xx#~otK;pi>_`N8B59GkYuEGr^hd^oPv#_eLxc!@yLdL$T~pPMbClZKilI=v z4Zsf{-}|1Y{1^e9$G&iRL@_UDIYa*djjbVa09fu3CoRZ3TjbiLkDy=a}WmkeoTi2Fgedaj4H^YetzBuEH?h3B^pSiGDt_LpTb@_FHy6JsyosU%0jY;D(B6_I*fWo&^}NvNIF3H?11( z!76>jmFnjX;b)0W62fPSkegWPRl_vjP_caLNJ6z`@2m;|H!tAqZAu^^TdU&|^@l%ejUM z{$&+l93_yW)KV0MLy|BLkAd^oIWq|^Qi$;D8$~_bz-<=SYO_pXuFWf`1*8t~Y+Mb* zvq%RVa7GVMoH(*a%za`sxrFL9rJTltQx4Zz+6v$AP6CBHPD#KS@^QfY^&;dIYkp74 z@L6ca2GhGPSk)~wx1w;n%QbKgJ=qy&0QmfIk@*Lz!vNyv%lCoE1&j$#N|yA_$54U6 z3aKZmCf*3i{W&d+d}sdv(j8S#56q&=`up(ph1mcc!BTxB()IYG{$rxlybv*2Y{yii zoQw{q7+QaD`PK+!NbZQ1si9L@B9XOORaKNU?4mRO0G2W~k~4wielgIe2!xQLzk5P+ zpo#~0E{kL8mt;#bB=zNREXMY68v#3zPp5`lgU;S^qmiDjOG*+6dTU>^5d5OdrwLU( zv^U+f57Qpv*SpV8s`vYH>^2_unGBLgEp5?mX%XDNr~?AQh;6>14ac|=OuoLTWUH<@|yiYPn zn2-&k>T1nR78RoufFM|978JFzhg0KIc59xhH64nNw_N`T(2Ail8p^#GC(j-0D-x&f$$DN zA01Lw9e=1Wa4C{|h8w?{YtwA!M1es%HT#xYbHy=JR`h4|c@MuZQhw~6w zS`#fOEJW57?8hW+#BJ%w<$*llC>}QOG2_QVaA;qA65~oAr@f)F#|DAE;%l!k6H7dagB;A6*+ z1FDm7i3(t!Uw)7Q4q=AVbjue>_D3Kab{n`PwlEZL&U|?UWM}b~I2mP5-oL-zA0UGt zOQi!F3?1iK)yC^_%CVed2iu?Rj&cumi7EPub_i;@ z5;6T=+%W`TU@mi(&yI(&4Cb3K_u&tfpnWq>X0nofz1V{Du#XLsoCZ!g&H(_Pc+VXO zl>Y#y(i|oh+IB29=$)`cgZho`7#RIN-#O%rjy^iBtU1qbe<%`%FxI6ls+OazB3+(c z%IFrmHsCy`Jb4+wANq8cD1((l-;4^Aq$V1+B0y}&M2WZ$QWlB-03#SAAIBdjsE!{s zThNFe&I*PlqZF}6HEF`g6bR-egJ2v5WdrBOCyqSy!agb5kun9USLd2Z61o7wm{{YBz z1up1#Y@$A{vPAP~Zwor5yl2{&fs|JBPj}g?fU$w4iZ`tbe0f@i`Hn+V7xJ9 z2^=>*06+B~Jy+}~;U$K`kOd6_{{S_fMOBVg1^_c8LB>hJ$R{5Iz$YFLS}MmX!%ndM zqNBF8d0Hh#4tFSP6&agb%+Q+a#PQY{^ZH7Npo7^zK@K*rm8UFxYi}8*FCb0R5taj$H zYS3hr+E~dU;*}ADXU1|e#{dp|VDw7HOgBF$cQ89mb>a;SLwNy5&IWn*@Cn<;?bK#^ z$nJ{3lBg;N74CYbqoT#ZBRBvYjEo%TgW&lcN|lV+vf2+ZFh-lGEs2^_UBu=_40i;{ z-Q>2=NA~{M>fE{DmumHISTPdD^^5MYnt_=H=**G_$;rk%5BPyO!O6}u)zr^a%4N3Y z;y`wV3mUBPkd>9jRKKGHe2w3$fJgZpc=+m6%$%xa@&Xk}tPN$Nk5LT5HI)G+VF3vn zkKph|ehL2o+?GNX`ohvBDUCj*IcJJSxu{1jMMxMVWaMN@_#t>ao=+d1r$LkkU|m}e ztVX2~YeJ{h^JCMJjbSN4n&!chG=D4O1;_W>`RSL5N>!Cf53~D!NKX33L-ohAyNABg zc%YD?N&y35Z=6$`AS~5K=@Wgw zuIs&@*;+n>pmwZ`x<;8^rDbag5_?g1_$=8can5teJZGxoiK8$p2f%O6EzX!oJ&3GV zKAv`;dv+((e&p|+Utgr@Ek*=@10#l03kGZ`C=@8#yns(6^*$quE@%ZP%G9pf zeGEq;SdfB}T$-M`{Nqyp0B9{(VB%X2(f+(3Dpa4-w4P3WPmFl#kKsz0h`Q@S-I1GH}EJ`d0Q`jAi0RRIPJ5PrfBw-up|WQBdnB!rv;!9q#h`Tqcy z$5w)+fHKx3a&HCEl_ZTwwvRdAv(rTba08r$;1iHP)brG+SR#h729fjz$dhYih$#G+KNO|mC`>{{Vph0IIg1bf-k4EN-i|ZBu(V zCmXVeMhEUk{QC57^hw18mh(PT205hh1ce5B`!|nqc9&!KC)3W;?fn;Di%1&E*Hn2b zMkIp7Nt9V*bqZMR3JBomcID3}&ao0EVb_vkSo_pRVw_4Qj_H4UK{|(UOQV=YsPxo0 z+xGj)08^D-)bUe$P_nj&SLmkiB;?kq?c0|Xoqk>KR?s6R7S9pD*t0JY0Q zt$o6>IiI&9DSy(ssu!2^oz);p*LWvfd8<6*0i(amuVAe;sp6^wA`Mo{#s zsA_3IRBB^K@BMOx&fnBKPq+0nSU;D(nuZcVmNc$ojR5(`RnMIG!2UILA$6Mu@<8 z1NNg$qAG5~ULHtkX8Is60Dx@`XdICD)Qsu~(4+EnSoa+LE+w8L)7 zp>-f~J)eN5h0Zh1I&7(M6ZZN*yOxg|{Yw5cwRk&@?w{J#-$&Cb%Jqe)+`Wkma*!YT zfx>tLZaicY;~gxF478vqmp+k%ach{OMH3ls)L!7ydxx`hJJdBA_HNlP>%k}sJ!goG z%;*LcBr*+{JnekrZ#E?1PiS7zJlslDfCb~zf51ky9;ezp*fo36!!>EEAc#cP9inBE zE>%Fe`*1BbJBBIqQyD*kZ5?DW`St4Rn zoUU9xILW|12hLeLclqLJ`g}VsSh)eq6C^6kN6rZ#k-I)Kuxcj#n+6fUyAZGoHF+r2hb=u6mN$jWsQ9wfRLv&dom}O@u@J zZ`W$+{{Tlj>rr`T`l!w@b7S*!b&~;&cB1x)7ZP+nCdmjIzNxpOkuT9eC7>Z>?;F-5SFUA5jeodo`2J$Gr=STx4nRdFL>k{rFVB!k``%WAf!>UsTs}* z3I~z%=Od^FAudNPzi0yrv8+iIoWV1qle}UV;~Dqh@y0Qek~99PScL=K`5%5dK<1=& z&ZTZz@=q-BM+`u-O(NnT?{Daw0b4iBc&VOh44a2J4i$7d^oO8_enOYA5IzEO`i9;5#NM0mzm zqJJGd-KXkH5?9eR9XL2ys^w_Rr-H0)FB?eUlB6qRJYe-C&KV>nrII{;@bdAqkLCdS zN2PlkzdLX0PKf%}gDr=FXE51;+)Xuk!iH$sloi{C_OkHcWDa=bv6yj%n~;OP?mnL0 zHYH3<#O6#{q`M}ISXq(fOiQ&k&{+s1kT>s7YT%5q;Ah4$)iS=zyj*|V^Ktw{goc7* zNUR}|wP@l;L@l}mReT-^P!xVg2b^aksQ^K#SOcf=4B$h->BTRnUgbtsCD_Xt*iYvk z;u~-IpPc7~+DS=ZvG3~(GgcQBj@wA1o?GmemB27TRbq#o*$PQ;&V9U)2LnHzjT5jf zolCDE-*8L;UVubD^GF_WVR@Y#5Wma{n6d~sR11UW#&P)bo~fKnuhUV^(BV|GLS&v> zD<`31wP^j!t6S5~zzOGY+qCdMZ20L3!9%*u$)RQ@@a>jKqlj2{Q!pPr@(NOItKpPd97 zyf)Fr1d_um!z7H$=t5wYQb`%Zdyemt2=IC580gcL@~DT>=ii4|RD?q$ptMT$VToLT z9gJBd?ZE{}B(WT4`<|r<{VoTm(iNe-PgR3Y(x-$H7BzciLO~+xe%g5q`Jz{klSTMa;3I_%1YpamN|czQi?fF#Z|tJ%D{pc zDG$%;##v4Ve;q=+Fj;Z){$NPn36eSs3Qlzfk#_Gf&4SH=f>$Hkl0JFB@y|qXs3Fg< zxF0RbNhQmc{@X~$9ytF1T=nNbW~5MI3H$gMKLSF|9t15pni?-IJtT9rj^RFsX&xZsXY#&h}S zgU?nE5uEvipgiH!wHOt9mc)1|A+D{JECVw$6O*`}e{7GBjyj0r<_i}0tO`j~3^glS zlay62A?2e`{{REdJmVkz_{UK8lcx{n7>yxSX0#=Z7J-q%Z)L_uOlO`LasFTB(o&}&RV4ZSp{ijpYtH);DQw7#AP!FVjx&>y z=j0Rr0KY-nPa=p^ET$P6>#}Z9B%}^xSc`5ygPibx;r{@yQX+0Ct1OQQY^2l}b#-eM z@SkX{BP%#YiMKEZB>a41{{W7hD-=Ke(eg0NsA@jtwMOxhSW=@EC1jP>0T?F(+Qb}Z z$DW-E$0=E*%RW$&GM9LAe(!q~#FE%VK&>DtI)Ff8rNLG?8Q^0$>gbom78yxC_k$=} ztYO%$TCk$3(6e$x8#duLfOF#~$M^pD>O{)LuufNl?3UFq=AR_D^;-IMWr|DEx}riY z2X<}P84(iS2lIfZ@x;l(Vy3Mq2&r${-r5^3M^Q8tiI1$mR5g#LJ*5KnOcCljWn=#U z5$2q8iejcMDv;fn za7Q{Z{oeH2FpUrD$J8IKI;qpB++7DvP14!BUOf6LP7c=p0E;IIpp0iZ9G<%bszjQE zx##ci9KvSe@v6#J!&cMl&{OLkuKIEH2enCm1mP?~mRM;+qwxu!`+|o@arW7uHs_aM$)tn~{oj!MDknMh+S2i5XK|2>NRHCYckDk*EzhBZs&_3TMr48I zj3^T3B!se@=O-B8_2hW3hw#&uxspIlOHsA=aUW*kIE?d$kZ}riEU@%h{JbGbB5 zJrMTM^>)?!$dP(}+DU^&Km>hQ8AN|q9{Sjm})OvWI1#4d;2R4r2) zWz7;c(}R;8h!3Y_AIWmc7_b2TpBxVv>rPQ)Q56B%Jv3=EDz+7L*VEKR!{uSELQ9YYM7mlI3eo{FdCr4CE+% z%AlWNz;Dz6z~icOrDGHM<+BBlb5yQu1#r`hUS-)~mbB8$|q_L4M^M7;3LfZTKKz&?L- z(VQ+KYJ;$STAn(<=c5uH9(_wr(~;Htii#-ylQec@D+^?hjsF1V&N;!s83X5-j}FBm zfR6`(GZLGaFGQU?-msfSv~JRc9p24&<((l^AzTu<+mc7Oh9ux*1J$8{#3^|V$0P2$ zZwH(88$?o1qYD*R`NZ^QvlCo`T{cS)Sa`t-9yvWo$t8v`8_yj00*n?aSqn+av1cQl z;y%#_x=d#?_Sa$P9o-(W+x@{VjocK@B-AygYq&NJGV1K%rG4ZBlHb3ctjtC{_g65h zs87-&0$M9CyAqm^7xcTDCb#t_)#x>f$b(AzSW+7aCJb!I3vNxyI0xN;GoCt)60s7Q zNqCfk4OH>xAF01Zd-Uo*b*OeM*Sm5EWmeU+PfA$PV{Pv|W)|G4s;44F;BUwUbJtOa zE9`+#W3dP9-cTx;nQU#=HDBSI>R(~#UrO#)i&4~)=+zo)XHkx80@Tin{aF1tf=3>~ zlCn8rv6!4=tPUZA#Lt{o8p4mW`qy|NH92oVyls{H^S9Cku+}2DmShpto)#oZ$MmWO zT(H3LkCJ-MMh<#Me0uw&N)-CUi28}!GtQJ^vqHuAwxsj~6Ev{0vB)fncHxI7;Nv;Z zR{sDDumK=_V8Vv*b)N6hYEKMSwRm*fCD{T)K^2et^|0F-Iz^Xp*DIBzs97ameH2#(KzHiTRl!#*Yz|J)jrvcmqpWH7a%8 z6tAgP!Q4(U{XE8ZH#p8e%j2x|;kbz`2mo{WQ2in@OIkeh{7QXUzooTa+o`C{3ekYP z_L)PqiP^uW31$O6m?X9r894Gf^k0W~bjdP*5j^*e2T$MS7}&YTIF>3kk12O|b=uZ# zlzClYh;2w!$r;XaLGng^dGr0c>n1FkMRXtv#6Fp)Thwe!X~R6?2wu#hviqZnN4?wd z25^5l!0M`&l-$Ln%Tm86w0j%)CWlne^xa-**1Vc-uLN^bg*VABs|<{WM%p%j1CfL7 zJC_^|xb8WKmn|tNkTIl81&p7(`!zU%-?45$CAFgOjZ&Yh9!nS9(I$QpPo56 zc=A=5hl|oA?M!`RR(tj2Wv5Q4V9cc@kgy<}w%|ORjtgTyj(V><7!;F~0DNE1_l2F{ zW2}%Rg>=g69D#(=p^^5C9tVsNNXQ>PPf#Zof`aJ-d%&5S!_s#RN_C`?YFcy_6LD5p zWKuymRVNGy;l4=z-1L=j~k zS{TeQsC9tgnIouSRaOZLOwGW+7{FbmkBoT9@&{8V7KH~Odj9|o4i-(|#?{qP3f2&? z7)OdmCmG6+PsbR?i~*bz)N91Cc5fek?**P1lFPoH%#uja`hmn=w8Trd^#$AQCz5$T z*nIUeF#{uJ^)>Q_%po>49q=q6H;9<2WHJmDB!li81D-}e=Z~JGUNJdxec=^7Ad32= zaqNa#@;(46=_25Ku>giYJ_+;CryLR8Ao_mrLSqPS-n?Y1_TO!~EzAe65-vIRg$ScL zJe-rBMgTlb{;KK92b+B$yI4hj;?yL8+1eb;yIwI`DN5uIa3xL;_c#Qvo}2wyV(sKU z&^@Vp!!Y+Cr)8Q*A(h>6B8aQRtOB2DD!d;#&+5)PkvvQ|&0i=&X7Kc#*3~0n62_pG zc13bq1Y|0n4g%wVe4mbp@nAcM9=|S7`ObnqwXNphmcwr<3}j~jH=Vd6AP>$n!Oxzd zR}fxzCIqaK@$}l&DMbGOaSSpZBX7EzHzB~w5J2z`0|$~h`5hnnf~!wacs|xRy3Igh ziUnw97{hPgpaMTg+<&+I4+E#RChv8 z0AOU`3}@pc`02+O0C(>N+8E@PsX3Z;v0w>ZnZ&ZleNe!L$=&`VkbXICsZ8SX%8MHK zz~)oMTTRrVSjVlU+d;!b@-j)hR|ZjLZMn(vNeF?2%!d4V~j8-K5_BS`E(^I6v}-e&mj!m zQY<(C)<%#Zjzw+691d`Ke~bnH06!fgQrS#1!k&0t0G=4~Tr(e7#M}&SB$7G6_#eMQ zScXy_2Ae=m$R(B)nH4fgC3LFuBJ_NXD&) zW*P*o0)W6euWZ3Yt zl=~vr({)WkNHw|w;Y%Kwzn7mlR%VT3A%XTDBP3(a2Ua^QV$+&@u;ZiI}$oB~@(XDE94abvk=;#$2?4u!z(~sxJU( zy_u-$38rhhcW&!F){il18kU+ZuBB!WH_Ko#$pv3-08}n8Fh@j$&nab~UzxU$NeLDY z?(k$9_K~PtTGq7aXh&V28Wy*4ATi@=#u+34lN&}bizek`gN%@6S~F281I&B#f=bc_ zRT3Ki0De@}Z`L$BW<4u?)_5*yP(v%q!AWI?Rxb*xj5i1mIL1#*rKS6*1K*TH4sBu- zYdTF`FHW(jX$EGf%^%E&h<+7J&oAaT%{smWk`V3HYBF*~OBy=wY( zc&>J5Z0h7XRJ;*f$y&j{d$zFhi)Qm>R#x(x`cZMWxe`z>AIyB=XWyrFTtq2hsl7dBQ zA_rfsrA4M)rKk3V9UD{rlF^ewuUtTqA=-t*%QA*Y_}rLj2m!+JmR}IUB=?pFi2K7K zee9_*YV0aKtJ@k)r`F7oz_H6+DC&X@Qk2oNHphv>C!=kM0fgVv9~k2VoMr_?=9r#@ zkD+&jyh2h7e2>|oKU90uvu5l|o#EQLg!&ex5{UHeBEN4}qcYuRiDb2Dkv1s`j3e|& z1S@VWla90D!G*`;fRa*j0-`n87TL&-Uyktug~G5=%`ku|L8)`0AQ&I9d#Ss-v%7mx zpJ4T)d90*k2wpd6&M;3N%;4ko0oS5T1v3WW2JgsmKeDTeKy_{{{X6xKVhD= zFpEMv$9CxB>uAB^X_+xym@G1q+mL>a=fkXB{)Tt|0BN74`a^doWT$vo)2;o2OtK(!0+k;x8w{u*^~3yXsTe6~#?K_AkQfb1^w_^qtaLvPW5P#b5#A416dgWI zs4mot?|FX{T3>4EX&-EM?uTZ+(^57PbUhY6-tCUl zGV40VpHvZE#-QYg+-{NBTqgP>F&CF zB0C0B%$uD-7!0JG3}?;Z^TTNP#i1+O`Dg5SG&vV zUYGS6#-C65PxE?OL?%DDvoVL)Q=doDjO6i#Ld*tNZUDzVY4K+kk%JP9nf{kx^9R5 z*J<$Y6q}C3sS^`G9mDVZ#&#lpa1zZPQPLWzonKL~h8a+E`p?g`bMi6&06x2*0e~c> zG=%6E>QTN#ROYbA?u54hjB%gz=_G`j2t+ADu|1>Sj^%4~Ra&&_tvz^|1d7w6dj<}C zrr<}90Oz40D`cp&I4VKRG>=(WZd?fZ%hc3u?zN+86Kzt>>Q6H`9H@Pn0I&=IILPA!W2RTcJV2_8Zvp2} zesDdlZ3fo4Pg$O5-j$}U2#!jzK~`leh{R>3Ktcy9pkyxMJd@|9S02YO37JvIeLisg zrebLef)3r%mKzc1F)ejXvCSTwk|yQOaK4}C9R942Jo6Can5DL5pEe$+>jowZkpkY; zbiGL^=&z);?H^Ai>LqBHCgKY_?An2FS&xu&`;M1aXhjaz^vVI3(lXeDzEHZgDw5KKylwyGN;rYA5OQNiF(x^pf+J z-DxS2Gb^DE;r4@)7=!0H8R?hBxP*z07HOgOS|KVzhKyq<{uZjz?4jS3q!usfm_{mY@rs3wk@q(6Ty*MT9fu7n-Uh}>{b2f@fbeD!)x zK6;awf=9m?F*!e$2(7N^Ix9kgUu#R59i~+oCoD^z0V8$*1d=)F*NdLAu)1P4y4EmhHy7w^yAOeZj`jBQk9{#y0H09Chm)K4D%SQjhqLlw~O{N!By#7Q;!j zd&ng6GyY_Cq@rFN2-5z7Yc({^=#stn3kIQI?aN5Jl(=P)0X*SJO}WP#y2{0*tsspf zUjn41L>*(=A4NN2%hQ)hp`=>9WWfIbZw(lO`UJ9^xf}@a0B_%JIT-7e#^H`NIR#bj z@uNKnM50XqjgCK~9r+Y(ejd}kIatJwqiW7b7-8uQyB-L~2Ok8Sb&9SDiviMxjEuv0 zhm*GZvb9l1AdFEt=3;O^~%O=ut;26pG9DoiE8C(|?EhM=c(h|>R9NHk| z?|nKQJvCc3C84Lt%!zctiqa5p$|ZH!NWkDR{>Q9|yeAbYK}Z>gm&|+C7%UL_LMr#h zhSw+EH=|)=-784r)KH)W^#S30fHwi~IL}nh3l@`{r_?{KJs^e583%Rfv$BMkJiq`5 zl?KqGaXf-EjoEMV_&q>n;!6Q9W{N$bi`Rbg+ zVu$9Je|F#9VXFqFr9yJ;B-Qt;Y)6ifCN%_mOAjl?bAiW?*cCcr04hy+kAHY(FK8`p zP>=v7hycFn3VKC2<8I<`JRIZy0Em@|2v%a6^!Y-oygBO8k{4*~$8?|`^RdGZ-1|Vt z`Qym^b0oZOV$&s&*Vu!G(^#hUoy2g*CPLuIp<@k?E?bkyBhNifnaL_7d-qsphQ_-} znnxQd)w0ZXpN$sz`Pi?VR-jP0z>KGyS!j1$0z>) zFu~;GIpe_U*ith0QqAw(;!s0F4{DrWaV)kgLslq*&d(u0xhu{GC;;5YBz)r}08lA@ zzNC;X@5UIY2z9Jgjb?g3ZGAmp$!0m<(V2V)S3Ka3KsW?^1IJA{X+o3(7ChRNhNOJj zooJznXPyd+Bj;l}MeV^O1e}q_&+qSV=*gUwWx)!=l0Z1~<0t1I z9VJi|1~8mQVMyf;CPi$3L}B+5yq;GeV2}oWGv~nS1sj7w@87n9ctYQSZy1%Uw45t0 z(||GxC_mySl1TA`oOJ?U^Ef`Ro)lygBM8ztMgyNs3hi7I&w@sLgP$koqCh6i0}df& zRvbtH$E9I%W~?&*v1(=yD$Q ztqpq+Y39sqM#NdM?ik>F9z5`Q$;VEfA&i$*OeA%9MsXq>rS%$(%KImpStQE;07|TR zicABy8}Z|j`+oj=rBg;Xyl`5)T6(p0x0b#$w4tl$8;) z4fkR0ks@Z5(c)J}?k?ZfqpN7!lF5QCkm@qAY_3l+>;VRO@#7?ZdZ&xUaSR;XgphWn zEALn(tP9d4>Gvj^r`e9{T$Zhh#5%hAj^mN;BoM541m`|Hbqua8#4`m2m+<|ddqjZ{ zdTq|$(=PheBlK)WWLV{=9H`C*p%uO ztkbdS)}k6{A*-xRs~oM2um_Qb3V894o}*V8#v~V-uRoL)Pz$6n5Jsinde)~_K>DIb zeOWi!nf~o$KneX08DwB^RIuRkG0~1YjdxVl9~Ro`hpD z65OnoI=-J+B~Byq!@5^{JW#tyNm@vX?%K?V>z@I6WApyK1j)r$zgPs$AXXl`-BQTW z%UX(hD7hOa+=rjiSAs|R@PE&$qIGLS&EeHCM8XqVy$oSZO6iihf>c+5kY$R2#v74? z{-dZ&@V6wtx<~fc(EYHIw zU_L_rVUL%}G+*JbO?iHp=?kS)8iko;u_V?Ddmv*o5Fg;M_BJ`jMtb@|OG#NE(lg|x zW=a_~Zwtn+W|#7+?-(s&(HNS>%!LH|loB(NJfHr0G_mg(B_yOU@iFKB0OEBXUeTn* zppmQJNsMgDWGKqXkh#tj?d19Y03+3uEFc*l$W=B$OY>0zaw zjAOnG*nfUH3)k$^FdPem?F++4)r<*`!BZBqXL zEq?GNWj*MC`ahu9i?`oc)Kk;dHQC!xvaCrctWT#OJmFLG{{Rr{?hX%v7(5)gWL1Od zqpD7{?uSO&)XrOhh zl_q{#Y0lGuj{qX^@yYAdeh!%^8OTxZ)5^bCr8z|;5gugG<)vQGmFEB;44^JZK5z$K zppu;;dbLDJ9Y(!ve#x&xd~z_AJys)w{{Y|qeKA*hA1jK%(03~7Qd+Sj20Jk{b5}3{ zfZ>BEC*XTe`ksMcEhQ?1mjpC>BENOi*3fED?bGR6j87C+hpmw!`iPrzle9L_JpTZ% zUpV6>b{Lh)6=Ho1O6)5iO{&|APbw$3S^|h)(!w@IGF%^U{>8p|uj&-2$^@Deu0Up*_F3962kZFZ zMmWG6V?RAhC>e=ls@i*`2`!{Ods(72SgcPb9IR$K?j?a-lDM9rd3 zEn-RN`g6#+jhV__ykU`4pU&m>@xj6Uk5Hr)8dPuZ)Op0YsSU-hT(JYRFQ!H7or!Nx zvMhR_Dfk{h+l{$7Cm%gbsePK8T|o`+#6xz96^M9ygSoD1_G=`%)rk=Vf)>+ZjZov3 z2Vha;57nNoE3p#7)Dk>j)9TPU)}qyjtp>MleLU8$La6X+wv-`?GEhAZHqPPtd%B0C6N&d8OJ{T`&C)LK78XG^ulcUZ7?mvu=`$u z3pfRUh3WIrhTXrg*X%{`}P;jC%@&Bhv%-=N)@b0-rw-gGBs6a;oX{ zh5!pjHne4xhzi@4aztyAG4gT99FDqtrJfdH8VAyT+Oes3PKwgif3~qV;Hah0IhLS)W%RJsXn9_lPDqHB_;kQJHt$~r^i+e zCjEv%09kA9vdRhE##dir;DP#Ci9G&#wI}0=im8y66d}u#%TL-MQf3ATReD1m-~2)M zKXX?UX3(hWRc9WKqXqb)wGD{3mRRIRi)w%ZfJewDBa9NhGhE;Q0I~+)mj}=h?8A_~ z3{~__@Uu@&ioI{A{jH@}npotT37|5-l12iFVyh!f9_^pgRz%#O9zfv4uZxNi{{TfT z)l7W(QR~U!(wTsUq+VP2neTlzS*F%w(685=g|aH?3+qY&J9Cmt0Dail$Id@JJfDsg zHjBcctnNVdw~abM8B=A~Kd0{!_5T2heL~sN?CM>qpx9?JO9VEMfZLUJvNq4@kW7ap z6Nez+jF3MYJjPZTIY&RM5qF8ROalJ^KfFilpTxGaZtYsh^w+kQgV<=Lib$H*s;!N_ zA=J$qJF$(rl0*z{HbHxddl8Ttg7Y1aeP3=lX6PNgzrIeSPT$R*=Eo zFSNd+{U+`$F)!&Fjr~5vg%;hrtl^+22FX1I8xlDPaLN827!Lr_$NW8q#L6KwR)<01 z&sY;QC|5|EZ7Ws#Dw1isvfHm<`q5jUraxIE1eT55D(%NOK0xv0b>dS1!s1sVYFMPU ztork-YXae^18G`Ea7gjilUCJc+QFxj(b?*NPGfD)vB%E!Ae`rpnrNXrzVHh<$Rs>>MM!Egxk=Z^=#&p@0&(ygpXov#j5?YnjEPbG~hSq#Uf<>-D!^7(iC=P=a*1w7q47WUy24((5#*6>k`1XsbwuGSEoTDD=^fy;}p4 zpnx)Z^Y4oAf9aD_i2wvFIR5}Ab|LOY)wDp8>T?>!yQ=^y(b!QQLlirXovYTj z->fUJ+Yaw< z!7a}`=RXJItvqH66@kn6sd--Cv}UA*S`0NphhMd2DJO^avHWwN$60x`%?BNhwDxe& zBN>YknMqvnjyOFwWP-%gm0_z8z00nrX7>H9TU+~D6wyp*u{$@fAO!3RS8*qfcmQWT zZgAWR_@*H^d^3}&It|Tj^N1F%%v3qkoOxrvKCAx#`o`8_o?SkzIgk|F3KJIG=PCv^ zpM&v$I`@1J4U@y9nR3g$Nu~YTC6E^Rl^%uq8>A$EPv}cdc-+}}CIV*0*DsBs7bQoq z9&x)QkBs=|#ZzC1s4XN00@nauv^V?3sp(=Ey(2(XjKs@+KT1Vxq=Cst0Q2Mnk^B60 z$C7Ae)*yZ1$*Dy&fKszWA!YlQNdqt(6Tv@>^ZojC6(|0vJiYxP$as%fXQO0P`?~&{ zyDVSUW_+G!VM?vgaXFC8&Tjr`zWmxULVw*&)02oLef=_ zPOOSc7>q2+{C|kEXZxJ|^#V8s7g-b(}s>7pGh(|SO>BL!@G^`YnpYZ#* z#tF_2dKIlx)N=!v^M5)*cq+c3rs{O!pVu#4hE{Ea72xs*F>r9n5TVd?rVQbyKh6(?N+%8>Qe5jJ&KGVnl3{;WIILcR6BxwaN5x{-sG_z z%_c2QiCWVvYa&WU(9%NV)Ih8_APoFx=`0Mz%NggmNwMecuNWAlqEbH(Aa{YNH4Jvx z^Bys!&-I5-?kFUZ*0HDRaDnZPPT+zo?&9J6~1M?Pyh`)1;p1h5$6W^3GI;QVZIj7z%Q68j^fuWb_z}N+d~HNh(to z9Q;3?R)s4KGHR0P2?eb0;^qAS-j5nhZ39ozDh)qRj*`U~WlwCmy-PyvjXc?wt&3i@ z`DDNeS{`r!;|zXr@CRPU@OQ#)Q!MVpT@edOJ08HWYC+f_f`5Fk` zH2Pq(1Vu&oTz-%|0s8^h(%?tQ^QIOKVqIg}!LT?44F3Fo@6@rL^k^;&FOD$B{JJp} z*UkxaN#l5AjU|p%DCr`HS5h!lf#8GSgVT`Vj6#x7prn=}w4)KIIuX24^=%HH9?&w+%XzAeKL>AKR-m;K4*xYFW2 zjrcR;Rb%q-&LqF?kIJiOt@JIbR`ME0+ z)ajw??Ddaq%v2(U^&_;;aCUa7ruwg9%|Q3zkJ8;Fk}9{cn6KczG4qbPt^>q|3mS>4 z03}S5um@4nm(nZD4jgG7IHOJ~(%727F+jfPm&qU!7!Tj`{f}Qkq}H%uP9U96U5oFL z>%km&jX-IA&Bu=)<J$Z z5OYwV)#Q>ou^bQUNirDTKo1+kfw*un{{Sx?WOkE52`8f$ZjEnm0T0kgBdCETuXz}N z+hY+nGx`&2frZD~c))r<+jz98y*@j2P z2S1LDG_U~b{{X!NGsgy?@dYLubt95MjAZ+tT4A0@8!!jJ4Um65Nt74LK`o(6dB5=n z3A9M+I;2_^a}vvJRCI85g2M!ILXdZ0o;W!F0GCwcrBjsYXXf6=SAtP$KovW7174Ti zz1>ICKBa4Vot0;dW7-A+90Txww~jisI~cN~lmyX;0JqoQoJJBa6S2*H-`vM?7^RJy zY6;?vqjBUgQIc`^IOK3T&odC5szLI18Z*)-N{OL{L)oW6AWE>p3*n4$#S; zpOMZEdNDL9Ma%i0pT7uBVOC|7LbcxQ^HLH$^$NrUhwwIFj1Dq7kt#_pw1OZ>F93ZlR?q`%@5i@qfs|h367sqymER{*N!8Z2t0n43`3H%I>tt13k zKCXInuUG=7i1|B{Q)>woLr(oiMv4gj-wKKjFs+@$XP$5~&pk@1Qp$h0;EsOB%)uo_ zF1?G|+Q-zs;j5wPJ+NO;m2%G{?kmF>AiR>lAtP5f&$-C~S0|3H@ZJ-O;uE_pF>!KD z02cmR`H(0Y5d-~g{sg|BKY83*+MU@oxhk=}ZD)ug+-zc4>+n%f@v!G0id%7$Th6Y0nBliJ;hq?-DahgrEaZv~mkdQn-8&$%8vkXPp! zBomN+I_xmGX;^e+0p9PdXU?3t$_i0#p6eI7A3^?{)XK%6Rnpi?>;2Va+{-2iEJL#v z$UO7N10$@iCBZPb_JxY$m4*qE zN1e`iLbE7}RwVI>a(@60ykCp_0mUV}+&YZW&im+pXqg;FOvPo+m+Id-dc||NJ5N&6 zb-Ai)+H+Bwvjv$o8nRY+qruyR0FQ%%`d9)(5!aLAWX{1&RGGk~L+8(tt&!j|`1#`}@_!X_reFa{ufA}cq_~fz)a+ieVr@wv*(`EItq5kH z4ZF}JHsia1SZraFf-}*{M5$`dW#$!dN%}X;h%Ciacs6r$TGk${QBNJ4c2C(-rzfc* z%E02abjwJ@bAnm82Q7|8)M5Vs7e6i*T~Fx_Lq9jxm*)&ierVdeSLm*X*?LSJ#q~0^ zeNET{PeE%4waegdW@Tj)G2k<9^cReJ?~Hp_0Qfdsp`REtFKUpUyPjQKh^;<&mK^){ zy}Fohx<0XMboBY#w2q0|HRU3?T^mbS4Oo02NggfoIU!fnWk8^uCpqiXF#H>b;M3fY z18Ty(T7YtM%+fG182RNTNn3Q&qlYE+U^Wq!_h)`OUXN4Lsonmg18%j8k#6;gJKYJ5 zM$`KK3X_bGLF?uZh^27#aXG)~(|e?o`DvNSIa^1!jC6Q;t5l?^X=AZ9@#o4R^;Ola zn{%e5E*Vi3Xuzongn%$z*?`Ug;FFvjj=5Q8SqTaZcXE9IiPcbR9(w*LbvtsuXK9*t zojrBA6#oFyC0Na2-Zb`r2RPa?f8x&>=hJ=ubKm}Ftj(iOHJ$VPkk`XJ2=Rgji32+#jeHt@J$VBgMotFoM*}FhbfUX_ho_Rf04@2J)*|2dTVhD6I{JoyIP4=A%iOU#{qd6KmA0G zGsb*@m6!yekUV|(dclQ&T)>H{>d&cON0Kw8T~NN2r)Z*uNW*)8&nGzm=N#uCbs{9l z$bgzDI_hh;?Fg+zouWHUyK2Pq)>f}lSeIlA6^B-ty}#2gQO?{C@iMm@dFtwuDJd&a zEO`oZYE}FqEXCRn{&>(MSGQiw+v1WLB=v(bBMg5<*baAX_h8`uzCYnGXm zA}xPHw3@w`oyR?$JX5EtY65!c3zABD_87$e;m@~-H#-WB(;T1x1cYJ3)PI}_ehRmF%huus6 z01;5k4-9C%!KqC(@3+t@$6P*RXzU?oT>k)!Z2&L;9CCbgIE*Bq5KPiZ8(XDqeW4~} zn?;JBal95LSyGjF0wTaESr6)b?GK{2?tA_f-!fRd{9mz(SmN?CPHz(#2NiZ*K5CvPGlFzrrj_W4-k)J+0VF`h{eIg<6#) zwDzXn8*0XbJAhp0-~++WUqgILg_e~ zmqH^Mkr+!NC?tEAlfljoem}QfI5>VSODIgG6>I^Fw4A}jM_BF{wEM49P}j709u(YS zI+Q6Y4nZ#-{0|=|kDiTuBN%|=4zPdXTh9Gy5$8(3DF!^>{73hTUG93Ce0M8a6yjg^ zHWFCOkCrDZkZ_@XetPylgYahwf?UO=l(l%U4fA-UQWrR==o{)|5s%S!4Kp7^J8zLV z$@AyHK05B4I~dUdl#35!eN5GR8p)qexh6z`mb{ET89qO#0Ll2rKOA+FkKx#EAt=hk zrN^k~bb@&F-_swemHw_{O_E5oZAu6pc8%4fgpt89mpieNJ*51BlkwvGXMvs$uGJ?r zwHDZoShq?M1C*#nhoW|)w39~Ove-reY`>(8%l`n8LCEv_=dMRnia+qNWo2$~OAjZXoafI}6^_b>%Twd&5+xO^W*=59Xt8VhJOfm#J>*L( zEL)7bLo{b4z}t)~j03?3AQ9JX!SL9`;g>X^gsUqXXnx{7Yx;RN3Y=`@ViW_VgYF1E zf+;_1q)xAc3}R3>?T{ic1bG?3^PY3#t(48hOBqs4ddI&jhVc2#F-b|YR1ZgiwQuC> zQb;13PETJ6wG3d$H!epWe1FTSu;&q()b#5a{8=~&cy{grt2S5gXwWCuHCnnpfSOlp zS=S|!qE968$kHDnJ-(W=a>Kzq5<%-z;qC>(c#5$HiO(|GOpxI>a^=B#QN3#)7V)HG znQM8uhoPu%i0LPy_aT9T5lF6a^d*{+M01zboy{pzJkm(b{ zUU%X_B87)*5I5w8ImUR-bN%|TC&^IEseqBl)%K$es?fy>5JZs7(lgqBxL}JLgePlw z&I0)(1oAQe04}1=n+iafTpDckr)yDFc^yVQvF#D5@HXcpc=*WaiiE`FdTU>QI8_yd zXmmAs*d1y}OFWUUs}B95CTtdOQyIdX;Ga1Bb!jIIx#&coa+6nYr|kxZ1QQ$Q)-7vF z?|p8QYdUQ^_H^ezYAisq8e?(U8Uv6vuHOSYv-8*5m^>`G5`?r4?HFl5&+ka(u~R!~ z-J<^hHEXR)w?Xrd9{&Is8pKz1{@0C9{vJ9LfTNID%rZ~^0HPQChB`c7Aj4qmh{AGS=;td0VMo&%^9eauKA()#+j6*OU^DK zjX|CVW!M15cx?Xuf6J?c>S8%TuXy%|vevXir|mJf6Nx z#7Hvni9?FjtV>HEis{Cma@ClHJnvICx`!@yU@H`+t`kCB2$2k<)TM0niW z2rmSJI2k<~MRs)`=b1H7F{@>(;RqlFcn#;gQ`=JxE9h&775ZSn^x1 zjsF0sUk9IuvFXKOl0=Eb<)`IVsp(f7pJ@9V;Z7@x;=D04F_OwACWXNnfU<+=e1UPJ z!4+Xu&}YN%8<7{{WXgc|xV-{{X5}_4JO#t4Q&;@dVZ>eN*ll z%@RQyT6-{hN$JPwN}-nvfJr3sPm`1L*Uu@VO68<;LY`8@H}TtfQ>X8d}VxuK%k<~^Y_=KESbFi#R!De zsD&D_HOS|ddv5|`i4%Xga4 z74hRZ{EvgvfRK#9)O)yr));|oiv+%!8bwW|pu=Qi0g2t@W5CV_Jbapp3PUf?`1?bL zdq;IiY-!~rq<4Q%22Hb0Llem!m}CC{eh)(=B4{a@^dB}5EPY$+Ld-KS6;x&HtTazAo&kHP2?5W^{{ zg4KCyu02ijU^6Q}4 zgF;H|{{ZPjoRP;F81tTzfl$l|e|rA_Fu=8A)L%&Ziigrp&=sk+i&p)y728v4fyC3l zrWxF@W!nrQSfN~=Si66VC+YtHh4?hQFAbDZ-y#~nv9P*~XS5+f7Y3WpniA1KfUiwD*| z!g^XINHmRaOpM#oG}@3!Ot8*cx-_!3H?H6ig;gO)#v89R_`iUsiQ!3X(^L6WNG0~L zq4~4M-V%0ncLO$z-W5KP=c()Jvd?5!g5(gvJ6E|KkZGk^2L)zl+Nj)qlZMVuJazM0 zICdm0QkH^}C|<*ndU?i=Y!n3V{`XfZy(<@f)p7)j`7v4)8zhy=? z7<7oZ1@qvQ3%>_Gn&Eo)4~M=Fk;2btzs!944Y`~0ic_(vN9FG;(0%>dGJf*zi@n9E zNvT}2Zrz?=>sg(0`S3BffUBGZ7*XT%*QF&XKnX}-EIs`0V+L%)Fu5*bzk9i>X?N;H z>nq4&c5wTG%bpk>3Be3`AEb@?7o9K+wgNR)2p zUgCMR4Hm3hFGP~Gp(Ax=VVn}o2yF3;9Ao3Iw=qd6YL1c9g@PPw6kn-*$sApasp;=o zpx#$>ak4AOqD5kIIL-!m`55Y4I+VG9X%z7*0$CBD{R5>QoAma+rw*S3HoJCvam4Sx z?PFFjD*MO}feB)9l27A~ywl>Ty{`}`b_hrs)$6A2!a3!uGMq-VH6fUvddRMz7D2Rs ze5k=6xfta2z!ViYA)+eBJxbw0kt+h@0>nf+{@57F9)5X09WyN1`Wg@BAFMlL?bTx* z!z+?~>dm_&Iobx_kJ$WllbD0c-R}bRg+Q4MlDNS+2nNvWgYM(c;~se9sLdFr3DO|1q%!v0AMDj?krbXC2u874k(iSstA+u98*)E> zd}HWLKf`!+p(K)yL-%+!ae0zEi2#V4?O&%KPAhgSou{owZlvie*wXdDO!lfxC<088 zfMa5+#xgb@cn1szi+mx3$6@<9NM?HK`}66vXRCn$z^bhFUxVBEly>T*}LAEqK#BlzIL zg$kI7`e)r*2WI_A?cGUWt28vV7_7CuO1jG&AxXv}A(g;5Vp&h=;B`9C6+>uJmQY+! zwfXyK^wz!nPj~OvNoI)Yw)?BMBX2A79i6H>9+b4{sGq1%18YRp$- z?hR58{80fJM0MDIQbb@$Dxj`aNh2BP1{}=qI0JHE*NRNA5<{IBd;Ozb{uy=M7fu?K zdOdk&hFPGcs@T*B4hy@A))AKmIE;lNbZmgn8)xyt{9VFPi@_X3g%Y_{ZPHj0Em3}) zpykb+SrZdOwOHs5?{2Z_8LB~Ec_KTce+-U1u+JIi#y`+?^Ww7t+eD8UZ`L2L)PA1m z_jG!dXP>%9+Q|cODpps-h1j2bu2gMR7*V*APXn)0_)p=v7`bO3j2V*s(Pv`Bo?8yF zQf$R6_Gs}w59iNLvEK?n?b|;d>+cDRISthSWIaozqWOE~|Y7P_@B=Pck^|i^A z!~}*sqfZSncxK9?8(nE0jQTGgjr9YtW3#MnsV0>awXI!*ud6F$$g)ZsDnN~uGlkvB z=j+wka9ofnb7Q_nlVQQ6m2HrbdKgIk*3Uet!VE+Jce_O-)m($HMNuU-anjJRG zwd1LpfH^NJqZa^SS+^2^=Kx^xan*SoMPd_}XqIAWe{w(w;W2 z(ll?TkZM|gb6xDN&e5%R4y|e%I(_Goir-X(viepdKviLpV|Fb07?1!?ao2yqLa`hp z6^=4fFr2v(Xv(XeZJ^qq$Di?Eh%x*}7b;A(c5V)0k)Y=Ef16Oo%U}3P`jJiQ%Li%Q z^kITHVWw_VF+{31BmUA}0~1XZS%y^ByuBJWu+S8J5yC?4Vn1NZ9B8Q{3S6T)zFv0dEin2^Qn zK@}C;&_-4t5+-9ON@55JEN`z~TEPDRe=PMr+GKjNdU*g9lV{4PApB!L>(`w2VJps( z;rWN&HlITrr32}(Op6V#O_80I-};5e?0C*|o_Oe%p)`V`JS_+~h>a6ipt_Ip&syr* zo>B_gunQMCIf!8H8P5RY_xTs6Ntk8!qzs^FK6`l9B7Go+b<(zNLlTBEi`k&TGjUrwtKRx!WS^$EZM7m{!| zBhOkK6Am$${5+Xu$y$6X?D7?~GS!hKXG-}(dzW2ndYy@xi#deOcMZ*!!2|Q2NBsKx zR#_TZpx9up(D4qiU0O85n*dICb=c<^}Ot7wJ)DN^i6opM7>^%wl z!heN6*oRV|xV5`fp3D&>R99#P*<6z{u1+~ogPt*g{wyCE_{)gP!r{{^J1IrSgmpql zRHkPH{D+nTl*Cw`}Ny!+;94h4pt<7un*tW<2*_r zSI4YxTK@pmTY5*(e%qf(o*6YumDf$0%0NVw1V~Z+$AZqQjQ-~wgN`mw43eLS=VB%% z$qG)-K>qdV7S{8SB?Ci!+iw%-BLdLqY8~mXB`Wc>)^0xCCBYIXwtp0F6Xzv;MHn1u#P0VkL8SVy|MV?wW>=~T4(%A&nUmKDIpa0XBJJfHLH z)KHKB05p7okHt*IPF(qiyrN84pinc%{{U~tQ-K)EFApI1C;D`888k5TYH}4pEPMbt zKjqOeh>qp}lUYgRk}Wv~p(-7Y==*ELe-AbYBz&p|Z`gG@BrR-NqrKo?1#wI+t+OKZ4q$21Z+^lGx~s$1YBAhXzi#_jz70D*eYmmnlQ*E@A3 zo71U}d*NRSa6g7pF*68K!2bZLF}zAehp#q>?%ib(_zbuF1Tu47%}KOb;>PZ5!V zkg~kW%bmNm8=W1j`ZefpaA+x|_Y6sL+O?@Sg;F6h`$<-Kz&n{&Yjcgizh4aExK9wk zNX>g_syLVwV5>i4)y4nYYEnyA^wh>HDruwsV;nP2r9ppam1Fe_%0U=ipr|;< z$Q%RDKoD|fNKsFxkM9S1L$#(ql$^aZGb5X6Y3hwXMl34Uq?3xPEY_$t{D7@%^#UcJn_UqBGVq6r^QhQGbU*^oH- zeaz*6AA^oh`E-sTtenF!J5jkX4doAzX%My&-iRjj*)HkL7T5@%CD@N~I1R=J4ZFsA zzJo6i*{HBnY}P#kJgEuloV9n z92RA7g#>+R!B76mNpMa?vt65Ax$PN~hHb#~`FxCJ9q-vX9ZPzCpQU$Rj?&bx6gqyJ zsX%ihLj{PS!v`VpB*B3(=P~1P!tLnOEmSPt{lA1{nUa!ng*emHbm{YeJ4d(dc20)c zRMySqhRnt%b|HN;wPOK){FXwNWCt7B&V9$JWFbv* zGTF8jfvpr<#f!Ily4HyzSkdd#(*#+OZCjOW&bc@wG=N1J za{UFdXXq-u-llr(-24IS^mud4plElWiF$r>Z5Em2Ru))5Vb_c4Z5WPC~!zPI1Zp z{s&BQ2*Xs*^QT|DR`m@`Pdi4DnW6)Hg&S%}<12tj_JBY?K68&9ah!MJ947;l+DL!}1j(?08ixIB zMlu(`0f_(;)h-=@#BnKtUL6GPO@}woduWiT0@}qpwEndAmv`%9OVcmR_oplN&}ub) z+}Jo6MPlVk`QeBpfCh8c?la+TBZZgRr~;9ErQ4e`w?~Gki$1y9l>6^ZxuI)149}<7 zlx>zt)BgYuCxnzZIgRpI1_1H65W^#^Yy|v95_)D}<_z{`HRR1vXAjY$M5Rvc9(wMW zG%myK_!mO%OLwbSl4#N7jbX7PBPk_gAF494F4+pIwtczxW7%+$akz(GGEhM_Voe)e zk0{%dH!Ql|GV2=s%~toQ=hH-e@9H>}-MJZdBL{+b00I5_=~0J56P7B?nx)b`IrKy7 zziwFVIJ8TW=(;weNzx4_wTLHZZU$#}kJU1}EAMQEVYe&?8RO2l@582Qm5D-$Bv#s5 z&7~QuxQII>+pSh8f_4@z<9L3nZpk zdyq&SKUdN!7Su$WG_CgjjYDb@p4nlIOD=y&ZankxpC{*zvcf^k>SFecpd6U|z+vn{3Mr&%or9^?&^v!!)vd~cRh<&ee^dI<(@M_G zF+uuTNhOUCfN~MB^ViTn(>x+kho6W^{NyFJYKnNhV>1+>rDYcb#l6r(uC3e_Da!-J z>Rh8al%(-kg@#OpixxO7ki%?fGQ95R$GIlXId8YR#zfLmSp7HmO&}T?{fnC1(3fI8 zLiyv`j2CYsoac6P{{WPD>D;h*qnw$AKG}pseNlp8tyZ2U5yO6zP8dPzGP*NHz&0|t z*tzkI!_QwY{{T`X_8ef%DWt@t=~fN&>SL(E7nTFXWo*jt6ms8~vN>}M6;)NT2_WPX zw*$xbJ$Vp0K`BV_F`*IG#blaBl4XECV+MIYr;GwX2OCJo$3Ah?$SWa$cC7_}coGD5 zmAAE41&-V@1mkJ~i?{*{5uE&S$?@~kEg4inNgZq9v@nkjYn`nQqpfP1FJw^HHOe}4 zCd9JY)4;bBcvCA(kh7{NZUM>2+u-rn))Mn6hNdp#dn`FoO1pzoPd%#@?%nK-ce^E$ zJqt*=tjw{QnkotR4u}W^m@gr6!;%M(Iw9UHFBKF@L+J(TKTo@dP2COd-PWr#O|`_7 zr1qb|!uP>!~y?&%9S!m`Gs3@OP2^nu`K9b$2bi#REhJ3^9> z?he&&8!(Bbpd{r>0(FU9W3)9N;p^%GN2u0aSGRQtq=lNRv5wU@;#jl64xMlub>^7wg>uiEGY+7%xhY5|GZRY&r(G)nI1~U=Q>$suUwg-c zKZu=9-2VVpHU9v*j8tz@HAr@qAp}aPxE}{^k=LVqHiXIHQW7>V*D-jhhaqmEx%z(5 znaIuhq$V_C*=7gEKl^{LT`>`*$~_DGImI@Yp=w%{o77-}7K+7c4Su>kmRS@sNgFo* z07N5h4pgS~UUIlRA0NO>;v^#sf~8az1duc#t4)8}{bNFQ6;iPO0Ek%uG`Ska+kd9^t!;1|x6;VB;CbURWh= zLup^O@zN6w$zr^6BZ~H29FS1PeL^$j4C9~9d;|ORvMW^&I(z!Uf?67uBxRbMvsNm^ z#B!yS^S6u@KiKseSCK(L>+gS9c6db{C2Ln$E6{3{p<+@Ml(Jyp_&kx5@<)%)MK)<# z%)+N9lrvHchTCbnHmD#g3(HZelvv7SyDFY>&QP|0zk}DL{{T+%ua6+Etd)_}53|N! z6H!JG<+&hvn5p*#oZ0RTp6J4ew}bxx3Bbrae1FTYtCM7d9J{$tyT%Lk?ETMc>9X#~ zhH#)~Bix6dKY%)|i~j(l?-c(4s=fH1 zQv1G5bZNSFl`N1&UF&I4v~jSEZ46Yi9mHcNJmbLYX>xG5aG6*%WlBRZ_wUv!NKDj% zW?Yx_7V>_i$2E@RtE_z})3jNIiyEa3MP8d5B*g`H5}8m%z{|+U!171CoE&wXh?GRZ ziFz{;&F>l*U^_}(qfP!7dm*f966>=o7~n#&6XAwRgZ^$EhyMV(>%HI@aW4+6NBNc? z@fY(`5Bp=yzoO`cdvSUMsaNJ)O#HR;>Ms=%A zhUC}*sW1%mMkgB6X9~Iei9hWIg}=#O5X4t)>ruwKENNd(+qE@IP>~r=1XqKNW()pu9-wx*c6E(_2$5okfP$)Gf*C zSYh}Lz(xN6%7It*>nd#3&HU%UTgpCp;{05ypm8}X2mZngsI}uQP4P(mx09iZU?u+$qzNf5fdd9un zx;>>=wSG{rT&S_1P6gTnBDGcu#BCed_&=!mKg2vbUMmYX0J6YPQ~uz7N34Boz<8p# zjvh}CBQcme-|a7yfx9Sc`dT9Y0E-P7biI4Fo*HpgsU;~=6CT~ND#^Gu6rYD4eEI$Q z^3Q48-Q(%gTX34zg)+}7*mYlljRHLBy|<1fM}nz2v{vce?MkD zYp810q1515@6x)%9;Iin(Par6Sz{}^ap%dv=gv9#m|^7vs)e*5J$8FjqsEz}Ia0kh z)Fhg;GE!?1uyY)&*b9;rDL7&=oD30+^b1ndGOC~#0lvRe4M-`p`%*vEyfVqu(iVBD`qr} zPdQ_i&Q5%j`RZAPSwK=l?^o$|ZwO!ki3Qdq0mDiY(qXg!a)*vs4>`c&`yQ)yLrs0s z4a-Aqd5}h?ShukPF*H7pB>t0;x%v71&jYE70)q<*manhU7!vh{tY3P)g`ysXR)KUpV25VXy%F{0>HX zkurkiw6qeS_Nek|Lvw}0e@D4Y^NJ59x=l6sd z7#hU{)j*f-Y>3y~vc||VG6IpDkT~Z!cdxIcJ3*Bj?%F$v49-wDqr5upR^#*@0SDlMK>T%G zeaDta=VMXM{v%j%c#&ust4Pt;a2-825k-X{j!O?DWPbU_A3XvTu&N(lNOA;RQ$?Y# z_WuCWwAA~K-k2cNWw}NVxD^ViJ8WqJFgze;)vMMlK3X>)^KRJRLY?HCdw;Co6F&IcTSZnGw-QciTK z#5!3+DHVcf_hy`SG}M+!RDZl!!C=6As}Kjr93S7FrkBNKf!_uj@a}9PeLLc7tFoDRnm=uBYzt7*4LM52RXz3Vhw5b3q zm)eqWa}RlLM;kUO;L>Cy|Uddzjdmvp-W!D}Y^lYQwyn(R z^XUlHr?E^zt6J^2CSsx*j6#GM%!)TDbM6@*C5b9lky zmWe3|mX;d`I*hG$B!<)vQYhH}08wmaM1vDO5$nFY^EdcUJ@6K2gw z$$yFg+nUd8_WfN>I@%VeS{n;i>O7LInM;Dw&g`MaL!#k9RSE}yG3>t#VkJ(L1{PjY zfC8=FcOcT6h|ESDj7glUkkqpRNN{xzLmofg_3RzbwUM_6icrJ$A%W-i!T$iKUXC}7 zoT4}W*8U+ov$}gT{hPO`(yw}<7Axx$q-kPpgY?TQDc)6b0^pO7aky{m9#;s#;ieg~ zWrULCw?C&VMVUo`OGaBN{pP7^UClg)FD-|_EPQ02@BW=>!gvyWIpU8p1;c)X1oA&8 zKc6`N0A7v&h+<`ED_E2M+2Pa z&m{FtQgas;9uUB^efJ=}P}TAZC=6E{kC5R0k+}E+KLh)bK7lUB3*wl z*X8519o)M6_C}!=q4ueDq(_9+GMJR$hTpqs%7R!N4B(HDdasSeCkY`_h<0tYq*uXU zm5RiKyExeC5Yqig)Vr@l((mfYr_rwsyCo8pM+&M+DN`%R#h3bO-T}}0b@MmIylH%A z5R5ls2WBZ!Zr*;8(<=xtQwl&v?| zp~8g$>EYDD0`PPijp)^65m=R<06dTf-snM58nZE045MiO04$D@RHP`WDsOwYnI9*I zv;(Ba8|habdNI#rIb?aOMxN4r$7=g`56NNp=RHcfp(?$9~&&xk~Wc+6r4!P03Z#-07>Kz(}T}nT;O~2Ens~}gW^vrNkiO^Kz{M9 zX}?xAF5lE2RntC-_THOtsaLf^wTRe;IUwzS&NKV`b+!0OdCI~|`}b(&yJZB4V%{gF zvCBWkk91d;QHI}ZXr{3UBCg)LHDUn|?%lcf9(~>panY!WW}vX=`$GerIZe+m-|Au~ zO4oJ&0IRwv)if^7sj1w9T!IZkrkzFXpR|$|eb2m{4a)grj2*w{jWsgoW++8z*I(K$ z%EK;ENm-?9&W*l~V$0jVOunCYhO4VbeyiE;3O1&%ZmbfrDM)2#<9POa8FnIPX*kA6 z`RmQTEtAK1QcgPrq4;x30O`}c$nU)=6JT+Y$Px+fUiazt@{6{g^pCc5n-Nb}Nsg6@ z@4hMGmBi?G3@~i&$OD7_0At5y@$7ye6wI7d4Z&u%){S>JF*iPir&?u>%u(HSR%H8x zX&OEV$S3=pA0w=^;wL54gO<+Jey}>jSFh;ZnL;Z%)#TFjvnMtl5II~H*uGiDLjm*g z^Vdnh_(m^|;&RD3K>W&k55B@8ojnqf{o1{QIX)5Rj`r`*q#B2I>N>&EZ1-F!>Jhb8 zv!}~*+CeO^xe=n_Kmm&$JagB&aLxz*o}S8PFqvr-0U#IVOT~0dniok_>+4UIVkf#j zxM+ILl|sIO*d49fi>J=B($sW{2DYgTs~@$V;>gB9y9W%u?Y{>(-BM%( z(y+2>9h!spi&l^It{O>l?{D`_+PbH5)hVx1imY|T>kg;Y4yZjgjz(evDuBLlIbqar zOji%VpsNspo!k}#xVJ&m#xOYCo+FJ~aQJnVOKTFqKa-xm5v6wT(66a|soHghpWQvJ z+d$VCp)6?C z5e_eg&yb|ekhfSrr&hUdn5e7L@n7zK>+OvmSflPM*CG)*dX%+o7FcABA%970N~*z@ zOhtwZ$PT=MbJvbwIDR_~i87@qCRz#ae~>pVdW%MmNplW)sC5>m#8tlLJEi{s8@e?l zt2U$b`?s~<)TE2;NF#S{9BBYQZM#0R-n0(l2yBS zG+!zA=Wcg-hgs0+SB8h&$rW*Thnpt1W94WQmHTO(1r4-NR>h{Jizz z(`AGlp+NIvPkvEk7(G--k|V;bgaFARg#;1)tbRYws=@|D}wg7g?@QM#UKj;2aB@&2rEWW^W-WPbBh5Ovp7D4V4vxNKtA?m01c;-N#JL%K=>O0jh)5iPQijQ7g5tGa7T!6 zH{{Vz9oZZ@`&&1zp0SH}WszC68hUNVy)@F6FJy*xUD-eG)C`lK9)3Fe6i6W;nDb=f zT$yPv?~wPTWIs`A_Xl!z{knDZOESY)8o*rIWd$KV?;6-? z!?a42Sa|fWoMH)9)9JUYSEVHJR4DUVSefI6S+*%+I2mFT7V>!Vdghd+AtgXZStS8x zzV(3LTeYc5klU2Jae@~iPy_k)fyu!A`l^|(SjDNREb$|xX|}bS2}XjMuFlGlMx+ma zoSg6#RZ9|afC(gHtCFQmoV3Z5mr{*7#B!0HZEkxA_b<{O*KH@XG{`GYr%hd;j>v+A zk!n?6O$1_RQGh@Il>^CP*QrSZB4s1;pahD&zU9x1e8KU*5=K9Y%*5qVMG`Ym0Gd#4 zeSsPR)p*Vs7=Llquo^tn%nGrA?p^ybkw+M@ZdE2I1MSEJ0`A}7jAxd|-~d>G)biz- zg@kl`2Y_QfA;o?h3K$BhNAjd@euCxzxpKCI$t{T`zpCBLP{Ra&*0F*KiWu|kjBrj) z1~*8=;B~DoT4g6Opcg*weTEKW>Rc`tgZvf4)By=d4y7N=v(LuBP*7B~CNA2wt4|%x zd(xan!RzU8KdDZ$4-Dkv*N}h2!)pHkg?@UeHxyKonREvGpDj5L7mot?m*c!w8d9+M zXO$;wDP2yVGssdW%Rb`7lIN01&hE40%qT=DU0h)MPmY=df+LvQ&N4sbdS+;Q$C^Ki z?#_ZvG4NchQ$On>?vay)K8Qjnu%Q1wsjW81y};wnxSei=WR z1;<6-{{WJGVuSTJQJcK}i}psgvC7}cr=LN$B%T^YNWwqhZdlhp+piAbauLIL&NB!s zwpw~QaqAxUG?|G+!>IMY-Z9xg`*7QdaK|VHeVcQf{=|Qw>(NaGBC>AL^$U+y+%_PP ztdW(GSw=Y+BOm5E1D0SYR=*<>5Z8~ulgN>rP{ zmL>#$R>~dMi~(06Yof`!@y0XGasGM7&qR=s`G=qL3LI*SV!L{=EZ`C(R#nRu&T>8m z2lgQT-2f-^B(yt1XQ?dp+7UePpbXK=P2*Qg08Vla7o2Aw9CakF%pB?u$iJ)!6k{B% zr`T%sqx-XSWQ{5-4_G%CE4VMf$2iV&_{UN$K$4V(>GpzcW0A=`uRL;Gh+#_uA~&c8 zd0%MxJ~DHR^$t|10tLD;^ViY|q1YJA{{V236C&;cG*Pe|9G$Jg{s|oM(0*R&9{ukO z+QNiv%OPh)$GSo3;F2-UbI<1^@Hz?)e*L^63`Wn}dVQM7Et=JCGE5z1ng?gxcnk;U z;N)}uc1(W&%iwA_UKAGMgxa?F(JOA9RUs z>6IZcqEw(3C|S`0Dn_a|JGz0D1Ro`iI_<=ePGc1gP?rUh?%R_RmVWnDk(gHz4aB8j zu*^44Hg^25z~$+m2g#1Y~$p(3!m}IK@y2XVx9G%GCI;mJ>8A8?yJ@d-iq zM#KjnxbSd#AYe;~dcSa6g@+MYi+Ey;pQw)kv5by?w;d*i3nB|FMM3r9jD5r7{6yp^{{X4~0NbcmSQdhCW86KDdQE$=v|UDd zq_Ou^1kV~*M`n2Pk}<%^8B@oR`RnIyErDjWSJc>^7>~ zpp|!(wF}h`Gm@urqlXF`pC8|nG1h8k%a#fjl>?!67o{1`@vJcfmk?zdG~ks@7pqmH zC1i@6Xr?z-#y4){;O@v>oMSm19K?eyE10rL>*>GF1*=F3eJUTnR<(9n3P`xBP|CRH z1R+)A5HZQY93F;Ribhm{S~wN2kEI`7;h1Y0htnu)!KQbRv8w6TVUFcpB`HH9*sxt; zGfg8#IcUL|S1P0f8=b@tchi3Z;8nx$Mp}pFIk`|Z{B`?4@kFLnxpMUv>T7QZ=An8i zW_r~$``(mn!6p=CSR)|gZs0St^M(pIAQ9K7Az|{9SmahoER>e5Sht_mqcHlN1h8oj zC6uQek0KQ?7I_$Q0o-s1+tMMd}rk2_xJ3N0Z&?tn8 z)u~bzu;~FH63oTWuF_lJWM@Agj=oLe$kV?mx+$(ad`FRrBa{_rS?&k|sj~1NW6Kn7Nn#Xxq~0(F zK0o5{$D@`q_p;P$ew1%07;eS4o+hbtO_tKiqhrO7Ptybd2ygKyA0z|8>LjHj`-xK@ zuhbnI${pjtIz83{n^@jdepB4y&0sK&lGMq!28T{vv_#-xCDX7z!}F-{87bl@;(-uCSztU zk#{+&!=pwtyfneY4AJ>W8#9gdbEJA+ool2jv_@LtLaRdg8;Kt7JP$bMJRZJFn5kl* z*Pkst(l?mFZxxwxpK~GfUm;2Mof)3&&m7`@7CyZc-mkEA=?qxR8dgK8L|hcLvjBABh<4Jk@;X!qD^3NWK_12E0!^k z+mQGmd>{1Z`}GBlAja{#eKY#SKCRoG#h~{EntFlMnzZR2p&zRtmH@^nB6jygQB0}q zWgBH9uA2dgO6Grf)LWm>#!fvGdm?3@zk#^gJqxe>SkeBreI%)%eNcF2)U;?@PLEfX z&8N8xr9r7>S9w_-hE$mW7>+<#2}TWgSwTevvR(9~Rhy495HSP{;5N5lZOyz3ml5Fw zFD1Dk^wwJmxfx(x-VO;E@;LthPPtjC5}X|)Zd!_x6{KE%iQCt7{{X2Syt-$v7X3yq zR7M!4)9rHD_}a(k#(zC>zYs4Dcx?XE{*u+#M&|a52oFOZ+=gnzlFr9=LXZa5#yojA z1OEU{zI7pb4}Osi8WOt9yQ7ssX$DZlkGmMo0VnVP=O5drG68g(Z||HiEof+3S!_w^ zmcy_L$W9x8LC!FE{PbA}NPQup*sj=`(6Q>7B$0_@#z`IyF^`YO&rYTn7K9!F)~xE< z&utA2wOJ@>@iCPW1!Nwh3>RVHhyVajjDFbbx#0f*7I6**C$PabQb{h5Po3j88^fgI zSWhH@pd=2htYxRNblpDe(dvEZ$v7Yt*kNdaU-1wKBb@&Lk^JMX%zse0?Bsp=kPCy4I49!-b+?50zlh>UPBj1| z-Kcz^PYshNy(9B;tX2DC>BU>2Ba3xy+OpMV?nFnbAGUGnv8ONTq zF~1PYN=U;bt9PUFr`j|m34tK4@augL?f59oHD2ud{X#elu+xsHi|${Lsy7^_I45^F z1dooe#VETm;g2msCxf;#pTsUUYnJfq^NlGIdlD1}(n!-{LSSRVIqQ9&Je6v?U151~?d zcBvV`l1Ve{jd{uBk^-nC@oriS)7|m+spaDv{{RnsF~mL~#r_^c1PZBbM2>vgz&uA? znCT?cr5ftm?ZjEw1GjpDK%QbUBCehk5B`+}i#J+94rHie^3v z1Ht1u`pMY~+uy@(l}|&b+i9fg_oA~#RlmDbvl~j!>Uv^k!Vm%6=nmd84gvYQ;nZV} zAzVX-X|>4|Gmt)$l6=<$Z# zA;8EuZiMmj~wC0&@!c(m1rQ`FJ#**9j< z>&TU@>V~}5E7NBxyJV0I2FVyqhCb2cjN!||PoFQgDLDZSrf>n*$|h2RTbxDedY9Qs zi#5s?8*x;!h~ZxzPSb`5IN<(9c9+vKN%nQ zj;pD~L=Z0y2-wFTse<5dbqsa}oY147u2qdzz>sw@&5p~ zMv+z?u$~=JBykB%f{J5eU7doi4oM0@{`_<-31B_*FyzAInm3Cbp`H`z%*h;~SgeQBXUGUYAoIb;?f4%(HdY6hoGj7VD$gJFF*pq0?-`IG z1bD#kF`j?1=>-OAdc)obp`+>srHe3BO7x@{ZAQeA!32XK@iUOSfEmIPPvgkzPEHPd z^yUGR&VYIU05Bwh1T7sOPmr53%d0agmV%;$jYd2frd`K8aItyt*(0d}Fmgl8m{vp4 zG#-CCL*`MmGU1+qc2VGSiRx>yz^|o1IDEVOf6zryNM2{{UeDBb7b=#uWi#s8o*(MDX50f6KHxKqw~(NIb47TL)kR84uRmJv??`;FXqWq4 zP9EL2sEBJC73EZ(X&NOV%AJ8!ux3&T&d@+O85!W^Ag)@LhG0g|SJL`jn|&U@!_DkW z@;k22&u+E$#k4IRKH~ae+v^SeI<>t+SefUcN)uLNE5w^7dlDnEz_5ml9mhGBXvrjX z^hd#%9Bu+c)Z)?tR6!N1k!Kn+X-L4u%}6OqDofIYQtotbtU_7si&ke%Pq?kw)b&#s z-(tK?6mae!m4q^=jxn4Ksu{eN&yP`-#xYE_E4?zOr?wsu9cx1S;HxbfvDQKv(qLv* z9Dn&W;n@5J-T2_=tq$l63_xX-T_T=7&sgQk8G%i;HL5~=kT~4f1IY{R1Y`7$0OOP4 zB_ZLI2p#GVeh>}DhDb(Zk!m9l4&jLi;~%)-`5*kclr0gE_0kK_^ueg>73x*2H3=PL zDNUtxS<1sCo|$xIa2TK&QV%{r=d28EWK2%rk1*d@(!xth@e+cn96w9@-XZC0N4)!M zP1b96{CY97{X2CYIaadC85kAikQq6R*K~6dRP7+LxW~)i5ODc89FrwFLWvD_BVu%I zFU~y+h+(DS<^-U-hiyilx<@I`4Q4RZb%m>qn?e~8oxHOK2P`<_aL?ziOe~}YtY6=k zhe*k24$hk^Fm-8F_l6#c^w*Mj36MeZbCdr7r$|go+uxKZVnupWrrQdOBa^mw+TL=$ z2e|(LkQ^L%<+BAj0MZ+*9}H6zmfOJ5G-SBk#fuU=gN^_s`+tAOOipGZu;5Z5`?Et{ z^X;uqL$uO*x|Lb0MJp{PHg?JJHSQ zZI(AzXxMlmxm511nyk703Y?~jUdHgixgyq)N)IV zhYOs7IOFrvg$Nw5gr?48YycE{K^f=&0AG%i!f2VH0jj_a``ip};{XnE{#^;INE05< z`Wtvd^sl!IQZm>?o;c+Qo75W;@))o_2p`+~_2AzVIpTAY2oII~gjP`R7}6xvAhgk< z%>=QZ_bWPX#z(u{Msf~EoRP=K>yrs0jN8HxB-bEID6vym(H)3b=I5R=IV5Kp03I`p zbY$l_zJ?rHI+}%zHe{X$Fd&%=thO8xx1Mw3Z}<86=;adNgXGYN);xXnSGg_rkJT(` zdX$y*4H9D->Q$Tx7RxHhJ1c-PrQOtg72uCOeLcaapTkO;JEVv6{j{_!Ss0Z$6!+|H z5PF~2pQu{xa@TvqQO(e!_~ zgqjwx+Kua4t%>cSF-h1fkzbX+o2xdp!6;>vyZC(KF(oeKJP||qZ0;Fw%T{ReRZz+m3C?^5z!=Bz0B(vnRd8{%_zR2=^Xq})-w_@smRom6tMWE& z-VBf|BSNL5T&N`yl&r6ZSt3W_w&Ac2103!E{ti6z*CR4?wXp&EwY2H)NNPjkeOcv= zVJgw=$Vj9J%*Qw$4hBEWW1t9;sIN!&^@Q<){cQS0uJ^{Z2CJrMu|aiH)3RRD?VVI% z9FNEz-QErdS)H#LtN#EFUGXc*^J&q-K*&~= zaVPAge}DP)*Z%H=4a=Vn znEu{aV~wPRmvXimGC2T%SDc1XkDjaXKaQnh@N1JUQqr|GTnaw!{*g#wfS8iOglKwK zY1gI_R)UOjL`aH6qLI9L9_0s&1Na&8NA~iG*wxJLf}#!k#DllL7|;eKH0?QSM(q`P zu(9Svg+hUxgWz-X1C_<2dM) zq5uij1_zKyS~%4J{k}drMvBNmcy|y?&`By|aRpZmh|1@1`3>hI=Z>NZ2{!gRL&9gD0Ztllc$M74H!gsr&ET^*z@z^cJ!L`)ld$cP zKm#1HA3jGuI*w-$#H#ZTpV&h%h!3a@qjaOJrE{T@3WoyK-1}$Pkwl6+;Iq}ZV z-yLf3bkE|PLRKA1QAs`O{*hDm>O@AQk}{^|8~QEuGey#L3Ffs$ENhyDV@7M%aWh9F zDOs8<@fh_TkTUW_SN5c8 zR2~GgC>VDu{_Y`xjCfJcLcEw%W;f7p`+Uu}9_hun&kXo00Gt*H01^39tN#F!w^6w! z-bpk4xu$=y@H%O;NxE0{Y*>(olFWc$Je**VI0G0^Z~z@*%v$EFGRv6xhluf9PZq=+ zMlDXGrh||k$<`%h)uyy_JWxt=%OqhOkH%LY^y)BLG8B+Q3Lk#=kCqs$k|2CHpH4yn z{{SQ4^&lok4c(e2Bl*1oK2>a^WBk;frRf6Rl>s#3smljRLFhGV4v0M)~+bxHe1XWjn*L@8^UwViLb z1T%|I>s-=nPaHy5JeP)CGNUo%#=~hOkW`+$&*CqI4-itZI28mD!Ch<@Je-6rt6r`po3Xzxjs%TznZVlcyLY2qU* z&PS*>D-R5M`FD*<;=CIX*2xv|#ukgVCoFlx1K8QK%es_961?8rOP#d+vVn?9FGj+-O=~ZCI$fd^#?h zP{h?kHtq2!U<(WaJ*OT=J$AExE}Xd^hLeX%QbSmmA0hk0Dn!Zd6kgOCF?}nvWYxP} zWQY@}8RV6TFDV6y#!0~*Khzu_!Rv@#EQ9$Ov>yKecp#Q761}hoZ+9IVnwloNqQu_) z7R-2xYz%vyNIuif54DdC?Id+R7Zrz>H8Up33S#D@IQMOvSa3pO)!)6(ruR;x6#8a| zL$#rTFGNSCSBe2MZYO+-p3M0k(cpaXi+pQ{;dq4PoGgHZfZWeA%qgeND@&A6<^BEQ zxec^Tvo0nJ>By!)3S@#a$p?eR56_e1u5h56TH-AN1bz1{LR!9tJw6n(6(@H*fuDo4 z@#6=h04Ne~@02`AcJ`G&bl;9RL?)*2E#BJm#Z?%b$m|GS0VfQ@IKd-5Ucz8zVkQ7{ z0QD_ltN1|X0`YI}T{E%tU2+8NUg6Yr8YxykUQrx!&G#A00VWa^Tm`^WCpo|%b+^SZ zTq6rP2Msu=02~UJ>usz{7l?sUlvq+AAilqQ6@4#EkMHD?I`K(0bAEY?94aR9uoh$f z7?*A<95>>c z0mxhp*udoA>W_m6mzh)DLqv!2cCWj}h8KZKROCv;%`$?&Qj*9AOw3e3CXc3}oWwqh zcj=X_Dz9(NXIIQ_*JqN&dQ_*9!eS#4Gd2?n*x0ScG2}N^q;NhQQ;^JAPNTY-^|p9bo|SWRfNT5B$4H&mD0Xf9lMsGmM;kuv=8UIjEAP^$Bw99pjBh z!79Wjw3f1z==FVo6h0?P#$ViBsjTQxJlFJz<)#k7YM?U5kBse70XX@_IsA3%oIB!< z8^xuGn0c4b(2u1#9rlh}AMoBk0-}YZFx@rrYmX7*1ARHx4wK!~b;|Lz=ya(C>1)Ef z6_Gh*Vo4Za2t4@D2gg%maWYdObII<2ON!73q+{XGxif(Rqfbu{bcz;_PPUsYOB+fH zst-zO%-)(IEU~LN4AI6}wga34kL~hr5#kg0g39KTlu&f&>ZmljpLW2rL|}zO9Or1|v5+=^Pv^!uY~ZLdZl~Y9VS-F| z`F#3?Xi`V8%vIt7MGCGkPTwRSCxMU8QfDBB`ual3^@WO;$XijFCfvy*k&Fh)e))rXtPuHKNS9=4zEy}S^CXcAiC5l$c5DxNLqhrVpj9Gm9mkc;3 z9_{e&7nQ`Y$pH7z08rL}fgFwb7X~H)gjFf_Ua_auf$V9sKyBBK+#*QXOt4A2(~t~! zY!Wl%VEDo7qZpZMBqczxBI4rX!X!dTtV>z#X;N18UPX;Us)U4+fB60J(FM|(+q*l37emF73D^XOrL@fDU8RuUA%kOrL~ zjCdnlHZ@H{P_|NY_f{mc?vI~ymKngtaKn!tdiq-qluANS8h{TV&>2c$y0OhdB#=jv zRkDrUCP1CmUDXZ*<> zaeo%gOs*F&8>oQ(E`0zWI*gVAfW?<|u`6yQ3={bP z4;kykyCqKU%Sj}AwLgo*qEyy$lHESAZQU1J(ftYaN_X_=XK4jz)al7tC1HTvvMXQ~ z2cL{`M^R3~&5%N8%PBnR_Xd}NRJDK>Y+kaXPopwV1%V0lHR`bimw2)i5kEjl(7dnakH5`<*iefN(2@52JGQRIKasB;BIhE0HNcA!^oM#2I zoDVJmI=WBd_Wlte%sp8Q4Op zCnJzK=g0jzS({73FvU{VS))r4h@-;@S`DtRoHp-wAmH!_`SH~2RzMthzwH9=cyTE_ z^(TT`Yh*-Es2J=}cMZAP+0H&V{{Wvu+A#`9XZNUM1o}hN>p*2w1V3<3vuJj}K6p7F zllu(g&p=eOLr;GC!s!b!Ix&lE8-!}gpzhvC&I2D9=lA0$9Yo;wt9Vs`EKOoJtn+P# zCdTO-XdM1Q@(<^M`}HzXgP01RAAep@&R~|AB81HRvVz{EQQ-LfSOcH^bNT8vDpNr` z4N?e=NnI94jy5NIl2OKP67)|5viv4DiT4hBAW z2S4-a`zee~gfoa@!w2`cb_s{x?Yn@gNX9&J2>JI9@6!j<+4tcI%=!92Fu& zMl7tEW*}hXjNy3CAD_=o5)Cva1>h*&No*lB=Ycko$uUx?{)f-{@Oqell%DWC9tFuU zll?)S-+6sa?cUL^7&Wxenzf}F0p@ZARddIJ%GexX$@tG+`~5HAsGo>9>4C*>{wcry zN@2_hqm9R>hG8f135UBWCBNi;)NWw!8^lsYqEe??tY*Z&?y5kowWZj+v2XCN_?^!K zcgY|hsZfKIh2BS-RPglLsYPw{SNkntjjhA4_8~m>+{UDp4r9z{^(5 zwInc6@L)6m63h%;v~4a88j!7DdwYzMO)<=q$bVFlAC^Dz@Hic0Oqi*PDp{|;bdQy> z*xCF-da)RVI*+Fz%jRlp1zOY~(%=>?Sc;q;Jz3-khxP=Xf3WAKNG5_Q%8cda|4{)^?8b1MmkQ##K^;wC=Lrl`++q zRn!3xeVrV62^^QC1N^oi{{TlFSQth%FX{LCN1e-`1=6W^REpvi<9`fX)=~OK1ooWzyAQ32LtDZfb)-!9YTIA!4J@{H2uUQG?P&N zKX@(=;zzRYIyR}6{oo~M=ZPYszur6;OM9$=V?ZXXRaJlSbb8>J`d z7M(Y`exCh8dc#ZpYc8=~HZ@aEuWl=~17H@6l3_$7e~kKP!vZ?b<9NS@arjd!iY{8^ znlS{czyKBx{6|>1!ag1t$;BKm3<8wSQZf!jNAitt;3DJn`?B?p<&qs9)o|;+G!%M}?(_fxNh4Cx>mJ7b6T4Hg^oefwlxwfda8{s^%n`F-Y#1r)mf(Ao z57>F@lbta#RA!J^umDuPfENx)r5J8lJaF^yxC!%wo*OMqy33QfAWZ9hgt7>c%=RyZStbDRQz5`Qi8@yA_G3l3x+)^B-ytesdJsP3~;%o!jE(ncCw zq;^upH9DUQ0kHF|W&OweJ?WbEt#49`UDPz`r;((ZDByw7w*$KchCyumkGq_lbJkjT zRH2yXCjPY*2TNPQnj^+=`8c_~p&4DBkC|pQH@tU8{uw(c){;B-yN2AZ<=RBITyD>h zN1@O8{ysX_m4V@Fs?xXQq3KWOXs0uT%0(rBf909|2p)zj-Rt}?b{>_d-L($2-Y+6l zl5e%I_Hww1HkM)Sj20e1&pSx@>PeVh5hQ>L!K$nkK+mY&CcykYoLs>yDldbg};c+FmBK&*A?7w|l**RkQVHQMlEZ z%LI3H5d>%lBM4X`Ch;bJ{{UvNkx%(Ho;JL5xUYrr*icrfPB|$m z`F*3hukcOW!K4~Tb7&g9cLldxscNIM5^zaVD(45!?sx;HPY=Q30agTBo0}R zUMcZk9TLj;W*oT%l%ZqDRElfNR-ggB!|`NY_Um8bIg2m6mPfW}@%1ei0L&t{xrhNyc z+r7VcThnBC9=pdI*0K^wFpi#b`xo10Odd^!O;BYXe9pJjahh^Y7XtzG1Q53fhk+0 z{{V>r_j`lTmIFwT?q97PhxIRQoo>Z_JGztzms+={xUYIJ2=p}&q@H*QVMDtA0E}a+ z%2Fnb%A&X5{$F~X;*^Xe{uxXeCQtZgst@@}BnB6B5)-OU^({Z@UX|D}rM(YFl9K95 z7iIZU3g|~0*VB>YnE@@fO{l@3ItWe2_RRux0J_HP}t;AZif8#EF zA4;=F`E**6Xf=}|3e`;>_?+X*eokXlCI0zP@c;v7O& zE?JVbQ9#q*)-)t=+yW#jUL5z8s);9%t-2=lk5^Erk6tyYgB0LQWyS*r0f!%q9tU46 zVhNVM$eVuN_xVShEcR+S?i@El$Jh3YCYPs42#!eVG{HaJKFqj3klsi6c*rN@hrh5QYNNkv?wy3sQ{L$l0yy4HEVdonSzywS9m2yP5pfuGtF!GThOme z(rH@U+H}X-65MHM0yDsWWAIPp^z%4Wq8Sr1@;2wUqD){-;c`@S1d#shU*{D0{{V$< z?~hW9RI#L6jh7#DCGSEtCxh*H%MAIz-PKR@d_t9zGFx|VHzM658gC9#KPi6$@(=Tg z3I70wYkCavN3QplnH19xQW!M4b}mgObC4S`DvWWH;Ai{wZg&L3;j*U@h#8b95R#|# zZl^FB#oQMLpNj=aNy;uuCA{4DF@kiTr9V$A$dT&Z!Q7S#{<$HF7%NP91RqUgkg4Eq z3_J{edao$(1gj*~smJPW>+)>HpH^b@Bvv{k{3D70Ehz_d<}TddC4 zE}5X{x}{i9scX8W`GSv~%F+R!*o@=j9(1Ow3<;ful_g2JIhRhaW6JulYU$zr8^eMV zh(gk@_@ciz>uzys><+}zHORjD&(r?dZ8B8?A45uwo`3jEYT=71CzoxZK+afk)`tv4 z$-vLV;nESK16x%fl1MtJfD#60A>zLk;oLI^{fR}EF6_hpMIbR6BsnIG3|Mu4q8+oY z>Egbl^w+Z`4(3;mO&+b(-Ok*G0B{aCU(Oq<)6Obfz^pW(DAuV~Z<56~0X6>sD8drB zc4EY+XyY%9kE!Y-JhSwd=$@&s%}2fdjA-#{(W=C^DAQ+#R8;{spk-tLU;)Sj5J{`CP*8i^y~{$K!HSY5;Z_Pxg}z`h^oeOdgrDKtRQJ{FF3l=@ zdbZ|`&+ng5em*nE^VQj$cMwnh(qxc7{iMl>J!(Mm&0S)T;1)mXAy3I5{JyOb*8c#5 zZ=i%_yC1Y!M*#l-Ik$j6Y){uei1?R|N+c$B5B~r)fBD48;pkP-Gyeb|+9EA~3LnGu zGw$rJ*bU?q*6w7V*nPk`>lSatKNBRJio+;!U*2Gef72xkg^r>w9g+MseKpZ*y6OFk zH7z-dI@YgiRFxf$Ga@Id$fWU?z$YI#>O3zIer1nS22}nSyDX3Y0MRts;EW%;)h$>*=o4 zmH_befe6B<^Adaq)WH@%g?_`Dm540%hLb!J#>pjUmSQAU7%F$ojzJ@l@_M=d0H*lF zzwaK*DK!FG3%-o0vsfPj-0cPxhIRn>pP&&#wfq%*EUJA9{{Ta~X8MtnGZ>XqAK@4% z#Qy+)_3Na=OyhD>mTZMhP`bNGZQub%kpM=twD9$zmpd;kh0ixRy*#3)ss8{3orfb9 zPvQGSt}<}ZYW4zwpZ@?i6yvHmjCaH+1fz#|_PL4hN}e?a6M@YkOU4p*$(PiI^o;rb zhIVh$p5eW3O8P5#Y^+{_i1bZW;f-=P{{UWT-WdVFAZ)N zB&oBE`-+?#dqBw;I9z(=tdS@sNfa!x$*86*H7Pz3m;R=?>+BQ?LC~=e&$~|oBQEdn zsvXVsHfWn!vMw|ZUaBN(S_qVbl^|sMu)MJ(;CywNjK|EzkgP$qwQbtZ%-)wV%wjQ9 zCwD~!+ueP4-YRXU{k?*PPVpL*1Z4aj#ODNHap&WoJ!fZhx%c*p6%>a;$f1B~1G<5Y zxFihZkbZDakBolZMh>t~NO>)+L0w&$r3FZ=%G*a6$T<1_6!845b^E zAoJ(W4o4%7I)tRSXedXmSiDUc*b*hlR;AJf$QCNKF2z zD)}JtGyY%S=bz6=ep(u|G&8E)XRjR9Buh}X*wx7|v?&7tSDXRh=Y!w@(aTGzrno-$ z_Rydt*DF%9MjMhsnxjdPg*a^F$B&c3pFCtAj;HJtq^WX&$~tm*7Z7bb)Pl`&vwE>h z9>XDBrd1~(F$czQe_}Y#P$@G~do@#+zh)kng8u-Sr6IY@K>$Snuw`;`N3+IB=f}V! zs06hr_itYh9#BBEe6qEcB0HsJQVVava@hSKc=-+a0pj;#7ttV2|+U&mZ*YLchpD zkE_#~D1AamY!~$k+x`08h_73X}?%d;Q_s5|KP5*vbH1sy(3ZBk}q8zKcJ&A}3fAYl zg{1^7Jkn(Cjv%{_`GKBCl1KLHc~X^$T2U&hQ2-4|C7@p|OlChqs>^I&?>$C2Ce*dd z4{|7ThQoT1R*gyUb}gqTz*Sc8I{U-n4h)VP!x1h}{L~-uMIXgPo%D}C_|{e{h)TkI zKZ6f4Y0k5tNUNym1qq&~pw5|Q21bwUeF>O4+2 z1;fTk%K=~FDz7a(Nb!$R!(s3oDp8k!l$4}@h+*QZp9hTRxIc+L=+z~Y`DH2^EN78E z4a-0O02G5@KiN9s@gElk%Y~NT@^k2t2dRw!sfsS(4_`AG?yKH?$=%YeU0Yw(HF+h@ zDK&>>AFzoqKmK01{6`()Se0d$FsG7xzx^N>r!suGAgXfd%+PfvgWVwgFX;aOb=U=~ z_T`!c$Lf#1+fRCD8-S3hJo&&l`Nl_3$l-iTh)iOV=T%eG)--+@!jFq4O-Bfrg_%v6 zlo&mop88R=i(ZTL6H(RTkz2Sln%CAa&qL9$&l}_P$aa9gjldQB5)OLbk?@WoY0W-h z9UT3=Bh>J})9I6ByvgYc7ZsuN57nZT-d|4jzRI8f0LMCPwIFkZy$~;2WB&k>QOEfl zH|^Isj^MbA0L?f!+*Ek%c^_EgIF|s$@N(F+u+aXWKcN7{f74*4aIy^G`?5#lJ#)bX z+AzYBfl?o+i?`Dsr1m@h)7Y9xG~Eib1+{#{%y;m8IERLObCc~+%X|Zg#5jU*iO81B zeIwC+7WhhUAD!B+l7;He$UmuK+5%}Fq0l>LPwibXG|ewhO**(6o-(DHbKpqwU>O_# z05Jy}fKUemhM6^UR97&U=O0jT{u_khazvaO0#Y{K<=m3}OVPyP0Zu+j}XQ|`+V z@0wR?6xsROibt&D;BI^;AO39qPIx{ki8Cgt2{cVSP5l^oymh{+=1K2wB(QS{{Z8yDRZRa#%wym{{Z4aZ!1QC1wWWa+EtTb zY&!)D8*SV^ap$MQPVGX?7*d?CPF~s+^aK=vtXK5SLJR%JtX;7*aaFEirx}sd2FB-c zhT!r>dd?f`Ae0ClO|XtO3b3b@&ewG=DXTFG;dp}6B~6Z zT%HI_YzSmYSRp4k{X2T$ao^jXu!G%l$9`d>(@rlFHd1EfIfw*~WC!(vsOwSFDd}l7 zq?5-1-#_031J*j^75-(G@DYtYTtaZ~IPi@xw%fkbZ1L2Ww(HG|*tK74xF7IDsv94S zu=Cb@%u;wy{WXwRUg;tK0PZhtLOA)$ku;Png|?3W09e;_jYClybSrY!p?i=20BN{? z-XER^ALKlB$MHOG6vUuR#7$Y2t2&lbN){y7oM0|gkh75~9Do%1#H}dp>QXYn`cP%$ zV}(!dddt9Ysr*GsOp!!~Q8Y?Mc-_H z{{V$hak)X{vA{VP&t9XzIDE_^%=xMBvjE}7@3G}mdqtUl5@C2+%)mQIf`9_Rbw6mz zbnO#P(k3?{k;Ji~CP|_Xmd86!;CR3Q>Kt+sUB9SjBF1T z#_;v$!p$k=r-+KR&!z37++JOl1KYUF|&O+RMN9n7M)w-Ne8CH^O28$d~G@9 ze|&YtaZ*7OWh=x$Vt8zXx8;H|PNLKG3Gk{zW~3@F?&@sA^~ zH^*Tm;uPz=a2UP~hnX=`DjdBF^`U>9SeNP+qCu%>Y{Wy@e6@`40d zJTne~QhZO|IZNvD>2BbP?n&BODU3~@^&>y!)}IVi?jA{&6S{NpPTh2hGg z@hM5zl%JbJ);_w~^h9(fTFk)z09I)lMsJ=lyH6iD{{SrY(2g;UN(v>=?rMB&9Bxxx zYgo%tcnp>Q0Pb-qS0K&jQ{-YMZ>=}h31Pk5H72e`=_=czl19VNAjEUyu3^@VvDJOIs*vlCXRj#ATnYJ-=eXhWBddcLZk)@WC#g@6HAq89)P% z_cc4{{RvbPM$G~GvbUKlBD7B6Zt+xkN#o&%n3Jr_7SxeiF;R3 zNdW>nf6p3vu0hER8ZT3B2Ou%?&mCD4IAYc&Ck&+JF2!RjbM8Npil^gA$ptyd2V98? z>KS=o!UpbtuN~#w_TFk;#dkoXu@W68K#CeLMprwxOsdl{Cjh8q9?|E(zcb=2VNO&8 z%iAbs9EC5bF8u%z%+BK&OpPK#tNf-M<@$bKN^^oV&*ERb^uYvC_jS8*DlfLoFi*L* zjEu;Iyywdk$55U%#HF%C%)oAteBIxyU<_(X%9Siv!{kI#b>HKa^~?INZ}&EvM_4*- znJY!Ay+5n+K}i$>S0^K_oM#Khacm+c%#$qV0D=L`o4BD*S2=}fHIBcA zV(^J&6`6avbe}?bY1RR$^-sGsD@`Vuq*RkgBPpp(T3bl0061r*EUHE^;1Ap{T+03% zz{o)D-Oo1i3IP2hy^_2X#JHO0ksnxIjr>bo(EE;Roy$s<{WK_1Hilg!jj^1lu*1|C z$S6WEE63o}#r!3Om>&$HgU`G@EPP1bEl=S80E?0i8e=PC?vd+0((XM;G}=~lSl+0! z2$dY^Nga}{;IZ1o=kgeX@JQn-ENRi$oKjnoV_Ij0g-ZP4R(-k#OR?McaD>s8)DKZ6s5eZ}J)S6p_ zx88LX7=#t&)M8M718i&?pTP6gxft#)k(D!4P&9Io-}aQ2=z7FUnJ6Cd58(oz;ufH@ z6>56bIqy7+3~=f7?AV@GVii#URwR>v0601E)|9+&7MirCLO@v{2F&M>B}9&Zw<2N_ zh>|E(@5(bzss8}}5PsYGeXC8|mvvozbr{eajiG6g%J7q)=?0oWt0o3Z57UqU&m+9x z*giRo8Rtyw(&SR^C|fg-f7}@82C;Vuh`@zsnlm+qo-L;>Vu;`V7OfVF?4#P9|4K`_E&;J12`o!3bW&uUz$^icW$NvD|`a(OO{{X`C zulClde@b7bx^c5kSs{P9*JY5SuNzL<<0Kvl&OaS#VHGxPp)uSVj-N9bQ&)r}Wq(g% zBmSHJ0Qg6Buc)2Ft0k|ey`4%%h)B~$)=aXyHq_nExOpF*N9U{#G9+MS3Bp2DN#;44 zjbK02=E?y%)ZKq*(XD-WtMvy*eLi^a=#4TcmVGl>vtpwuIB4aDQdlTlmQ`ZBVCTnP zUBoddohXTEQWQf`WkIzBDb2MC715~|k&cmz9h~(PzMu>9W<6=+8Sb?uEf(@bZ0VZf zh@>%Grz`Dv*!vm5+q8k;epfjlbAYKLRT9b-7BxK!9+Vn9U{6YAETt+6<{%h?PZ?O7(Ce}A+mh}OQ)SaT2b$X&kW3(j+p4}*^f8NmIzpR)=b?|$~Q3$>v!!kKh>_9m86 z2%DKVdrm`tu1ETO^&wYwga+{PhG7Fuqb8$bH8z=CG2~0L4f=3b$B)NECz(ksnsn0l zh6$`1DvF`ESZAGoPW3q8kH^XUkK3p-l^Q0V{!mRFxU8_tG%&4;L@pcxfDNZ`0Qn!M z&Rg4`C!#`it&Xv(@5U-1GwuN*hQ#~ykjpoI{?{h_Z| z8E73&8BoYsG6;&n07wtCl^OAXG6C_;fk6}8g*dtBBXmsjxEO- z9^JV7?gdH69P&p-xvQ9a-hW>z!WM?6)8n&i*NZ{3EPyCC4$bVj!sWBU2ikubl|_KSB4LA zfN%&a=Q$n+7|whikGN7))qVZj_Jje1So@}#0-dT2<6Sr!vHvkR}bNSCZzw6_u*zZq%+7vNc zQ1xD5(aRV+gFBotCn0gpNdR&@bMw(EUQij2y8Gn{9g1l_+$<%;E_S$L4nY|NF!>lf zf(YdB4<%H){Jj|a(iP({`r~5ep`_@&pQp|vzui{Wrq>&q8puOSA;7>~;C|pRCyu={ z;2sbDt;GRI{H#1xuYz4i(k8>o6K5qVJ<(Ej1K*etW9@(C+8&VF)Xp^hI_!q7mkCnN zwPTGSf+(W^w30fYa;OO;ug5)o^CJ$42@<7>AOYYtC*RT>%R(lv7NoM~H&6%VKDv^)b%SZdfacM-I)w)3T>Lrcu6H;UKt`|!H|ILSmW9hbsWikp&5+@w63K6ElNK8 zR}lDL{c{*6@oA4KKbBfa1FF?`NiQ{wh9vv#Uj6I<1XZJstbG3XbAkQ(c|Qu_5I?-NDDQ86 zFh>n7Nda!Je7^BvKR|m=?OkipwAKMuNi1KL(SZK|3n(k?ZwCh;9FG|4@BW_Qpf_jg zufNhOV(^B1h02n$g%Lw`Jqv^6evrH0RrEikvO%KhosV-&yyyw7P)7p4=WNTp=O6jG z&sZ4g_q+$$ojhxtbFf3;Ep%Mu?qZtBQXY#nP-s+qEh^+v@;L!2iyJK z7xLS5t>~JJlO%9hwIVciC1Md~X%wqQR1(K3H)Cnwo`;9=tR606M71$GxFEicDd!ts z4*V;}z8>RPY+nz>i9`_UOQhv@DJd=#4YHC)D7NeReATsMrCrjWxTB6UVkzs;j%lKZ zxJ|Q08Fn!sfMX;!(n78eUC^2kkY*lFYL5NfppKlG@yQ z2;_aULU~Dk)xT_frK*nTPCwza<#^zD@!<6W=g63Xm9ZSh+AHEet6$TpNI(1JuaHE= z`ekVPe#O&+<$Etef-g*$xpPjY=?)0z(uaGo=Re_IIXK|*d}kAe;rLZgk%H}2uuTUw zQSyyg6VmY)1pcM`0r3_${XY)0N>Q?=p)!(mWT|w`uu%l)=8^Sk@KdasCvMx5O@gdP z?rcEO%&>16#AGPM@)BJ5*tu+ef!B=uJMiWs;w)*z;z3NHT}86FzEp;%{{U!VtbG(~18!%Cadq&SN>U`LVxTi^ zs}P}m0UF*tk=`F!`!D!^?9B^G(^@@VHGo4_D%PW9jy3-P^44Ws`fAKiziG=8-~va? zI35R%;)t0)4J@rP!>Elb`lw5nC6eUUtR6gzhw%Jo#d!I+iOb66)TVF=ut=ts4?sL4 z9_}CFlYcp zW-Gj@X_!S_nA2L-{{Ze0&f`7-orXzY48)#_qyGThDdvzPWYDDTUghnFyP@Bb_L^0d ze%{NeMqeqnhgJlEwYeGb&s?r26*C8fRD{h~DHIvl(WuRs)U`=XTs`B?F$aiYab1=v z0V@Yok2kR##?g@X&tSboSVG4ng}(CE_(R)?T>k*N4duOyoDrUQ$vo!Zh9Yddn1qK6 zr7-V;UWa-Bq7Acd@!fFlB)Prp%s(j6CADh`mUzVUS>jj{R<~9;Ej!wEd4>k|J69i& zfdTP@pPcmgfJEQzg2`Yhx2EyYqluzh=TP_ZjSK1j0Cm;u{bBFTXGoGm9@P7Yn!`lF zf!>%t)4&2LClA4SfXP0LL)IwHOoB3{$mT&Kp}2~7)NkTQ=#a@@ zP_+VB_OR1yI!7*8fV;+Wqurlz$r&L|IS%dHx~v~lAHaRbR9j57n^;~e$S;CN(?9G60CVoN_x zoP%?W3|A4Ii0+cZVL|)&#a1fmI5MP?EJaBQtQ2Dz#(oGG{Q3R*)KZFjq*vJTjI6}R z*6OWxo7Rn_+sOmk0M32c91Q$>=w5FGoIs<0XO{mB{_99-NEB^a4hT1)=Nkq-S~lt~fd4(HMB_J48S({G68 zwFIF=u`U(VfK}A_+R8(I*~iCUOkiH-?KO=`6B!NdyDF*?$AI2{eu(hQEWlAVQb<4TA86%I5cdZjH_ zR1?^(W;kb&VO5sY42RP)k&W$?Y_Z06f_ce4aq-S87l6Z~2`B{0nC?ITl9C7oyzKFm ziH8$2G{q7|^%0^&Qh8-qwurqY3VmWD%I(Nrc=3UO`(rul#OO`xYwY}C zIc6-ek6|yCJ90oc3xEfbd~x&eXh>V7H}}u?gjB1%4$ntifVZbHSligL0)&tUy9b6I zF_Fi|QUNy(rg2{%2o)UEFzszRjcWU$4$Bx^L&G|gf#i?XoDV)T^Nyoj*{fH3U3C6F zuz&&#%GZRx;PGucz=Sa>zB7@5pKttu&pkv5Cok9EKUg?sx&?#Pouox&3tTWIqb5)B zD&uepe3Op=l1Ek@ET9;lr{x+bQi1_av7Jw< zK~CFG5AQwN|B)7{fSCBt9i1Apt*tv#mdv&>!hs|FmgK5>kEgVfV7Ntl9ykjIdUg<&Phk7!qtwe30Q zr!63LAT3uYpVMex`jMjWJ;TFo;{}wSybB*KI7#y)k@-t_a@&+b zVy9XdG`jRwyk$G4 zcvIU$g|?A+`gJ5dhgR$>rwJ@iXie)fZY382BzzH&8!dt4k_o}<7l`79F<^x_1HsMW z0!oXR)Vq7XBh)TW6^bt{YZpCrmT1Dv6}N?5%fO5mIZ=?sRDXnZ$zuXhElEffF8b3> zPW~`}Q@M-HeMKrgAnIk)Vlz4=H~Vegq1<+mr0qY`AP{f>1J7Kq;yXpkx5$NtCsx&nlo)*Kjh?FEiD^vP`DP@rN?Ke7G3F~^RDAcZ(E z&m1@7hDSy|p>}Z5oyR;d$G4CMaxssOJvd2G&O|%3Gd7=Ab0egW8%C+VS>+*=9I+|} zF_Jz!=lAMFOjrOGZa{SR(j2wmS~VbwX;dV$2@4(5ZZ3rX0EI`D@D9>WGI8grCajS{ zDYH~=clPii3b3Y9V{{}ER!pla5^xADoR1&Bj-W|y^ZdS0@$yOPM7zOMEOWg93C>18 zZ`&UO@H$3>feIS24LYuuEXu^TVOvrL=Wzg_0FlqNbB=O90}=xw=3uuBYZKX7^!q^#vh>+OUhW13_YWhylFR*}f|#hymogM-I8 z&-orYOYKxzXTKPFM`WD9k^-?aNMsc-o-m5G?|n}C zo7`Hp4^3%Ojy(dc>47!lfVSH%m?53tbE<{Mwo+KG0qgA#=~n_)2MQ;YcgT0xs6UuU z0k9M^n{wF2k()=CO6Y;z8&onL@RhhJW1GiAWKkP@~g` zDu4#!ah|>IeCb$OCS>IVp*!2^Nj@edTGKANpEL3e(S%{0_w}b*?>hD>z~{I7e4UL7 zOcqJk;7us_KmKiSzx`c#j}`c)Vw7dzd!to;s_GBv@N>D+DE5k-`KUJce{_o%(y!r* zxIUzI-lM9)+j`yFh6&@-Drx#`th3nJlzCl|V_3KC8$)IJ@S`V^z;R9pg&Dk08h}F# zpp_{2ASj<28o7%}kWu}>hc4h7Z(q04Jt^DYLcXDQE|)`8r`Y6f z0kx+D00;qb@HihGdj9~bp9;wt6^JeXe6rL-@}PeR)0~AaVJHNHtu4*V!h3`1k9PKT zX0xpJzOANIh-K|bSV;tb9_b=3sy}wX>uUk=@55YCK@u^k1y&1rXQQkHzbxG%iitt* z0wNL;2qj98M({J9Jn(#eIUIG}7uE`}WqXsNeS01z)%5%GY7oolO=H!DXWD=yDIhY3 zjG_&JT|whK?mc?O3SKT`po~IDP^EUe)7NudW6L<-^t0m+ig7>cSR|%owInuaiAsY| zhBkJ4^3pD&x+4rg(KY8I{{YSDq4DGJ{B?ZcrgHO+Nq^BVj?!4*)BgbdP=2uVy4=na z(XBNWI3inqn2|qqiCOaB{_|)bA^dbw@T*@>AAVku=wAwd{62AvW+5|vBU)t07eW=D zV!1dOP!6P$b`T76#S!$@MqIPV?K2q~XLGsTnB==L&gSDkumE+ei6UB92>auQ@a#Ve z!pp&6<%CL>0#cEn9=!E|b&lrKyK&YnPg9|#J(UWP>N2ceux>d{-0D)?VT6-Y*OxdBkl>H zksyx%hf2{YFr4im@4@mHBXIFQjrdOvlm;`g?3AL+k_K~k$_Wp;sm;qoM;pd7CSC?w zWuNydpjN%r7uks}C__A2^e*KZc9*9;o1Uhnb|~exl0)qifrljYTj@&T<<5vOcmVh72Q%$tJeLn5YSL%+m)iidR)p&}n%}U(M1qy?o z)~Zyy{D_8Kh~$ix89YOY$iXD4Vyf0E34fFOXS8yhYvK$i29!W!Ge85E=1Z{j7NsNE zovW#6A5J?`9S1=F0P&Ej37Mll`DG_BEb@VcW^9rMNEiTM4!oG+r|}Y_EiMhnaYxlb zh!18i)~!!~&y|Vd__bp3C5pUXS-QmUHs~VKid&L-$y;gQ98*c;wg#f-WoSfkN0zk)2+*n}1?*9JpM3@q$ ztn!VAPmoUG%#&^m7?JH!oO_h`KaxQF^&*mlrr`VWroT84Oc5){(aQ58He1!s04z_* zIQI=Hai3d>&2kY(7i0~Ctl9izu zciZkMdYiz;OZCUF=y&I1>bL&dvTM3+b+=aNL+ThK++=`YfB_x@ZrVr2bJtz)zZRQ; z$09kfi6SlX$GKBWC^+`<6O=;crNlS)$cwGOB^@GD1Yk z21w3OO27V>9dRER$W9}J%vrh$R1Vc+=zSspGR{dH$Di@*9+`9>Xv<=}mW-3e1H~&w z?GP*+1ptCi4Y=fupmW#HNK~OlVk-XtNYbn|LfrQx7~dFawyRGErV5458-@guQ=DM< z<2*8@Au8^X=JkKr{Nd{bLuTFfpCMkvj${%vh$JuMqZe_Jw6{Ooo(LnA1b4mtzHlx{ zfyJ$ru(SKwKc?)wCUfC_OMLu}I*6s!QBR-o4h$)*Ag=8r8!(VUB8C8*j43!cC-MIP zKA0rt4MXqc1QkeJ)5TpQf))0_>=ZBsGvH-O!5KUcA2{*SMq+cDpML)Um`Wjf?0(yd3RS}Wm|c+tjdv@wY{M8_w$e|6#F9zpKQnw$ggAaL zh)!ePAw!~oAb3RbCTEri_Qj%Xtjg@kwj|hhEUEf|#uT?6MnCV*DhL!YJT5ITNYO~} zqB3P!WKymMamV~X5Jwo~^edQ93SZyS2nbyCWSS&)tfE@;H}wcR(;x=_0ONt3SX}kap|9~;9oHfz|D85Srjx`KtIwIYU0I-CQvx{N3z$T=hA z^(tm0cS*b0a`N(H0LBchW=i!XcH3>r?GwrkfQ0Zc82fq7Fmk_+qR#7LE*Y&)+6+h1 zG%{Hbt=5>hW??L3Au2JugTMs&KatNpMx5a;Qo@$`b%hKY(8+n>`tPTM1IlNi!_X5Q$-pQMSa2prY{E zz+eDCFmaBhGbS#%oQ?kgaqHs@(e|PhqKGUqrIaBi5`tmNF)qPIF_E_*k;p%ufy4K| zcNWqWsRGHV(R*;ztzKo6Gd!~-rK2QxC^nC17zZajany>J{h+;hx3TNt4XZ&7K9#0Y zlVZ4(TyHEL$MsbXiiUCPUr}OoJXO7&8dQ-YhErTs` z!w5M?VUVwkU~}?!W6n-`46JgLw{l&pxkr-lN&Qo2Y6P9=&@O3s9($Meli~ z^3m<#cCPb}+zfNaS@7=+M-$>QE_i06FA-f=gZ>;m#E)GtHiY>QSi!@1k1efzicJ zN>$lQAC`k=6n>89pQv}}U+~NI2l%$`X?13>Dd`Y6uXDJsz)g5D=Dk={k5$*^H6d60 zZ@A$6^YKT^V}M^!6U6%k0msc$i5pWjcW(eLl4f5Yd{U*UkkX6>CG zUgv=`{NuWA!!^*^88JkCu=yNC8u=L~&t5M-8O6BN!z~F?%h4$W*GEcR+x&qqV;aFS zWQP=uHb3Go>Adz9A>Nv6GLUlbAXELAu0Q9|FX3N^P&-B7=P>^O_ESInAkm1Fx+LBl z`Z?TMF16dU>DoT8Lsc3*+d6p^Cs{&9lQ*O=2{`oHK74h<;<$DvhtE`uLJAZKU}#9Y zv0~MsBhImLCp5IEvoliEqdxSgi}>1&&$0gizB*)_v@}ejbp|C0L2H>{X4R| z`mLQVyj3XcH5$uRhXG@mCEFoYAEjS>X8>bxe0d|UvOlC;XBCaWPrzaerdCpeuu)Jc z9Dz%LVB`t_1{A~za|#n`db{{iolIm3*aV**IQjYcGMxD7lmcq;r-I@*o*gr_!!Ie< zW7KOED88iV@}%g}Ppw#m`IAhf%U;qI8)OY8#X@M%DGVtbGf> zxZ3!(6f8(rXed}w??o02b|t{`f#>>zdR@y)NvTOomJ(jD_8f!%0Fph5KYw{2w^Ed1 zW-|burTquqI*}(CGen>^sCJi1jtQRn^HGH06$x9G@T40f-qRmOgry z@QPpkNrd!a_dc;W;n}M?YL&<`N2P!HL~OmAq|nsd$$d}aDNg108uyx!}y=T;^%k4Ww3CA z{wHsMm9DG%S!KeEP zD^aPd4vt^z8%Jq>4Vp%WcI}Sk)o*tbCTDn}h|r?jxy+IwXEFW;=*N&j9ChSV{w~B$ zmcGVO008CORMNtar7x(wW@ElG;uxg%QW{t^-A&_9M~xdspnHy$STx-_4I4nFxPm%r zAQC1|`>=o*<3E)f-^n=YwgZJu;tKpuEjf+qbbVTExRd6^>46+)uq5&^+ux4_9$fyY#M!c>U>E{<(qI@fVv zDI0tMq^@aAmzSkSAG0^6Hd#6Q-5t~$5W zo2vko^!*8a@aa&$wq^vCmS^MCW!i9~C=ZOV4c4+|tw~Y^5?TJ&o2x;)6%fRJC1WHRoSQ?%H|4 zm?;H*2-t*{!8qJJk<7%=$nv{XmZ4Ppm#|nNc}S|^-Qq}F#P^o*`dW}DvQO&&V@97L}4Iq&%LeF-~ zGB!Pt0H@uL0B+6?Cy&?Hq1J6l1jIQk-Tul0<%g+>Ie+K)Z`_ zsv9}{{{ZXP)%6M@B9yGc1yEv>-l2vD(77ajfA9NrqH08qN5pnXW!xikl5xNo$Nc)4 z7BO9JNNBWu6V#>CKBsoHiRv?dGp`sCF6UtGBf^i8ck%tlS(yAk;_x%1GnFKP)QWn; ztvOC&9joOY>$z4c>oQ%5V6_}YRi<$p#>s%k!3BmGa7Q`$&tE12LW)2;c=W%Y(l3!# zgC)5Qv0Y$`cKM8Pr9xbVRa8aZOS1(z%8#Ev9Z4}}X#A{puc15X=?w7^X1>XZk=-Jc zsCRV?3uK)C0OdRZ`*HocjVDH^c#Ov)n&mr2^%s#Xv#+Azb1IXk<4PB#FxFQx*=1K)W;=Hi#yI}~&5kkW z9D&5bV%!5=sSFzN=4 z6oKgf09b(&Z>?iAwNX`_)fYR78^5;%4zx-{T1ui*>9=9o@6Pwq^h&ZU7A{K2Y{)kx z5rr($kU83*AMf$cSo~9oO5s>l;-^_YrPu+?m_)q@Z_> z%CPi+;tV2@9|_qyhnVsUPSa6agvVH;PGS(Q|Ve_dE)uwpU*ZoFsugTqtWNyj5l60cGZ zfTutrTon?eKBWA;^^1m)s_FWi@>q}hPhHU`tr7_RBm}-reDRKQd2k6n2anD~)Df6d zPI`Rz+B8x`7?&YdjLQ%bm~)vuMjMH2w%|`b2OqE?Zaq{}uELh~{{X}oG01LK*qf$c zBq2&jUkv1s3GvQBAe@Yh;De5sc8MyZ)8CXU-V-jPzNYHcT4#1soE!t5P7gbLWDNZL z_z@|T%TqVMJ)pBgGRNyQJ3$Oa@3xX9c!}KHvho`kCms(6sb#FoB<-%cd#^YG9u8VG z)-ABK=+Z)DmEu=1*CF2x?&XPHtCBEIFizq`>nJD$RO|Mqu?edPr%0fgkFOieA(wN7 zB0>lSRAlg_RPmBI1pX?iN&zcs3t!L0;lR9d?RAZYWs=nMpHpOkVvlw}z#N7jtAzt@ z1`h|RLXx4L*S4R`0R$LNr#v7^vz5d}go07-8-NTlLmVkm2aBPgvp!w+xz7AxdNf7fIM5(Wd8tjka!${@#m)nKoAXmKaQ~P1Vvp^D3T;f$e^fH zZKnhrFCIb9j|V@$KvrCmNk06cK~W9!&lxJy$ibs5Nn`QtKOPVI=cox}g;)r9=v#{v za>paQ<%HnOJ;bXJ11a(b);#=Y&qtjhRkxpyt3qdn(ww&D-yCUdGJvKsd>m(=j(lKo zjt+U~vpHcy??@Oxh9-PVA@$Zm2z(s!akwAP2f_H~o|`g&bc#2&_q{0y2ZdFuTnMFT z+2afpfS?yu&mJ+8hB(GQIqCC~Sx4D|Xk^3d1&T#fs^xaZzpYn3SPobbw4ca1*wR&>ir&4Gm6_Ue1(^m?Out)(hm)yj;tAnyI9Me~7Su4Y4I{3trSwOmoYqDoHvf zByEs_pudyM&E%d>Uia{B9TKo}3KGshr6>el11yxbr1}Jb^FDF1LvCMJB3T{F0Cux+ z`+og*Wg}>g@lCZm>Hh##bx%#&8yBh7)S{j_3d*Nvp!je>9KwY2^VLb%940sg$W<19#->-ND2~i2j^>D~Mld_)LvPLgpbq_=RO2Pz#)}pV~7`bL&=>tm>97+1E7=?A0YXmcH%vC~LsUIN!05Jv-b)*2&cTTKa z-R>(;qhY?^N()9Y$o~Kme^BT5bJK}SKb>Ady=r1W5`yA6r}k!*r5SI{6lzczCb>Rj zkwD~<=PZ2j&yoiu^yXZ&RfCO9esE}VLJ%rw!v6rgGbB#dM6<^#+qg%gMNr8&KmbGt zxg>?bV~hgC6UR;`OO%r`VQ2Ou-}rVThkOaaJ8nBYGGDx+qpAHV6(oQff~DfyrTsa* zav0^jYw2vjV!0l?zy6R{RA1i=-{sb1&M2onnrY5HbNG}00OCqisisfEcs|h}{&}RN zANZj??uA5jhc&$t-i+cpb}Cx4T|HY8ul^0H!;J9m?tz$$C?pbCpB(kg@gIwDydlsc-TkfxdcLJwy6i{X*m4)EQG|;y7SLq!u$0qm@ek02HM`$w;b*BmmAqXr;?) z8?eh}+~3J60HO%(#L>?rMZL_6!I81Mkl(0`o;udUaqI^akg*GI){u$udnW{zrue$8AT$8B@000TDq?(mI_$B-!hEi6vl9d}dbK}hG9?Je5 zJG(*ejTUWFNpf{K<56cznRi*Klfc-&??WGOboP)rAmnx6hl0=Hc!E|l9WJMP0FcE> zHy{=O5Tj83f5dohCx!sQKt8`5xRkIqR)@z=Z4>_hP&C58jQ;Oyg=YkwN2EC%H_7mN z#!}}@Kb(iVHF?kn9z3E|@s1gX?kZwwrT*~5UFo3LSnXcDscIF17LGdc&9Tb(Lm~YO zpC=p);~x_uM6z?vSvgy}*W>Q zS6!33?AE(#)7pCRvTSk`LmG1;#S$nG!k~LSq}EvakQllgsLyLi;)ofIzBZ`4$OJ<>0@*a8^+BPnB&yP zw{A%UDf#kG9SF(V9OFacbob+=D(M}ucF(A1`?X^~wmBMAUF=kxg##quV1;EUT|+vy3R(;Rf$GBAqTu~f+pS(hrn5#RzCdFLK*G1I9k za*{1^Z)P5lxsg2trC|%#ELg(vlvB9iFCg&Va(Ez&etG~_4q-wy@P9}q7KSt$)O}$j zRV$+tupPoibSwc;j!)`QwU2;*9ZNLKAcZeS)K}eJ&=lG~KRcp{(sgUqcLHnAEIYi9 zY>YTQK0Z47-v=LA&sL(*f~Aq8)2ue}xzy+J zlYlY)y+~+-6@_L9Z%m&S(nTd0GP;#G+C~m?IRt)siO2wmo?vj%?!Tv<`+vE+14+Gg z z-4Id{!miRh9-p)F*$Q$FTavgwKO?R<D8u~$siJt6&c1yhRb6a9$(Ll zYj8;?9mHf|h;URMnrq|%`ovRJ*`Tan?{8S}*1kWi?7*Ff2QBhA^Zvi{>*}=kfhHO^`&_)P2!$EkN+!}f~qC{&VV{+UHP?78G|!X^fJ z&t56<*AhV};quF$%%RT7uJypghzLZsS;jdP`@IKHH0Ait-XNFvMW^JpTa5{{YLcc|w2= zB9>h7ss8{EJ?mdd?nyg(-k}wJKT{w=g&mByfD2b@A~8G3G5jd?0b&C6$$U}56~?ex zC8?Hz-q8Rr`%imL4{r8?&5q(kXItDe3RS-Qa`>+I@=e1mdC` zGTKQaZQ#k}m4P|n`2z#w1LVj_N=JFY4dD#Xazi%A63rA6aFQLwbu4 zQe%JoNo8or1xX6I#xub4`(vQWhY9*?4-F;QYe(y;jO3`1RlWOw;D2sUkCFRwWeQ(# z_(ME$5i2uoc_nEA4^m8p3(hh~C&?Z@PmhqrwP4RqexABRL-K`0ib&;^l#?)2+qx7u zRmOj@;PNq^K^-s}CGpzzeWWv_k1bvD9kCL)7Nj8uxHk7k*Z1B5K;9awM%8ve9wF%{A46$LE zl}UUm;BL;(9Ah~02S!qfizt+D@2K{5gii2OZ6(<#)r!WO~612_QthrrKAC`&=i zO*#Gj>jLD$bsKi7SBhHoWYh$TUMGl@WNdQ9kLm#A4EzK6=}VS{sD&#lUF+b``J!7z zc{SRyN)^Ve>SWsGRPD*hBxGcQ+f;chr0vE!Wr=dion1D2>c`mu;mvIki>A4b1-|v5 zkw~!=kj6bj)=0CR5qa0#O<#I^_o zGL6N&1O10zczi9y_-_lrj4Jzta-#sk)?pel=SxhHD$Ggi&X%Pie_4C8vpczH+|jJq zpI(57OtDmW;dn4f5;KK6fj-;v^OTy1hwnK zUTL0Oz@UW@qkFR-k0&GXk?LOxn?6uaxu8>nTSOh_cmnOj=IZ}>^0rjAB44H-U%T0NH{{T4Edz<)A?@rY0`yIomu8}2rF?#dVt*AWK zA&2~KGYX@G9FV6t2L$7vB;lXdUx)aY5Jm=Esfa?<5*DnW7qQ9|NDkF{0_?!S%uM8r z^KI4Jtv&4-l$uh&4r3~+Pdj#~{jzb!f92P{xdn~lGj@g7u4I)GJT$vT#pSc`4gmfC z0Qbj8((u`uAwH?;{lVHp>D9Y2*QKC$H1e7*5X*7niQ-ncAqrP!*;qrJ#PMu@2MWYsC4)F6MGhal$0=h++0R(e zbl>)Swsf(2wAAWUxY$bzDV9q_cm@5^PWbXUAn}3L!Ot;W_a~fulZjWvUl?&iE^wQQ z#VX;7mEfBsZD88NlWSU)-BGl?FHLImM-t0w_NGK>Nf>1}Ko0HfKTK}XoO_rB8R*j~ zOCSrF_b-M20HC~QiBCk%IWS0-btNP&V_l!}2h_TlbEIOu?e%X<(QmGiquRIJIz3r^ zY301|n);2M;h4Ke8ZnQ^027=p2v9TTKN0Yr6*+Sh6r()cgGbTyrop!6ezC)RO~d{W z!pq@286Vk{GKL|!_lvoDl9bh3#6NR=b+6t~!33~arA{O28nL&irccgzI3J`RIQ{zb z)Npx-Q|Aw*KbYT{{Yt>%5*rSu8oFQC_OGHeEpDvBO-jrU4MbA&Nn&M;a7IYReBdgt zl1>2Yqffy2{6-YbGZ2A(^QM}Uu;yZ%zv?y!GNxhGm;OmHyy!YT>9lp!?=`IoUiWY_zOOOT+4*0ZV2Fo-*4ilnkKD-aGCF9+~B>yet2sUtMB z01JQ+kx|S5TGV>;k151)d`2jo%s=8A=1fLwmi0Ya+&6A&)4i5s4mA`u6km4dS1jNpFw1KrO)anxWG9`JVe?V*Wr z3|p@?itT0PGnrMRP+SuNsb6_b#DlcpC@0{9@zteeNJ%CBWwboMh!{PjX`_%?tq@OJ zFB+9HI4gxL!wlJ89D$6o{zp+~Q)WW8*l%rH*@rygX%S^Iy*kh(kP>qgL?HkLT#dNF zJ9D3mj0}!|OV1Ny!{^S;7+gZC+gZc*19f{@RQ(S&E9W?pa@X};`8A1w``pomdvt!Ghg)osZasbzgs+6Sn-k(C%6 zCVVDxdWA((0aE}+&ffaNydtQI2&B`6SrEik1a8=qfZ1kKlwgEooB_Fc$pfR96;-H* z_v2qX^MoPYT9W!&Z8}tvPfSVudh?iPKwYtz_{qpS2ntjW)0`fi6Ux!wU`Hp-txaqw zMUL>4H4xPgRbjmpvOyI&T^b~?vQN2`WHBZ37>p<&95y(|LZu}|H!9SMcu;9u>1aZb z-n(w6YW8lYubOq|t*6$S%OZqkW&7z3iAxGe3dC%{jzOTaJh6+M3=p=*y zkWJVIigNz|En*Qy?BJ5;4U_Zu{{T*`M5Z6r6J-QYea|Rezp{c){K)C1eo+G~49^VF zX*Q`0LoO7ObHL;r{{W{+0n-OCw0mFpUhb%GeK4(6r8T6{ugsPikd$cTWtCU~hzBJ<`#pNv$pT;dvg_lt@}I^wbn$q@PTlGBVsZ zlgaUpyyVMQXaz*J^O9(&tP|V+Q(Nd1dQ5v@Slt{sHNgvWG55`Hy zBd*ur$srtN2~ns?E#dpb6UZp2_xyD?krEh0E5xj2e5#0$ zvB1s-GJpGY4D>NO$GLk8M2ku74F^ez1*-&lYz*ua5bo%%?CcM?vpZ)P$oylkl(@bi z;xY55thvsi;0q5R1W1acKB3~>N|jjAL6ockj8bKD>M2$(jmLt%(hhc)Je+~nI*C2w z;CYJ?>D8i99yR)f+6&n|$*Aa($s<#VgH^CjJxCZ68*%%4P7mrrK05l7fN^ATYz(>P ziXoIg+&?$1MEINkMEP7x^fZ)BC#t`k6(Qz`k4>a z4%5@>+}B%D`k}SpG;CH2KCKih{TR%ufIRCMZKa86d=k!}A36BD;goSHUyZ{4bpBzu z)POY}qnNDHW+)b!bMM>h60uLKTlGv(!yEz;BD!Rim9xRm0eC;(3=bJScp*fD07)jT z;q!l$HE6u>{RX{8mg(dkmf}fN$EZ<=3_;-YxPCc2{FayjJ<)$Jez1o)bXTlCm3c36OC3J)yf z0FZJAJdcye>bXj+0U&k$JZ;JlW`Q0Uq%zhwq-3(YUKETe*|~Bzc~HT!G7fSNBOO>u zmnelP49stRL@@(MHl1u-1r~IcX?;b88!p-1?f{P)hHzUWBo70qrm4Wn-k&@8bB7QP z^s{!NN?J!K(x8nF<=Xf>=f)gkgSQMZ(g39jBd4eBUFcyLD%?*C0UIa1T~aD>Br-(q6vk3983eWnJOWe<`2CL_3Q|m>qDz4KzwGb!hZTvbw6&vNNOr%V z%;GB&x_?xT3jhZo737Z@2b`X%=31Iz@(=U(v^fAkQ(Z--V_~v2QL$8?VEHElfq}T= zKOdf?N)%N0_v{E5`q>hsZ4xZ-ezCD!dPjguXUI6lK6p6s)1f3gmOnUSA59XZC}nX{ zBb*lDfXT>J3{DR?$B<9p^noDIIrq!>htd}uzMVNHm5i$+JaQ~@VqBu+6&c|Re2?)1 z@9xmf(B7UTN`9fP)h6_}Wd;=GNy3rI&m-h{>NSy`ig5n`wP4}9kdwLP zV|5*o*0_^l1%c1E!*lz29QFZ%xiBd^Wj0j#% zGm)MM=gthIB_gi-Y5R?>3}K3G9{Cb$OIoyz>9vUSBycZ=XOGJPwD1Vzbt)2-sVQ5F zeD~?r5Qkwh2Z5Htv!r{K<2`$;B_Ly zntQYhZ3CIGb%px*x8kngl1^?c0CkLlQvK~Z715Hc~wFnGrUEBNE3 z**7$Ur@!7Kf-3#gHR6UyE37`LUP+DGH5~F3?En=!zXQ%ZgUTm0O+9?;(|DJZF?ML` zdJ#OPXr_S5lDvU8ZE(9=ju>Pg82$cwQqrZmfMN?XJ;S)=3}F@fiXASLtvB1KWe~|U zk%>Jr!Gmq~dGa|&U-tMY08_Q-Wai@Vn4*kdBo2)&8ewi5(dPX z36Pc-H$MI8yVd^yieIZVdk3&DXp1sS++D$4>qT8i-3@02C9uQcLgg1fsmR9)#B@K? zpM^1>5%Gy@Qh&696){=_{{V>U*;BifSQw>^md(kO%(Xw5@UpDBgzF*NySPp_ zDv!bc0EqL)_v`djs^rBh)+K0nUtxA2)A^u4*UT|&&Hfr8-d}bR}2q6Yo#UIC}QdxAJP^2Nh%If0jR0cB}^i6tR1)6{@U2k z#cy2i{^$N6KB>?3Yg5xsmD^f02ELJ`$0pW>vZE@j83A3FdpRR&vz1jm4!r*W;6I0x z!BQto?-EHRu33N|?MR^0#yFf$#h7VaW=;SlOPMr;GAdG4M7X<>ZE)p9gjqDo7O3|X z%33rP&v{OLeM?X=BUkPCm;kYyf4#7vc+LawW0EpGk!#L#wN1Z%M`8KD<3siL0pTd% z*kgvmfF?|=#kEP$QsYFYQQkBxaIdXB-KRB?+TFLQYBuz0vPmWVLe2SHYF1kt-)JFr z@#hC5`M@17;qT#avitI1DVqkUnbM?M84l(dY#uD0w8c zVIYlnAYwMIbN+t-e}BhZHXjW;7O;s)bw`-3VKq{s)7pKTZvCQ=NB0x%RrN^BQ8D<> z$^QVKKet|qgyDF891}Ye-kG4=_}|Jojw8n;>sNcW|<#(4ciYcAfS<%U!d$z&tRCm~aU4_`S|s;%AMlScYRH zQr5vyAWgs{4*{3<&*$T-2*fQQfLIX4_I9Y`dBE!f*U{#+V#=d$X$p*5gvTv)RXjuo z3<}^CBLiqT@tON{p#eF3L3bqUwQBVzXiV`hVXDxD=Y%vVG9U$~SymmK1v^0Z9!c?# zeg{;0(^Z#e(TRO)p!Vh*!2)2Kd`|}8peab0nh7Lpd_#k~FaK*3YRP2bkF9CFH`qj=(t z)RfwV6^UVj0VI?IImz5f>I~~333T%3UmI7Z(B&}YixVxCDMmIx1kuQ%G=bSz;A~H5 zOa*Kx00#h%o`obR_KH|hyjAKM#xut#y6J9Y%n?8a{1)v@wdlL5>{VPwYGmt{NPMG zQk8c1!sxAIYD!tmk%wGH8-f_P3RJPkI2i|yn9M(wV$Rx7*UGNk+-nJ==^%Q#WboC~ zSWi6 zm0d85-=~%Y;~f5Z>565toVPPqsOUVrA#{V*tj`h8+(#0>GNBtC0SeFR9s+J~qtE9j zpb1WdX4`G8)0WVy8PD+_bm{*9Np%Gc8GDvtp({;e^x?fnWCNCC=_;Nu0X&{M^k0Cm z3B_?0;s^jrPGQwWhu2J5Bmq+bcqhRpPl; z8Oj5=ywAk0#FAX^SKZII*Z>`T+3`jj883@YRxF#kN&f&3(gz?Z%)8baM9FJM_RsSc z{{ZGe;xh%5z=tDhuo)aG7UTjk#&h5gj=1i`y?yKN&Manu<<=sGLtYcJV{B%UCJf5T zhj~5#+*l5I&V9gOSX_Y4-2VV(e;-1|1S%sZ*DJ>nlB||3CRRBlXvjD?QIK(g`uHRa z0#6v~$-}5CEd@Vw^xm+=Q7@(HB092KcXE@dlr&jpjYiN}`~U{^8E0ioCKm@=5liasl0iJ~QBCdFlZvOA0OQ*M0v0m?vh6FVrnc zU3);*^q~Zm;t;jDC3GR!B&4Z}Ke}Tl!jf@=j=LX*<);P2WMWpJJHa}Bpu`_K^NRC{ zC@dq#nwF=!%NTa7WRSS{=l=knzLg3p6^iIfQZBzhdltWa_w72_*QuoG4qUo2_vpM;$9^;hhWO2j`E8GwJLnA&bmdG1Xew5+?~r?L%A}KW3iA% zhM+9F$dJOygnw0VLa!dsa1IW94sHz2SN6wthU`X_Cr!@#JV@;W=-SL3<=gSx-)0K_ zzoG?_QZ@ZvTL6suP5N`dIma1>ZU$}raa9hy{x7+ z0XAcS?KvKQ=l=jsy{QTyL?k_9wQha0$0TK=_kO6G1xP$F#~^|M1adLQj;^73VyxL& zgnHlUm(?vh>R)S#HLZ3Uz1LIIr6o^nGhHMB70A;YXDzqWl@|^l5)6kRc`wA@3!!UY z5yBzfs{=#+O|jLPjcpRjSxCyY4qp8t-J@!V)WL~k9)zH=DQiW)@usIkGfMB>eJZA&|PbUYcrllt_ zSYi&ZPtKsMN+r~mN2S>7&}VFgCLWO6D6T*Nm9WK z0WM1hwR9bNL)D|^OMhz|6(>Rh%P;jpMtq_NAe^XTcHo?HG4eX5C3b)TXZ@iEf})aR zLdClXVKB=qfB18TGN=kww=UhG`SyF5s{!DV5W>ookM4)NSEC-6gK}*Vaambxq$@Y0 zWJL2bwPa{=RnMl(%oo}LAhtOiWbFmegdx@$L9BgS&hXcW(QeIoU~4hS9Bq$un|AVW zI0MKZj)Ni;2tlum3-1SNOd>AQ*?4QT9-HmOOA(NXhu%5zr|UR7(f<^7*sFg9w_E)%7b^S;M5E zjFYeoiSXZmn8RQaeWM)+UUC$+Vm*FurqzdsV_BBOvQK14UP+N_MJ51HLaUYlaB!{^ z5O8tEdUXUO00Y{*{kVFFXcDz}s>ch6Q3oLddz!B}wpE)=_JR4agWKbZ76riDS@4nE5-X3_a zHHajLuiO{@p4Z=l!A+-(5HpSkk>L4=8F@%myt=*buSj?wQ>A@xtiV}VP|5v0(U@IH z+qmQnq+=(@$n(ciN?fkbDQ7KOxFb${xoHgxN^Kc-gt}QVg^iGc7b9xAK>@z4^L9Hbn>`1^e!L$Ybfshaj-7iSY~sK%nGkslD=b^9Ai!?LDB>GhsN19pQNhgyAt7d1Fw>B`RaV?XKe#`OD zrexr(i5Aw)PYP=M%p*voeM$O#OX`k~TD?zB4N8MD*Sk9@RXIM%0O6I`V{p#a8S#Ejyrc4@8JA5ZmV z(;@q4Yt(dzL;|#7$P8Hde1du8{O28emu8F(6=MvR$dwQ|fNR%U#2EEn#3U_%pz&(87kHyNs0=0s} zVE%Ub#W`8$t155cBhsC#{{Z09`oE}04EmLh-0j$o9bRopOENFvg253}jtKYl=d-`4 zI2U`E)WI0@-rU zG0DmHvVJfz)A1kcXB@^L$-v4)lWfdXqIU&ul$O&b>_bM+;Ylu|?Wr z$fX5EXc`kr!+=sOLFz$x%f76C8G0^@P3m?KgSj;cL1U^AT7__V_Kppf$HEYR`D2Zz z)IJLSknvtIlM%$nVIy)o${+1Pzx$I{ z_u46O>;C{tsYtrsvtpLFugt3{i)6RNME;aj$EJ1+0#0(K1fg*!e z0@SxYrap7xi};_7Vt%rtTJ6zOuuIEwx zBC)AIrHF6|;gUWuG1V1u$w>bIq=K%b55MspPrzIXXTv;IJYyV`El5cSNOnq;8#9m` zfZ$V7UE?+%Rr|i?ko~)->T|I{+`91_PEI}^!^)BI=bt@IgWwaE zdrVpE)6Vbje<p!UzxXN3^oV^$>h3h}qOkDqf79|Pq6$)_wA$2_>aV8VJX8BTT@_pZ`8(#r1r||c53Jqq)j@UvAfxsK^~QL9_1T( z4Iv^u!z2cF{{RnOTk)^NM}hF;H46Up(o&v9s;>ZP%z+esAHbd|jx+HcuePA&^w@0y*^b@qO0{&gLIs&U zzjY%8Z|UbdBU6FBSKwrf5;?`pIwdF^&Kq3?cRIB-XM00f!%=FcWVHaawqwAwZbtSlZUMp#PjW-M12rCZPM%W-82G;`_&fq{8BZ1V(kforks6K-4^&VF@hI+J0LLs+h1mG-k zuc}Bsqa=Adi(|uONe6MyBycm-8A^5je~X42a)J%4Me0@^m?ep6Q?8%X6aC3)KmmwA zndB(KfHR%HJy?{GYLH2@H8iW}`q`clBGA<>X7vj-O=>dSrqML5JQ4`xeDJCUFnlNq zaCsv{^9*$foPJaJlcJlc3(@eUs93Ty+s3JKKvcU$j=}HoPa?b zgVYL2fh@4WneR?P;RX=dikiZWB?l_49e{~C#>*adw&9$BK-vy6H)qFEOHoR@<@D>x z+V?&X#6rK7)N7Gh2GmuAE*h*P0*-R4w|3*VaaoY784Zv~WggZt!Bl1YpJ!g$1a(rjn=Chjk$M0(@gRS!UuCusU~J^ zk2yw1B$1ME02n{H>%5Bwf;P|L`{`X?>azOwvm|My)0@+;R#?P>Mxr!jvpeIr$;re2b~kXe$YHCZHhtm*)Wz`@vZ zJ-{D#9B#s|9ChL-64tPgS-^cQe|6rmq8wf%VydqaswZJL!Zv@O#t1|uEOmcT8)V7iHt6V_xfa@zzd&f8%LZk11voCA{Ld5 z^AH$)>(&^9q%Zk2b2~*mmM*&|rxV%@BS`rfTyneF%CiCB?8fc6GEAQ_N_03&(0x3u zEU1Y1o&Egg zWKFCJv6EsEkbgnI`RmC3I-4?B!=(o0RBb?DdIvsnVp($0vYj4-*qczXWo_!hR+3ec zNi!3T%8bX5#djPNk`8`4^2K7Y1vx8}(?>RcHi-xg25EHY3Ub$qnnN2Pm12o;=}OKR z$U)DB$r#2!#{lD~*Ep$JrWeuQ!{vWw2P{oN4I7c!ilp;WBscWTWY;@iAw;0AJSh^i zVesWbgUPwtNRm_#x>12=CG|9;R9d|tfYGDLA6~wmFJX6;nP^5+x3xJWnv%CIUB6RT zeoh%O2>hW_KQcVIAbKm`Kr&PdIe5R%AozkqvD{da-|p)f?UX=E(n?8eNF@xn^T zS^c@m+N3CbzBgsO{8LkZi#I<`kMxM>Y-6y9DPLc^mM9W=H^*8mO63)Xoa+YCjr7IEo`Uce}VmW=8tD*j3L*A>Pjyu|f-*)XQNTvjl5;EV>%Vb~{C3fS-f_wu?B&iwFElN{L zoA{lt1n@?)sy%1746#il#vrW~i1w6$O0mZ!fI`FPAOdsar<9eJ4`AE#@OHo4!zk7p zzpBHgUK8Gp>Bml3o?5EgQ~&lT+!ou;7PiY|UwGx7k4l z>3>YZNh%5CZfudp0W3=%f4@ww%(-0w_xx!Jx<|`2@>gfthACHbMDmFplny=LkU8Cf zpPcyVm6WfT*WZ(QLKLhO6s-fr0F6b9KF6^On5AEhu1B=T#^4CfLHIc738%9mGyuNu z%d8VX<{33d+iL9OySU8Iq^4Ce62u-EagIEU@<_);ohfw@T3DJE@~`b!O%LjN%i2^H zWN2r`#q=0KJSw3;%P8c90djnSo=ECrHe}qYQyicGTAN$jc*I#kZ4hY|nDo*Gc&Ao> z>6ztJXkCM@&Bv4AF)T>uKHjA!bGQw=3xL)BIYCg?Ap4K$#*^O~S_sm{ov!Kro6^=H zlM+-LLn!m^8EkCxPqRwmj{jq#U&>T7=1Elp?>QcYV6L z-JZ?w=wYX>gfT}vgc^F1A1n{)48x710G>Uq!TW>ZAB`|y6Je6d0%f*yvuJlskLjf% z>Pm5GD9Qr0PX7S$`&?Xdt#?20?!wISXg8~xhVe3Z*EnS#i(rZOXtbc?k?B z)odp}Jhy}6^y98>XTi8R3Tc=i2)}hhomjE)c&;(i59>ZGK4K$uPp;av31)3sYIjo1 z8DyJOkiEhPAdz5}VtL5lyB`?w*LT3Nk~lCDW#Nz#HA+&IUD?{Bk*p-*%0UWZ_2=hb z+ekY=`r)L|(kR1fjrX@&yl}ImvEZ_xO~IJ{21gu!mtFoF;=B=(kufR?6d{4PH;V@! zCsdJ6UwHcO*PFLym-#M<9eL!8!xYm?B<~Dk{3wV39CAs{Pd^=2#C%evA+VAOKkna{ zNH5Xina3(+C8oV!)ADBP}Z%d&p#qLEG&{DVepR% zcJ2Kodq1|LpSQH=)Ftb2ykgYn{6Op{49A{W?Hpirv@x`6+oz$zMOYR9!)p`E5%?#`{Qf$?n>u9bth{-yFB>-&rGdK$rG~E#&6A;sJ4O{t6P^|G<2fIv_vW}pbq1|` zcK7>37Z80*&|ZeDlR_c4O7du6UJH|0 zfi#H(4;g6yJH%k&h8u8FGFW)e#&MC=WXM-)Ggx!eXWN#sBhjEOTFjB&j-|NTX`ak zk<>^D2`dBFt+msvG&iQo;&Bj)Sc#L~^31q)3!LycR>>IW7+z03G}NGnac!;pk6Oc4 z57Vre>{R=h=8D|2!q5e-(W8yt)>DED1C9VEf%)o0NM3aIbQ|CIgA7MFedtEE@5aoU znT~P=recUdBo=N#JDI$Wau|FQ)67R^vK$tG6^1D3pKmdgMGQ)-DzGx;np|K+s=2`@ zk;VuO$jCiJ!D%_w?|3T5Z2i5-%yB)KW*cLcD-nih<=Kz}s}jq*DoHuvP7jWyT)8rU zDJInn-F!B&Xkl23nw!>X0h(yv)l%{l-WiVv3&1>nGx5-c1v82_Pgrsq4qMgL>{gO@ zsRyF7B8eMnu?w&qWlr=YgPu6goOC^sf?SIH{>|>t#s`LKx`3KG_9)9@FejF^nN?<8 zrHe_9c*g{uNMJF7I-CW|K^aa{M{ghuETq;2TVHKb*qPt?baDFEb~fQ0afaU^Tyhmz z@P7^Gj;!Hu^D$G=he4{80NS?S$M=9)2tuYC0RH;gJVoD~&pUbC)P^x~53mY7Aj zpG6J=C-zkezvb7}5^(Zx$V|B*l90ooic->~ASFh+^*8m1Bqtde*qmS=Kb(Kx$5N=o zwU1eU3p8p}`h41r`71`}Q?g8g67wnrj2{3Gf>bk)B={$<68Og~-XajAK$QBqx9=8` zGQ?l;P@l~E54Z2X+sQo}(nP{3!?0Q7EKxxHO05vfWb)s?P4G}$tz#5rquX4N+5jc#Iy?_ejoTN4X*aDXVw=OraDO>g zJZH!rdh$<>r%1^m3JzUeS-$NO%B#llNS|6Yu_Hwpk)i&OSsD;j{e2oHghDMhAkM-dr=BgSUX(<#=q24l+sVB9y2yS-tY|*PwzVwPFKS?fTvA+c)THYWFF;D>U*& zF6M#2Az4`Qy*bEIa!%es>w5z~5ySBV6ND_J@?b{9U61b(C_UrK9p&3sI|I07(sd<9 z0aj=&g~C~d@=|33l?(?ec~imb=#CS`@`f%^L^s%cq@JR_?13Pck#ssaObCe)8_^`f={TjW9oQaCqy+zBrh}4J`?B zN{^r(u42l70I@($zZ8x9cIC}1Lt;DTZ$h@&tVXBQOgw~07=Tgswhqn2o_O)sm-eVy zM`o5cBS2^wwG^Z25wk;+t()$A0(Rz{LgmCP+ON)(5wLQL>F`tyhypQWh!HBtDctw#+URWU#?w$iX?s9SXm=LxY?*iT#7x@J@Uq zvVF`B$?8Qg5+OF?EFuk!#gDfn@JSg^FhJnrs_R6tYU%q|%Ex$MNrh6oscNb#wx3$H zyRrIdHR?t(WF-h#f#e=G@-SD<(nnU)Ei%f|`Jb6^$CWGfhy27eN3~e9D*CnKbtEiI z2IC-H@V_8sLBTlBj(V&VqMXcI=zoIS#0>;B`!T^SJBUN7ss(j&V?s+1p^g+WWXS_9 z?Z7;ePRZ`uuiM1(+(8icfzhmL0hpTFd?H36*tA7}04W&ro(6CRNY9R+DiAK^y7M9g zjbQfms6{GG8j`M++F>DA360K*2kgLPd~tw$LX`$NZb!4}tT_fD{{WG^*WpCDA-Xam zC;^f4jx&#rFgd_JPCBqB_fK+%t9f&Q$yazjzKW@ag9<3r;N65vQ>xW&5UU-wfr)DoN=$7R19I*moPvG4c^JX!70Upq1p)wnPYvt#h6V(? zJP~?jz0w>@45wo_KGs&q;P^RT+Xo&xh)Y^Y+g-i&^@A+X3U?)W?Z|yND%hD+M8(!M zQ-hW$qXB)O5;1~)PaRrb9YrNdsO9da7+xll!}=BBs}tLG4TW=sQUZ|5I3VX3-SU4O zQA%?XO%Ji{p&CRsuc$gh8^<^D_i+;J za$X+$8jS=dYd)PZ7*~ztIDcp(BHk_4e?@K-DR|%ac#`0*tf7 zz-kX5jP7Cz{lhG6l=g+r26!ZZ2MRHDNHH1F~3h(RuL24y{o4EL(t%-+?86qb4#`xPZ#x~Y%2PQ z*d&|+SRCgVuRPFBUN}8P( zXOB?s>H<6V`nTP_KFrT1ss-W|z~)`@|lii6bEXw8;M5dqp*; zU$}p~2wSLtlH!C|{NZ|zr6!+Nc`s=ihJtZD7+iYO&mH3R|!^$h7)EnH#~#Wme~boc=nXSB4~mm&@)Wl~=_a0ZZ)W z;t&)KLV*X=MN_tPi0aF!)vcgOB=v7uj68Z;qXja0h)Xz7J+deOoPm$qp_4lSC@C}* zIz8*4{h`>=wqQQY8a(3iTdW8Tbt=<0tlTke9QcrIc!(;GJ6q*!vOlJMEJ0$tQn~2^gC}(cbTo~*R7Epb?w=2PFMH_DE1*y z=LJtV9(tuaA2=iz=m2lu);N`XSI2m~!i8c?xniox-3{ws<{)idi?cMQ(sbQ}QIM9o zVkH1X$C9c5#zuZTbMuU3E@41oT}I|9qFT6=@5N$2ApD@!`JG#>`bAn80Bn^2embTj zZA&Cx3024?ykngHI(USgp9tl3~c>di!uq3dgOm=OHF=~3PNbS8^w(HfBWse8d z*(|sWq+x*0emMi5gY)E(0&Q8i7D7VF>>6ER*F{<}2 zJNhBwygj%ZA@=!p4EZQV*4nrjB%cQpN|BXTV@BU*wXc3}9*(Y1<)tuNYYsE_G6!@~ zB#c}gvTfc}WkCHTf}oF%3`GS<1!mNGcnW(yurS3JR*IbWG?oe!Sdk%KSyY^+@r(~> z003}vf>dXrbfRMgICiOPw_iVzq&pK2K{TuD!CFUv*#%y_$7D^t(TcHOEBZ5xjDUCp zkt;WPaykyY#;vvN%wgMlD2nf=KipU<6#4|K0U%7VVyD_cY+!+mE=N4VW)!7>1D_|; z-upvk4_I-p=+yPMhQz5-*jZN~s7G{+-1`sKIVYAweUfOq@ZNnJz^EE-3eY zPDg`Q4busND%N3+RD>dGs5j0R&5yg+q|aFOp?GSAeB-81Cfr1qFG4=>9;oL`@P}XtRUzX zeO4NkBd1oKYViAI0IYB#hiM(%i){qwX>Tmb2;*-}{{Z<)0)dB5Gr8A&G=T{z(jsOy z>S#}53-1ZFAExqy8Lu+1%PJ^fOM|xxLYCRjkO~r#l>kBd({o=Dpf3hNXc`SZQDVDl z6VF~KoGE7aEWE02P<`b;=khW$(}0yJsLIz+K2H(lYsVngW=ddQn`%xBUZlU(^yV2< z222nahAdae#&MC;89)+|R;@pJ?N6IQ65-&RI#XGKoR?yFQ`8bgU)3I)B8)7t%1*+- z1_f|J1jr{i4iHd~QmUF%Y4c~J>#ZPiz2GXns{pTMJ$+4R22W2IFISf&wVtT(uO?KAGy);D+=w>rA>`cA9d)2{!t>aszB+ zjzfmeEJg+oU5~;Td9(PM2xgWX>9`M}E_Bdq74Zs6M_;9W(c|qxb*($bry)OE50?A` z_y_)-d+uPA<%;sgahd~;)2wWYMt2@h=ctOyMENTo#rh!x`ZvIB*h`~gqeWH&BLY3Duu2OcmdA3M3U4>1B*v$YZd2DSZUiU z$fhV{u^S*}+D^|?!D6GHPXuR@mMsi#|b=U4+^PSnJCm z+E;TEoRw3P$Br^vg6=E0r#|}H{{S%#&1eq9mi2gJv8UgL2~h;mAk)w&Ts}6C`-TfE zWQ>p-{vK46$w($+<{Cdb>0P~uWgVjy{wOuJmrv3(di0~UQv*?bQW;Abg0!$l9>6gg zj60mXyrM^dG-3COGbRGOyGNMJ-liHx!OtN70G>MZtki?b z@#$~j*IU&z{@s?gmk#^&BsVO>1pb>CHmFI6An$P4i~)g=zbCIL__qO`YB6O@000sJ zVhJtNSE;-*TsTPU8=kq`8oinngcWXE#GuuV%dB!hyn2ij`@`?gw{XEyaqsKRCZsr) zxr_sp>*3MuMyyWHSZ*6bP_w6L_UEw`d8B9yJ&l-0!$@<6D8tVgDp;S4^`4Mcr*THT zMftT3?OkD*z$dnDErC4^ckO3b)p0zE$faC?Bxs`!5r9xZCC@ktTNXm)u@ev&9~$$k zzmVq8Ko_BhwUE{xSRNfc;Rn=InpBg~0Z~R*ZZ{Xp5gT9~yybJ!3jhp+kFDK}{PfYF zvX_c~)c(Sc>L+JcT}7j-au*`Ls3Y|~Il#-IVeWKgXz~v{1_vV@Zg8F{JB3f}B?mz) zMFWjG5mr4#j3OR}JkMw7{{V7Zvf51;Ba+vrVYLe|v&#(+MGG_!Dz3vbC?C^|u^yyq zeauvQDC4+OC``=2vjak2jsao^GfER^6iagEGYt$%?Yu!)&tTJ^7E^Eq`#CGdVU1WmD zhC}MPUBv?J3=0rJEHDN$%CYLgqy&X_Yp5msJbX0VP>5@q!lthT7vilf@W~{QD--SB z10QmRK5`fj$KV`wR1=pl1%c7q*!kW9(97w{@}*|6EXyeXO(6{=UY5lO!*Sia!sp|_ z;~h%5geSECbn5lB8(2*7^jd^ftgRs;!wZEm%3eZ_PII`Q@nKt#F~`pXsFaniUTd?s zr8*6&Il>i(Yt;rQj8*6L-JfE=u#z*zSIPeX&GEbE$5E+8%aGLP>~iG}(Da+^Xt9e> zeZHwE^jGvv3YH8>8;D>>vz|fsWR5zK36>PBiX;LK&N;Wr7SR0AUVpcaCNQ5&iOlXN zk{K|pM&NA%mF@ zIbs8xf`&Cag7QH*$3m&(BuPjh_&tHz)cL*P@`DlAjcKH|WTzAp`iU${QmKt1Rb^*n zPzVm=Diufgj?e+>Q$0&kmY_SquFQ0xATZ}oARvPZ2$GXZt!Hy6jbp%$7Z(u60|ws9 zh6f;S&Tn?pt07oRuK$dvqZ@>o|{T{MmCb=OKsV`v-$=(!BNz|-2*5{ zXB%s3=r9!M^OKGTnmr9`mqE;QfBay!F4VHd1xPLd@_e(H91`({YP zNJ-(4tMEVwa9HPV4=1H5IClkMhP!e!*0uufEd~^A5}HVPpiyT}uM1gs60KPWrU)v> zBN@OOeW!!B$j=p~l!X$Nk_DVrt|{+cPcO2oCo&0ocov}f z{Gk90NWy4oPeanXW@+PuM2&D5pKlAmBztnlY3C{jjCE3a7D>t&8kQQf^Y?qh+|~pd z+iCV5Ws*ppL&nRrGO@|q`n{}u$M+oaIw@#S0ho_^UAFXrhY{uyto9^(EQ#r|$pDQA z7-dqUX+Ni*&hLYh)L}pol^_QO{s-UAkh;P&y*|{#?i0|CRcNjsHx~);O$yk@XAJD#xaTI);^C*Pe#{ey8W(Y4WsMg0&M8={{X^N zuPQkW7BEzt4oda)*TCP@85rq*gT|VsggcSUZjV&+5otJ#=n|$YD#ztt(OZwFD{K9? zt=-fqM=L{HQDjwTZ8?=+3=Y{NRy!n(ysIOx$lJIP*SISZl&O_tVk!vV+s6rr__-V( z7;rppNtAS2d&p4Tt~9gnVk6{DHMJ*}oYQHVf&Tz_k^cbUEHFRfXq;k96XdId`RY&Y z&kjI7`7s_gjS1L31My6}=Oo~DN$(G+HqtBfyN33a+`Zcc>ZefHYT9XQBE;$%B~1Dx z;PW01)0~5W&sj?0q|H}ktFf;}`f`st;=cgT5|RF%w@kN7DAc>Ehn8lGl}&Pm`wt9O z5Y9MivI898XOZc)8$Jif=c+4|xo5r`-tpvk-WiSI%E^k9P)6dOcHhb-Ws)r_tk$lR z9B_^9NXadp03RpdbS@Ax8%82hsp}HuRGWMCF`Mc=*EP*XYpLzJz{dkG>8PLie=mdQ z<0SdUGSxJxMSA?Bw3C2Lxk5?&w?FPY`Eu5R&hT->IXsMH@_hdQ-~RnWSko#nKtYV0 z1N;1R^@vF@wN+&&E4TBWim)b-;8V1mGiUty>BE?hSd#2@rEj`5crGku5|w1%y>f@v zmGA*L+*Jq}BjfYe&%f10l&b_2{{H~}*!Ay&5ZFnoI;eUkFY7gZU8M;Xi~F`sBw}<~ zY&t1qM95IhlyzW(B#;5;Bf+I3D*LA>tv2gd>GzLAdPgIU9Y;{9SjDT>rw~-L6=;^r z6#XwKE=ToCW7q};<@XVj)rONbi#0>k)qrvgsjsD)7OY@alJ20Ity=V?w}dmvJ&E{0Tq?i*T>Dqt32bly z8S2cm1``Rkz|+>hAn+POblLS-#-_I{&Vp09XN(zSmVNt}7ca(J18^X(2M2@EP3#t= zt5E*{DtoK?LQ9C1)q8?nK7@oUR0D6aZYX94hT%lPL=sfpr@O9QE8-4sbezQdX0LiHQh5rDUlvKI2dv8Lst7z+ZHCmLSWcztN zAzs85E25C4Lj`4FpJ>AJ3FjlP0!}3AD%jZi8Tufj%7h3 z6p*gn$_d)o_{hQk06xBwN|c}quMo=sBs6vjlo)|0(qaMUf&P6+6wxAq0zI|#r$b17 zk#;_-UqsZumA~A=ZbdK#TT#vb0E&+}0F%3VA@~G)h$pWY__qf&7`5U_8D?dvs}LJv z&A!GXmQ6x~ADmk4%`$1NUI=80I5vYyOMb2Z-cf^PivF#joSb~%@xd9QV!tuUYe7e~ zsPu_KcYJO+MjkcwV~#dBDx3m5kU_vXB=gi|D_?XTSNZAn(i4FMTJDyM zOrudP&8XDOf(WOb(m>I+z}}fSP~Om4hDO#-uJ>&k&-m9q% zA3)Y>hL$IfxHR^)6Uxh$+@7ET$t)L~08T*}Jzl_KlZ08AnMqw6ydLG;w$0<#0GH_* z-_-nlOMm8?gcWqj=7}prx@}miR<49Mq^z)_t~{VJZjc^10Cm2OFOKc|LO~jU0H8V! zT(pSPl!0cCKWjaX+Cs*YMe0Z+-z1=JW|4~mpn=E(k&e4eKN&NegZx4Rc%J%8Z%*u9 z;nZ$2+NcJfBn+v&)VAeyjD_$Vrp*48-MIMTpA;+NG6b@rcOC)=D=B!@^#1^>6(^F^ z5-~$7G>dA|sU(CV6CobvU{2N~;NeLmXRa1^3{p;DAC>yC_4Xiw=pH+ByfmkVF=ZNA z;g(6vn5%9df%^#}q; zLPaEz@?#sanCBTS=LNYOjC7}PWwPeXwjYcBPDk!kC4H!>H&f0p__yf`_hy2`)LxSz2NvYIxtpCdSYq=QJqmGiQ#Ew zjg~-KSoVLu{+J*}!M)i4fr27TsbnTv&)WU~4fefZ$%Xuz7c3ZO-cnM= zNi1YMvq;DE$?6hAs3Z}zoG8w;JRY-noz}#N}8Iz zXA@XPonrfAV%j{HNFYTDvX4|ZyeP)h%AMgrz^aL2VHtotjeY$ztVl~*8k$$puS5o` z1@Q7s>Bl8=Wos_=bLsEOf@CU70fD!JfJar-wIms7{WPVY%VDKVFxJX%}L0M^^ zR2EfmHu|^$gJFozjBW9Vl!<97Dg=3&=;iHu)O3T0%{og@q%}QEL?io)6Gv1=2m(#M z@Gvki6mU2I5!D&%?*k~BH!ADzSg&x<&04kg*KfJqR63lQbASoRCp@1R00V|Ux#~43 zPC*sv@73)IsSN7+GD~7KifJxlXAxM9OnXoO!I?QAunU043(3w7dY)X|rCCKe>v8E< zYuiY00K83UHAOp?Z%M4kuqyq`hEwfdAx;#63lKp&c=B=y>JalnQcRrSAh-(GPEOirAFMuFRuLwrTv&=3j@uR$WmzFi zyI`p*7xyjBJn@1v)FM*yl_tY`YghaEQXJ9_)AebgrS|C48ghmpA=@IW09kRyLv7o% zk_i~VJu;M`M`P?Cxu-hA2rEyzTs@hTLoCQlO3S^WU^bO2kOn;9gZUnMuC!#LYTEDo z?+6$I{-H&+kd_(~C7H~Eu$aU$RJq%N4ls;3@CO*iI<}KBPdgmo%mNor}geoIEAFlKD4Y-SEHK*2(J-a3$}&?aVkh7@g^bNgBm3}7VHE=>!$trb|I ziv~IDEK)j<2H40vF$CawIK~b+6v#u-PEMdwo9_gpv0O=YQv(Vxt} zgd#N%?XKa}<kn$4|D z*}YE&+>M60IeQE}OzxE92P#jyB$Jbc<29!guQbHWB)KQVeNREw1UoT>QI=?6s*_ zp3-Ha61;JV2wj;0vK@DEkWUJ8f(h!B<0&8#pb}Z=OIOq0kVypErXGUb>hxl38f}HI z6ly-sFk8}PW+Z!n#y5L%G6^Hb2TY`ete`E~>u;B>ONQ1R*g|U23kf!@%Hl(@jHD7o zoa6;w6z<)?$P4G3bsE%PxR4v^)Za~u!h+?bR!fUDlVW)hRc2`YH(pCK0fNMZ!OxZi zjydWvGxFsOJoye)K2Wu2?9Szh^vyz_t##dtp;ErnD!IsIe_>n%;%dA9T@5iMnF8)xL+#Q(Dr+i@k+Bq$jPJCemeN{YWGSmqL91 z0K_of*X6#G_#flZKmP#foJ_eJfAu4;>PP-Dlg2n^7mZ;*1&z1pUa3ECV&q2EqU(B3 ze&`X-k=MOgg?Fzm*_Csg5ubzb!yX7YU_keRGP2NI5a5dtbqDF8kCynC4aa;rjxuJF zmFT3FaiIi!tK}7aSF^=F^qlFoS_RZJVp=Pk?JF2;E)h#GWB^J$6aBn(B&>YdGr%Rr zUy7NhGDC+iC?lAxTq* z3IHv_pFL}*8!2haYkS+xqtI}_9q=r2QzsPi-$kb|^A0&$@saoEaoV@5v^qq3y$;^3 z?W+p3=kAzi`pg}eChYL5jjMr-=crb$A3SAJlKx&+c8YUkd_Jklnnb;Plq@P*-lC8A-?{ z7~J^$CFhjqB&8(J@8BFBhy5=OE>D>I zK_}sU7^M-G^kgoKlqM{(;KK+*)VEHu(-9a5CQAu*=2Jp za((&`e{&rnhMGHJ>(!36sltXe&@0Fh3k<3Zdx$JKKOAR))1fMy*-hz9Pt%~*u#F(= z8jY!AtkO=X%NX_SxGs#SN~(nig5RJsF4RyMXF2JaBr?gZgVutN(`{`8^>~amY}%~S zRtU)u+|6B1T~rijbvW8L9Gw3E#4xx$5y}*OwO^ly&Kb3iNgT{zGS&-PU|%y37IX}t zV|FqLBOKu19Q>ASfYp!6`#rq=P|)e6B=RJ+>>)!O(yBtEhF@St7&s)H=a2B5fzY~A zmR-wT`u!oyM_c=fGSvHUvql3)D6pQ^I3K8SyJ;LR&m`dS)r6{A!Fn5p`fUE1K#D=N zi7C|Sfiz$`b9jtlU$8gRVaJ>Hjh!Rj8_KJnV~Afr_tcB~|f zdZw@;glu!eg$xJ*PUXuFjP!!nDrRKqzW%rN&@S-ol?7LqQ=d|}YO5w9tamg=3^ph` zcex4%?Z7A`kk})sq=|D7(+7{xgQp>A0$R{C?=3A3oaGC^(U{nx{{T$v2v}uba~!b? zl>-}yIpd&S9Y35a^Ag+o+Tr&t1mp;ii&}-Y;1ahk`8T(}`NfLJ*pRA-)l2Oy-ok(4R3U#H7m zph<>w*=+Xu!89`11eRs8*;BBwQg=VMAhP72KL8&Fqn%`i1bo2QSAVO+g4Bb?qf&;E z_UaNCVwI(6qL*gi&9vYGLIxp+w}$!P6VxBwm_Rk=Yo~>V@J~oDO4csafm~mfNR^m~ z$ug-7jvoPs_?3?sIShPp((RWmZi*gl>M9M$FeEsSH~#<-`VFq~?;4jROOn${vDRsu zY>H;w+?LORn|$So(qIQR#lA6Es>8Zxf#jMFg`l;oE-Gx(vSsCmx}9|kkawCeGKeM{fRcecBGKpyDSlj z>;2SQtH6>IA$ZZw6D2ZA;A7)C>(9PA!p&5vIe#Om+d{)HjoK~9n5DR0k*m?}DwUY} z?y6+<4%JPHm`+rZGC5V}BRLp7c*kKej!^UieQ)lfv2QT5!&*mm)2DH4LjaX(!q?`M zouz_d{YTyy@BzY)aoxv|2KNn=Q;L?kqJcxu*lFd?24T#S0no>d`;)cnN>IfM!?+9x z;$jP9$OHE|$6nnp944%>ijsjsLP@X?-j(#1UA`9%@qjO-Kt9yY_w>jxJYH;1CIOocA6l&6&a`CRCl&L{6bMNn`f5VS)_T}rk zU(~+n(5yxpZ(3Mp)K}GP96T)!NhA&uL}Q;L&t013@F{adpr%n2zuV^%uLPu|lBOPU z<^2PO)00#rtp%I0gG0J^oAH_rv<3xy=Qo5u8zRDT!NS! zgTko*XD5TkI>ejBCK;JfD(uZ#`}@S1V(PJ2TXm{cbo+!<0<|HS>$))5fOD1rl2_aM znE7LjX_J()Rz>&s>ssE>cA$@`6e%=%hLrJHM~~B&(N~Gr1TP7KSe!Ed0E7Z^4s+0x zEKACj)bli@Y2~a7kZlXuB(CBq1TH~~)pjZF04U?|7bFfr@J~HeLXs*)KJ~v^A4n|Z zn!%Fk(#uLzHKdWGl0}wwRS-uSDuvot{pMS4@<=LGKyPvC&{`6J@0*sub!XPz5Y?{Gcs_v7&zQAO`JwC1Ok%!k8P%~A@vZ(ouJ#(qpewNwY9C4 zXyp-*>c?~J2OO|r?Iig)$0Mqz7KwzY(_MNGesLgoXlB-lEjrV~D24?q>6b8w@>48~ z25_L@0^AId!)`qqCZ=J`{ued(*O+J$S`DilDHpM&wJ<3xJ78H0Fx&2H~a@h9g;FHEN(Pm_^9X@E4Pi4h}kJn1`P%vmbpQSGO?^`WR~L^<$$gv~{%#xs&aN zB=rv~xl|HzHjqp0`8;^(kcAQuOBTJqZFG848Q!rUe(bc~w03m)CABPKdD^1yni3lf zr+?`nZOb}G4#By#=uYLW1(#Tm)NfbRCRrm^jRQv{UvSZfh>U)%fe8WB zsKTyS0Q1jLtR*WZP%Ujs({^rrw1Rhu7;XLJHk!qcwZAN}GU^hjb@YOIL;;P;gzkPa z%vX)Lf+ODdh^$8yplgvel}_S|(vE79x&9^C&#LYDNl>xjFHka7H?L ziBi;C{XfU=?_UvVU1odbT5z_$p_;NWDHJOl75ttBeR zkj-y*EHz;FXm%HbS6k&CT5-o5GfcJYOzIV1{CMBME;g3fNZ=EmI*=tvlBE!GQ={lP zJ6w4{y<2!q-B_i|FCBH%XEQu;tf5v(m@pB@rHD{Yai4&5!6zURXEB(ja2(c~g4NO; z+R)Cct7=+o5*c)fLOewSy04^c6xrq^TgFs^lAoBanIlS`bNd zu!kXw7$s@Z(zO^Gu&Zh+?)#qBW`ZE`ykLsM5j2PTWCa{DsK^~!mx@%fL8vtjJlx!M zCu36rz?U#LaCVNNtF)fAsUfJ*D^{4)O9$*m_|bzF3RneHU9II99(CzmHVfytylG~nH+WhuJW*I#Rn9f+C1sjaESy_oWW9|cvsUm8XKp0n${f3QZ zydsxAeIQA0h2FsT$HL%IfJ<-glGk> zAL|2BrifW9=#j|Usz(l>t^$iQvGpQ(6;~UMeLc!Dc3>Ry#(I?#X3I&Dq0zP^__l|W zKoALWA8B>xtc+{wH>A=bh|4T9nC2yxH{B$yASAX+xW@x%_VJ#a%`*W=l#s4pKsi&! z@Im*aKCkQQ7b_cQOGyW=(b`9lspSi_d>ri`1IYu7k|PNLNkZHu+ormEye%Lry~9Aq zs6}N%Kh_a3l?&lE)o1yfnhJlqs5^yD6{X%VS1?Q3Np&hwD0i zv-E$swM|>M=}8Z5!D+YYzjU*;BFZ6L`r%#rNWV?}2*Jjt3gm|VS;QW6M!Qz)ONix%wV7Y$lyN01< zHQD(^+q?CGl!l!P;s*Z!eCkH)5Auuu0FWLTykuZ!1a+l4V+@1&<@b(bhhd&);ubsF zf3#>%GCwH0RI`H>T6CthI0&WWIXg!INB)EUy;V!7#_Y+dmYUiu^}9_Zj0+f?umfW# zC=cZ4k_Y~sT1>4>VXue~uH`xF3mt`_O6NK?CO~i)#sU81fywH^a|7Kc&L90_8iC!O zq2gfLor|EO6-^ePU8J1HkbZn;{#{NMy;OY6M6Nr;f?M$`{{YCt^fXE|NHu8GtfQal zK!cqAah{_jtMgVV&c)B2{{U8ElzEbVu`{LBC3mnTzeg>cet%)pAZr=3=2s|ywRsx9 zd}z`HUq&r9*`4x>g_3&8ytYWoCNc&`>tw*b7I$_$UbHo zZBiXSONzZjtWNfM5#*A6?pcBWtU_Qe!^h7gjN`AFfy_o^_}faquRd|^#<4M@fYpDY zB}wIiq0Nf$ud53Bp?5?95<7^4(urU{^S9?&oiJBRviDC%M7?%uou`TXgk0gu? zoB%!DODQl)Vp0(mjEs&!{pRa)l=heYpxq*|-zn^yY~@ zpgQUI`qkNm;j0DuZ#9*9BlRjNENp{?U_m^Cw~}}ud=v45)o=m<0M?ZEv^zsT&DcVM zB^0NPu_2aVg+mMtp_7I8DBY9pJn(v-vOpPydROFmTWdo(fTOctQ<1{VmL!63LV)-Y zDwT2*Af2cQ^uPq}Ns=aZH@N)Kv~ zJ;xZ`fEknvgwpPiMIct-cRCujg3npskm z2xn6VI`8xv*097iB0ESLwGC};UaJEbiF&e6D)d<^52BcT$Ipy;V>S%qtApJW(fT9I0_ zt;a@+WKz=Qw`EI!8{Nkk1pfeFGv47RuM7Z|S*+1rOlub%8|!ayc8;G7 z4HHYy^oUUISfQeENIGwJl=yr7N2@q?DWh z(V;{?oUiUqe|~?;#Jn_#KaoCg#YO~eRfEOWC)&I<%hvj~Pauu+}=0Fqxg+PM66$MIw&C=i;rgWb7!MZ_zdX>T)K5w4*! zMpgj;WI*n*m&+3xFbv$V#zFCx>{?FTt?j(RkSq-;LU&Azp5D-qHds!V!imAx1A-d{+`EMc&#lMf`e z2J8X}8P7a;S(FlcEm+gnPZ~oF;!{o3D8&q0B$f;E`cA%=JJh!-=Wr?rEu5|n(0KCPlbV&h1Fe%HO)mu9%Dhr?RvjE&4v9btk`c%F7X9`>XNFhPjT8VgAF>jiM8S(npyQ0R9!Cm3RdPXyp&#z>@gXS8ac zOK-N;`a6@XA~|8JB9>`2HjXG`b*lLY>>1yJ$KJbH^1KWA$r(b(BYBYfk z40A!(X1zkNn8!|ervgneC1AjiqXsj;W1b5hf5m_fBE(1u1On-MzpqcV9t;xjC*IhS zc)r;qu3A{^$?L?*%~kiJDoubu2Y4y~;3)?KsLZsb3JV0>3l>mgQOq-MtPBsQ*6us^ zFJi%S71vV1`oJ_xptNk6<=+WKCY z)GaikcyP?^0f`g|Kxp$JT(BFmM;YTIX*~+0g)g);Bz5%g{i_X{k@d@#k{c;DqW3F$ zsT3Eg#u`UPk35o2=I7iw3NQm?^#Q{nDhrm1Y)c2}r7f%6L1ub9A7jB*rZS2@Ryan*Fo6G$oZapkuCHisv8BTIEukVx=Yl5bPcj~FmvaE#NE zcCqD^Mn@`lbt2@aA{2$PwHh}{bpBuhs>DDL4O-$z@EEMtf^ShIioV8)4&D1z7naBz z4}ix!XQ@&%rzi#6t#qliDt=Ig@O2n2OL3mYrm)K+ddTaUho`({xm-xw&Oik6zy&j7wpu-|*5tNo+ds}zpHvxnN!GO$KuM;n}lAU+1s!Ef;OGA7JIdoaH1@9xkicyB{c z!zhaP5!P62Sd6Yu2ipGtxY|n-IX(#2nVrXa>c2!biuSfV0fiQAkOV4;Ze z2EvvY(M1J9sX)o9rkbW4-0ZyeIgRyhybGc$nfagd>qFlJy02PKD% z94x7cDq0dqNH0eW+Vytu(j!`c4x-RSN}|P@v%^#QexA}s`y`mwbpV5q**unA*(C4> zJmajz%t}?j|%+Bf^K@4zLmnR?|03OxOeaOvP z$7Vk*9BJ3V`iCU)5?oe0scDpB)94gilO)U;#9`{mEWiXwB9XEP5(dW6k+YGL(-38{ z-r(E<0(I&QMfB1Dv#=1@?g^7C0tv;L8k!RuFo_VHd??EW0G->oB;@B2c0htxMgZxq z*RkbWQXDD{hfqs`87qCYdE`lqx3w8UB#XFgf%hC_V*mi8v_kCjFR(^P2Q0&Sus9hR3~|&dk`QvHp*zy`^0PH; z;6u0~u|Aqq@{?fKxmGCwX)4Pln%H$Yjz*V$cOo!5aINrE7UYg5UK|9JQlPb-y+OFO zHG#@Wg;nicUA3ti;@O^Ev8>geB@#=O9+}G!!DLT(+r~U+gVkA#nRyW_D>2rOQBQXV zloiX95x2WrO}5kCxn}HtZ3{;f5Jn6$lO%uBkmD*gG>SL`GK}M?Fu0Hsxoe>z#^GLe z)R!*;q=;ipLrhSDjBg;W(AG(A*p5BhhFkiDlprcY6OgCIe1a94wH1-xa>3h`D)pxD z@3a%8G#ZszZCrt8)ZmMFce2G~+(zh|*w{F2+nd{v*cm(}H}SUcAlSpY zl$GwZ?WCj?W=7tVRa2P&IrP_pRex@BqvVs-a<1?*6GL!K0Xo$AbJd~xN5@aAVMA4> zvBMIEVKOX@AsOA4DmQzuHm-BDamhV0rjTylKTBE@0~oJ9=GtH#mT|Vl`DyoQj!^0EcRE>pB>{j8-kaN z!>)w8f|rsD2N~p z$2ldwNhi-liD^J#9%DNcxyve1f`kEjk)pPDhili&WMC2pw~yba0s(8Q$0GilgULLO zKetK|sAyqg0!(~@dGU^gqGGBDNwqcpmSioU5FmwUrjnOAyJ)9O)W zF@l6hX0ae+@xX*}=O?_5Nw|k6E-CS_YP_w3|_}SX)tZT-}j4nxz%gI4vnrUZtehE9p6P=&19mLIg0UK0gtiq=eLYi$9Qurm9- zPEG+|ZVqJyWg=w_ZDu+jDmN;;I01{tY4+6%a{aVW{rsM+aICXLR0&SvyMp8$!B`OL z+}yAF#zw*{p88NG5Z<701t&cj;y9) z_6ehA&3N#E?ngn zBo}84{Xih=Q@jZzio%=h>eK6)AzM z_0(u|BSxn1Yz#naQ`o5#VugX342aWDFao+`Ck)In<%UN&Bc{WUvP{utpcgi?AG9+= z)U~SmwQ1z30FrY4pcYuIhg_;7#&NPxd<+rrd>nKWFzb|{7FZ3cH6$HD7c}*WT$;qD zjcz3MYfB&VBV|6{63iEBvjY20;Dt`+z!_%1#?mll9neg+1-df|zO}Vj2n}ly6Tw{Q z&lK_h05FMT5{a&;U9Br|`|mq&0*vhrq~kpb48bs|2QGvTUrqV3fh^f#r)RJ<=cnmZ z)RWUyjpc$s@|BQ{$`p*qM&btGFag2qYF`nY6`5$>v>&suv)-^7Lq>V8_OfZ;%cBNs zS1n;b@X(T|?SeS}02VnU@IE=)$47?78M)O!6S=KE{_hbHq(;r#ucxf`;7gIw5_$%6 zUBqHR!A-<@&yny>I6Y{}#1*hr`uqMNlp$QfTFM+@~MbOptbANgpSZfyY(P8^WBaU1;{# zW6wjN+``DA7IWIKrCTkjT8vq-6lI}dxVnI#Ml#B!Sx5yKvA>$tV0wbJcT;%>AXbdR2gO-smY0SeUP->QST; zB%gL70o=_aBDC0yw0HzOgk+Kd+6m<2Jy%+!u_l1*G_Ld-4al@ODurqFZD|iJd-rA3 zEj+R#-f;G|qA?3xK1p^hw1&%NPT+93Cy6MZFq$N62&r$)vuU%l)X>tpRt>!Nt~I~B z(I~wc0?3Ih=XTAzOuR8FW6nfmhbM;MbjS#jLlz_xO9!y=-rS*$*ocC)uQcx4w8_>u zRG37TRI+jaRgH*X%aO@AV}dvYiOiC~XY?%Hc0PjjxEi}hYaKQvdy=~Y7GZM|5}^nC z-N!!XU7+xPQsd7+(-hL)9Vak?N~DrW-n0|XyA|A>c+~9s#L4{Lj`>0cWxLw^ulc1F$P$; z)W2HjYoO_9G~LWN)hx?;SD^(>A~-2W2@&qEEb+c_OlsR>+S|)VzjGEO5t zt7*x+K765+8U*c{*JRb?g3ZyaOFYn2p#m3;;SB7ts!B?!-dqB4v>ry^b}mdw#RN)| zFT8ao=VsBgeIX=XJKMI~MV=W8{{S#B07synsF8%BkOUxxJBp2f9ozyr9ZN23#7@i; zSFzLQt!qxO%nVBEHuS4vBz--r$ytbj>)VvHl5N_o+wqJ7OCk1?ykvCKC(BaKR5OM@ zeSY?|I|3kUue$QVHkUr0Ru4?a6v3sH)qh)|1gC?AAaR493m6Qbg#bs~7)(v!HMFTJ zHlG}|FGmFESS(Y8^$`Zwke!*>Ldpik_G28C&rXF(T7uRbP}TRZvwh%OcmqqnKAm4v zmLI*&FSR?Ll~2O;Ea0l>9g3U zy^Uzmtor6CG6D%o5RM838;WG$9ti`eR+5FNrScl=`-g4r)a1}Fm`6XK)~OXr^Is`q zXv`)#Vk^TlGJ@E`sbo{M@s`dA8R`LsTEdorKz$mmz3+H^V7Rq9VmQq2O1e!vsglyn zLjmcl6+^-G2{;7qRT;o1auPDLho?pt`Fi+6-j;}J>#WOicrL6FQ;cu3C00;b0l`qb zd$7cT=Z^y%OxSfKE?Fe#{{T2lb%JQcW*7`})`Ch=w&nYY(ne&-Y^t2?V5^Qn$r-`w zH3ThL(4zdQ{En8ehISQ+4L9m8r`vO?=~rxNkxLwrdQsbvio{L?Y{`wl`;}CJpq!9+ z>gFC~+&trmkhLU@L1XVyepQDHbg4QR4z*i1p-SbzQq9 zWj~l**{fi5t)AATN!A{|nl7O%ve-j4r{5acngPVG<~9gGMYx4!7~D4vr#z2o0Q_|;i2yBTr$P&#q4w~d z;zo?N<7iq&X(c|I1}I~$>FY94G}&)xpl(sUQ%Ght2 z)_A6I0k=?xRFQW@=@|&m1RSr%;Bkx+rpldEOdQyX=t=PmcGB=+g`>9U$9h?2kJE@l z8yO=siboO@3my&*&Q3S@Vpk_|>QrV>Bv!g~Ooeoz*OX3{9w0+6r0$|$UrWOqa= zKtI$ZvNs#IvBATGjCE(WAS!B$cnZHSpnxhOkaROoB{hMpe<-Gr#LS_0B38yo{{YNG z;0*k+IUJs=P^zahl008-mGb2h6A#z!2%|=)46bV%2+d?y9;(NX74wo%x~?`8ft`*!Rs7C^8|7*-NTE3Vc7oMi9Zt&FhSgW-R_QA{e6Fer+HN?@@nkiw9PmSnG6 zb?G9WXys6%s~3(`U^bFT86;nuL|*m0~ra2UhX7NFWIic%K&9wNvQ03X#s< zqAI&5mo5nfi5YKb1wjkrBBTWrlG;DMTUV~c(VXjlMsNK>v0u4#7N?@?u)T3!TNWay zETcBWk(7`MDPMe>hTXf4;zm77;ctoYoDUI_TER%THXDa58 zgc}%{`g8S(HK}cB9myc4tkWb;?%Rk1192b?$WI~1ARmAHU43K1xSkJ>;l?8bd(Bj8 zIj3~`i15FN{svwd2Oq(bS1a`+^&{>%H!INUEPyrv-Or!T;PKVQk0El^jw+vzjz><= ziH7Hl#7`dH2s!9l4JyqD5#myOWwH7A{{Z8rg3RI%thla77$+wm^ysvTt3BwWRkH<) zoyjDz2U{AgnIo`ed`A^=95~uCq~;v{lgH1;Umbr@F-CvHScIeWWKW0(ovY{PA6EPy zhU_>N6dRQ0KlfMb5;mtx6$F-97Oh&5hn9O0E)iSU#ad0?8GWh=V#ExcyL{mZnLSM` zR6hL+QMcBe9y?++F)o@EGRBv5*R>=L>a4QYc-7>~{{T&jqO^HJ4#wVb#(B!aG=u;Y zEH3UImhc*DSVNRUjTGufPSjcm^y%r$yI3tvPcgmECLXJ<~(JFCz~|Ml4TZU zX=3H9{-@P}he0Xgx22>BgB__Y)S3Ny2Mb3FOe zV-gp5Iuy5R#+^Z9OS)&KAe9c|0dmJ{u_3mSTfiHAqZkLM*Eh75O;S^_ug~3egeYPr zHA?8lT2ZP`T|+}UD{^wjW3zY+pdZ!e<2@d793zgu^2RILjiq+{H?r*$P;0D!Ihy!*+>CO4w>hBf_=mq0$7MW9#js#ZGCfOCwer z$!TU15h8)fWjkDPw<*JU@xYBqm?xA0vvfaWzoZBtibGng(Nh(rfU|~;m`Nb@9u(QT zYN2AGLEb?Cl34MRx2X_2ZsXf}J$32wh9@i;HBDwSJbs-@;DKcl*h5``Y*$er_m`Ap zEJ2jN9@0>eV-JSyQc{uKXC}&`NgzLukAyi`P2g9J?mc*{vT1O&@f?O)=3>M!Q<1+a zOJEJ$WFNrlq_m|gI`?!e)ah0gEd>)YX_qWhp%Bw8rl|ytBz08DS;7@fj~qA>X933J z`ncPTyqcvgBp@V(E1_|5%bubLmewCyu;`-1VmPWRH>A+RX?Hpm3>PY!9D=9_+*qG> zKs{BQrIKCUz4&VE28|31XT9M*uc2x3ecV;!lE$89hAHf4X^ifEb5UYUUK;_F2 z;{b3@&`HN!?C~yIh(ey1(DLXluR2y0;GD%ipJM&gdy-8vM>CdV947;Ol9E6&9F4dn z`QQztm*q}WmO2gfr!C)`ydamUhU!BKLgDn?)uLe%4@9Ja!T$hw3I5Hwz#xH-)H;l* zM6gqm9M!{ZPeFZc4k#fFZms*b3|CqGUrv0<{X)^PAgYtW$C8K1PDmpi5k$l!6^6dL z^VgxH17?Mo62cCtR(8>s(VlygG?|VV6(bO$lBvC807ko5WZ;4@#PB+|nOZnw%7G!0BCjhGl*1?ia{IS?SF6%S8z?i>b5hN}lMbcm&$tq?(<6%K=US5gL!hB04o}e`-lppjv>l;h4scpQ z?6UU#$YJ}(HdSM>B#Uk4icBbQi0q1rq<*qWjiLR%-eZ|2Qh^HrxdEuy5?ntzwQCBZ z1%~_(>Ci=P-FlQGjoEHUp0d|+%E1D1PV$Y;+>i)u%5l?61kH4$l{lvK*Gf{H^k`Z` z2f6gilE-56LM~mAV_E%C7;kYZ7vpK#a6SO}C!t;+5bEsRlI5A&pD_NwgyUG2mPOVz znPs`DThxZ5T_d&k&6HimT!nw?V5lT4gezd2fExnJ))AK~p2U(Ey^f}j{6IN+!Za@0 z(&^eWPhBfST5FEhy*b8L-GKtMYat7O3EQ2dATD_^h+ZOMRHahXVL&?3?@eoJK_r1l zYHBxscQ;8=C9TeB-}vOwL9rF@Ky*AZbdd^ zk>N5H1&MjLo=SiS-_Os0dJ-lAa@1P>jiKcX@I-oxR(U-dveK+01V=nmq8%ZRM>H_PLDcX4% zb?`~xcq(2cC^=S;utDU-jcr}n#DWzxnw+l|pKASSwK*bYr*lz_%RvwUmPcG-BmtBF z6m10((mQ?AP5aGS;gw{cOX47r z?=0onh~CcQ2rzc#n2r+}NmQwbQl={@wot1X#`1&Tx! z!^fS)Lv3{3m*H`r3uRywxm8FWUnADe@QHX5T>!laCiK%#mDntvp~;=Lu`B93{{Xw% zW%f8pSa0s$RKJ!K?GT7g$0Yt4<1tNFu#E+NbIXi0sR6?q~#ksgr^Sti(cx_RSz>Nb`_6<}6fnIsZ7F&R1b zk}}fDLXwpb9)i5Tc@FEGf&jXMuavbQm* z+DJG!$?I6o%a&ZSQU%Uz$>u{1#q=6L6qkFzwAQtaI#?yqWYb==vNVf2nBjHf(q&P( z@q_Qd3{C+jfD-{ivk94r%GArl8d9Z?>?&m#jow)~4=1wbS&+`+4AHotl05nBbIiA1X)( zjzJ-YMswAaN=6wabAy87ooXKblrV|S1Vh_1+H0~j#Zpkg0Pui#ja%n#HxhGoZ+5C^wHhl`@YBMQ)YM=SLe{EFws_IOX2Sw}0fCkoVl&X< zyIBgBf^GtuhNLs;r;F_ZmZ)oM35r_wY4J&`9i1=U+9Rei< z06>1MEY)gMyEjLKOGjJLf|M;K>CtM}`)NH!BnAz=8aDU^cVuyZNgjHPmCRFR1 zAdW?woH!w&=rxEnDGr+s-|2@%_Aq}@cV;yqO9o=Z0)3}&$pC^KG8P79mU{iY`cf8P zX%Uc8dkGZ@Xc*;*Bv*#Q?2)Bmg)F4|8z&941uKui>cqulC3&tztit+KZ0=p*fUHaE zt4jWpVhj2;%Q}QM5UM=JN~{7BT_a#ImQVp8kQbJG@WFDB0g$|;^zo<Q8`&A|Pj_OG=u5wUg`~ z!BAz6Lx*ezNWniU3l4md<*<63%v|Qp-UCgyJNwbZM&w6Q9u=@K%vWsW%N$v90axTzd+K@?I3MspfS0n0WS zf#scc?Erz%&zUlEfSNpmQR;tvceEEV5b;XT(e$hAhEm)2Obdr$lfO8@!#Dc20Q1xY z1urt{IZ>@oFO^_92yQ4fEUb?$rKGaP;cZM;bc_!rbLZM{--b9mo~)ImODO-wMe42c!x2N9upVJ*A0k2FTSIgNQ{twn(u2au;6Bl*gcw4CE_ll7;<{{S4|ekGUK z;)s=q4DUd$yB=bjv5zbGo8ZYjT}Ksx0$lr%WUaOuE8ktVh05BUIPKG&Emf3zC5x3| z{{W%=zB=?Z%T%=#s4)@qtobu=^9-5EDNwjKj@gfLe`DvUpb%KYCmxw&!OtJ(=b>no z>d_+s5TtBWGv_L$v7elO%c2&Fiy4U;MMJeFO)po}v>TGH>hny|izt%EEOsPm;grPO zdSi5o_~be(XN-06kM#?M%;EfgeEE)3H@=c7aKt|EZ5hKy*B=LQ30VFQB}lpkQZ^!= zzlqa4o z0K#Ge$85X(>`du6egJ8Cti2 zAYr6DdW71l#|zG6Q87}oFi_-;gp37|L1Ne-GM|o|rAnYhM%5#0?nRpHcZk@g@Lg?w zl*X;}qgz+6Yh;kt5}!abzQEF|9i$K)w){48?ehJaq|8=&5ceAoFh^TANE^^ZwYRfI zs4Z$bb|OO$+trkrTiQbxjfe}&jQ;?_t~P<5pbnaV!0Br5t-v;+f@-u!(sY8%RW4Su zsh6tBAE3<|z|N|OLk3g*B`qK)+mr-?IwVd?37kSmvnl!3jaQ)ccyKg?+pDYG6+I*s z>-uUShRlXAs4)3dL7W}Uv@T9t;NvVN$y(V&k~&_lAFbgW&{KBOPpm}RWwA$2o)Kmm zaNI=N(x}=Q*Nv5Qpp6iZ+D1%|P76FN0Xd4(|i5?4Ms$ycNEkvYd<*D6?uQPTUfet_xh)SPO z^r-uO!qhIBWr!;aHN`<2%QxHxM@#}n!c+&31m%=|taYr3Sm`BrWV5ZotI$_l5=OQh zo&mvvv`uQCtzDz6R#~MIEB&)gGJ0|mkWaak=MC`3AcA_OmTZDjq8Kx~HN1ySn$Y=J z#N3k|T`OF#wV;9USk_g7DAb%r3D2abA;23(9moeIy03_tGGhM#x?GwHum$uUe1WEr z;Vo$Cwf@T2)E?U{c%_Lq2%0HjSn$fs<&>Ws0;dCL-H=CDWKC3*8B)$-t89EdM?E4X zIk6;b4u1*kRp|!sd{+P z$a=)Vnvq|<-gt+D$5t76_eIM28Q$b@*iol9QZ%Sz|z{lH_KM9PI&) z)X%zQqDti{at(D3A6g2R4X(Cw)M8k~)}^WHniVM@REfPwrjAG_a)i7xY*fONTM)P_f5`e}FRsbd5cNRIw(R;_^auPR3rcx8~TObZ^@ATcDb^)Ly+ z;eL561i*7;soQhVUWa{P=x-7&boUjN7UYughtjQ-ZSzPLJT4@DGq>A=j#Q2?GZK`% z;$*@}KQC<8qeD`{IA+pk6sHQ0EO0!Qo(Yx-i1`T)>HW z7y#-oa`&wrslEC{LEO2B8&g)jURd6Dqv=Xz7a*0R5iwEiQJfxg#enR z$;4yDANDOtlObH94f%v&9q%5swmv25AE}6_<1+eVAlPd9eiK#Nm zhoHSR(%ml&)-4)!I`wRrCUGQg(K6i)n(_07$W_Cz^+wU0sN8UIp0fs3Awgk@a6Bq$ za^P3@hcwoZB|TawwFQN%m{Zkg(ph8L+m#C}s#|~t3Enb1jAx-twICImmVZ`iYid)< z8W@`WnvXhb6yA-9j@5=K0d#gC1OWJDY@Lh-M~skqu&g+ivYY5#!#1arYriNb-WZe} zZn>?dUfWrjTqT;mq^zY`WH>{?1Y{C$p#1F}J4=+ZwI-vRY<6bcT=QLbVXk2%*%& z%N+7O-SHwMV<1MkWMJW6d1nML-~>6HbCRhE1~GYhf=eD)^O8?cP7{_-YKk-13x);v6zLKI(I_6amPGO`iyKHuqP1)xQ@m~t zR31v;7542RNjL>fS!zi=8nzPuJnS^8G>I%)k=DGhNm;MwC=Pq%-*GTD8@=;Y!w?+WgC#B zN>~CLh4$yBD3n<@S6U5iuf5>{d?4yIA5pTh87$mal2$^NVy@n<(0xYr7<<*EWKvbz zDncs)I*k$(LpaLM)xSvV;Qe5;J)&Px7aq};d8GTAwT)=Zc8slDx&+vVB&2&mB?AG2 z#t9_NK6JvxYOrP+4S}r%Y4?ZLv00W#E9rARsVdXBmLx`p?PP6-8;0pOCP4(LQ?;_Y z`RA(rk`$cDB-irtYG`%q2)JmHsL;e~!)jP8Y6-RETHxV%G01bdg91V2PCf_08FP`7 zK35tGHtnXC^D!=v^&0Gc-Zn(^9d@Ntwm2R>Syy&223I)#{{T76NFdB`9KCB-`S|pP z0Gq@#{nHMhk2aYMLdAI1DJ%? zwMTK48C);=L4+tkW>^>*;(AT?k*ojyf|if zr?+>*cTmTJljM5B?iS#mKD&fo#sf8 zix4jb&MeIsTNUb!WL1J$;Es*s8&+vvOal`}7{;5z9Jt3UNd4m=Ek!crx|o{LgI~ez z%KIW1N5etAdXT1;tc^~RE2S8s;iE6|%KK4rSFp?=?t?Ii0fLWY}!mj4$kjvdWW22siusn2x^yNF68ZPsx=V6*03eAm0>|+Q@1k- zrTeJb4@p>OD(uFN<;#?bQ@g2C)grAx%I6G)3?(}{P`VmxaP7p^#B}{( zGBf4cc4DdoY@lJ3lj;sY0QBipnaF);2z`c)T7#yetSep!8jH}Pg^f0-&t9SWd~}bA?GIs5|yQ;q=hQM>@3s|(ZiDV)jyOI1=aO!(4LA|YN2A5 z7|@9xISQ1Fpprb~6M~z#Knk!pEiFh16IDkByu~@#?@H*<1r8&LsY#=1R-vwor{DX- z5Tcio$-%?y=KEL+A%{^FXa3~B%0s&l8)JLgw{Z@Xkw|eY!7ZIc&q9RuO@#Gn zOB;G%1C$9Icx{gAt+8?nfw@QnsW9_aCISP6R8v>$a0_*{Vb~asgSBZ?)b>}>B-7-l zVmivTrV*pC$`3LyUAy>Uvjj6X7!%Z!XU$)5tP*bZXQ^_-W-j!lp^c!uN3%ZRDu^eF zvPZ)_76FQM%WlJOJHWtD>Fhhk2*x^zYgQRb3-495q2;4oo}A%;coN=$Qtq1cZisBq z6x}6(_K4ziU|KSZ%dy-GI*q%yT;!gC6PGSvn( z*U(8Qoh(k0prjiS@;e>DsE?VrzXHZ^Nyc6!E}axVDf>l%7j$5G231^-$CJ>Q#xyU= zBo&O-tS|T@0YC$i-bm-7xiL|KP?Nk=`~Cx^PowM5LM;T8X||B7E0||sqdQJXNxd=w zO9$^Idv)(uVzzF zS%uJ7QjZEIMh;637&^N0eE7=s@*1U0DP+4F*z@vYH0xOUSP*X$bae{N9E>Kg4U)1# z#Tppp_WP0J3f`avun9QJ1O6k36r6)5*Q?v>w?xw?HG0kjPz;5WhAa?%HAiDHX83h)rRnc>eWpSNh>U0e?kalrCLhA(N$F}(Yi3k z2rM`lCvF(O%jS}>)Vuw#w@%OM3y z;u05O@xTMm0f$wmwA8wZDlT7{ceTOs@QDFy$73}sLu_Jtw-!%Sq!ykTkd{_(z#-g8 zAFOR{*aYEvDQZcT{ltQ74^CC1I}I<&A||jMDH1qin!%OmEw=U67xX;G1M%9@TI70oRMHp^a%vG4}Je6ir6CgI_ z83JXDI)Tc;AiKFGsZM-@J5i3VN{lmH*rQG>dX-qKO=4*i?R6L&37-N0cF9QoG3&1Tz=6L%1~@+(QR$^&(c)4Mygn9Mj!v(#z>c?6EhjCM?k>^wh8n znZZQL(o;8yXmvf*mSk)1sf#NvpF?W{r)5!l`IfAl#*Jq_YsY zLO}=2((@%HmfZeDy!UZSLl!W-G4v@uL+PRQK?pGE&e9V+AJkMg2N6jJ zHA4d6R?XA)-Kyo)OUmY6FnDdX7PzfEG0j3C^2Q@1$S2#inWaXvdy&=Cp8G{5$h62z(YSXi1dWVLJs}y&Fvisc?%j=9fj@{$lQ5Kr z4)sX8ii*3itGyZk0@Z^k)1#(IB(^EW+H7$(XeKpclFV%H{WimIF`zIYWF!%}2l$5R z@-ikUOmvf>6}YJ9d)ex10hx?)wdkx{r()inIEq3b`-npr{WcAqiulWAH*!WuMIK={+x@&PU1DT(%Y|eK3~hi{EPX<90S4YD z2)X&P?WT-1YxEjHp+#CnCsEZje%RFJ7OcZYKS^X+C73HBDv-(mBoNMsPH~aJ<2i;@ znfSRwB1(&!4NbHb1h(2%1iDRPQ0uy6dW~&IN&a7`LGNjJkbSgnjj>oo8j^U%_V5rA zah%AEEW+Y)NCLps+U`mDHC>@*8bjI~6U*E6S?I@RSQaL0alfG?fwwZmeVJ)o6Nh~5 z*m(Z{2#RFc0$~#YXSR$ly*hf{1Om{Sl_%6$8L6#$k*ho<-zyp-2N5X2X%86A)3|ar zf^Z5-N{?i2O*J*-+tSs$!#lycPPo=IQC=B#yz)t6)DX;9?UADeH=Y+eg0|e9_)d5O zq_l~B-6{r;R&Cmz?+y#Ybgk-ewNRCs#PZtvaS4yrje{=Sl>;ilSqKZygOG4Y%R*M^ zm#Ev#kAV+yR)-syBGr#tX_Af6JT>IOX%I;c_KllwmQvVKNmgar0S5yB=V4VS8I0)+ z9@U`!H-;(>!WpM63sECRvBMBzBZ5>1Zqc3CZ;YSQpT;~8bLNSOCF{8d)A#Gj3_&nU z4WCrU6rxm&#_k((mzz`s77c~WRd2NghRY3JVqQ~5TJla z-OpAMwPjLLpumls@*3+~v?B)I`wdCy%R;S+QTh@zjY_$cDy~qoD3k!FXddE9mST64 zj;8+9^|ev}tp)Vv-%7(5(kfcEl9ifNaB9Z>`*F(31dhjaewhoD!jvbGlb?f*gFZ$; z2RNqmA2+W4aL|oCe%_?-Ud+Qwo7JpJs{5Y@cw8V?3b;5N`-eOpnqg2OSyB4X?NbzL1=UyCw`;R@+vu9d$KdfAvE$ z%VX04S%Z6yK>@d8AOND?AyWH^31oo9pRT+5QWzwdgZ@LSYZoGhmDx>@i|NZ7C_yJX zND=}v2R?tp;2yK3PL_o}Kx^a9ppR#0u||e_XSejql2n3O&ZJdiwpUS!q=H3K3r8dS z7_LbHQ^8&8<7$(3ZaB$bZ;&X00}2X)^t*~V)!HM>^ocr;qH1>@%E>ZLW&VT%_E>EbR-< zQ&NGXV2dICm^%WvU5m7W3i2XxJ)h%iZrrR(@ zT1dr)ju>`FUb{?KGDw&qi9!Mmz;Zd-0GQ59l=g%>^A#=8m_5fXa4hDr6Wn@cudM6x zYqILoQlVbV7N&-KcFWBg%8R~vnnUlBH43f~Fub4uanKBWxuhv_Gw4Aen^LUw*0t*u z#Jqu&r`8)z*P~LROK`ItthDU}erD(RFQIIR`ag>c$YQ_t47YD4t zF$kEVp_f%XY+iuT^-(TW)(wdOLWgeDg5|4n3mSzu<@Di(TTLWnEFnFtcQj>pBKue| z1Pl-xr!&{tg^(*tHm_D-MzyB9LJke#*!220?8jbpf!a9Cf>`V`l`3*~J0X5@8HrJV za0tNaik7(~kZ4O;fpMUF(oThZ8n7)-Ypbcg_wJXW8?+TAcrqjwbS#H!sEF`2=6hsz4WZWCvUAX`qpbt3% zt0_>I+)(d5+m$>Ao7Mzx9lfXOOvyUeqgFJT;+mVvcg+yw#|&}gB8{%caZ!VS8!CDP zDpAfKR#K;6Yz?^IozgMf2pHNj+2U0w zB?U6WlUCZb(!@5mcMSv@#s;;fT+-5OMJrsfVZfTrlmN$-*}!5CIU|9CpK<4+lZi=A zQ9+=+tM0u7IUB=PT9%6rA&v;*jZDrxDH;e<(+jZ`jsRTl2a_2*IRmK{vxrt^sYb|gjOEp-*>lujNs9re&#sQZ(8;SsMea9UdF!N>3OB##V zfooivQQ7tcK@bRI73$V)J^ui(oV;$W31^8JpdvU)3T49sCm;d=RbwI&3X*gt;gKXb zP%a6*YIdOmLhNj9X5|(L`&Q{6%vxPGJ21w#9y=tlh%BCqMu{LMMlrB(;|ip)BMI(+ zgb7ZS7Ry(ol4)*wT#aFr2xuchr)C?kWy48bK(RNd?dzDGB?#(OPGSv`Kp9|#00*A4 zilnKf&9y*k2MxQ{-fr+gz<4rCV@Z2g*1)AYZ-$_fCfO*-GAT{OkOYcUF(3|3ek#fe zPysat?0Pj@R;`@UknSM8Jz=M%$3sJx`HWJfa8KC8L`*QoJ*><`NWsWmqtsN~M`Z$WQ0@xtXHXr^GQN+tG)6gH zr>KmX#$Rq!0Rh;St*F$i*xn891Xrr*eZ8RQijc!foUE-RZZ|y69#2hMab=V-T#(_g z%ExdEo~)?Oo8Fj7E#aYN{GqM`9V+b+xE6R=a6Gg-5GrIHzZ;7D z@zi+tiY)wL-5kF;#n?atv?+QS)b zq*2aSFS+ocP>>rB;kNs^Aob|~01NzUjre_iiCI*l!LH2Pn)JAS?U>NUU?gK@8558c zlX3R;(J}Pn>r&sidw;v3Iv;Y^blxTb5?qc6i9QXV!&GPByYj0~)SnGQc=e5ny>CPxQU3s{-sV*q^UnnN{{W|2!Gw8Y z)tmB)2#@alJnJ2^s?%(tma8b0G8qZ|Cy$VDe{6LG`6$H7T!n`U1QE!Fi`*{|aCq4` ziDs!#K3>`@+EQ87`*%y$E>W%)c8TVOaI%R*rs(3{Iop+xfsolg;0YQ0^To>;EKJ-Y zRC|Pn4j6*Ow)W75v5&UpE47rFjY0wQ4+vh_(rKx>(nj=c${IKzqDX>4xV}pRxj-NV zBOs7Ya4`?!r%WPQe95`ZaYosV%f5^m>o7wb;nft~g;x`P+sE1^K^cXNwYIL_CN=OPg zMk63KYILX4-6btjN{R{?=<0IQP9#CM$m^XA9l{KB+9lN z!yJ0$%T5d$=4cIO4D~#}Ky-My!!+OZb5@zB6n={0|JPgKS|tfedHLB)!{bk^E`G3f zIHhg73S0B=!W0UEjCXS6*C`WP4_dXCHdY;XdsW-m@-RqpY1 zocJMScC6q^t;e^SoA$lI0Lt^{?KWl3H$ zkn-Bmbd%NT%y|4cv20(0_|1^Mrv3)X?Z0M5=gwVCks(7tkC8dh$eN_eCJAf`mYSeMij(aW7QUE}>xcFoaM5LXQj5{gzgsE&L>R)c zUC&21N)Fw@plKa5SH`CMZyXXQ{LhhWl7M}-fjAyYGS6ZL&j-X>l@c&gawetqldsoHF#p+m5vB>c3+n1y@?+ltmYA?lEpXldvL zhfR8C2DbQ$68ViMlMMRAIvZ~w);_N#6YRaY&>B%C=&9sV``%wQ@2p$PQZunaqH2pX zJjGP3Bc`@8twJN2KkuoHX}6*Ro%;OyyI9FknAd~DaJ<#!jKOIR1Jr;K*EU#-8m}eV zNgh)Mr|quUFe_kR_*k`MJ1YHZ-#8ENUpJ0?CB2<~Zug$8QGNnk^?a*=fSuvtR^&iE zD;!5la&*@Uz9i2<nvwrZFSWdN?^pFh;4`5xGMY5Wx;2d(BFYwk{)T*@`8t zhu^-OGS~Z4*1q@wc3=q5xwpzgjgV&QY34+AzJDzcx$9wf4-dCSPu8xwZ`OSD5izMx z22;N& zwX%kUE4N(ZTPuk4hFqigQvmE=(n;^}{~Q?CyRr?ktft!to2*-bAX zQc}dqO(S_1gmxkcz){N)iFGKB@=H}@=A|6ngjXH+xM|0QzPvma^C-e=7xihq4|eqI zslBWHCAM&9yK`12I;&060CdRJG1H5UrqYa742sk!!==xr|~32UZ0ou`s=Nd@2>Y}0$<+yPKk%x8&oJFwQ6FJJ_AI4^ye z>di+ndKeF?KK;~q`BqzJ?nIh_4}g{Gm*mS<6j`Y@{YJ%A(0oj`_3A;^^IG`?zo3|- zrF?irDKb(A{0TIAnJ!)4EM#;%Jfg-&q?PPfqCThkh)WOI4+p2R#orfz_%o+{|EjcI zSi2mvJswo3my?$=BMt!$*dm&SXw&FcM^V_Pr<(LJ!v@cya51x#OG6d^c%_iZH5^OI zuPj$;+;8?5+g_P@Kd!b4Z_2y3EC~qgsC zOpm^c;>a%tTglYcF}HJTWB;{IiLOkL@|_v^tGqsuBWJ4rnt}D(g%nH=rxw=O{;i(G z8rm_nc(SLZs*RM)Vl70S-d(3nNND7m{n~o>txa8JTpBAug-p!K)9rbGvBB|gJbo>m zstsc0h1IqSd4kmVZjokCYF}bmxqg^<=6|1r;5@lZ}P3z?~PK0LO{_;h4Xq%_E?vm$D_=p|m! z-j;+OQtdIPN}Ctbrxkw!RdY^F9H&m_bP6$a;WZS-&%jyg{;FaD{$W9(&+Jtn**-Ur zynpyf&r!3Yb#x%hNl51~MJlS8w|h_iYL<$`ouaBq8q}7+Z?CW&Ypk5Fs+q9JS^W_7 zCpcVY^GOV%ZKDN+Yg4?q5CP#(-O*7`Qlh9169??{rJBGf1lW-N=-nrRHm%HcG;jQU zMs>kWE;CUrm$>n(gUJugC0Wld#2BwGSry3**@)7Lv!sEjcllwAKkB-Xpn_rRpQ;FX zQHXw~3D4S5t@yWT<{K$#Qi$9~Bc7J7ynrxOMQm5!uCIfKEX_SfyB=ak1;^EvY6}aP z2>UlJ#{GsW4SVxE1opS6Zbb@YZ46MR2mpoiC8Td-a7-lW)_rU?*f~O!=)A*IUl6

MDHhbQR%S#ky+YMQ9#@WTu3(ar@=}P zRON~P`_%2Uw1xky2kS$*Y^4pmFfuKajjCZRLVb%D{Fk{7SwQJtp#6On63RjS+}0zR zHUsh^Z=H|mXZ@6k3$nNJxRI&vMlQn}VcESvtyeOM=MYCD;}_-+od`N>WRD@@p_eD= z3HfCb%JHCY#S~kkeeTF92istHk;jcy_cl&!VX}?i4FY|x6n-NT>Y6m&3Q6^%NXN!wFGy?s2|j9c>JAyd z=A{p^QpTrASVP*USIl00;h~rxX1x^*aWA|1A3)}nS31FfX@uXnDqdi&(T8L7P|$5` ze9R)A*9s(HRk9w7${~B};SoLv7~VZEaW%#}885Bp*Hys(Nev=G9*8Ao%9v;wRqQMK zsgeDfAg?u{v1JpEE@o;U#)*q)oLi|U;+E^@WW8T}=zI)eTvNolIYFJZ1*`5*vCdKKWmT$y#`rj&6D~1qMi`KE`D&G^jUNAT^+on;)fK#(?8w1-NW{ zh*7Un&h0qaWk>b4{Y?ojwz{#To+ljxC^TlzPtNjzAyE z(hX+O(T)}|3PCrhmRfsG+-(||m7h;1KC~1NE3t+7`SWOJ$~Mt{3($7$Naoo|+1=F~0QG4(N*>LH7oGCs|)>CJI+K?~pzj$?b* z`7Vv~SZ1PTE`PRPn$C>8=ZRa^2KL{(g~-S7J6HeG-DqGF3M%sp2LV4BY(y@K_+-?yKHd`fs+@tUQF5hour!D;cAq z5-SSMEfH1Rg8NXZnrF!&D_ul)w+w5C>-lzxd+(U300jWKN*;CfC1ph9gRSuK&P2ZL|`M;{Ou3%o?PW z!yUC!2cYZr`jaqwd`OhqgkZ&9=a2ZInKzI)(h>wIx)EYg)o{)!`Z{`PFwtD}$j-Sb zTyj}p@b6k$P@vIr&_C+4g?Xth#xaN|q_Hj`$&x7t5TnU9y%~~HIcf;|1veEqLaj*7 zvh|pK@g=nRP9s&XK^ek4RK-kO6z8NOcG|LRu|wZ9g#po%`1cKX!KjKQNZv(pZQhsZr{*Ox&%Pi=P0i zq*ukw4NPhz!{H$<%p~T1Frt&YauEPOH5T+osSnD$xb5yzNu@?n(!TC|7;;x<>um&s zp*@l1*zk(BaVSJM>BX;2>z**hNM7eog8J%cpJJUxAGP0cHA)}zW;vLKkBF0R+1I!` zJQs9+cdi=4tcc;G&a80>eB)r~tG2Q}h5SloSs=qJCcYZ|{68qzS%Lr)nQ$cZg(S8N z*d7k!fZ;If^*>Cb!>)3bcNA>3sNUU=|DyWhX>nM|2R{caTT^tf-S|Rqc)nRED-ERL zEFF#+hcm~FfvJf;hbZvBhdW9ZRl~*{F=E=JC<%v9I#{Ipl4mO;j3qWA!D#W|Lh)QL z;G)FfKL6E8c#_q}N-$QJBVZz4aHrs)kt2G}RL?T=o)A`#J=^c9Vd7P?DmMx)n|!|B z^SnStX1I;E%Ie;Y_$mliI*P``3ZUbiFY67LIs~~4O_K8@1EV~;*gqg*T_WQ&8{_S# z?UPw|S52~NwJQ^OuUiMX7?GrYEWFeGykED9u~efUA=gCN)zA>AKyuetU&pIDD&m`H zo*nY#h*x^mE|A<_Iegy1=FO1;I*X3kh8SS>Q>i&RhUs~D9^2EFdmGeWJh2cRD@OvI->%RVnOqC&;R4fM~T5s*I9@4PRA zJ$x6*_;+^>nkO3OUceVuGYJf6s37Cm@oioa`F{lRZN6|rswWPk`!PS5l17jfJV~8v z^Tul^>BXPinmN?dxfPz_4bLX-oR5k^myz&mTD(rO6z{e3R0hu)UC1c;Q`^qLM(!sX z5+@p_iD)AJghY-<^%q8&(p#8SE z3Ynj%Iq&-}Z>gdiE3-t$1;koqKBuwO-yw90%~wYqx?f2bl|EV1rO+|qP#2W2Eh0|W zID;WkQPXj9R#vB$twGTm&CFZFf67Sr)zRWp{_F@8i*uIuORYRdUXWhZ{WlWZSW4x1 z32$cop=p2~K{oN!%KRONs=e`KQ)1-ZxcQvpJWh}Gp}K0uJ=3BkdW3r{U_fW#_yP6K zMM%bBz8sh4I zu#w9Qc;R}XO)ELVaE%J?IDXk`SGt+d0Px>Zx#(aK**g8t#+8wnWmU_PWKaIG>waT9 zVczk4)pB8A^g@Q8=;S}r--|=j9vqw9+U?a`Twt6Oei= zNC!@T)LrJV{?~kX{2;Id369*o9F`paJ|^!{BBuUodH$m=;k!J2ErY!$C{svVu3{k> zIb|4h#VH%b6V+R^BrqbvbN@%n1~_VCUE!oRsd@filXt33U>%yCr4x@%kcn?y-%F;- z+h_h9HWSRQ3AmI2Bw=Rp_BwCjkl+y&v;N|F-&cxY$GO-L8!6 zdcU<*eQ^uUG2m7XMG=;k&0(F>K;sJ@QFOC@Vn7=vNf5oCzZ3BMxrx^KSaw zl{h-edvLyXypigCBJBOZ7}v|5okN6jV(Xv48LSZ*`!ZJj1oQ?WkTHnEcmTS3_5z@H za%LormJ&hB+pM&y`lQGeg?BVS?zy;~!M-(6@8lxUU7X6Wv-cM(s3tP~5>Va|=-^dL zk?+3R{lk9dXz?=Akl7MJzl9v#FgcG9?^-|Eso+(`SZvme}*PI5M&~7V0oJpC^bU2n(AV zTf9~!m!?ssmBEupxqtWDzgj{RZh#EfM(hkf#p=&r^oYsZDm7H{TbXTu?Qyb{Q!XkF zT$5Gz{iFiI!JY67SG`a&S0t^BlZMsj!0@3Mw&D1U9V>wnsr7hMUVf(&U5A(W(rsMw zEWlCYHY;J@@24}$4ippzC|{EJaYVA%`pBG+Qi)dDsW#GIG~e1F^Oir~80iv77Ia@V z=9_LV1J!;>1e@L=b$r-LLX6j9=4AFT7DSWZRs6jE!7PL-yhRstaSy5ZB0oHYT{v^K z0kgc`c$iDL?FEfWg^Cb|`uVRq;9RD`RD>=9kuLj}8}1DOs&X=qEr!inUW=WI@M!+p zc`jU&6xSj^0Fz~AW&tGf`pjj>poM>YYZza;3aNP6YaDmlv;u-rWed?%v016=*f{&l!NO1a=ef1)$OR6~AxyNOWU%8_*%w zY`g`lH8lFGNFx4;!o#PbFpYDL(AR<7LN#T4(JR4w0{XccIO;g|343t;(PqvPT8_z$ zx#z86WtCk9(#^@)F{P=$e!$W&q}Vqgrbk0w(J3~YJKt}~1<&=S>1fkr_BL6fjT=t< zplmB^qff4mmNHKdF4hs*p7kl?kB^-FxrK7JJm7_yPGsJxw)9Fu&N?@P?UvYG_+oCe#{3rnntAyvZdp$iv zc<$WS=l8!aMZ}uaw-OdAzK!xxxxl6kcw7ZEy6)KoF-dt74oi%+lrIG>JZ1lnd%`yT9!m4QLG{Ibb)URK9dV0^@F!iguF^jc4#dvC_L zosq5$L>i8MfjZ861mWr*3}nlulMjvr^YifVNe#K@$KpBcBGiPkSQ5uut(kb>r7d)C zkmuEyQ9_M~I;l?y5cZumWli+uLPKzQFD+87Zyf;=d zzi_l#qgyx7p-GTh_f|H#TN+CYQ0ah!zyW5g9!C(#;Q=YLR?>>AH`79&E|$>?_Yzq&GL&t~=rUl*V#iAzbuPl_kaU z9yTU5FrM`|QaO!&k+Is4i?>ho^kDa`H-fxRIGW>Av+0dC>8R*Cx$ueGSQ0-Y!>k&6 zAlN!E;_QQiK`qZ3WnMnc9Wvoywt;qRvbZ=xY&FV%Hp5_bA9(0ktJ zNqOjI@6qj*gK=|;AD&}!KM`Vtb_0l+krFU7<@fI=R1Fl2@})&FUsPs$KL!GY!|3jR zT)q~>)dmhhKLq7{=JTz1Q-(AC{+xU(1ru%a?1z%TB*dif4siY%PHcY)3U%lyKmLSi zPdA58FNNQ`(6erCl5xd48?0}p5l5#KkXuP*2$`L?L5(zBMLn%GV=f!sF`Ze&90Uwj zsq(3ex5$w;23wO~P z&rwD*)Jp}%?r)U%!MUAOiER!P$YwTDBdf0kmXoN2CB*TF)wlY(;HA}C==bdSN~{zc zUq<}vpc)APAa2*bsIDZE5Q8&-{twX7Cwd+AtZ_?qKL7pNVx&n0!8WyK#fOAfI7E{> z741Z)k-d32Nl&{{mN*K>@z+$tMa3yLc0_}txP=lDJ}-w_G|5iMd;SmbkiZnolleaY zY%M9KCQiNe(SrQ7vjtgmz{`pUl>F5qUjYVskDtzM)HQnKB$!Cj-#8%}+~X2KX1U>| zza+t&kJbiRiOPurfJB;3fh>c;UV2(1>?n1S+W4jAy^6d4!`#?a*54vjQU*C zU{tSN*Iv{5wRU7xYh$I)2(BJ)t}FTaIE!565Jh(Iv`5bGt!Swu^}vMrDIMz{Clag@dS{3MW=w>5#8X4l zhKMD7)pSZq<)J+q+Qmtua~|eZd(E2+JA)sOK#D$Z#FahD@X2Ey%%+B!W?-Sh!b6Sl zp!`k%R=ECClE*!(R>@#1M!5nXefEno|NGnMKKJshhmkd7EO$~HIqH#WCEKsV7GPJj z9+7tzWR2nCX-1n}*(B8+@cF3F?B#NIYX(9mcJ--L`E^stN0~tn)&oT_#)!) zAk^{G)nngh`PFwGAUzE%%ioB^oz!`8f4!dQ#Rk0%M9(Kone0Ki|tP zcM@<&jvf!_OS34UFL_?hagP-Xfbj~PxpzM=3p7DHJ9oLDdZ>C77$EpE9So!jy9;D? zgm0AHg}_i<83B-c+c2KEYuQZ;e2slF(m?igTe1ux+@0y16a-}y3r~e^q$m37JwFuP zK|yQc$VPXSa8~eKOdMI*T}I$&Xr>neXJ7BHaS{n${WJ9=;acT&mSs1ZqHR@|aI`UB>N(wbG6xcqKLp%}_x|fDJwath z)VWCDKD2poXGlp%CpNzNlS^D5Fn|?@D|atvB@RvH>JJ~NH#y!a*2N{3q6_mmzIA!paJw6;O2pt)*qJ9y|QN!FvI;Jb|)3HBGHw22!_tJwK>DPz5y*}72J`Fru9 zy|x3>5kTDuwiE^k+Nl`@AEEg!Bi1p8@2(Vjj=Xh1S*xZ}HCn}_=cS-tM^P)#ALB+%JcIf)JS2h4IOf z@{k8hjt5=T5HI54gDTQ%b1REv!B$&l>qnVP{7Edd2EID2%H#ykC(d{hD@IBd=FqKg zeC=%ZLMi|$vzAVlQESp>B7}y8$-&m~m(^o?{uy4na_P0(WJ-2-cW+lcsb-`Q!=nMwS+1T=#u+2%#Gp#pb~ne_bPhE)=iEJN}!x9At3 zf4#ii_s#FH9k_D5xW1i0fN_)%cCfBN`F-^lTU>uv49`X0Rgwb>*SQf-@U~+YHC^~?Mhq-@>~8OGG+ zPgW3F9xZDtmFV&%kN78MEM}BWTE^uqHG4vt#gMq&*yV~4K2}+-w?|Cv8o%_7b$=M<9N?l0a3m%u^SD&kxX6g5|2su*3TS-L!wt1pP>ZLFG`JC2@bJ&vn{r zge_VxPck=NG5X*7D>Lpx|5i$50(42jS@?jEct#qfaYxgp#*x^2Q+{gk&J)cjy-+!U z^8h#9Fdrh~p#U)l+U05ljY1p85VL)MA|>2WJi4$j6xzZm5I73pBMUF>s<9%d)f=Aa z$qiO)i?{_vle;OEUxN8xb$IZB(Xvaxn4q935iQd6K^S$HD40nB@(6ZnN~5HYjj(6> z6wR~ty^iuf?P8d~{Fv{Z`nlNHhKY8uzz)$;zI+gjzRi*oAgvzV(x~j_ zpRWSwG#FOw_3_}WxcV%=ELJVL{XMA$PH#k3S#?HpIB4@4j1U29&h|qKE*$bI5E`hw z3{jd*Bdc4U*ug3;r@o;`nd)muxeSM#C|rekCqn;*w`!5V*ZuYfBq6e;)Og$(x%T14t z+O=;ykYhc-x>Yh{mA|BAJJoo47jUD>zf^JTHhjKl=_)(k;P^%XqYY{Q)a$2%Iygk& zUIc<-qA>WP;*R(?zk4)jLFpc*R!cR<>@V4~yjf;>J4h_7i_dn<;oQK~tZT3)tHa^H za@r+_EMaP)^X3>EtW~!})3K&G-2sUyb_V!U-oI?>U2C9S!u(40`^r z2n+BFb(Ypf0Gvgj#MA}n(ka`CS2Q+{GTy?HXi6JvnWqwpJ})^|2mTK*D(}t{Pm$d7 zK!4PBg+bh?4Usu82Obz0pVPmZrq(>PR&k9L=S5A0D_9_s+?RsO`*Pg?yT1}81y6QIYR`5?=Q5~kG+ABtDb!~M#? z^&)bka7aj+fdV zZcNzCsEc7pFGB3=?Q9Q3q=^jyyUuAfRf4V0W@ZF>Ooo7MJ}x(sssJbYFIP2OSG60j zjSB_QXI6*_+N4jUKFM(UKE@w~(JQ%Ur0HYk@gglj2-u`v0=*|wA5mwEwl=__dGSlQ zS&3Nr(grj2**qYIJI-cWlLf=UXy!Md5l@7ms~H~_!o38Y_zl+oqPSN2q;aT8Ab!TxpOQ^_1a(KCh>F=gt$ZUE zN546Yl!7nZL7Qzz)kpPZj*gCPQk`iq>brnYoVlp2(bt51K2`mqbUBRxh_e{wXK;HqhU|^WwfT3&s&&#wd3a%AYI2_QY2us+@BVg)gW3(w_5&){Vnz zP%w<;2w&+ggO359BAg|vnm|c6FgAD!%wp$FzP!zS1(G7iY=J0Dblz#zlFsjZyNYk@ zu^(Zn@42`l%(^{1zH)G_*yFBvK;}?%ID)eX-Dy*;#+071n{YSN z$hbWlvYfsPn!u+g>1`m##jfx==#jv^+mwn!F1i^7#~a z!Z)hPRvI&{yIT1oRsr9u+DJ18jn_&kB*J=c*7-KRLg-ze>*dcQWhlYU* ztT+mtG~KMV5oV4C@pB9&PW?etRW0740-Ud@*iM{hko{EP)Q@(-T@k4H=jJg?QLW)w zFLL<3l`>H$!;zAt{<(Ww77$^i4qM9ir++`E5$0@0L}&6#;j2f5!Ca3nC9d+DJ(RYJ z*R97)FzHS?Zt3fNgA(*OYmwG(lm8W|r%V~=jFi@>qLn632CyV*{2w2b?uEdlvKGeD0Py>jbga+wM*b9LpR8mSQ5s*DlATNC z4_Iv}0C+SFEhN^*uReg=J|ZVpZ$#%)_ZF zpq8~-&<-6~0>H1VD5)x;h@_{8Kgqu;h8&G1B zd01wwZpNR9LuB!h20{2?Nbn4$s_kguQyIpcd)ulr8VF`6M?`tNOL8l=E3wwQE6>`~ zJHtsh*r6$Ui#(KBFoDLVHaW}wYv5u#+0A2nZXDZSjc4fYN=3Q2%DoX7`qmj~!h9r& zuvljEld8k14Sy7syJo*!%n|I+lMo}U_v721=d5xHC2QCt@u_-O3(V4tL zkqXAx@sM2=yozf<%3l_Z<)RZ&@qsvl>tgbvX`SN3X~hKHLcj3-Q=Gu7Y7|LD}^ z1|C|IFm`UX7WeZ3B&g7wU`)j3?s zTOwytck*8t-Ps?2pY(ut3k64Hr9MjbS-CZ(6*I0?-zhTk2bg8FLdHV0nX7}j)Ee+K zZEySw^iIj34P9t^#*@I$9JAzbF5yL%n9M1)pm zx-soqy@!kWg{p^u%@L|~YvoM1LzyNpni>d?vH!{#sF}9yUfyT> z(j#S2_p95wNnZhr2vyiW%?Wc7tlG)lXtb?otg^dQHb`*Yd0rj)1HHa?SU zBjtrCAfE>>9~<+F+2%(v)yZ74R=0JX(;bX$#GYI?B)m3yrS+(_dCKnb-^YX#+B!DI z9xy}vxCJDp9~6?}HkTj8EE?~RN81*x(OdN&1;TQd#dkbbF^w(i_#rFA@)0hdM?)fo zxSGWC!LK_EDDhsJo1T>Vbmodh_2%*TbuZ(z7D-*`YyXKYGZq~wU)_6DXoaxlG6-#l zhx%(*Zeg);@a~$i{xZ~K22Jf{*B%@+Z%4@@antA=*07QMvY}FnCp(14BK+WysgH~3 zE>M`%{_}*Bp*hE`doGQn{ofmLsc65iMgX=SQ4!Bq-)#8GG@ZOIK?B86w{8|*=?_kd z<2uMmn?xxA1m$N8qj&Ico{J=uDZG<6&12Q4m@axPI*hdw@^+_F`p( z=|WiOB6~%g9_%rK6;aCk>6$I9Ti(ThFSm*E02Pi(w=Ctwqy~QVO1mVJxh>1g8OY1g z;Sq?7sIP$MgnQfdGqCEPnDg;5o$2KLlN(2bjSZhx|D?bPjr zskWxk&~r0ndG_S)Um#YOue$M)@?akKVcXj`#sT#OT|<;j?pYtqVLKz8U~feuH5e+JsQEM7tkmDn zVE=+$CpnK5Zl+>($$xE!yTXB_b77eaw{bjm3gsy|7g~#GDXnzgD2qQPSQS#+ce{pc z_2lrA!6PU_&qFFBoJ%eIQJq0auJSqEdOqUj*r(jCpz+r18w9CC%<`0k0y(|2KheKo zM5k2l@k8B_vc6$_zC&O+N|=Wm7J7D(nR<$11$x_DX7vnmm?tuT)^ux|OEgpKicy&~=A<{4Mxr>WB+={*pJF+Pe? z9bSLZSWUy6#>E@rX`ZC}5^_DR;Gl|v043J8~=UUNm z2rP=mMwTD#t;iuT<)JljQ8RWk&2JFBkfxGwnRP;MFe&iX7q%YqX zj0#{)0Vt(8V5AkwTLYW#YZ0*@R7oqZ`#)Dn8k{%TC`27mwI>QlN_ao1Gk(bmX?aVgSKnP_y z2o)7Z!3%yWfE9q=c*{gyaCP_RqwgvNQWG`j9n$MD@Ee-(&^)u(4WN&2K z>jGeqMT+>}izic{JjcLx#uJB{mAF#We zN!w|{*O%x|yDab0cT1|cvGhAYQiekKH(Rbsw%G$F{VWKMS|deY?7^q_+`xvUjZ1#u zr&Cg0f(LsX>k_7^hU*bx4xA-QNMsyK=$ zG-9!=EbZ=_Od5&ZKbfeQFr>aR`o6&4I)$jR|IU;SoK8)TX_hc;*|cDeIIx=LXM3i!{6)ARi zd1_3;C*1~Dy+&IdT$^ioq}2mGglp2pAdH$n1r&?L*ywpvZ}o&m=jBBr!o+iboeB!N z7)C~uw~}XXpU6922D?Uyx_w`F+ecU1ra7jKe~;(T1e5>jO>vGYxI9xM@$s6n4&90& zx_$CCy0|MWjK{|_>!3D?6T_D;RE%fXx`7boI&k=70PTCPqI17}u1*(|#^7Fa1Uhbh z5$V}dC!V30a%_L&&vT)QwoHX2^QJQu4NJxGo8fhgOaN$@*q>%Y^>Hb^DTz2?dQD?t zm*S9<(QJmv^-o`}^L#DyX~o0e&hW=8@-U8m)4Du2<*1R{rFr;h4@DeCN-R=`p=h<~ zEiG!H9F-=?QA&HR=X-$h$?HFx|Avus^*HfWRO8@@$KJ!Zd297%c$NBn+g2b|ax zq#4}&bg{Ur@`S^rRElPYCs?L=qy8;k#>y)rJYdeF+sr&aK+Vm>(NR8Ii)by1hO30i zLh$Y)xLP9!FMK>9>`GqpCprS>kqe?x&kBm~u(CXM7g;}5srGq+EN76GA_EJ0jysuV z9G|;NQFwXx-ZQG zYLE9OW8;2uW)>|7BfsdHmUxF!l=U40+|DWBF7+9*=}7wHE*>5Q$+C$Y$wrkd3l}!N zlzVT}s-~%jA4(Sgp?$4GSj|lEODECW__KYRWp3%XSc_q4!mYEn*Rpw7`;xD|c8)!usAzwBDaDf(s3?kS75 z6nX0UazXZx5~;7Ea;M0O;Wni)-G}FqP%^S{?~Vj<&BXW>q2bjFiDaX;_=&jP&Z1hk z1;%dP|BBd{6?f^~_R}-u?6cCw zg#lldWp$niP91HVty<4?Ah-5=7w{lNsSWb5of59tR@d>pNw6M!6*t3NeH`U{g~vFx z_wob1&(}fU!=_ji28=2DRZ;G4+{tMnM?v#poxN0y;L4Ee^M@wx!`Ju~UUxyM?db{! zC|9$TD=4NoDz&Y7sEAx_t{|(J-<^k=I3{JZ|G)g@{P=jaNgF$UrV+wa_<7Ib6P%IR z4>oj}w9~E>S4l2sHRorXf~rMkCfr?HQH`nYsfGFFA!)=);y2#Kc$TblHrGb|;4|1M z)5v@4&yV)!e)(thL!y)lnA0=eWR`Re4X)d!b#B7;P6Vc{%%0X#?IV}81@TReNLJ4% zxt!+!DNY=dt8(CS2C6iMj6L>TA^R#P=ua@`2=9)0sz5qJP63b%=x26QkD&y3^MO^}958w!R8&EoGWhr24f~9JB4JXWm-9dHEoEChZ#I2$%AfF& z=?CC}!oV7JOj*V(1=xq+@C%)dBHa5P5%E5gUo6q>)oRa+Ec%tGsR z;B=v`f+adc8t!i#3ID7KbSVSHgI`8P{vdb~NG z3C~5ZEZb{X6b5?--%!+wjtT_*ce+%u48_NWunC~^H^6<(vtU(SQU7D^pXn2SLFy+3 zTkOsErzz+t2*IDf%-df$^!ZvYiCZ=>sk2nvP-ibwV8Eg_uYvvCbBCe+3$P5?{{xUf zZ@&PkSLFT;>*mldBEM(qJk6=?Z&p7{Ge*s~Xc)jG00!U!jt9qAM+`|pBCmgYUJNft znv3e>wDnNX0U&lI5?HgaZ+1_1bC9kVaO5kx@)+g;!ygW*bgMsFRCtechAl%HJRRv# zbY~FTl175p+(y@B5t%BeU+{su++1WY2RTv4T>AKFRN5wlRffHJRlQ)=4v`s0z97|S zk4+PLt2?)$=|Ll~a$D0)go4B$NMz$G4+Qbk&f&_J{ltR9M;xBL9@@2`rR-q)efwU# z!n|R9dY3{wQJ2*%7~0HLwt{j*n~4}vkU%|AM+{n>M#@g1t5&1v8MQD)K`^NPrRk+h zYEjs)Yf#@}yv-uCEg1nqD(wWR!{oE)YAzR&da`&*rON>ca+ZB5&z~`=FoL&(6Tb94 zS53vL3=W}fq(q9-FcAO>M}{m!aj?KHSg;^ua!OtmX<1TI23NDvmIIsDrzkK*B5^y` z!#r#VYm2`^ALH+j_P{dYi$*% z&)rq6JhGVf7~^R-m3PYfnBqb=4pfi^+$ae~W^T_<3X9jPYSiTnR1IJb#MU(J4$J-K zn-f)|wl~w($E0LzhhZGUE$&^}!SFIqj-{NEsmPeIoD%wn7Gt)+=ucRTf>J;&4BPIi zmTYJsSyO(snBi-wc%7kxrqUI#5Ga!b5D6>u$4}YJOr?N=D0I6JLsIU~t@^=-i#DaL z$#YG%TGGVJT4uQ%!_^i6Z&;VkT2~taEo|8!GOh<8LWmZ{8Z~)=6mIs0f5TOf{>qrF*1=uRoaYi2@%t4LfKJ--$ zjW}wKwtYZoS(9lIyZiAG+3#P}3lq731b}c)Q^{JENJn)eyau3M!{3)s`$G`*fb=iq z8j!069Wl)FxUXQYq!J*H2#PQU#qt3FZ5ad39iOT325Jc_RRu$bG%jn_q`fVm5Q0G) z!=}mY#7OO_LFp?@ialDZfx`m4$$_|T1gR^=-k@BAHB6HAENo7l0CuN3yf7^UHoljw zoi!tM^`2;q6y{l_Nupe_VV_US4gnk!w47t27+G?bW>264YE{5(S;c)sIdUljN3DWs z)QUErOSI`Eps-15`vMs;fJoe-30!1G6pnh0a!yqo%%t088@9l;L+1UKs%KXphZCldaUsDLjZdi^fw>5!1n4nT4 zuF7`j8P6F~GMOhC_!#OCl@UO|oqg9^KR7Eytv|ceYDpwVedQKcu_vu)By-mFAJUjdBN8GD zzae(ut~Q9+k)Ez64zw>aLn%A#;8gDYO;|vPtamrk{YK#K7pqO0k*tMo*Oie~ho(bq zVtH08xDWWb#^6r27Z9mwLe<_0CFtR`sN`t&r$`{o)_9$vrdo?bX!SN(R^){uvmKF% zQ*kPCL1u_FYK`f(axl!NJF=z#liMLI8pLb38{Vz+VD4=PRI47D9Xj-?2;X|eHD`>j z$HEo}LYCY^F)Bs?Wy>+h6s8W@XE`tDO5VHts2T_gQXsXLkF=}TAA9T5HeMO7%F)Li zWd+E2RanTR?ZIM!dEgz(ojF)zj4l`w914BSvN zOi^Rgiwf`V$y~+%0OUlVor$+O1$Z4&kv|cGQS21uT9zkguj(R>r{_}!Kvh&B-6PS{ zbkkf@h6<@5S6L=R^w>k^lgE!uLwk?A3hl|TsY}L8PDQy^^sBk^zS=m@!zS=WT5E2t zeNuu?4xeg2U6S3_f-xe-#g4kYC=1YOR(dt8O<(~EE(ARh zsinO3maOpV(aN@Iv^E+jU;z*V7~q%og0dVHIAe@)1gt`k5)w@f-kRUSZA$0cx)t+#?eM4P(TdWR&Bv!9`hOZ7Ho8B%tvW5rRWVWKx?6TlR)iD zfgm*?TQEmdBsO46aj_E0Zu^mIl&vb&Nh(j!DnK|wX68OG}ZLGz<@6d zYaQDKIEIdC+@*LQymHX`Wg~Yp*fQ;1lActKKm?2ed0R=6W+bg_N96_eph#O_X9{!cF~-=W!BTQt7z{GG>VLz?lrt?=djY1vYD3ge zY*@nqT12n6x$oPDQPV9->19;lNf=Oe?oFyV+}l9#psogfD8J#+o?3&D^li_`c*KNe zp@!-9WmA3~Nh%69Y(qJBf;Rh~L6Ku80A#5JPa8ohKp{!!&Ng5MO4LeSH?zKNX!qXZ zR)GoHJFE6J1k!43o>KJbq$@r8%T7;885TKOBx8b00CD<@94RNueY; zcMg4qygJjCf+V2RE>MEjly3^r&t+_XRQ~`kdZnZ-8?%Vc<=V)GISxiMoyrGMcIZ){ zdN=t6FdXSuP$HfN5DFA9NVlzaR=YeC+$mQ1j?A?Yp0t?%0C^v&9BfDH1Ox8z{{RaN zSTjhLQ!J=Bu7LcldbKP8hh+=FDz~WWwkTNBsimN+9J4}Mr%aS$QPdIwulQRflz)T) zoTUUME>uc+fX!CE`-WpmmuViXL`c=wv)fj5iL6I1mbMy7@F|)IC9>iDg@|Qy>7P-H zo_Anl8S31msG-#PZp46GQ!s62&DXO00-0Ebz%{H3;Fida=d` z^#RK$PD2oQRmGx-i&BiVcTLG+ZJ{k~2x>Pti!%$snymp(OnVS($sU|xj@#UhF(a<$ zD1PH}6Aa4lC;*lR(_Nsc-PCxDB2nF!{lSzF0Bu=|S(%8{-%$<;HG!iQXvZRb^2Uyk z*NRCZa;(0av}@`v9H|CL6b;CB@(U6=%|Kd`%NKIo-HB})gMS8wI|v_J*K6C>ZNW}$ zJfy1!+t{rX)!p0$P!=aeFX{v&$QE-+2tE`ZRAT(LsfGj? zk+)hJEmKRgrfQcgtGseopu1)pVBDyr$+vI_t`W1l1xEmZYD|fRN?hO=FmqOSYYud^ zp&f()TTYIku)0*U+AUcmmds7%qeA&Wfo_C3+XpZRis*eSq^yP*E?gs-s45>t^NJ>X|Fn6F#=(yvNMV%085v#2GA2S~I3daL9yQ!C+RbQUu%_h*rQEl*-Dg(S_2{Wy zr(u5Gq^(tBdQ1}nMTr!V5e!!WRY+z4k_qckeT?-dwC zNiDla>>`dR5J4R~C)BE3Zo)e#+VgE31GHsiW?W=-PZcN-7Ldu@4)v&d9ZOb#L8Y~a zt?S0*G0z+@SF;MH)29Cb;a&Ekg=c0Y<(nr8pni}xl^s@48NkZG2GrHfdhcD!F^76M zfv420*V7=lU~M)H>nupK;hFt4R%aO}8%Q4k!#G^7Ixrn(T2%M>>CDvX#h$PvlM(Av zR`h)_6`=-WEX!Q8M;1wA8OIqVydRJ}XQ@!UHg{PX>#Z&|rtpA{)Y7jC^V}Mn3Tt53 z)7DfXX`un6_kailvkz%v0U!cNAY8;5WdkZ)3+Ylor$KY{5Cb-_?334z?M&{MwD_1> z0To=v0#qg2Hxr9r*j^k@DxhG=G>OEbcXmA>okJvq>_;LI4d#Q<(QmmrMi zAbtoAu}V8TgUPS3Z9H^na5seNeZf~ryK=M@7SKcz+Mf-{loXB#+&K;jQJ2}c5>y|a zr!fWYW}&Y-$sv)YVOhcv?<&AFL6l^SvonGI z9;~K(;Bu1h(xB`ZvGM>x01ZSvtVyQ`8X7d3geelE3&mxXrVF=djQ~jT?;%Fw2OQ** zb4-=Y3X=QZ#MOl!+?mV`P2#n$_MCSrX*P9DD)YlFt1+zlB(fPtrXw34}#@_|q$h9d{lIq$(~>)B4-G46J;ui03l)D`5} z5-<=bS6q9f&KEf#V;yWKh)PjRq#&HfLMS{)pgTLn2}yS(dd7d;{j@ZTZ8n{#oBC{d zs|4*;>I*nP%*u}2u)K^PBxH4@nbPx=Aiax%{(dnZ=Bz|kd1X#J@rKjFkxi&;r1mtfdo%@1}lFRA=o(}IBAoWgSlPyS8!9n!1dJ_edl+pv!dn)C9 zYfM{lG!-nnn53$BmNspL;w%EXue&@9ZNWWQUM^XZr3WlE13GPxm8gpVUQ7(j!7gR(m7tSm5i2|X<0X~CUZvL`LsJ57(Nc!leQ`tFWIYN z{1_%52J!#3oOA`XZYiId#wIK67q9W&TY7}M{n5?GJbrO1N zBYfbIO^hXvgQncGjp#fU}65=)giYeRiRq-W`qk5B$JsEQjVcw3(Bi* zJcIrJ06C8nB~smMB!fn4I4)l1pJM^i)QV91rBYSp0Q1QQ1Of-`j~;VmU<-u+ z%-sj=01QmY^>C< zZxR}0&|kfBI_slpuZMd-YRMgU3bLee#u=HM84F;k8@VLo2dBp+Myg4nVRNcnxTS|S zCXkXGMXr=5O6_VhQmJ-o@rfguC1?_6ELORAaD~bo`azjPXL5`&Vi1u!WWL%}0R{YZ z==wPiXNFNqOBk&}=Tg#rD%5`QEYB=1^Q3bq3c-Yk&5|V89Dp;BKyDkX@V~>YS(F6_ zBhyMd2d1sG6Fdzzp%#&%Ln1*snI%f*-0?@ha2_Ec_kp?4Fx!$9a&V_M!h1kbLiN^- z;MER#^DPSSO}@|5uEnF7hjnRl039sTk`;pFCEpXE2YRvDx16p~wqw-XB^Z@q5DJo$ zo}?FXqYW4u=uW?tq4K2D?FYeEPtaj|(Nev7cv?;RfRfRPV!#rJh>(!sAyK=F}DBD-y7J!s2~O?F6|k2fOyO18wx#2s2ksC6#2A za$KsP>TEeK8bCnmV5~93giI$cx)SZ6a6EZ|SdEQf-42DrsLu7J)M@F=#Zau!y}6-~ z?}b$va9Dsprm7KyTr_9Q#7RQRfc&Map*H^jmgKwFP+`&@iiPW%zw;5Mh-t>q!k&b+ z^yU%3WslQIA9hI(Aq#~n{um8`Krv09+o|RN0$HeBhO56vEI>K~Q#)R&>-NT9#A+BoV12 zU??237tj*m7bK7~F|13p6I!>Z*OtYasIko;rD)9Ru^dRzxG}r>U}0K$9b1k|V?9un zDL8^=OF+=NbZR?;H2(ld84;NzSsbB_ z9RlqnoD$LRP^+#oWI}sB`McDYt*K*Tz#Z)3*AT~YcS75@sZr4ltwuvt7HaD>lTT^z zo2*9)DI~`WF(ERsp$v8$p>$v-;s6CEmAxE0I3|y6_M{j}Q&);zFRNN3>Xxa`s8iCO zIDJK0Eb5b#7ve{67Bj+4(mIlk0n*bL@`hr#m7$7 zs$Q(AE!@d5B3l=16~uek5rl}HzwXsah5)NEz}wb3aKKDTe<-I%zczhe?#!x(iEixI zbmZ6XHOq&%Elo1&HQ!L3^%}PC1Q2LNVS2soHstoDy6Ps;=)vpPP$YN)?T#mC0FQCP#EQ}aPm&m`9sXPMkPN*@PwW-w%2c?SwuaATU1 zgOO#JH+Yuu@uwqph8D~rnc}HdM7WdDI+KhJCU0^Shux!5VYcu zqNWuh#1bn<-nJKEQfkDFB0%aDqtt6zv2L^#CA498mdmfLA^ZYZpk?+djlf`?zIu6S zRMg~9pdN#uHr$R?gzXU)JC3lcx@hfBt5qgMhQ!8cV`J)j04TUtW>FCcGs6LZM?}0R z6BH$|LW?+{YFEHjPhB9x8l51@l4<(hlN!MdX(}exY35aoN{T>PGEP6HBtQwvGbcNX z5!7nViMdqaR$z56LI$OcsMWMNNHHx`)TJ>AWwgn5JYQ!5ymD3B9_Uwj+#CWJU}1!e zNx(S>r4xW$!YWPl=D=IY$4a7dmJt`bPL9kiW=&6UWH;i0R>Rju*jX94D&a@?!Cq6G zwhKr=BzB{^4xQIXEa`f5`w9x27K2Ux-B}{Ba@y%~wMk@xS?SAU`U=}sH#@9ykV>%( z5S0=#vH+mm!%20ryQm6!F)l@b-L;?wguNll5dhDjM_)~nT{*N70)H-yjP~lbnW3$%+jy_zEAlT_2-Blq72b>Xa?quRJyMu{AkU3T+h|LbxzbV|rnk zPT?2;uGL-O@sRD3k`|&yP&7Jg!A zYEN746F#8~%g^Xl1`WGTNN)!gRKca7GOa+|wdeYs^%S_9)I^EWE$Z)fg$-qutxBP1 zzh-zVq>+Uv#xh;pp!Bdo-oaG{GtIPOO!>lG)wnHMS(&Oi5CwtXM#umpLpL1g~0fP#SDmG{X%j*wo z38iR}&tp*ae&WP!6ha#lSD5Y_+G9LyRyN-j)Q@otuG^6A&sun8NtGxmAT3)LcBstO zYW3EF1g5o!mAxLJr$b(znOY~QkoMKsNen*JP}>PrA=@TENN-mM00JsyPiC=@l{=F3 zBAOi>bsXXXr)UaGPiB^-97k<=g4q(Y5hNrMuBQM3k{$`;^bOpA-2f?;!6~hcz0*Km z^o5z>#9I*kygr-LjbRc^B#X@Q9n8T(ev(%>&zv_QusW&zrIjl)ojD2td$Iay1Wu5C zmb#voES6o+QBabj(BRwbr^nEsG5VHLs<RVY={YNFb) zYG;KN#3DQikEGGzMouJB2Q7iO8@c3247Dh!RIj7C{k(cWkl~;`OYJWzAH0HWlN5<& zAT2sdNFW0nNA0NvHKJjCV&h*54s$+Yx=V(kP;tXr+A*r?Md^4&4_M;4iqAlyof5^_lm{vp+AVaN+-ADH;`J$hcyY}JHomMI8aNvBPEH<9NytQC7E zatKj2z);44pV*6&yq03aJ!yelp!EYUjk`Z1J!39uNh=|Og=x~&cPz&4=SZF%O9EX()rvat%IZ=o)?+&g zs&ONb2ecI#S-qff zfLIjZ0&~>GNOnjx-JkbvjC>&%A^@TE_WqYcPYq@pyX%IM#TLRt5iA-h7jP>sH@W;M z2PZtwh{elM&KkA^xq9-mfH}DEa0|r5ld90E90%@aNEx0-w(|yIw2hfK3y{AA?-{|t z>oJz49OaVq3M1Si^MoKwq(eEvSOVMb8zW5{jc>s_D-O3R3dr}S!y>tg-OG;XuZ;8g|0i?!Gi@`9+P zvPc}{0L_i3HrK``CP0%cF)dvOfzsbg!---d`=jV&-Mf0t8pZ^dbWA4oRbW74&hp4# z+ku`5U<%|ZJ9^en6dkf<%{p~2PHgq%5M(Z5CXZsX+0=wGJ-ULV{`j&Al~6>>yO6k5 zet97O0E<07D;#7LrBqQO`hsoaXh|8tXs_xWruFNX>?E>UhDiS6D%+$gWFb@bak~Q` z`DGd8b$V7XO)j6yTAo+w?`T`WRs7H~k5$1b;~+4Bnnfxng#l@NvhF%MUztIGZh+9pKF)wXR#TkR+eXY)c;L zAc=O;XY&`!k4l7h298{M^sXP}8FbeJycaH1`QZKhb#Bac*uAEmb7 zHdhLKXFX@k#3?e$spF@&tQtgFbZh!`%i-s9p~%S5%>Ia!&}MC|A&-Lc7HoM3InDqF ztf{i*qq=+H{{Zp>N6-KtX6F(B4Ptg(HnLaKLg`Np8KIU+kiiQ=_QO8?%_$D2V})D= zz*PhG>Jx}r08F|~OZa+^dc(F_M56ark)&0iRQyJ5r4RTG3pavMf`QI7|(9QC4Z* zJ5KSuZ?J4q=%}!%44^Ug#!Dd^xa(x&P?cwx*-Z%KO-;#BxnWa7XkrbaaOiqneI6QG zooM5)X<`$!t?NY?^vX2wzSU=S0E6u-BH-sdiDyndqE#r6tu&*%9i0CF;(VZTxjRD7 zVQJbnr)C(h$9>>$ww+>KiC`=4Y1K(A9dN8heX4Wwj;JpiGHMGBV4K*6=z3K2hZKfv z{VJPSn#(QUR=t>HMaU^4mj*_1m?2Opj0qHx#z6OSN#e5b!6c@jk46At*OaQwTs%+&3+GaHmIfVO03|r#WJa8kr1)NyA~yI0P`C7u$xNaw4gUxr9mR9H*8tZ^OD0y7%oB-|PJP=%jv@zql_ zgc-8anMpP@OWnz#)}T~_M*zWu0$9aIP3&7bB#Eg_Jo8Nrg*2ew98xeWOhto|OE3cYNZh8C2;=TRjM)yE=)m(eC3&l{8{w>iX4hR>j$(uQYABW0c6s&@mu@ zKRZFl2lSj1oMWgHD4~#)m-kie|lNr>67CY`U0?AYrFx$HFKvogP>@zX%lI^y-B3L)>b&B0b-q|Fn!E<5eU^mLt)RN z?i>OUX4*3-#-0xnnJ8FXoaD1;+Mf-ppbrR)#0R88$)jqGPMYfT)48eWk@>j=SW*=n$;?a6R0boevZhF$uEohB=8sE$U;{NM6mCr*&R`xV ztLYKjwQc3MVyJQVHR&x@X*M8M%RJlOG}*MB#hDp~?g=8i<;s>zOQwVxRPtveJ2ub+ z6JraDQLjYQ)h`Vy_uNVYEsao%wj|DGlQICAEg~{5(z{gc-V24UC5*w6th%tEV8Bt$ zg-O(m%)!ixQ|DNV)jOUYDy1Z)rA=JR8=dzc5V`)FGD@rMATyKgCn^E?!Rj<&4CO1d zSS;2rs}OHtuBEhwBnt@px%)1*qeEXYgmg`I?k*7uUA1nmo%uA z;C=EC!~wkegAK7*$DSV;AoW6gq=W=6T3t6{=9(J-C|>qwX9mL?!kn#y5%qUKI5#mtxSP#!=x?vI2Iv$|wUIHto6Sntc3}dYvJlS#;ok-3!V?nrP zp&v+XS+&#Of>-Sov3(uuVsEM&swN1H}LI^2_X#({mruNwaV>Zwr* zOtE*SgORhdF*n+{O=jA)_A+Qff^?qw~wjifUf(^BU(8%NMKq3BH>i8g%*q#H*y zs|nh=gqqb2DK(u&)CQ&PLrT(887UP(=|El4RTw))G8LF2s}{&AwvdF)k|g${B?t;6 z+|;#dKqbLqOKRp71~!S!Lshffx(L;8$$sqhbNNao*blVTlStfMXBZtS(YbDN4Ap1nEr&g&o+bO$V4PP}DUV6VQWJrAegw z=p|Vsj-WFd!}auUExrS(e(O~ zX_TqSEr@krR$=I_qJ0IvxhCIXY>lkj9SkZ|mJ6NhxeDwE;mZjTgi2?sSuSoW z%t+iFDj06=8WAQVH3rnbmo!qvWi=|bWz;gW$ak3^(L@g626tthcE~@b#FZPfDZ)#Z zha_A(fEZk|YIzG9@`h6C1VjG-GVaY1DNS0JPNi1KG1ZaPfpY*XqDLnJHUJVpbtHr+ ztCJce;FP}lfN;(QyTywDDn*!(MN5*<;d{luP3~&;=xWvLY4D<6De5Po8d-?OcOxSY z7%oGQ+gO4~@wSkeHbkXKn99u6HS#8*_S93HF=wPPtoESR9j!2$Tu&qsLLC|(QMS7` zq@-_RuYy!<pnO&+t$W82RLC8`5&k3|KDXDhF{kJCmT*36}vCAx19X)vj z%&CPccNZJeC*eY=AY(JglRFd_QQZ!Ay(rFa;Zqdm0WB_lyjEk>;=5L@y3<`6ld=h< zhy+t3pd6rYgO72?E6#Z8C8VKhN}DSi@;Vmu=cE&c1VK>Tx@bgXuVN}>%29+(5Ru5n z!wUf#SC7QP%KFfGbapQ zSu=>YD3U_Oy~P!MxCf>ZCQVUMCB?QiX6LY`=C+9_)QH&R(_=Ed=KEbv8t}-J(Q_lp zu8PV@$VCyCVBC$-V{il$c2t0|F)LXBJ5q(np|cyGquMJoK-!x}r`nY%&o8KvNVa2^ zvY*u%Vqe_K+lwwFQ=SHKw;di(f)#gWeyqcg4?}*X;hq%@rlxf3)U8DRoWeq6fp@D1 z+5jvFkcQgZbF}9;{1Oyp5Qpb$AG3SiHn7Ax5>|aW7I|aTtH~UN>rRriEsVwn9LC## zR>P3qLC4I~T3O36vaC7ieqI)Yk+zUtth#erE%w&0sVY({2-RiwkQpRc+<>H^J4htw zoP0o)i3(CB(ISppUw3#SGgQD#WOjg1 z)|IH%_tK555ZV)3(m_P&bqu;X+gOL(!7BlbIEA&pa0(yvvTNTWy6!1u-K>}t|Rzz1FDIc_&|h5VkK66YmAGcJ|BXBD@g z*OVnNsI&$(8ajPcky<;V3D8CsBJwl4hxF8)xGK^GIbLzc9GfOw#T84S%qnyM27PV@ zo-nM@VyRxtm!p!r^4&_&zpmyG@gi<*Bte~)z)gThhyRc_^P*nP9 z2G6V%wJg|*fu6jBO|@CgNGnLrSRvRXe`Zm=ML<|>C2^7vjOigJT0zlH_NS@!fi-~X z%&@Dq*kqZaknR&$1xtb#oD;O<9Dq41j&L!ZGd&QKPL}EX+h}mw!FsKTj*hfosXP>t zIK4^Q#v}DTP2RI)Wka;xKLUzS!oXP9NrlG&X? zq>Y`;PcyA{fho#!2zJJhx^AG>HA_+^r4SPuq=$4<8Zxuskc?5)wlUmBdTn$FG0-i(-tSbZ?#t9PARh}wl&6hFHa?$jj#yuipSdo}wCJ`&M9oqn8%LeuX z7;`xZT9@8uVD1IRW)#)D4l>;oTm2KRC zjNot}!=ZE16F9LH4s2-Wp?4eT@M-}hv=6K4+DCCj`h!@i`ktW+QjQHPPy1&PIEq-> zFsiYWwN;2vIYXRo3~#D@)BvZliBL8fd$ny8Ngk!`5wK_0DAsfl$^;V2Cb+|5OoY{4N_UJ0#FrLinrL;hM^?F zy8YInqswBAda?n%`=w7fQURMW{ zO4RKB^4BD)buQS`wBe%t#*Ja2iW)3p^x05P87kXlcMOshZNqj-g&@_KtTq=9HFBmX zxk`(YNFPHfUUPmfvJTOgLS;0v3r-ZO&Vj zSb{pA|@=AI#d8{LWv`~DN8*~YAEpI^F-BOrxSLjp?65rq`6>Q zd5@uu?Fm=i>7=9DP7nrO2_&f9oOPV8Dx6{j`Jp2y9w3X6eP~Bg&}D@p%MWSV8g-_W zrHl3?0^9{)NFH*5ceE%-KwEh#TNy$?_!%q4r?d@3iyrYgUE48YSTF^hh!3m`Dz|S< z1Qx8LiD8N6sw&9~yeWX{7C5+4S)9mQcgO{BGECyfb24tl-u5NcdIkV`)(EzdCs0Wh z!#p$U5HMsbayt&M8vv*oiEIZ8fw@ZnP6K1AeY)kSE?Ev8x75+9&!J-4JTw|>+ls`n zw7Q0%=~y%dyxz1XHrp?>1|eCM!*brjNF#6~qr@d)lQ;*VxFiA|tnN1(-;ljvy*hh( zwxrfzj=g%1JZo`UH)O(%!AW3?+N%gQsjZSk201dgrCl`$of7@VMWE%~$uUD#cqXhmU)z1a=@Q4W%f z@>`sY8y{^m zNlw)49x(o#Vn7QOl@93r#AFn5SA}kYa#WA+B$6Jq)K>Qf;)TdzXNduLV%KIJ3sYMZ zHG6Z%6wsQ`q?09cD6SD59Z3G0#=(~jxT6L3o#(3)v6DoThL}60$)MD|L2;W$B9$#+ z4(8${*G1FmT9$EPY%Q4n?pdNnbhL!4AazMFWh1vTj)LTxbH0NVK*>9-{eoJkLO3fw~A<1<7RhgJ8ymY#l0OXKZ5>0D(BP<7V6|HZ!=y!9!@+n2b6gku5)9Ne{ylsYv2_=z? zvF?x^$8j49?pAH1a!xvAvOf}xwJsgVHF^OKW-y^@wl{#1cZTV(lN) z4FXpFX{zXmbrcFIO*Ep@fE5-=oq)`WN=b#_;fk{m0qTtWJn644QqEI<(_6KeXnAP{ zk^!iTRGP+<*~<+nwB1r0utXLMb41%GW&;gg7=eyNW}Dlza$6wbHnv*OtVF)(1xBW} zccQZz00myV-X;8|CTn*)OQ=%Q&3dLeXK00Vys<+H$&8(n1yxeTO78tEr*h<+V+jZd zQp>$7mCYY#a@^UiNFcIHKpJm#CX)n^G*(*Lys>GJKA8TT5k(*l%67Dy!@Pl%IU#UD zuKWwrBpIm+3hz)T<{F0r;8mztRl#@%A=9Kz=zZU%HEmk;hz74>f|_&3HCVk#tEx1v zkVv3~j54YAvo_aLv@Tn!D&h0xkdUmPl$Hup-JFKFW+dffN|z9M(gmIbyQn<%5h6`f zTGMNo;~Qi3Cx&ymp=FJ{5?66`3ZB9U&+8;%2v2B|60adwa7(!iIhy$iTbk5ER<){K z%9_5fJ-r~s7RXvwD_TaHI_3RnH=Mj^ibLT*8;Yhgk!KIXXUdieQbpK4gqw}rIAU~q zu!S*To#0AZ)|IMiI)sUOmANWXJL7_8j;V6!a9TBG%LkAaI|1~mulD$Es(M+%8Ux2om6EvlB!q^PE!oLB0EKP zV6_1k0JFOXH{^M16Ly6ntw<7mKCP`v?9)*zX41(TSg&qq+wOfK?-MeCA{1k>TalFo zh{sS#nxQLICe;Fg^#S!20iR&dE(v%CQrBNku`NLam7uL6{hfF)hxFGV?Krf8vH?jNxyq6OCXC^erh&X+X5}AFwQl!B)`pv8*~|-GeJ@b62k;2lSq$Fn_Rq)36dhf zw&hF+(O3k=TMnl?dFs@@$Sf&jbNFV_ zY@;@DeF(F?E=hXo1y~mXBeetvA9QQhYF(|V%TBFGWR| z@}QSey*J^cWI>ckB!U5}5pck$J-RUsV zg(=!^zSQ+AI&o_nhxAk>W%uD>GP^R!8FwT>&!`bou7u(+$iu2eW?%?4dI!n{8n^3K zq(%~6&~}@m>ND#r6fo1ZsjXP6($sECA&ePnyFppi<7mXB6@)Y8al0zNscYcyB6eA5 zAZIV5R;R?VYcaXj8B5CQIzgA!b<9j6bvJhKv809GnaV=-xMsz)(a-KqfuLda7{5KpRU`dQK>wJm)uY}#}ySM8vR zHS|eH_DJusu#W!LAim?sAWbz%#ATRoO-h`d+yMSo5YaEAcfEhNxkAOmH0ZIULRzn6 zhCA7!$xy|ga=38g3y|3)Vw#Xll2k9gR$+a}uGcSk6GI;!^Q%=RjuTr>)Hg<~Wlb$@vyk_;qLCXHsfOq7x;mb#dMfv!MU=ST zQwk+zDHv2xc-*_l-Nr%2TPLU$hntX1S~Ad4s5P%b&fYX=Fc{JYhq)Q)?`NLUMqsQV z`;<`307AZ|!Y~zpWl{+*9y62Cehe2;U}`~Q)!XFusnQAS1`D0P43gP4bqJnTh9pUF zjbe8!tWo1!D-UUGg#>a=KvOVD2|bm=(`116ee5c3+r=^gygs@$DwDi)tjA^sXo@>- zE`gcEh!!5SjAISeDa-D;HD~k;`V+VxIB+5tUx>pNt8%uD1!|B;hDn4zgz~hp z?v6AE+*!7>l5voPgOa^kloVEpa~5+EL2v*pNzsF_=5}~xSdnOihq)@D2XKiH5fT0fpA5S`)y8e>q>4S5`9Yko2b~@ zBZlp)OA^?l43Vr#fD^IvhHqh3Qa~6u!63{NDq=GbSu~*{r>5H$HxBIpkU@gC_ND=`tUccYkZloq9D#X)5g$S}@f+t{z7C_lC zhQn?k?N%K7N$U6DX6Fh4t6&_ka^phOXSg(|5X1vmhSjx+>Md`&wGD*U?;@5nRz(vSfpR$o3~a*+O9?pur$nqmF)CVt8C94w9N(4b`WI`4BIFoN zuCaCP=8I7kMYPl@=~3C0WVAQMw17rfA`-=qs4%334ng3zSwMDksI=xFbD`R=X1E!M zrKc+pZ z0S%CP)WPB;s#;9I`cp<~g$H`m`63xbi!~y@uR%Sit_pEk79P6GD6a;{s*IIl03>0! zlW710BLD#LOwCOBh?t-{R;TyWibFF*evjJAsu?u<`jp+brp+TtY)u@ta>~V{%s!z_ zyR#b>Tx>ZbBb`|@(J6hL^3-c;2DUEj+Jk63z^0}wCr+y#pWF6gmMXKwU)L{L9SmET zv~1^$20NT=9y8#D4U08Olmdb8D4JEn7Nr=_UHb0>o&dAlR&_+v=+}y~#aUT`$u8Cj z$#7hexm@MB8;SYB1E(yRB`c+9TYVcw_hDp`9f}N47&f-KeDzOdjbwyll zazG_tA(#`8Nh7FJF$-Q&Lk(X`7t~(54vh|T0Z53}?V3}|Pg9azKU6g0vmA{gS%qtb zz$MX$4%bs+AiZl#DkDrH*=7%px%1GXd}2LrORt$Tb6NmJsMuip9d7lJ{lRbsGA9 zolTbgaN@h9He*9#NIqv^+?`LlprC1aImYcv29-53ZG9 z;1(|AD1ulTi{*yQFCFbUTBz%X}?qsF9386~&O z+&7Cb1hK2VA=6vM)3?hev8UcL5|Y`BNo3e6k+uYjDFZu|inis){{WL5t~1tuW)^~g zKx$N3%U7m{pEx@l(GjSLp_fNW9*oo5V**x+ypoxT^wlJgIRyKoE3-T|yn9Z1vn?Wd zS~9aWk0RhR-QC5Cly(Ia!PP=4hCGgibi0N6k6|dKucJ?hdR<2uqfB7=$ zxO2FigWv(l$5iG>?3SMJ2?L$=V%BfsuxVmB#20bv^*ej)V@h{cd(*`%v}aY4$EM53 zaEw642v%SL!?s318uBpz0E!9;A(9I!pgzIdsRFhQ0%&3h`{zj6rGgeqH`X~~FqaJp zJ42aQXf7E{ZktOIp^iXd)w9EKBRQ~3yX&(Z2rdh?TVn`>krr(VrCl2II@}i?3)Gb< z2-GZYg{BfC6SvxcaJzsoTx3g)n8ITLxw6g1vn*hwMb^3M`Eq#q5GF*@$`c%wyq^c~;CW)~XXr_opY=RUSP^R_)9|d;-fDUqm9MZIA zDq1ctR|51lxGg|j>|#oUurSi9TeT&ccH@pf(IZU_L=hneVG5vNXWHSs9PU1Nb*P{O zOiR$vn(uy94SgYnOT@euV!w6+cKk8XLnF=TxiFw0F@uHL0QVkS%M1~>nzIs6laVf> z%}VQ5CErR5Uhyz4YGHM|iqS_e7MrQbV8|Hh^3NV5P*o6mg;g;51dv&GhLs7}ffV@_8-s;q%qe8_kAv<_U3QaDLS{b2KNYsXk6#z1-Te!`rM@elj z-4=hg7tDK^A!J4{p~OcGBNC%50U-4fQi+L53sAe|eRr{H0QPn;#b-#a>ph_&Yr1`E zTAW%u`fK_x9-j&b$dLm~VI>eVz9jcB<&YJ?0ClpQQgG^0Ry#qyW1*!@Sug9Y;wL85 zh}ruqx$MJoB-NwDQ%hYF?_-VRXTv~}xQH}NQPm3tE9w*JgYN5IULy#lcLPgzsHktv ztwkvQFf4BodWU8AH+2~6>R0M%x>mqz*VL@ox``L3nALZyNb7*HpBOAl1p}cxR|aO4 zO9TTPt3j>8gJ)iC%Q5k0NX@yWEkfG6LD!cH}c)fC1{P zzB@H(A!&9ras@!=KzFUG3&Zwd!3HIC&!x2`xedtfS+KENX(#l&s(zTZ%;PU3NYP{{ z&cMVlI8XzLXGu<0rN3vO6>fSFSUjKyiET4NvlnXJvNcJnBoIel-D+l2>x2a$^(!tB zq1+?dNZq-Z0gj;UCS+GqYZlX z=HDw8Eaq~m$7{13CndD2nw-F6z>0 z%QCceYq8($3ANruTsS9TJ6Ah~KqQVS%Eu^63GDAzZgv;5inR`cB@EYtDrgH+Q5I?U z1+67)BhNj0*A8Z2g>YG9G6epr$Z}hG3>0MU>SAj`ox`Ok8e|lw7Mj=wy)124uRjP(OSjHD zJjp$CtgNxe7MRiL%N_ZriYnonK^0^qNbTu*dzqxcWeLs{j;>-QryDOiO9Q3No|XhL zEa#wjK}1#~rMB_UENg8nLX~7eBDQPNE|x){H7H z_G?3xbBSF#+H2Zv3J|`db)eqSfez_RWn9Rm8BSp$H6_j*ENpN*n{uQrWXnXn$Cibj zjA}-;AbguR8pIs)B$CK%S(fh95${^QqB$*>k%NX-OgI=~HxjEpctDH9ma(nDW|Kvw*rx?}p)Xk~TWo2oU3HMOd?^wBJKAeU9C8L*8c z7%J-+kx(lMA|pE(scfe89aSpAAfCnBl{I}!lc+r@48{sIs7qm})wZZ&WQpOcO(3%j zPBMxVh5?CV?gJP(!OliJ#Vi3aNhRv!k_i`fA;8=CCeTG48Z=v%G~3qV(<~OJ9CFxC zRy8{gSRd)TJ{t*=2Emt8jN3%ScYAi`*ZZ&<`cej>(b_%ClD(zV)~puoFWQ>LT1v>H z1SpK*f|lFv+yKgxv;)<1C;Nl644dr!HLve@g{erDw?*}sr-k3qwO$8nU(2hNTFx3M zgbEJg;ze-k!0sxnHk@-2$P?Qj0_0q<{Z-lAS4S|}YS7Kz)Tu*Nv8F9uKKr`QKJ8-I zj+|ya*VJu=upQT6_&h}tDCpQV!oAtlWY?Q2LuJ40Kf2(bI3T3Xp2EuAKUSS;EM)?yaL6iBVEh?O~D|a2s20+%U-}1`JZW0A_@^qlTjw9*uDC zLugr$krk*fS15RfFyz>UCvmQelRz6wJu^Mv-Wz}_;>nPp zsV0HKgOujV4UMxg1E>zsAW@Qmok=qmO;iM3l?dG-KcG4p5lj12 zsQ?npROf-7S859+I1b{P_%XKkv%@VxCLFt^Sc^%OVz!Dc&01J1y(E?r7z~fN4opV@ z2;PNoO0&kw-AOMCAg4UiM8GJ{Y}ByOQNnu?@!zpgVl^T50fk_vo4-O;*X)AEacQZ zHi-K|p!SFzOX>y0mKgPID_hd%w_PUF?b*~77I#Kf0zmmJzMk+lgD6)#zC{~aak_2T~+i)ZYQpy($MmJ;RV;JhV z!}g&`4i2=YALhPRPd| z(5Er6hqRDe1{P6ThJ3s})Bm zBmzcP7*T~*VS>EQWgy9z5?C$$4btzM_AK{I0Bnu>_R zG~CWc2w+vWFHfT--;Od@t5{rNl%CC5RzP9k3lPm}u@#|ryg;Oa4L@j& zt*ytW$$HVT8(6Zdlykf8L~L1Ok}y=`(+YNhk-Lu^1j$RAl?_4Q&!{#1Pgl~g;IuQN z_Z;%aZED(Ah3;&SyKLHwHz9GsKEaNDSKzSdr(u9>!Kvm&3twHg3R`$~q)OYXQCSiy z?3TiU)> zHP*58fH4TnOrb-%s}FzV5(($0RJEyHttm|nhWfVPJ>XJA0S>NpLwZ?6wd9GdS#MUr zERhU3U>n-tD=tD4$sXJsh(8Kk2~0yL?AjN5IQcPI_K515lof4R-6)VouN;?DhWmDA z+ND)UCk4ZoP(e~YPe!>)T2n@LAZ`G1V9Xb2Rvvg6BHW3IQpB>7cE+tm1?KO;eP=bO2`oj*~%2A?hQYK5OcqNX>)5<<+Lq|Vp@gKk*2s)h~^ zw1Lq}nVw#0jK;MWdedEsnaGA?q)c{~X3KxkSoGK)g`fN^htv-B(PP^jp%H^QiB=X0 z3kNw|9F5Z?ElSR(A@vt}onEeL!Ku4MsjcCvouNV+FvY1Gm$c}t5}F+BV@PBWY#D3| z4Oq4|BLJ()?s0_?&&9O9+ED^w?v*$shj#<=g#h5|$QLkrF%dgQwx2;wK&^`RG|JJW z)-BkHi>rWRCE5K$vz5kKqX*bm13gxqFwSX+1hS_Wbk*c-I#C)0E1Ksl8^!v$|MIQuy@o` zdJK{_tNmdUT1fpBE6gEdAJpF&A(W~D9kGmKqLn!ril&Uq zRVWQ=*|rRI1lI7x5Ie9nyZVQ3%^Hnbt;sCa`>INmy*KpQMmuCPMzVWnZV$EiQLux8 z7^ype>YCD(BxFqi-i`%`)LgrV4B#<`N~>6&hg;G{ops&TIqPBUfnqo$ffxsf#-&L< znyTdcxC+DpyR82J!(s~3);6IapUS7N>Ll%`4@i)dfbl7((5tKBD;hY|!Xb*a3#uc; zWeF>n7?GTkJ;aiD<2^$2WlREE@{(MUsePV}$PP|n0tj*;Omq@9of8wso8ohXn z!!X3824{K3@)!*J6@0$+J5(vW1hFkkm`-O6W^3v|I{{BK2$zVBS68zw>oQM&Pt}rd zNjKM*C=`CR9-Dae0Aj##w}3ZbM>8=|1;VQI724sgVa~Rz0`-As)wQcOUVr5W?WQJ7 z%UW-8uvbQTgKgU~Zf;ZHh1_@!Fmh$ca-!jZG-0I%=`=cVq zV-$C3%VJpM`+ZD^{RRcq8e$}3gbz`;8@7dVc7g?olGcJHD7kHev145-);eh9Kvn_M z>*_NBD&A;Cr&p@9?#GEhf!_=P!UaCU4?D{M7d;4&gba!z)$zB9^cQmo9KdCQHEnNF z(5za6`DKlXGc;81L%IcpeX@p6c|;5uN;4dKV5Zz@|Eo?Z@g`I%6bBsi|(zgif4 z)T}E+?m2ZTM&&r6tu0PBJgSkRGP1{lR}2AS0+1AjINCFqie}212~jQg*1vYx=u{$+ zC1Td6s8o);I+YqQSgAZVl50`UB7s9lwYH)xez1@gP@X-5&g@RSB)-r&1wdvMYWVId z!tX{fNSUM3D)eT#N-a$dOFCYT&I7&S(xi}LR`#I{kLGh&io_B$vAh)^fv0f^53z%!ajS+3jI39McFAJI z^;&efJ*iBj1cE5))BsQ0oeq%Mm{u#jy*-UGN*acz70Px5jO%6#5zQhl3aczAyO3Zh zB2rre2nQLyp(qrIYJa|$=d}&Y5VfLaN7ijt(zN-(i|t{NVT{}j9C7c(SvTN1l1DL0 z01vR_Asq-9Wn@rRWf4~t2T%ZiJwwBgpkwPpR@LPoA&z@gaVd!ukc+7CH2!%YcTYKI^@HQLTt4qrk5s5p7ZrEYrA|gm+Sz=#MLn``3*cKu{GK}D!rN$vL6WOHqRZwheKnNDq>rGye$wH5L z;4L>y(XHRNCbe@xS6=-Vn(2hA4OMZ;s(qwR>e&G8i$OTvdwQZ1!UFBvSXJwHv(2xU7|`L8h< zNtB}H=x;;O>Mn1ncyYWFq?ltC+L~B_?9K^irvCtOakqs)tTF8XVUp^~i6rpHxR+&MS0PYmaLYRJw|*y(lBH$^l3X*= z*6Uw%h~Gg4K$Gh?%=IFR5X$nqEN44cCm7r_lvjN4IOk|RP?Lw5FchIKSX-l?m$ME| zkyF(kJrh&abo!9`aM7V&J>ppelP^3wh6E4lV+d4X6k`LO92opil`?H}sYRGD=o^-= zbG$(?AOt2I9QK0;8*Y(coeexiu?#PP1aP zR%Cy-hB%gK5A`r&fseRjZ@>X?02lyd=SF54T2+);{{HLsh53V+Z%&3!U5K$LRj#%X zJ-bp!5?zi4Fc0+96Y@Abby<@#iW`pwWpUE9*w*kXERshk8s47{r`uY-qc-nSzKK1^ zp$+{cW+9nEocl>$SI0lw8#p9oUuFro>vs7>S0*%_2Bw`qWXGzukwi~s#r-a15n&z} z^q6fX?s8l`nE-N2Ds5FDC>>Z_pd_S%ZhR_F&b4^4kRK!TjNj^qWmD|ENWPIhim}VS zm!Nh$ajQ%>G6qw%&KKN2;UB(eU@=RYoU;HoE^b&@^1qw~rZh>go_fnG%w>`*PoyRSFBL>pJ6_hW1S=Gj#8B6+tlpGt(@wsp zoyFC6C@jPP$B!7CfVl@Xh!m?-6KiBNdz&4R#?P9 zF~=pzP{p&7Nk2VQlQiPgCM*CzNvAPim8}Q>@Gok24e4N}?I`u`24|co-lZiAmHciF z!M{2Ehf5VH63W07A5c%yB^79yr>;BMpH4L@n$q1VgBdJ}0CEtMmE2pBIrb(9#(Jy} z0zupZ>fT(f2AGqhLA#&4b?qv9Qq_Sy7HO&^anC80q_6ImX5tYMb|@iKf?{{V+?T4Rag zCSda@?@a>-p!Z0G2UA7bKEcqnKJls})1aUZq;e}nm^_${cZS`SPw{8<9Ak`gzv@%i zrRc(rc^XykTfqRzan2uuwDj9m*5r`;qqbsLRPBf=qJRpH7-Ci!Y>Z=%%{Ag?rd46GjLq4ZK zPs~4@M!FhA{{Yj|4V(93`$%G|3^6iRk8n{NF;fjN_)@czn!6unL1;nt;%QuJI3c$f4hz*`jH+>*;!Cm#A2w6?*WiB#O3u zF(iUb!Q|TQ?mjri2OUSnvf=S5?9v@B0o8z3z8=hL;S{CHa(#kF(klM|%Am3JCXkVH zC8v^ECQuSm(EB4L)B=1KAd|SU&OygPDMeUH*ip2tO$|9xz)~#&ND+NSn{~*ou7%HE?Y}k4h6cXH!;Y(NDERXHOL|CyVWY`@Wy|c z7O!SmhUa9C6)FK33P^ILfXNu=fO=wa_@e=SEMNcz>R@STfAALiD!fbgb?^3PL$>;Tcp|YQ zeXN!qKT=1DqiI61Y~-s9D<5)#aDIBBg_JQ<(k7BXs1CpoLUi9zVswWiD0r!9^ZmxB zGD#Fq6I7!rs~{y9rZOD+zI+l#13AZ9^F~QSDg8?xyIaUaX7O**ZdAQ6i-N<_rW2C9Ds1J`n0`aBul9@dr`9iv*!O7YdRB#lm4 zk(r)I+1ERZ6Ou47_{MyaN|K{Cc$T_=AH)OUu8qzF&^Ur4tt4_ys7G?tHj_?*$~-cd z$}wo;!j-{MwU2@hetNSlAt+N1%6_`Sd&M_e)MTNmY8UWqO$}C<3FMH1QXSYNFl;eU zTLcr2Jzm4h6Vi!Fqp7Ico?wbS98wrUHG4r*N|CG9lkRi_tTMa`K#=M7BuKn-5jMwI(4a5QGrySe;Fs^sAaPa)_3qGd7Vq*m@KfHEz_V(oN~= zH-%Z*pVTD2_uopti&0At;pwKJN`tfGxS< zj}X@Nnh2ppU;ICzEQu76xf|9%AVObuNiClzsu;v2GgXu^v+`hDW-nS+0Va?=D>BWY z>4Zq})@sn(YD(?sJhHy$cT!IBNXNfCapSDX;7OiHse7BSu;e;5YZfit(cj2m7#xsKkQ+G}>m^dhX6CQGYn9)q)0H6w5~himYMY&~wEp5~Vw%*= zA%(Uxx+Cq7@yiXr+m_=eJwj%xQ>a_JO+2|hH5x$<_23Y*QjP9IZgsa7j>b9JgSvWk z<5j_6eizT4P6+5q`$c71K?Q6^?xXB!0p^B_H4I_JiGamVSGIK`(@1BhJXNBwFeD2kxqd+W zwm|SOFaYZGu$7L`iwA8ie%HR{1QI}pE$h;2^))O-K+N@b8Mk^+M1t=F!Q2*V>9g7~*Rl-9)UR?Cu?(p!;Taud-I0kH zgXA0>L>x(TCckkzT7jYgbtMCvL^6hHLF`~kZ@ zbNY|%jY(-vAw9Z-6_N{B4qaPUcJ6X%|*ttmN@px|hIyW3w% zL4!!MRGqt%we-DG(x8?rSIC4Qc$q|sy9&5(Z@iojkPqiIX2?z!SxO0@PhB=>Fax|+ zJGWcZw0eEDE}f`PLiLR*YXmaXDBcq zs)xaY_0{*~NM@u)_p?bZnz5wv!Bb9>GfI*YlPaTnbaP}Mt+vnY2asjJ%5FWo{MTUe%8!pCE0rp|@bs@IJJ2Hm$R zNgfA?&Q^8HHDHvvy9R=4JQqW`wJkz}qgnLgnt$q|#;v@paRp+`3uoG=InNmINI6)U zt4afxwf*ve075mllS=J+H)Eb>wLQzV9o|;Qrww|{Fra~!Y2BFPISY-WAl>{dqk~Q% zyUJ7=O?~o+t2(u}yQNqYyj-+mI0=bWLc4&wFjw3M=_9}%I?F7ytPm+t;zs`fSefe+ zow;2vcV+E1bq^F3H^(jjkyynOme1!df9v|W=;g~wq?H{(K17eF($He1pnmA?Yn`{C zt-9Wu`Zba@g_NQnOsEWY6=F}8=iGTYJrswVJRlsd)~23cUU0;)h4yr3P>N@1WrS3$ zwj)?1WX!5%b#`EP95Bu^x6VP&K{aIngzEl)cs*};5wsUptWqwer!r+HnKn(5B!I|^ zB#ei~;?V=SgAuv5;O!!LGjiP^&~NwV+fo>snCw)l+e`OsSNn+txL4DbX<9wY7DI(C zfY~|7z{v5-XEPCqn3bUkpeb^|Rmr~aS{RMfG}?NL=@F$!^!YU#FCCi{GYqbg#_Sg$ z{GToQv4sp205qmz7MTR!AfwlBzOaA~5}GGrX_Pg3@A4z;5mUhfkrAP9=RDAIHdS5~haVoG+ZdR1L_z$BgAJi88m zsueWiE(cCg6AQ&>yZgrehpRr7Ut=Ww#&lM*D?3cEMHFs@ zPV6h?j#vSop0sdyL`=db6is_P?M8B2XKxrvQ&XHuce)PQuNBHTNu|>3>Ojb%MoA{8 z>d3p;7G%iVhb(sSo(2TSN~WSwv1k7P$+V#GYQXC8l56^lRqnfUtT8;b(pceX6t|}y z@;xvK#^36kdDpIA%I+|_WMQv!0R<@ z%OsSBEIxkx%uJ98V+1{`8ihNiSyO#2ja|iPOCv}G4B04heKb|)Amfm7dL-i~&QLQH z_8tL%_BE~IRHZ3N${O((R-tECoDQ(+EsU?FXIT*tb`6R1M?x2zspJ)Rh-kwl^20&ed zwQ18*!OptCB$DPKdwW5tHo1F9buC_`d!dlv$c)5$fg{Mz9C6ihwJ0WOy05h&Km>pP E*$X~lF#rGn literal 0 HcmV?d00001 diff --git a/examples/vision/detection/yolov6/python/infer.py b/examples/vision/detection/yolov6/python/infer.py new file mode 100644 index 0000000000..060f29258d --- /dev/null +++ b/examples/vision/detection/yolov6/python/infer.py @@ -0,0 +1,51 @@ +import fastdeploy as fd +import cv2 + + +def parse_arguments(): + import argparse + import ast + parser = argparse.ArgumentParser() + parser.add_argument( + "--model", required=True, help="Path of yolov6 onnx model.") + parser.add_argument( + "--image", required=True, help="Path of test image file.") + parser.add_argument( + "--device", + type=str, + default='cpu', + help="Type of inference device, support 'cpu' or 'gpu'.") + parser.add_argument( + "--use_trt", + type=ast.literal_eval, + default=False, + help="Wether to use tensorrt.") + return parser.parse_args() + + +def build_option(args): + option = fd.RuntimeOption() + + if args.device.lower() == "gpu": + option.use_gpu() + + if args.use_trt: + option.use_trt_backend() + option.set_trt_input_shape("images", [1, 3, 640, 640]) + return option + + +args = parse_arguments() + +# 配置runtime,加载模型 +runtime_option = build_option(args) +model = fd.vision.detection.YOLOv6(args.model, runtime_option=runtime_option) + +# 预测图片检测结果 +im = cv2.imread(args.image) +result = model.predict(im) + +# 预测结果可视化 +vis_im = fd.vision.vis_detection(im, result) +cv2.imwrite("visualized_result.jpg", vis_im) +print("Visualized result save in ./visualized_result.jpg") diff --git a/examples/vision/detection/yolov6/python/visualized_result.jpg b/examples/vision/detection/yolov6/python/visualized_result.jpg new file mode 100644 index 0000000000000000000000000000000000000000..d8a7eea96ec7c3a963ba5ae892be14c51f4c60f1 GIT binary patch literal 205296 zcmbTdd0dj&|2}Nfo|)5PmR5c$%PF_XwcPkrlV)a4HsXTFH0~0T`-U*nqNS;$rX)@} z<(49n3!;E9ipwaXxsRYAxReV5T7t3oyXX6Ty`JZv=a1)kZeQFlms>dRbKd8i>$=|8 z+4^T|M&sLC&aTcH+qP+FYy-YDwzwMCHNM&Q@AdEfn}4qz+yA}q+_7W(j$J!^Xa_zFo(2NMdODOZ20?`;O`! zJFa)`ywP_TE?QaJ*j~P3cm0PO&Mr4yZ{7RJ3+(OV>lYXl91GR~ z)3o$w&olD!UltT%UlrlrmX%jjR=umPX=rRBHIrLfDILttuI`?{d;5k*M#sh{{+Z+m zgtK$=3m-o%E=kwcKY!Vf$v40L8`n0C?f)AVaQ}ad>mU%XDvM?I=@5@?K^5{B{(PjH?;qW?EhV0kN$r} z_J0NTf5*kq*uQ-nFnHS!YJfE439jEyg-hGi<<@#-%{k^c^#vg`Zy@y4?&{}F)a6a4;qsx-Tk8T0ksS)afJ{Pd15wXnKkGp-4-#^SY zy!Qhk>cntT#E|(kml}b`;MJn!^p02JFT_LsiEi$3KaB7eMS*#+0^HFlZbIZ{5gMu5 zi=VB?Vg7ReJKj`{xHs13?dWW!LYZypEx#_U3K)^elb3YhK85BMh`Y4ECt_^IHoap| zV?9j8DW#YEwJxe&u(&ljuGfxty5nM0EBt8%Q7y6=__X9&Nlc1S**Exdm zLEq8Weg@`nbL`2|uOtLRHpmL-nq?ig%FJ}r)Y{!)XPvQ>K^edbpxP^#uuYtG%j+`O z=YlPb!b}n!R=lbl8%PXT_Zg$PCke#6m%NYa-E(VRtc^6xHoaQ5u4f$#RtdYQbST5p zW`XE;tlvbyK1M}KVOtsq*zeWdP4r=rgezIV6tTiU_2|cMmUZMbH@Ok))*@638isSV zAx^_^tzypEjZV2#`e@*zi)4x|(6oO>tCXlqvRccKo1xZyHBFi1h)wu>O^u3=jZ3=x zMOTGYsI87WNe8f!n?(WpgdK-V+40qILtExlJjFh zYhf>8z6GYq^LW>*M`Tm9NtVW#33o<34bsEkr#=CPX`+w>rJ1VSns_P^%cQvf(eR0W zQjSbIoF(?Fqt_9u#WCMqo-bBeA3Ef%is|YT3fZK`=p5mtmMKHE2Idw=^07!MViVs53W7b| z(%5Hh&fyDP9xJ`>2d3;!=Ln%YiA|zD_J*k8W_=@yF#k6-o`B-%W-kgY`k}r*HkKXh z8(J2B_hl&ZHIIYqFdcPm8$2yp&6y3S4}3Lx{>7u78D&t6j||Q}uD_6lDw>4IAqho5 zuW|Cb0*=e)D;I4_PRm7t(t|!>GH=K)upoT;A-!4X8jIYNFMS8fx4p~van z69$uCsR4#rNtP}<&=K8u`*4ZXDpvlc?_;IDha2h4$TX!*RyP9eUq{ z0JA=eRE{i`N9u}`;yRR;lF}D~4@y&_`|LZE&iz^KgY0tN64>+{ufF>FxsKj*bKuPN zq)Uydul*dU<++UXI5hD@fVRDWlUQNhYqZ)1iT9S}Q5W1@t%x;OW=DXDb7^-?(`z-AZ~g{k3BvXhRC zG(ufBJe($j6zsXb@$;hf8y3P6J8E)ZM5L6tzr4c#xX&yhOHY)T_9O*`zawh zH@+v}%?xP&KXASIk7-{R+abh_69=2X8_nRFk&C@2qMfM;#@98?`=~`^3l+Zz0`ZZ@ zl=W)ypNk!AlXTkJYE*ayh&e<}dcdbC2P%;G?FCr4Nd_-e7Qc!VY&LIc#KAG0CDvoHumju(N>{(7At=}l zpEAhntf^tCi3{pAwG@+{Fs&f9l`#cb2f9ecj^N9tB#47l7&59V5s()bX{$jUB*SGxbCkl!uL#PoEl7G+1S>+wiH)WjkM)FfFQF zore%`E@OI5=OP7ueH0U_y6fOFv!(IvBK;KZc!X5_Emi?3RZ1 z_3Cg+g|IE{&)z80zW(~nXcM#xFZ%|P#H{+YdZR$Z5tTZaUi5VKVc%7G#QWe)QGT*pvgsZp_J@Ep!rO{)c{ROu7dgUS+ z%&B}_?MLH9bW?G>8(`Heil7G88%e3$$h1dk$Zt#uvsE7>Z)seeu;KI=-Wv9YOu3xw z1Oc5m{m}yW{o#O*(v<5GUb+O{<^2`(T)B8!GXAIfcCKyFd9E)V+Fx74u$5uu_+G|+ zhUTZQp-=B!8x7D8ZIQ>!y!N&AAV;&|s9G*W^==FeR_pbc5y05J!l~G%#a?)$ccx;K zGWBFB#k;QAO^W>Nl(?nwsLF@ytGp#itw_t!Dnjm?GO3@eMc>0z+7@yHfefJ|g~r%1=?%$cjup>)S^q3Efy0BJ*COm$hu0ajstT_-1n>iX>p1 zvPyOHk+d+USb`c)bTB#v8e_0bsI4jo&ak~_!Y)P z?`>{|CL1)lr&xAbzs64DKvP5jP>5|UQEa8xdX?ygK;@=Bh}oZ5ZL*-y`NJD5iqb@{ z@|<%az$81WUdJRLFm4~%;Zp4b!!;9U3J7?G1-WqJpUsHNSME3N%BKcE6`MHz0riO` znB-SEGN5tnfv3Lc*3otIyfB>Oum$neP>qt`ij;YgQy0pm^hab)VMWC!r*G=h2Cq^= zM@fhSYT-h6$T}##F}2be!ICxiP}ROlhINXO^6*C{=m6akW$$sA2lbnVpVG~6+c`4# zUg5EkN}oKxPHQz>+h8lYr|@f{HT9BmI&?)PFqHN@X;L{aIG%@J{)H?JD1%pf%w2sK z!Rf@_gP2ggWl`Ml_x~O0W?byGdcco^OaDkXG%N8@{@^#9-$*YeBLgxN@YK6s;98y! zJt}f`459Z1xW=xbej3q(EAX&-pYUc%`Ni=@^!h^HjZN=)ue{6LgrepfAz&zpDR*Ng zYd>=^Klggh0s`Y~=KA})R-;8_n~>-ZRx6EBJU>;#Pp0?Cf2*1bSd6adH<)bRpd?J9 zKyj-4{fr33HR9K2=W}GGmM)s^>$YNBGky_%ez|nVl3zkkR?J_6q3^;g*PYelkGh&j zzLU6Pli=?PFwcvcm5dI8dMw~x=jGZsySk><;NWwJJa8M`V{k6m^N+H#uj=K=C1<2R z@w62|(flRPMSOWvg0vjIkvUHe&P5|P7xfX~DaPtJD4S-45EwiItB<|MkTz4$Lsmm3 zr8Hl|tV&>4%b8zyV5s&{LC^U6QLX~Vn0DMog#gnCS@x4{#AB_;KTQ_*r`-ra)YMQh zi3EMyK7#*3bvX~t(6<6W-6DnQW;$Mt{8`$oAX@gfaAn0RKP8uJ#;1`suEg7UeM$)% z^-MS3v=UJmh!^)zeKvMtov?V$9WLVMh;bR#+tbS1udEhQ(A=|N(XMY%w*YLj@ghz? zi6J4kzn5oxT^d;L0^48$$xurAvpV$O>$YDD!;WK;0FGB2&ljTzegWqt`z?^!6}1k4iQwXVO~7`cXdilgQnQ0hy)(;uwTH2d7P94Ke>=^ zxOl$Wpm2z@8Um$9uX^RjAcFIw8xh3&sxqgO%kiMbW%}cpUP5e}liJ;RVCgbrcrM$q znQ&@eHTSS)Gvz+A0F5jv!q=*vpta=bvlA=in2-{k;=A?a@yMp>0C=ymf{b%5E1u5^ z3|_~bqy}vULHTA&UtmW@(yrClIj~A|ITNcT17Bm6H&*5^kmizLE6WnqRZ~3 z6DY@RJXRImJ>e#4YkPvti^%tPW}liY3|3fl4~as85%^Xz0$k@s>H*KcnQq6cGF)xsHV21=DqKuS_&*|BC1-6(SEY1qziX z$TFxn5MIqiDx1RYXI17a2y?3nRsiY5a7`IR-_P~Orr@JvuUnDk5Z_PFvuy8jzWZ$O z1XM51QQKa}CzjrlR;C~Ltzz0g$$Ns&ITX3(cX?b;Xb%qd)sKxJkg&ZW!CoBpr_w`P z8Z!kLeVV>NF?b4!N|B$DawpxBk^*CdTDSJfiPMw) zd6p8}PV?tSWhG}OOqAwPmCJsgv2NAYpV=pj&9O3XR)}gU%hHy;)=kB)|54m&5+7AW zy7+s=E|Jc6v%FlB9|4p%(-xa!W(Q{@-Gl5OQ2IL~C%%0N3Y8F|tDmfFS{C%kOh zw54&S@UpVIH8|UPibZjC60=(iBCuWXRPPOpyceH%e$cBC3;q!2aWU1+g~C6P@@Cp7 z*FxFgVi;|ll)2VG<`LagC1Qbmw-nzAuqUD@y&@nn(){p^6MLiH`EsW=EhCW)?jKY6 zrUgZ1E!oalL5e;WHPTjjp`1(w)ngY9Kc0*1B2QF_ls zx}}jxyAUmM@}=(^%1{zZE2L`uFj}WD$h!7 zMV1m|Cq~!dD$zKczXyHNCVd7T?=zkvx;7m`&+NZ3-3q>97_!Mo9Cik%&Q}Il0)Qa% z$`Q1QJYV3F9%ryUv0-3fnWgw46Y+M*dtB1H;CiN)^dPi2T7)$3#kun>de`JdGJWR7 z&YB~`F_}>~NLA2iV8cRFzW2)T#DAl}qYFyo7)5kI;)XWoaC?c3{Eg7#P@)G*0dBQ@ zexFLMCy(mc`|6;&L&5-j2ptmUg95?$TMhPi+R9Ow{dw zaa=2>62V6y`l-K!*BMfB3kMUhXZ@@)&sthqS)ll4oX^C^rCRD!?Uiz(0Q)6OtI$&4bid1hmU zoP2%4%)#Bzdm`Tij!du;={q;LE?8Qo^1ong0JW^yK5Hph`=FlWR)ux{TU_)@h$c_6} zxbZh<{f)2$=VpK&UZneNX|Qx);>dMmCg@uSty%WHc90|obw&;sY_s%h$JB%+^Zs1t zN0M`5u~Abz|LeQPLsWB@x`%BRykG_JmBC;6&9+@gJ-Mw=*2&nR^qnxw@fkKZ_`&_! z((zyDEnefK9J9o^rvBh8kZj-w8cQx$Pr_l1Rls_uCqKGy#65<2>NMrUm%p!l_pK}9 z;fH>oo;ADLT!f0Nv%k3g0vp|{yf)-;eM@8KHPx$Ft+N{mQs;u%wq3KXEs}0xtsL|h z=lq|s?0DnF$T6+YH_JSnZp=SV3hA7&)t(uZ;qkl}Qv# zEC(U$f<31keSGx!u6IW-13dSi`gcvOV|91MPJL%A@Vvp???|}2OTkkvzjgf_F%kQ5 z&v3@e)`F&}h4@b^WZqzXZP~?qHKTvVTKgY8MGsm}9&=wn%y8Nv;dg4K+-__hQSPpT z>y3Ub&G_ONUZ}W{!SS(*spK?mco%Nlj~#dxay`!ajm3=vF71xGg4fODYDSiWkx*Ml zem^wL*k#D!D!eMf!hOEyW0OZhU=)0<(N*?lnpEILU-zD57;I?-^0N#Mf+8{*^6x-@ z5sdCiFifJhf8x>Q_L9p(G(9rM`!|A>mW?sy+oiW%9o=Pa$OF*-nrx)CoVhH_z8Zu4 zm|R(}xPk&nOP`vlFMi@T*rq#}zV%;@?mRt=I$l|B_rup>7HZ>QfA#h$w+D5-Cs=w- zcYE{-JghFv4kWTn`o4d;G+=?XUdcl=l^wqlEAz}7i#y%z{wcAG-(ETin~^=Hh0Z|T zgyv~h=Fh?4gu=Z7k7E%DGF?6B7uvv`S0NtS=UyZX(W00e%l7TtVt;oQ?O(V?Kcae# z3E>}A52$segWVu*K3Ed^9Cf!cV2NY!m;~Dc<1uw!CRfa7wdSpk4@)P{<@L?%8G9S+ zwdqw5y`>QyuIJH1)aeqIK7}>A{Dvfn)BGqA-lxo3nK#Fw!hakKfO+{>zS4PMNC8Qm)fqpFR^R-fYJ>&P2bRN2Ymxwj^17DS-7R~ z_ZY&|044iw#9HVQW%7HuUHt|$r_W>Wz%dm|Yuxl#kW-c^DEXgRY z{$sW~#h~T6oB`sqaX@E4T@kWVZcC0(h|lV_G)O`9BQL&Y`dvlmHq}?OXSgLspC|}p zp0P8mN+|s4&GLUFIV&IEMSL5EGs;#5S0=_wPeWb&$7s8eI@3g~qV;TcZ=7>hSum7k z#L~oSsw~Eg)VneGs2Om@Rmo{UWnBSV;lbUnhC4$$% z69h~XvM6MYv)sQX^S5*!PP)sa*no2uf{D&FRS}>ELF`2a9X~3~=rr7fCp;@ep*|W< z&|^l@L$R99Lk*my{G^AqF9_;7XJi4{yWo_GV9Mk|UmGmjfi;cMM8JZ_E|HDIXFr0D z%NuV=JRW_9v$VDSmPaBA6<=;phQ(*H2c+oWb$yn?@oPh)RRlp+hO}G=E|KA6hz2oLK5b>dIVN4 zUKvY}7<6&4?WMcq%QvJ>`Q^=ovH*8!FiG=f5D8}IVWGVu_d~Fbly*{m$>v`vvp}G* zf0k#&ojH?(?E$96OoKxPp5qS2CsnKUZD%(!5E5PHIIitW)}Y8%5|DbH2W|dZ*jNJ} zj~|zkXb&1(4LNpAQL8GhrP+Kif^Qs2AU-o_5O;{v0`%3q;%>B+4TiIE$sU$Sb}#Aa z#VsA1^aT%GIqG3r?iB*XpXV1~NG=Mf$Q?w)IqQe zuE&Js{S&$edla`mf#NXp{Ad!TN6`iL)j@j!va=WZnDVrQ*oox8J>`Ky&@)O^dhDka zv*XqxU8eCn|JBB@4N=63+pz z;Ayfvj==x8N=S{wbZ!QCWmes(ylBPEC}{n&?)8_Z{AAh4-@?9Gm&rSxu1jBCw=@<_ z&D{rgj$rQdIhs%jV`EF>1t<*kE6-Hw@()-HrnRW5o7mUy``7Fiu|+1w>&+9<4>Bs9&>(C zU-q!^CZiJ%gDn{}82oj!B5w^T|4#e40Q|h71up(|0!9l)unm+?$(E@LoErYcX3i#u z<=iTVE1=56QM#Ub!mb09MzgM1WJq%TIUQQJ;6~)s!;;I0YPmh)>T2lIDY`r^%kf%LG8x@eV=jPAUoeD z9CQeMFfpZ_(#T0nlPBr9bPZ}v-x^;wL__Q_;TP*B&*XvHs4*xO6!U^HQvz!`H_`b- z3QBL9Hveg4)xqUdmSgv0p`$hLc-`?gzoF8K5=d-BzL&naB$exDc**zfZ4hx#_&8>wZ-Mn01)hZYJ$ zV8YD=^J%19_c8hdtR5&u1z zTtJjxx0zR;I0Cn2hbjXTf9lIsy)t z9L+dZOTaV+K1VD-x#cjvMV&C9{5sa1Tdu?ut|4sQZM?lDQ&2xbXAIY7 z-YXY^WRd&T27)b(-@xke>Am;)Ob#-AXOpSConfs(^KqLxb+D@c0WPZDNK#ra}G6S7;J!-E&~ zctr4K5jIjRgKORxH(8lQYJEz^rmdLD$OUj!8+=CuTSf<}7jj=b(qoL~YMvh2NRxV} z-ES=#_gX_;)r7d{->j(_K?JWKXFTca?Q zJ$l6gG|cH>_Bty}6qvry(wZ+1L>AA(c&?-ZOk2Qn#5*`&`~EsDW>)(^A1<#KE)X3> ze(Q2X_mJ7-d7Q7G&f0w_!zz=SFbyk4B-%sdH4__kk39jf73PutwYyp<-V6JIGiO(h z2#@&CyO3l`F_?->pU=WP4-B3T$lkph_j5EZW8!t`#c_^oXfYKr0WZD~-FxkjE18Z< zGEGQHI+ax+$Ggj`19fv8l8j-?_pA!PzRX|N0JW!pGwVwk4NhC{ z99qq)%=Insy04&Oxj+34osrY3}V@11CUUpYu zJp~Fye#tuVuz4)E5$=G)SM5d#m@w3He&Q z>xS+^whhu4$txZprLBPkirNoA1yv_tq(Z)E&31xb@1Q%#Eq-Ul8L{GolY}~sBiHXn zQ_1ByB()VYImKVyywj+-%tQU6`m=Z=V+4aRcgqutVu^FH&X-=_?6Uwb*Z84UQm`9P z;ncAQD`t6o^kAVW!(3&>VN7qL#Zs@0kuT3JHj*7j`5i;zfB2RN4qMjT0AwIIZ*gG^ zRXoUnY$O%(SGF{kL@4AL6n~Y-BKgu+7ZiLZDS|NGCMYB~rv}av+r3}EMKqYTS!ML` zr-2#_XvXK@miyr-QVV!DU5sWD!0GcQUIfkVbmOO`N@>oiIs{7tA%|Pj99$l9cYs+bk7F zWp~9n+E$V2I-WfunCegvbxMsZy#KEs^S9gmy&dM&t;$10j%Zw#FZER%04?+QMT z#hLFn`F+XH-}d(iy_B1^EK-n7*@kjQB-N|8U&9stTWf$7)N%^3{+s{LjTp?6^M{{a$`wTfYgGuuf!JBG!~_ z*4k#Nd}C~rF1kuY1vL+OD{R`WjT4J}I`O(#V_P1At#2dZo3qw{Ip7gDJE#?w{N$^V z(iW2*uxBRg?dWLBN=&5m;`?}ap^K!j zzLK4~dvcXTvxM`ul77wm?n6ozW=QMr{Ab z7tO25Ln1*d>TkyiZKfEOF3lc5wGSwp$1i>P^mAjs#hoDvXZ$DkH)V6SbKB zMWv>2d2r8U`wYG`osw3ttF)4NvSz7}pof4`i^{za{R~SBB_o*#KXrLr&x-P1GS;Gd z0Kb#zdwUpfi5hku>fi%~ONOn&dVYf->2YmyD@Jm){Y&=d`av=cc23)3f!^0Gils87 zVk-E-YEyOH8(R9tU!$gHa)YL$jGmaE&5u^r))TpgNUAIWL9}tO&%vKxy7LE;L8qts zPVT>PrEkC$H3VP-9n#>p%Zy!0*NX6Ft%rGu-e6}R#A+FstT1`Orrdz?#h8=?o`STV z(_RY9zXNJrIiwwS*x|nLU{$ybvuAGTi%x-h$gyiY=iNo-iEZeRys*$qpLfIX&TyjV+E+*_pX!zKpsyC_31bW5v|9AA^ZTx#R^PTgZyB&yyYpf{!#T3hG;)S=+oWJJ;ZjOc-a| z$1X>|lnl79S5(Sixohb?gBcS*FR9Jon8T!B-qY)oDC0brsSn(qHRb`a*d&6A{{)j8 zg0ieq3^?0=lGxPOFdvkhk(6BxJuy=2Zarst-RdDxR)l45Vlt63y6pNa<5y6^m zKOM))$+spn$*M8+>XL#skmPKi9$Yy5$Nd9dI$~p&!f{0{gWz(l`LGsvwfq5bTr5G9 zqLz3Djjb(DS3Xag>Y%!?p7i&*K~K?M!I|^WewKP;njz5xl7Z_qFktX>BN@wGJ>)!g zmXu+ignCy-_L)O|!SG<_V$8mYCvv2}gz)kCX-2Vdy!ifG)#56)!~4)w-B`%x&yXy( zdSkwh>@x)D0y&4mZQ{rI%~s>%@D!_tRk_0NT zR6WOMq@S}*fSapY#mzlls6K+-p`Q%j&eeV)jqqeZhRuirD^%p8TN;Yp9x3cl#Vz>r zpdgx$Lj|;t8t4@Q0A`>{lz>115Bvx}%0aHcOpB*#y(f51B(j@V$n+c&Y28^WW0|BB zt7<;OGLt|4R;Se7S^BHm!auP?-=2Rag_X0zCVjr4gBrLtmy2^}ttVvylh}K=^xj_> zP5SvnTlZo5IptlcZ8~nymy+TW3)2MF;7)F#b)KtDuR$S;KjO@9I zi6BwPQ+Vovf*us(Vk9A7W_;qiBdJz46v&4h>+Ws&8VH!s0Zt>AQBPi4T!_Omg&3SB~uKAJFJ)bYjH+BZb z_OIeN6ihEDarHl}YSpbDpQdyTLWey(tcr)XG~S4ZHgX^5lrDct#Rf_yn1zj2$wq0` z&k=LaYv)I?Zn#x1Qka+DWy9ixNm>`Is0?t+abdRT;gWCD1!B=OP?R~UX9dvh7lfc; z$UuWHcsw}#w9hQGpRhL7qnc)zO_L%9d-$3-H%yb9+vS(bRaDeBG=?bnaX2?Nw|HR_ zE61)*JW)@ca*`C3yqOUNGW7QmO6xYq(=+F=+;B!sU&O>Cj3?c^1lydy6#hv{^ z2Qxd5gPtcEp9n?9${)jhVdc2w!*^Yy*IlZ`Ajyw8X*N#!^0Q^~g_AJPacfh9uCH#9 z*_hAziF0{3xF^ZV_fsrA<-hJNGK#c@dC}Asctce|@0tBOq;U)+xL6ODi%tFZnogEy z6}scsof$SFmTGzo@J^9>x`;dE)R_RPfT0~Ae?CXcm{>v7nBrIS2Yj7YqH2LG+=k#= zsHRCXm?NjWrO^Ayvz9{E+hw{IQQ zcrNil$K8oiNL|f|YB(GQH~`HSESaMlqHa^C9xer&HoV+(^to`_z!b4Y zCD-jxlj7znE7Ma8S%Dd@fX&A;8gCi7wp=T@L&sq^&gDE;A6oYQUR5&y+4*6mj+P0D z&n?S>MD)y)#h|BvsSGgVL`WIwm_tr`7bP`t-ZA-`(4?rS{SCc>Y0oRBWkt=KB{41$ z!>(rmT{dz{BiJi`!u>o!a@cbix9;*@-kO4mGC}UhgAcVIwt7&@x~O{U6^ImLlIS9o z7y2N3J|f|9#v~s`4Xiunf8Cq>QSIWba8CsJkX5f}_W?Y!Pv-W%fh5`W#&os5)?=eO z!Ne&Dapd7moROt;5?(-mIGZtdhF0pD`>^nwq|VZW;5qzmDftJRc)ft;jjv8G2!~yf@7yV~ zf9K1hT@@`{xOpKrzClR{0KDuwa;55eVV38~TZ`iyu3(^;VD|c>%_x1$;YPK|ge6)8 zfv0Bo9g7zQbE$wo#ayaJK6d(Eo?9dTd1~1MQLa2k$KHhFw=`bIrM38$g@m65hoRc| zahOc8_YkOIaHUOQraVWLH!7@`7`pTeaLf_Z$#-yVp}xDp?e`y)PDT<{ML(tKdzf|C zSl4I}F%Qa+osDt<0a}di}>QiMCv7` z`_O!eUwmM!%-GwUG8gKq3IKAzBE@J6*sXvh77NYZC_K=E^t}?oKOglN#IPJcX#RY5(D|wnd!6LcU^=zhx|TmSinIC z4U*Y0cGpe`hPy`p6MFhZO7`Bnc`d}dQpO5damH^^eO4M&h-m_s3dK(X4AQ-!+q?Bl z>FFtvSY2Fd(akw{ar5&n4dMMGGZ0uUuWAr+B{|kmmmZlXC+R;3D z8V3?W*v^#S+hrQ*Gg9XJ%?WwEDUl|$5$hrrg5r=GV z8n8141{PMMvtp|Lo5nf1ZMy&2JLs&UWM}WC*8QWGbdG%LQoNqM;A_vkBqVRx&REQf}XgoG3KC1$oa1o(t+G<26n;HHDJfRRgDTg}ri{8yhMmV@q# z<<}j;6^Gt7^-Db!~DiRq2S!NtpwCiKH!TvaV)%RU-JHAbAU z@nM*`NW2}jUzHzA+_9v-JWLeRE(l8Z*L30e@|@1^dv=gVu0E9dQ5KUO*+YfF%`LeV zYcONve^8b`5PQ4-&_BVy^zO~QLa*RE;T4Tff_KjoM#QnNK6Q9zaYm36>SG_(TJp?W z60ztUMslwcZAGRqU^{#^DFt04H#tx2QfEyoK7MV64u_MYOfvZK?l!6Y)Od0>%6r5? zUuvn|i%*?Z1%ewJi#r1x=o409f)QK_>v@0EaToHxk@E%K*^!^Qpay;JYdfJ9rR`# zZ~P~3Y(Q_X=e=SD1Z5_7lj<@9bYMHtV-y&m=cg4~1}p}gDixZJ6AAj$*S^8pf{)gJ z!Xf(nGVLKM2dfe1%9!MR4%v++kof?En&YY~t)$C+e{h@C1E#SYSx(BiNJ{9v$$NiR zegS`8<_ET_vK@5FlxO9ev!%P>2FFDxkC@aM&>nu5P#-c$@2xQz>gNTMU*XR}dKO$x zz|W6wCLq)Rtro#;9?z5l`{_>gm9Vc2J6IOv>O`fl?>V?V05KCA%z0eu0D?eO1t(y% zY1+WrD3u(@n!#wG0c3N?Y}LWzUE<*zqt?WepU{_DNgv{l)iupA8VmQ7y4nhlBO?E! z7+>mJ`MdAWqsL&9yQRNwX}qt@QJ95mD~y@7i8;WLjE2WvlE~^%>8aJVf$#n^7DQ@E zx=$YwZF7dhzrZeBH-`6YP&Lc=v(*SQ6f!=v)Fz>2- z%|>S3m8UQnhv^3srA`C*N~KrMO$u&YZJo1{T#(g8TN*}i@C!G#+x7!Q9%x7VC8c?V zKgxGp2@yi9Q;b0eARW}Yg2Sqdo2b8)%~Nd3<#N~t8YbihJXPHBLARq#RQWVFjD!gI zAqJnKexX3y)LALDD3`RanvU1(p9SUmX`6zb*T&;GjFk9!DBnVA@V>2Qr6P2OsN=65 zDgDNps5>yRaVZ@0kJ`Rv_-HqOWzEv0#2?X$bB4(Qa>Iunb2{=BtM$wG8I_H9AEW9P zFSHU4Px!hGuiU+3iL6<}R*p07^rQl#uq1&oQ(%*!9hB~hIK9eN{1UFB6NL0i@$p+E z2(!EwkcHzrRUdhq3?A{4e43Dm`KuZ84b8C{#vy$CO>Gen#$k66hW}oJEUrVCT5}nM zwVreM|8g1PQfhOX8g>xVZWgOwMIFKXJg9r7fygO`z2H27#Q{gK^y_rJl4Ia z3-Wfw(Ov;Gv=hf)39WXIA|^6^9DQJwLd>94F6$o+h8Wv$s8wOJ&l6l*$yVHipY95l zL%gCI-c<~q-}Db5u#Y=W%qM^gnxNLC_U=3TOcC?u0rgOiR}L^ErpSEbavN=wV2%-( zN_9Ih={Bd1(Qymr^$_+>-lO^Xsn&naWCwQ~PRo@MS-FdR$t~hDPx;v%Ti%W#2SnK- z#p5~sP*;p_neE~HzLn}$9TzsQm&~@Xl6Gh0{*9PTECl~#xqB@i$#OEmXv#kVjyA1{ zzfNhTbrF$(#JPTZh&-p^~60%D?umGzNJro{O%M1$X{ zyTA#X1M;&%-6yE+6^ZGdq}DN7l)??_LEzuaWyMq6T#$3FM(kFvqB;m`6OG_oqo3J6 zqn$%yjC-U<1x9&b)ilL~0URed>te`HkYm#S<KRYUCeFWy zx0y3_>F36(@_(FSpFjuUZRXRXo9Dfz_<$fmWVI~+T3?iHldYGPSof2tzhZ;bCOB@G z=L$z0Y9l!&lfJu0E#EA}mCKs(kvZ-OpTd-J@_97TKod!rjkG#z^|C z92(utqJeA9A_e9|j(Kes(Z%M9?-F}D7`srGwTXD)#7et>e?q?8jho_Bwh7&@ciIkJ zJ>lfHY4g3y11{>7cRz3f7I3j6^GM5f-l8gnQ`5n#LXUs)1x>nFy()^;huH|7vk1N| zd;Q|Sr&kzM)slS5Wyh>UZvFT#ptsA@^ z`dpbP-C0Rg#G-bj?IpR9>#_c0bw`C>p|ELXEnz)!YRI<#UwQejRO)r4eUG!gCgIjJ z*-|F)2%68=rh~34E`-^+JW`s^;8~8I zeFldkur`OF{-(1ohU?wzsO>Hk$C?_vYJp3R!~&t+MfcuQ_Xk{QmU}DCDX$EF5nY?& zE^?~UXpkC@cfP$=#lbCEA-REw_nRwyTrVuZIO)ZPf(#$Kmy&-d6EV!6m{rGqiZAfX z&o61(^uCxke8=8qr8dyd^VV?Nh$lA%4SSw53)4-6cN$3Uf%cv+7V3Ay4K$YYPL8+( z`^$Lt8i;IFhzKj=!@L0j6V2-3=`$$94iI75Xf4l%@-$d+ zJEQ(@mPIFPt)`)8Z52VqbF4AX?Z9uOZV*+a(|_OzCErRz+N2ll!<1)dJa#U|ERH*! zB!^8tec2dbl4ntSm)nGfiO=G0gKl?G3=Z{ph87h|xgKL;~lEZPiDNo}7 zw&bwHq<%5ktfZfcqMmg0dee|Dr@d;fTbL%t{6qo#noG!2?CQky_AL$l|HILD$0fD@ z|M$J^X-h3l?Uv)_yyd#(`ZTP}%odW8aw|6?InhM84K*|Ks;McZ*IX$gIe-*}n=?f+ z7bXa#rshCE3sA=Wz5D&|;~^Z*Iq&D|xn9kCUVG1LQ?t8^Og+`*E!mX43=%5K;TvnmUa71?HRR}E0v!og7 zVJU#yR}O^~iM@*CzfB1{sBP)zq)Bgwvyf+Ph&ck z%^IO1sGv|A*svMf-BG~6EdS`_mTUCz%20K{4PlL8hU}QH4fc|dNR1suPNGIo&&5yELs#l=5Ww^EwJ)#d?eJ69XNnZlj zg4E&YmcC7)m-PP1Ooic`Va{giRp^uKr;SkC>P1mNg0ONprG4Nv{yB*(9ZpDJ~(@hC`&UPezeH)>EIk9LG;*df9Sy7JZrOj{-nxMP^UBw56RNko;)5vgn=b zNwFZfZ_KAgSqf!+W0#|Ey(`^Z8Cd1yp^UX!Xst>^vTf~qucT&fM{9;>=AU~M@ZnB_#vQwbToKvHh= zX%as!Y61uB0Z$4|nyET2?VH2?TT{143_sk2AAZ-3Myx?cKM_+lO4Z5O#TIbvzSD9nDVeeay@6-)mw(rZ`iXkN%p zRdlW3#~dU&WcDi!m)5&H$_T+FX%{@ojm6rfSeOqmQ85)B8>Uw1UfC!fhp=qLl;thf z%q3DgMM<$te;MEI6VF_v9$ps(>(cqquk5c6PeJ#9Z;~jIBHj&PjZYT)w5ieq#&eT3 zgLC}liN(OiA;>8}Iu&@*l<=6)AEb`HJzdeuAzu9bvFWliEU}<%eySd+U`Q;+7zv9b zeS;lhm4ofoX6zN^*xzb$G=r($JbAA|xK*4UW3|O2NOI2A4cgA0H_wB@_A~lXCChZw zIPVxqg5;@&->9uIk@Aol%rQxLL6Nr2tE-9E+U#?sJ~6K*$gH9C@E*#1lN0}rwW*K? z)Z0@eY`t71iR?pvo*o#CrYBT8Z~AoGm-&-Ph*tFuGu8bD2k1xSgfHkP&1N7xb$52v znOvIq#4F=;eJZBg&?_DC57~=nm^P&rQZa%yKe})Fc!9iXZzY7p>dm@v6dewQZxhid zsWfEe$U}mom+@Y6(GeqRqQmzwY>{|ly9FWuTQHT8m$@< zH#47}8#GiR@16V6IQnAp_q~XYKfg}t-|%h7Yw2lhxTl03C&|5b;qH)`j{GR7~hw^QW{s%Z?NXgsYn7$#9}_eq7q5 zfB$2_M08-V+8>d&0cC+3Rn0I}0z6vn)KqoTs)KmXzgc3?-eNb8q`^wFZd|10;1}!N ztT7Brfm}0o+ChyimkUMi2w6Q<=j@aak@`PGvsUYGv>4pr;5U3fMSuLD4fCrGA+K;{H;S76^)0) z3UA1))J|#~hg3v(d4U*zeTKY9@xA!GVYqPP9WlD~GSBMOKZ4E1O9TAlM)`-|!D(c) z73;q>iHQOkqPzK)!cZAVm$-gx!4_QHIhVzXxP_wlFB4^0VcwLYiqWy0$Wffc>MY)g zw@2qW?910U2*_9P^uePr2Nr^~g9}Kg0u#q1IcwmCXsi#+!3m=S zHj>(KyW@^=PY#(-hOHJv&aLz)cf(u-G&)kqTM~LL4Fj641tp&T*#D9X_dC`=v>df8 za1B%&bAcdL55As-N15{Yr|4I1F}`-Ke}w0F(}x)#fQspj@-qsp!Rt{0BesCeJ;F3q#(&e)9#4zZ?m1vlISdOg6A%4jBk z$KPl^n#v#t{)&Y+gC7CNP6EhpRJ?<6eohV=BZ$vaO_A~nJx)caZA2BEMDieO?s~~t zj)Bc*HFMDA19o#bhq1&NnaZTgSWOpxptfV%iniLgYzV#wf^>1VPWsF3vBw0nY`+8Y zgbIP_ztn5oiTI!)i!HtWlpKobu@&SV)L3t-fcUh(?a{KHHzVQDYjBF7l|sNTHjy5l zm;u=dB>Z9F|4F)9t@J=B7@7OZGBqlG((K_s4rYAj8>8=+Ll5l_4=dC=>y+KL_dbUo zb9u{l-XHq=+SoJCU(R`39C%g35r*f@^S*CtwF-!u6mWvnn_o>5-Mg;JH*7zBX}}6& zTPX+`(LQnJuKV_}+P*2#RiWN zm2dRTb#Jw=#ufeR)2qWS3DN7IYES;WH&u%@1GQ6+I=Cyd+PRoDAZCinsm`cdDs?hW}7PH%VZlr$g9s%)y;rbKp(x|b+W?G=@q$s@pfav zxOwg~>4522;>!*DjIK|etBU)mu~T`*H)=AuvHDx?h)%^5f9u)UrY9>)9ud=H=lfx~ zA%~MT?a%yZedssK(?aPD5~=Hntx)%h{9)-BM*H29@31(Or#$7>TFab$);_uk#yMr~ zU;0lUgr@q1+=nIWM=#uEM&8_RZ)6$bOR(do3ZQ}24bXc_QuN-C!Gx0Wq6Y%8+Yzjl zyr-?LAf;oxVm41FJ+~;H&`UBsY1IDO=IOyT%^%TWk9VFw7kK1A#G%Evy4x z)It~hZN@6)A$oCS+E;*%RB;p)(ik~j+~+Q_{|j?;0=_lW&Z8o(u19u_{f{I}D>qMj zEBeLaVsRF0Gn? zy%$!(G9TUiW#b>+A8ULcR3ugWc0C-5W6=3Nx}I9{qKPUw+Ox4ScL4lISM7H@ea|#R zoj6FLHvV}f@KM(I$>npoumU zUG}C(sE?fuAP%Pdl2q0A1JcsSxm_e}vR#~HTlaA6kaPMHvl z4*SgC9^dz#X7%q3_869l+ej9yVNQ^=l%`@<#uKH*WQfvbBGLX-|HAiax6S9CcPJFS zd40=5War?~+O&}Ky?>n*96L8%@UN;C>lBwVX_bM*TC00;I~~^&G5f_c&y>e+O}1RS zkR&R)+{lh$S-Uq5<1bT|O5QdV@DofB*K6Nxwr!|ehsS(gO&F)PTaDi4{zMShz6j5C zT77Y|VeI684i?L?dyn4Sequa`?%r5)vCjT8MiF;AOVgwL@S**XDcni_cRuk?Dvz7_ zcG#EVsTMzcZD`QuT>NZ!k@=5UrG|c!DzkaB;$(<=o22gnU0-fw`C~rc2DkIWp)|KT z>d@pn{hL@HQ&@>psWhw>X69y+<4_$oe^_7RU&eauX79LI#e{aMS}V9{)b6|_q$#34 zc+iKj`&`=H>nWQ-PW?jnQ`6FfiG;^~Ee_+5(w$*8_C^&nUB`Y?JD`^k-eeIJcRt;0YhJJN#8JL84X z>Gu%+@lTMJTJ~#d|Ih7@kD7_=<86Zj+&T#lS~y6<#es~Lz??MR4|J~}XG2oPcVxeIqQ7~=6$WU9o<4eiB4_j8CFW1H zr=R_=$=Omq7$rG8k@wg!M&fXe&aSqdE;LKnPfkimHy*9&7`oy;#@d^A7q-Q8O#e)tO$woY~ve{2h4%9U_)mwOLKTZ zJbU6;$4q=Etvq-V4tk!(1iVU@+eKUDCSCmf5D< z3%TfcL?bRO6f_is|0Y_%y`4@DDQQ$_4wz>$m(7t&b&EXOv~Ld=3vU6K=@#s`p|~&A zAaEns94-s(Va$D15LE(omx2YyrDEuMt!AD;4vBw}(0p@hW>ueN@LupB0T(6qYz?9K zav9?~wGg${Y7l^q+W~pg_x^Z3Kpkyzi#*U}AZ{rV=_*)_xbHgO)SbeE*9(v|MsqJ0 z1|kERhqy|Ds$+$U(^hy;7)L$xLTv8cTvRZcXLP^xz296Mj2k~6XBj;1LC}r!>!8D1 zsOUUuxCqQ3lMRZ#`@jgXczEZSa-G<5RtM>t-KrHvgNcwlz3AV$W8L-|`ae925ELPe zk(8%is}?%zB3}3lmIx@-T($c$j@*=0X>Z~G{yJXaDGJl5cyWIY zf@;l*mES)w4EyU6Mb`hV(O;qTAMp+C5{9;XRZF7=FuL;NHn*pj$Kfdf@p@S4D}!E| zH&!Z;R%T=N%Pry!vSY+;Zn-W;UcciZWO?de1fW=g@P@pCzc)tMtS!G5I}H%L*eW-p zwu$ti$g*$TD_{i=thV5KRHeHal@dW=1y z{M!l4YGK(MFn07AQrPUd0f+Ge^|v&i?cE*m*XD_qT!SQdJ-ov}?utV^7HVG*^B;~o zSx$dFheMaj2^V#?KaC~nQS!|*Lx1`443D=xzrSqR;-w1JXRkBy;-$!TiYY&jtK-^XpB(r@n1ZPzg=I%cVIu> zTnSyAJyb!LZye8C;08$)fR22Sj``Jj59LH|%bPfc`%rYzo-#TqqLSzStCoGN+g<9> zi`h4w=*0+|i03y@w;lh|{bKdmM7?FB!U}U#el1>M6Gxczs{N;|>ly5F{a_3Mpa0#> z_+9#Kulqv{c8}>^?Bz#y?Dt)(j};W-{NH6+CEcssv1?NPYVv$fb$Y#v`1#z+8F})) zFS6r$1lrN0utsJP2VP)t+>O9UfRr=q5^e3;Pu&Q8c3_0+_f!hsqHcG2Yn2?0S|gvF zM76?&=bPam4wPCES2OW>HutVTH^|zj>p)%4Q_V5=1CRSsTsPg}tJ5W2(wGJ+u(Lj+ zZ&VN6d+K~p{OpAwMBE9K!JUshLU6m{qG!z;UW6Ad#CGaZw}$0Z`a;@cySIB-`R=NOxDU+@OC zqPACe-skam$P$MIru2J^5)zg^7JrhAsn{j2;KIoK%e}f_}DZ>Mzs7D zfPgerx_Y;|kD#!T%cNd~dWQx(ILSo758vchj25$v>L2vH7cL(Dt9#dQ_(UDq*IsCR z&0H(g{)0r+1%B7P*pHu-8%ET-+r+eXwG}Y9A!E{`PKooLg|DNK%J}p5w)dF#8+FGY zY>Ll$dPgS>%8Yur#`2n}8~)VsfE)X@tHglN+pK2m|H{54FO%3`5_gt;G0=1$2QRVW z3cjy%<@(;8i1f;{xv}({7hygH8ufWs@S2HVJSbLB{qdAHW&8bZ(uLDmQ^>Qj5aUH2 zp26|mHRzLTvQ<)E#EM`y|fc&VqaI(Wo-lEl2IBZ-b?|o)M0<5%D$~Q1ZOj9f#KE+E+@pc34ejX!GNq&ax0e z_IW#A-8#I&3<}z7w!NiBxz_me)jwo!$JE!X15$016r!WAMq=SJE1HayDbzBceMdg_ z*%pl$UENc3z2If`6|bp!zSM`>BE2~AD#XI7xL+BlN_O5OADp-|y9#Yh-SN)R=+2v) zahm_Ud**9=>er_sK8ekjMw2&f+kGuzQ~3@Z#T-yJub$%sPji02G!P zZegf12rf8u45>)3N8ly}v2}(1+&5pozLd%EFuo85or$IyHfW&i>v8eZ1K2LW=IAZh z8!vyEE=nuA-W2%m9q+KEC?~I10r>mLA)$-(j4EI0U@idN6-S?S1O_at+p77l(@3>U zWJeiB64b1F?!Vthr+n6wc2MOE2@nN*!ouba-+W^&*9jFp47q4!1xLGSw#VBd)TJd; zSQVY^xB^1ze;pm$+AcnjbwbI~k!M_nAI(#6!<~JHfLvXdu^2as}bRTDGmK2a;(2ct1S;y6!GljIDbBL^rN%pn0%0U+~w^?XL; z`Qu)2u?Ok1iJN3S^+`j~X!dDJOI=I?43EGkEj?Cbty`vO-PE#Z*p<{4zCs}2rS7Vi zrSN)R%x|^P*;A@1P{7C#TD@AvNo~K?HZW0irw^=UOVI>qD-JBn_!z95Nd!?sf~5%m zleHsUy3u^SAhOZ~Wv}*E_+$vJnbkOuh#G}^4DGp^^2PX>@v_&A^5yXTxvmP3dk5Kf z`Vu>bv5=>*;qsiOsQ-Fyt>(%cndJbN$(Cb9;|enWYz_C<(b`e02R9F`T#psn9sl+E z@9k;i{N^{KSO6S@z~A00uQ4t2j=GLVv3P2uBNt4&U%r>`7h?r#|5wgOsUi%M8im_2 z>e2$dx*eB&u^(O58K+y6%aMT-F1bd_{wp|%Xku)5q&F}H_43YHUXyVqZgd5U%6;|t z+-epdDz2&#GTH#D3alOu%xNs*>+?ymcduaq*s13YGv~F7wuLPk1@W_OhdV(|vUw2q zFYdc3a%2Iou%Fi&41pGHY5>j5Y2Fr7NoAQi#R1$yAWx$!)E>b-TIb^?rKrJ* zMeE@$TPffjDqh1({$yqj51lBo$j^aJD;TOADBE04(nRx%Pq(3+$;ZbHR}st+{mvi(Zv{%q6>^`8NGQB@T^0wIp3w2kPT z2 zLA7{8|65b5vG6|rjmI00FB&@&%jdjxV2`oC&R@1EjAdt|^P*`~Bm^Zw!WW768e+w5 z_N(QsO1jS?T!!PQWuV#f%6UB9BTa_&5X68a*qc?UsclOY!N`f!E2=1*x=Qj%+CNZX zTx=F)R8JX!x#WMniv5>S`<3Iba{;3jvJfURY$De-OL7~epT^vDx(u0n;7ITNw~~B3 zs(Nc{KoFaZ>NEhKu|%@f3x7kNB);R$MuqFjs3V+@|14n$GN{UI3Md0rKpGe-A2oa` zDbeEh5M?lh0Ca?dgohi&ly%IEX2M)nfhf*IaOI0hQ%JfSJNtRGX8{z-|0sjbBYU48 zPAHRGEjj9fd-p*Qwr!PPq>rz}e)RD%j4;I~a_3QejzE++H(alJgbVWB zI9^yLKJ6jqzjf5E3jKTTL`G70GXx_=Yk#Wwyr|>EEeP-m@Z!l}$OQY*el+U*C8IKmBjhS0#YfjI-W7wx~3jy|Sj>a#()5sIVK7Te~KMjLp$|0oWCER8^$ zAsKS4@4(Nu1*muME{6DuJ5#Bs#ZeHc%q&UD&pHo9%OK7&1*H2a2x{*5qp~TLqh$3+ zxd{sm>oxTbG1jjh&c~12KjLU6BfKBcC@X0M4;eNvz)Nkg48|sdN2^gr^)=ixA8K=OeN#zv*dTWdnu@*HbuSG zV9C_djaOe_uvw}9DKR6U&0qyVy_yNh1LM+vYov(7KB|26mbgNm4J+Yfb7N|FF0QVd z}0AdPM!uZRWJyS9fG>x6FmFp2wj@I8EOJ zRP#30rSv~KYATJE2Ra+(hs&;9e^ih)S`pZyGgLFOi6SH~;jvz*CP9Y``@@7&T;NK1 z<{_%T*IC1bDXM`=v#UW)n>_BEST==!1mS?jKc@S}PETFi{$=uSqqvN)3w7p2XjBgu z_XVzI#brDeTlr!qHMJ7_&9gR~i@TxZvTYQM1(G`Hl2@`V0>yhR93vX=kW8w2{j#N? zpxG3a&OiR_{IvULjuRJ?e=r>nxVi);il&|$x`qjoL<}1Ql7eh*LsU#yRVjih3in!p zsYpLF+ZbYy++XxK+ zT!ppt+S~oN#+>Fgf$;4iG}7S(9t?YyT6kV?z;>Q>-gP_q-F7X((FRdYEYQ`aL1LwGnyx;1X}TEYrj*yPGxw#F`lyI=EKpBUPd2Mk>w z%J%Xy;$d%mc)0$OF##@CB-aqD1TA2?CSS^ud4%ZRJKKfh`LBRmB|t4xErUybUp`Lc zR(WV9z5LWbkm9mjJs1m%YCUkkbW7?x?Jrk+K9p>m1@R@I8lfr?9`FT46Q%Np8}&(x zx0pt?i=7g~5IdV-Mm&Zwfm&2D6qa+LV}~_X?qqxk1er)in;Ms$A53bY;#fFa#P#*ygg1n+kAYmnSoKX0)i4UL1Q)M8ZsV}3H6qf9 zuKUL43}xxo|3O9PfZnK~W-jAmo9CrQL*wq1RR}64$ZB4PEOwLf(^a^I_4#;(B-NY0uxhdHR2(W=?)QHA2F~ z$D~Yjy^mOTmo<%XYsx)t5$^c8JiXb96d!qtiwz#d2B0R?Rv;17B@tbd>}Z*QH~57@ zp&mgosDxt;3>);5y7+7}QxZ!dm_W%8fg=-_UN2z|Q52_I3ezON#HPj2Y@XE1aW_#` z;m}CYa;pl#B^4;b&N*HBJBQzLa23h>mP;DoZ-BSQHBag?eU7@BXa)q8E%1r(0YMJ^ zBjOcNfe-fPf>LDADM7v3Bd~7939VibfgYCd6R-03+AyqFQy0DCdK-79fngT@B`ls$nsc~$7qCLUuufq4?J7Iak1=O{=9M5oq54w0fgFNpaDoEk%B~LcaC=;J3qFI)#lp z2niGO%%DbX9bsqf-pDtD-kDc;r;{@8FD*Al!9(8Xr)c!)S_w^ij$WAeKD#&oD=o~U zKa)}2^C41k$-OPhO=O{z{aAQh0&hh<%H{8q%%oJ~$c7Sqa%E`lsv&jvXg(^& zERAyGYZ0Qn87# zkzm)a&%!;|-K~A#dBxXT3zz-pyDEs;^{cPToM9D4EfWjbR5!TCRhy1m0e+AE;gOIn z*ARmxj-JvhDx$OLtPbN5BQUQn#Ru>$Pq3zPpXu`z>9~gRgv7wk>+bQTci-%5-)mlE zd6RPX817bi?mp@%syec(QLi`=6i&%<7^#RQgfv0*1bZY-w-cmLiYBe}f-^_uQSIy# z=`F8X9u4?;IJFFEU$~=j>>%{_eazio%<4CN{);L5j%&zUy~FhgqwcXBK0AmE%yqHI z!Nv6wz=@&&?*0Qo@sHsbw3`fiIEO1pD&|H)jf-Q4-ewXl*c-R_B1al^?;7Bz)5Y@5`dod~; zG?ehUKgZpU#PGtgId!RGzvY|nf`(I7ONpPUN*a!)5{Rx9oOu=?X!NEBcyYmb2aX{p za15>fx8@@y#ZO^TK$#k2Q#~lQs5~azNok)S<4bMfyQ70_ghtYsz-b(b`A zhGCu_VIaYgsi4*;^c&+(T8fUrlXJ{vU!MhEpx7(GnKSYT%MmmPS{bQ)AU;dbdp#jk83om ziNf1JNhHK0C{DfAy}(@XY>^8m1ion6=e(if5#g%ypev$+zgese&gh(NG z32c@CE)QG$IC@%Xq;@sE_~TokYSO9h+_JGKhYsN|;wydpGdox4sKzCx45liQYsT9M zG|9B%HMfSUEf2XT5~nX_o8km8XjGzzMpLha>Q2NG%4TPyqRLr{4(O7P3MxW!h`PuC zR6bV5D=x$;?LIK-PA%2 zg2bDL6TNdi7%{K2Tv2%_5Ro4gepy`kyX9$VjlFa@>$uAuQrg8SH-l}q;(@POo}W!R z&wrT#0;S?A$n5?iKPic7r#q=iolnFy#(-em1^NT^vg?3wsjgMBE&PWC+^Q6H-My3B0=Ae|`3*!~ z;?jO=$on|5PF6+3dk=(3KmAbc`h&~DP(y_Rf+FpuRLvX^Pym=O;%7lCtHtT%oE&e z@pJBtH*<6-%={XQ>UrIyZqjSaaZP)Id2%dP9_e)ap(3(gIstn9#g>9D@?m491&A%bf(n!+Z zO=V-uW0(*`y1|XcMqg(MDeP;|6So3A&nsO88M^q)-!{$e&zLIOA@7+34&{h1?_9=r z4}Gm4YFz~w&~+i>j2`*p>3%Hc^;`Yv_fJw~S>%~{*S zLJ5l*$0gHs#2M1UG~}DEnNXdSr@fh*tajhnx|b?wd9JyuZU%9JvHuH-+tr0@ND&%+ z5sfNZ;oHNo`j``+7G{QjJpK`o-L8|w#f6;HQ_XM}CMoXw_(MMDbal-#b-hbA&4gTV z*{(RL?)Td!cUQzR~{1uAcj5a*(jugGV{rFrE{^f*mnJm`43a&~olS3ZWCZvMdO z_Nvx$@#hJl$?w&F8^zo@ws0nD^QqFwz7M(S%XR||ltGxWh~DJK9ItCaSh3y=by&q7 zds<3u^01HLdNjQf2?Hb=uKkYN#U;`L)hp-2p#_(SQ8l5fiTwLzvq$di$f=pF&ys~8 z!!kdg0dnn7^w$mSHQC|$0vYLuQ;~9y!K6Wlr@pW9^Z=0BG`>*_W>TF^Zk)OX6;pCz zY^O%WeJR4WRLkPT+Mb@eZNEQ7@j53wJ6ty5P`LlYU{qwR-YoE?>L#GXZSPlOh}2}| zF3=eo>i))M^oHORopYdMSAj&HE*@Onr=cZ|C$QacOQfODcoXW{mVu76#Ud@W&THs>CL>S&?bI& zTYd#P$Sk;lE;LM>(T%W!6i(UP{p`?l9R`Y}1T znopHNa&t<*{W8F8zlVNUz5P{5n{9JsDAmsL*VNpYH!b&fx6}PxA3yi-;joXs7OrobcktYX|t(D^Ut=+zXoP!6VJTJiWper8B?? zp8{oQ5S{lbkM{pyX}hb(PS8n65(3AuG8)OP{kNwQx#eF?#{uu_`C0+|(en>5EjK5( zLpT2MvHE&h><={KSZI%uLiJk78iWDck7ab=aQ^F&zdscB4{O=4xFnG2sKqMKamC>M zwq?&zJaA=C|e?VfMDW<2h`p(9Kdqmy&;M2W-R_Q(EPLsLzu zby)bxpcLW_BbKV&`k2{@MQQi6sY=MA{@mDdAGa0yq<+hhQC8x-@9NEx;VAUuirOJqF%lFEg++W>Mh9{ClEj9Kyl~QHpsJm5I{+X# zG6Ww549DPvS6h%zC@=RLHvcY=7wQH#vymfSRrK47ke`ehBLysO(Lv~4Kuu=wPn8(< zoj>v9^Z^Dv$a9<*qef%6mFW>%x-3;4P_=h;H8&41?crd<03e-SjWfu`FAIOKEeN43 zz@pPZE7-Cb5`e*J1&dPLBqzRxb!l^Lf19E-H7^}{msxVdFk23bo#s>%Wwr7$L}`f=LSe`~bZD~ONW3CN?|!J91G zDp!<)>s>);5?FT7M1#qSI@BbzBwTIUa%a+gE6a)W-x`WdT#4dL#Idew&dT2_MKJ{~ zNAq)HmAzFx3`=3duv<6HMt%F4PT5C}$Jx>1>k0#w$;Ww}m8XIQ@H4o?mME@Tuz$Ge zhGG_I6t;10OuCOt!#6SPW13Y?6HACHflC?a!#Ffb;Q?xEZcmJ~|4uj&a@B+4eJIC^ z$3P#dnfBJ5f+z&`Y9_3+(9+FzPtafOHU(!}S%|47P{ zYHf!b%pL!(VkAs^=AmVI3P808Pq_U$7Hcf*CJ)nnnMxOcRAb+%+$T&@8it#jheXrC zVJLM+OvE-ne=pfKT61pp6eK&8%9Js_xY&B7OxQhq1M5>|?5M27CvqpB_4Rmh8yQnH zOF<4UO))&v3_aXXSdQ(v66;WDS^c#sI$86OXI`%_!;-DGQlix>Z_3}f{<0fSPh>Qv zMb={ktD2F>$?dz5mIgb!WBXxlh?)0b5gJwq@y4U_r>#~yR&X3IpErU%bUp96RA53c z+-3NrzQZt=Scl4|Lo(54>k@_ii(?GYwOk8}aQb(B)(?lM=t6f?;}qQ+7yVIyRw{?L zq1ior_tJDKfV<~c7+>IG0})PO+ly-r8P9l2FI}x3;EGlZ>$!*B$P}S(o!#^bUBp)~ ztoW5=l2%BQUV0~?8ivqf$I^Y6@pkDEQ{hW8Qy>S8!^c;ks-g)$e&i+ha`mguO84SV zl;Ulsq#4sx=gEuAC6aiEf)0u9llsB1pPQd6Xg_n1r)u0UN~p=UVf37;`euMy?m58u zV06ldfZ_^(RjokNmL%<8CgziX7n&PPnVXfSTfBbYidqRI(xUA7V`LZDllp0|A7_5E zLI3S*)Tzqtc~4Y(yQCrP6#`&4pF>GsBT*-&Aj2|HiUOv|cyIhP@8&6GgxuH0CB$>* zd2YC!qjd-xkDpfb5S6A?+yXGc&paZIKU+eE>)I5o4=&I2hh(|rU^;j*sy$z=$MT$S zSoLrzFuIm~B7VPc@_JFP_bps@*hLyBD3UQ+JiIHs;A6dgE#AP13aYOlOj5hXy7E2~ z-z3LQ?w+vhC~F*`vVYn@6GRZ9iT7Yk5^e!UfyW;WkEITlznX+kpe)dKf))jf22~7S zmpHR;HgHhguZNG@zqaK}sA6AZ?q5^E@k`+0{cCZhQc;5xIkTdsPXo%e&=$CI7U2Ly zpr@)mVC@B#_&2a%nJ6M8{%)_7V(ooCBZB&K&QSL$hPTL>?;VI(0kO4+qgybd(h69n zBNUVif|@4>UXq+&6HWx&t0U!;vXjc!JAl zl~vSxt7e0V{lek~-(4@ptXAJG=J?ryk&wMY|KS-w{wl#OxO!S_JIFLNXe!WL)*AQ5 zC3|s~arazWC@qDGbA#UH@x?sjA70PgAan@GB{+aJm<846#$gFUO)DENVmdQ>Ci$a< z0w!c9Kt3zU|jvHpxy#s>dYktb(QTf(uG%7h+(Uxo1XRUe-gQby55|SPN$AFo!NA!g{odaItUs^kEDa7Ak}U_zLX6BOlumIQd?-Y&`2OuNtAJ88%wma$-bjI|_GTjhytuXP`Qp*)*O zZ%#U0`ARL^%yu67bM560DNi8owm60i>bbq$u z(7TkWto_>40f7N4I8iRa8=@PU{C016yo22A#*kI%K+Ub%=%&~ZpYdTB6E1}+1Xil= z#H*y%BHehPLTkmwM%o0^77dpVEE~ATe8um6OFFE^GK5_g-nqIwP^K9sju8x(Z6Ew(uD%=L}g%i z$t4rG90a)op5R2C9H9Jb>IV>x|@?o>qt>|XxUOr!s=~!+}Sr`WA%KF{%0F!o44!To)q+dO(A>E#wSJwCS zvsuUO+OBN~lNW**A2&^~4wxq+%xP?wJ9tvXRtsI_@_VZY;W2@?dqTqN%-YNeaXH24 z;)Xv-(`V=FW3N9raUYI**=Ol(to7=|mFee(QvpPt2}sonzDE2hkJ|XCeD1`N(1Lru zA_AgYNTKP4;kIC0Q5C9M?Q{3PasY+7=Qnhh0e^);5|elV3Z{D4djPBRt>Dq1-@G;~ zC%X@pwCpIb26?vWvpoQ=fCfy$fQ8zC3lrj}AH~xGvUHg-5Y)s9qlKm*`*`eFpI?&| zU(_9&B`J-~j8%#Fi9CQ%TGT=;)ZB8jt&#S!ljfr`?dXi(`;}|sQ87RQGx}PPQ<^&E zZbZr@(qgAFu}jrBdzLIemUt2&UY!2$d2AHHDPbNXw{#R1Au3i7`qL|ncpIcikKhD zNT`x)w^J{>?a0|xm(C9k9M+FFvES3RyE@I(^}xsXhwQhSza%|&)(W9J$UA^yyMO*f zhq@vnUi?*_eY1aR@L)r>!mI0i=2vZFyXEur!akwL?@RpiwcW5FSUvo9xjcJ{n#}Ib_c;q*r8jy7b86pe zaD)Iyci#BTPqV_3&&ZF`yajtL+WtH0If~#X=h)07d(qWz2P~By3Wsm^WDlJW%k!j| z6X6xo7H{VRel5K^#H8V`l!~iO^^GsipRwQ7g_?GCTt*&rbiP79+9&>He>B+YKG6){ zH{vChJ&7sOq7W)Vc7|rpZbq)P9iw_deQJul!oj+xb#;-;;9@2K@PyP1h^0 zK6Rqxn2VT;gicR}-F%LpS-v1I-*{JR8JbzU0${`@OA2j)f5eyqd(7s~64B-nAXEwq zXT#;YNiu9GN@>P}M6U?iPdGKP?1st9dX%a~1+g)2N)2{|$_R@BZr{H;6_2A)YcXf) z28!GpNAy*@7Ms0Y9xooQr#$&Nrb#zIK?EK-`pN5mwQhxC8AT{z)x)^au6un=~mU}+ z=!U+t7G=2BN&5As-1C{?Ho<;3V4;fvp!cD<8k{do8IDI1E2JSQNB<-(>`7)hWo8Kx@u!7}(4w=3*BIoWD^SG<%Er{n_LY%Dj2 zYLk%fO&GJ)NO3++&ku*U&)?3%E_xppe~+r+vB+pQV>Wm@1?Z|Vy~BD03DZacTE%(Uy1(-dj5Ff*L^{a4I-dK+%rM>*?06Hi98W;h zMNTRBLmhg=q13P4v?-ox)k?aIYFbBz*AB$Y6MaCWuYiwmZx`&fHYKu}NUS9SW&p{R z<%JK!Tb9W8l8WwQE{$we;V0cIYaEARuyokjXPyoA3g?>$ntJ6YC4wf1Jr~ASTiC=2 zQYz=i>>$RXxJAfL2`U-ojLiii2nmqKH*XsCoOr)cI3e4tEQCZjsnIjXBzX5MR0+(VaJ?z@R=hER8Q4B-LC(mC zR&2$G{F5#O81eMkcMtZB7W1gRUJ$qiY#IRfFg2V8KLu8=VLnZW^)n^9;}5z^f=6_Q z*{KmBHRpovveB2{ubab;Y%gnCYZJL}ex1L1>a)%L2W_OcWg*W3 z_d;DIjLlzuAjh>!W2Kz1D4+6|+#CyU&J9O0hwcO3GProq&3p~{-DUgdF}s%PIOFkl z^SWlX-S*ge+z$4$vp4=9M`s=gb^ibH_S?2wNlHa1wj7nSoLf0|D|b?47}spMjWJt} zag3SWgb+f^B5XN|apV|-VPSWvtsZq6AxK5lv^A?Itx4R8)Fs1AJZ8g@ zQIFT#A#YU28SwOGtr9Nuq+$~&(r9`9YMN)z#4*~+B)@_2Ynb6N`$vA)Up_l=mTr3j zZD{`%qNsBF{G2MO|-8Pyp{8ue-J_q&eQE-{} zZMh#~H8~rfDl+Fs3m%O~m+zT5Mcx_Q+M?LwQx6wFR?rqgw?Jjp;@Ka&M&U>EwSBm9 z)oL%=ODps*d|t}jXFYq$MF_6ivsJ}3`fMa#_$&JOxFG{F?_kteFvM=wEscz9PXE~# zQGKQWV&s<{%0jk*nE@|ON$7>XHZHm+)JEG5w_TsZQ`uHKbBlF|vd>c|B8WgB zKKuoB^~K*(r`2%#ju67kO~r31zQXmxE7w%gfN}3#xX0kN`QEM{uuo@HiPlY78m>@- z@_i-J|Mcy9{5Yx>ruD(+@{kNnT7K|wny`*EneB(zilqEimhjw*(`XJ6KNBf&nH?Cj z_F;2b(f#Z$`Fx8fG>MnSSB`{#iA?u`%fHczorT+NtT%u;TWoSq=#sb`(Y}P;Wf(URIHk9Eu9)QNEmG z8jXFoG6D_a$5bbE3>}~T72$U}3+iLxKgzO-DCl>6=!}_udTJZDmOYDbbQYL}Rj(-e zSHJDHNHcZB2W``u63}IXsNi2@id|VVSyu zlWiX|aFAG?n+DFupY2B1M$>Fci$hb_P51w^=UrVZ=~CV|u^l^jhtHAuKFWWjul5A{ zk}ToXBM&=J?p1*BDSG5ylAca3l2J?}f)jztEqT8f=Dj2Y+$SLN(s<-Qt zon|{w#W`jF&?m@F+#F&TQ(3U0BM%*IbA1%Y8E=?dV8S`jU8ycknMu?Uv33Uv0m;uL}h7 z*MleuMtV<_O-CtnCx#^^G&!vC9>&D=zvM<9uGq*-NuT?f!H8?2tkpy|z#1_XX z(ySRV1sk+5t6+Y4xe1c>wMp;nzfDDRge%6iej&LRn?9sCYJt?EWASMp^QO&PuUbK2 zN@pw;(K&CbQE?>m)uFKY&1)`a#%+z%8WA2F*6mPC=T32bZSPQmB6u*R-vznztB)2` zdU@Tdi%VaAE#BZ{fSy~m5Cn0f`stKJ@@s)c$d<*k;!{pEllO^^$*HDXot5(?)H(Kf zsM_&=R~7%)of`1n%>%nP{!h zA{FJ+qd}*lyA)i{0gcdLE6L)Cxy*{Hb`*AO$)Rk?GVmuesSj8t<5UttY&+0AEN^?3NQ+>Hd9)8fd2|8hib*zyx-BqEq7{6Em$Yn*d$RX#OB{QB>=iJY>0H*b%g(FXyHmL>_GjaeT6Gl>=` zDzqx&q-RuT*X1`36*qb;?+aZzq5tixfExcxzOAFA%VH1;SVCq@^U}+EPs6j3qHA%> z^u68YTsfNCrpMX+-aKCul_?se`Z0H?egp0S)4UEr;zF03*t=L(5ia5WwP76#S1G ziiN}#+>+c(qQ(8C*doN+<-O}siE$<_Hi&#C959M@9~%}Bt?0f9F1}vF_1{GJDy@~I zcloQgpsZ1t-GsJUn!65NRO(et+R5xBA$N!%@K0~uD8^+mB_FXh#6M8z0k_xxNcYcX z`7>H_u(Ng)PUY~UGQpGUlWh`eos{8WQYjA5Ul@Mh?5>kZyt54mxw7#FzcB=zvMf zmlxB4leDLgY6^^BKW-uiYoqQB37cqDBewzk6g%;LfVh-f2SQ0I&!A zNW5MW+kZHJCGPR&z<>LkmGn4lc`Bq}pg;&ROM=&G4DYuQ<20lOjVDgCecehL-lvr& zn$dAo2q?QjZw4hrGD(}El{oiEqOtdo6R}R&`FtM|*=n&4V0NIl1|lNAs*2yttM7eu zEWijw`SgftQhB4+3TC-4*jrCfVQTWCysX;nRW8N9Xr4(;9Z0~j(js@^=j++KzCg#k zHUUXBNkwaw50+0&6Ebjm1t*P>kBE=Oi+&mFnjF7fjQ=hGU$`Qo=h|({fnCI91wSZT zn`d~B(_GG;1NdPrC)dA-KTG2qskSUs4Sq=bLlk1cz~{92TFzsQX#{J}H4uH}D-E8>CI;f%87qinRSOu6 zMkH*qKUi$Yi^o39^%IJ1=n+TT~z1%dcC!X4bxx{m7^(`H?g(hwQd~ zKCsXiSpWxF-;i%Wrg59bzuXlFsZ#J*@L3V%XJDN10@kkUfJ?e#`=-9Aio)=#g=9F4 z)V{XyT3go{cM%a%htj|lr4*KH$q;#vA+U9)Dr5Ov{|-3+>A%IRI_`mQiPID>fzI+K+l+UIsF?~7b+G2MQ%tee(|m$gN*D(h<&r|WLi>sQwAVgeZcZv(HH z5tAGHBHVN|lGZpwrYPBtkxu>ko__pJDPoYsg0)gQC^|76%T3a%9-eI&tD$X+-`tui zi8Ba*%#*B$8p~JhvR0IQ^JhH7&->JJE9}>;>!(Lnvi<#qq$~x!a|Gd8t=aR~?3^Ov$ zDQnelK>t^r+Pqe!cZyf#%$R!p)tgq}6oO-W%0g)04Po=CCasA!YYT2AZo0=yGa@Iv zhKVzhtq^c+uEx>UbkSF5@eDf$Ssgj0)TylNczm*Hi>?Q9uPHZTgXRbhNej^AwN^s~ z7wm}Ky(oXlCi}JbRgPkyDdn02Jr~hWZ|g?j!6tBrl2V^`$vOKa@@)=5*!4c0Z@O+h zZhAAKbbwppD7{6(?U0?LGl`rX;|V$us3!h#aTQeHCe1ITd^{0Tt=bp!F3V`-TQgq{ z!!5#gFgw(yb|*lcY}#>**7pp}Iw+!Jjo!G>NvLd4z)Tyog<`G8OFlB zGHbphUeC~5Tk9v~VQK&EgHX5{U^~}TXMd}}+pkI$;kgk_SeLavLe!gEjg3T9)-k*s zh+)=?=z8r~mq>t3|E60|Tn#Tgpu6Sa<>Mll;_^Qfgzw0qid4sQ7mXmcPS5GUo#S+t)4`cYz)GdVXH2>yuC8 zWf9pge^*9P)KAaBK&B}(j1BR4&9sJQ&V}H=BJ`kk7G}zYwo$gQ*Pp$j0(y;GyL;(I zUi=-|`hO;x{@Zumgck~gnOwd07RpK_4c!H}|48l? zebMvL)eywB59kTYTM`|R#Uf_*SIxkH^Xx>?ccjg+NY3Q0$=8BQ&1DTHoNA~buj&wFMZJ(Twj^s9|S_=b?R**VZx9g^u_3mPRXh7QV8SV`Hv*#fs&6dUMJlT z@prIr6UGW+0<$JrkzU{s6LncM5F-NYZtFT;T;{WLV|vQst>|2KO)1o4(2Z!4x0COv z1I2>J7KMm(igK@SYs6Y?h<-fXci%`~0#W zU;u|QTPd5vP$B}nZ$npNR+KHsL=fzynuD?$6=ygQcX6Opf^OjTLb-r#(+O zgs`xeu=yr;lV-v`31mEqX682r7)WTOsy=c36frb#++z`z1B9~$lKo8uDqu#Tq&a9L zDT9Z+@9+O_9|guBaz&LOO9MRon&V@o?}?{4;U-!#k3rc5z4)xjfSOj7X*t&DN?H?89P~&P!J931* zt9sS>X+*%;h*^(!td@LwukpD#8eW>!@lU<2WRhp{tyD`1-mLvh=_H>Zic`%`!j(-0 z_|j#F9P_ZD)@pAvWm-o^u0zH+$Tjryu>9}pL?7gid;6lWn_esFjh^!|3RPsFg4mUY z+?ysd860-dq!=KW#{s)>eQH#4_$ro*>?l6erd1YYHKX#qz{HWe{dvSR+K?8`4k|Y< zv4h#a(B(l{!znXTnPWS#u&PDJ{ zz$X7%7hI5E0}E+sO^lvooT7yGg9nwOeLdJGI`_1PwI@v!;kT*$#ZvUy-*KFy?wxz;Z6Dg20y&e8p% z{21Li=epJ>j21ZaS#{z@%+FDz57J=~HPb6(H3tKaA`cR&XQXy(Rljb~=ZHeebw>OH zh5G^DO+Obpcl&NHyDn8Incau9UTiKaf!i!2^nQDL_`as|TL+WVy*Wh8a&6%#xxnJ) z8h+@Y07-3WDsW1_MTA7t*Xpgm^zNWQjA3OJrHkGqYNQV(at4;)c-;KbM-cT85o?rg z;^Uh2cF-zOov|JNOcD;svGleNc{9bB&!;j=KzZ-VYCsQ?1i&vYY$UpVTE*E`3Q}kj zC(|rO-8VEgbr$?xkUHq00u(UY07DX3kGEKzAWS67mXwt3YyHWc2#AmxrfE$=hcSa+llIRoA#y_(kBr zYVp~1DR^8f-eMZXj6D{$YUNnQ?SPAN(CpRuM$?;o3m|+hwB~OJ*16X6=0ufe=4S$r zeQib4j6OD|N~d+R(#pi`3_^R=T~BY%s^!?wfxYKaXWXC1%kF!aJwk!k(%{{c?~v6P z4G^z9-qv&B=n`ymSFXN( z0`3BFCeEu>Ou+`*rW5(m6T%M;X4K{6-%kS#Y`KBLX|-43y)k@jYLPr|sV8GeL}oq6wEuM$mN;V$vQXIH z#m?syO)xu|C((z>0I}oz)It=8<&mZ+aU~ga^&NRZRX#h{l;``Lc5rczucY#PSY-c6 zVzF!?_l75vNWLU8^W%9JBj&v>pq^MX`sGi|tH{uyf$XLB+*fj1>SG+W8%OD>5ic~~ zeyL5gn2rB$ACdFcqA>2llkf`mQkp4g==5&=?J7EY@KXnsfTj6;d$zXi=k)&hZ(mw* z!%zqIN6{4l>A~FbdAF_R`R3*k1SHn%#>ClAy%FEnRUuTP;ff&fhcvLM%Gfj^7Jk|6 zyF6BS`^^0qZ++YA%Wr}-THP6_pY>mbN~tJV=Uvq^y27@^TW`T3dn?IMunF`leUr(b$#cyV?-$!}O4Sldkbhk(~H7Z{{F|(=|@T{zb zD~dG!wL}LV@aTK@S8GHkLg(!f`xcvc>b^@ox(y3JNi9;~c5nBt_0HpmWCcmkQH>D~ z2c8o%@GcG7*X(tBgkHvN!Q%SteGXSDJbfMCQBd!jMf({$53dd4^LKJ@PvS-_316~< zCA?C97aE?B(^SuawbJwJIig=OmlqKx^C-HkeRLMIe6l3XzZurrPrjXMA|$!8yaq)^ zskN5xi5nM}CnX^bXmyfaol+f=< zlcHFj+@xLO2;0>4-5um?MzHsX--*U|wXfI_ADt@>YrC_*QtpNoqnM55jj~@{#|$0o ze&f9ixcDnPPeEpt;z4iE+mr7DrBB@IQ?gyE+d$W=ZK~e=ya9VYqunAH1;UUAqg7~& z`F7nE;T#U!_3wk6>MKFD-r-VhGBxiKeacm=#O4i*fJ@gQTS#CRD~lrL%sl2KwL&w| zPrR`!-@f*WaYD#M$cLKhxl!88K^JW5<6h z@+sfyJ^7~zb7{1ZU(PvAqQ!}bv(KUCko#VzuN*dh^;;p`u4p{QzLpkak7^s297UVh ziZ+}^R_BkFo#_+(@(v*rZM6+lQ!y#a<_i5gFlmt)Hef+)vX)<-R5ooHHOZ}?zA^Er zzM~&Zi>C1)k)0@{SFXVgO-5|X;4w!Gb~mEs<#D(FrGhfs_$yuFt`8cD4LpJlOMHvl zpt)Uhz)r&?UKPTv*9Ql?)z*N=bpY5!Wd-s4k`&HC=h=)u9~j&~8w8{p%s@Qfk^4TwovV|;AuedaR6K3V1222R9CYy0YS(U0 zX0_D^j^2=y3i@B3leOP3&^Q?o5_ft6#!dE)DmrWXg(x(Ym%ElTR{eV=y&!lO;|bp( z0w*{j+M?6(`C-zk|>;AW| zAZRH@W$Hy({)drJ=AdhJZhvjSNRRW=QzZxhiA;0sg*ADDjCC2wkjhc44@F8)-r4P~ z+6b~hP$g)mKy8Gs`%UB#XO9({WxHfG+$8J@fsj{fKjt=&bql0tNiJY$#f07`mep6-`x^V@Ftge-dJ zJFP--8D+K+cDxbjRcLSH$^ZL0DuCY{S=V^);B<)Qv6@J1^@CDIAbv@l%H3EDhXp1B zXc>Nr;7{((y!;2sVFA9w+hBA(!pwG2GE&!SX+LNThrsl0K~sCiMEct9b&tgj5L7_j zB{%5)GxwgOw{o&p$K>5w{^q-p&89tOSa*@Zj}ZMwv4~haY1e4O!kH{9nJR9S>BfT< zqUbQUxWi&!RCZ@%Yj`hIFzCZOA7yIxF4z%%hG^p8BgTm<9hU50nqk)bWp%N_9^yQ_Pj|-HM=I9E;d#pcwE&h%bKv^nA{%j!LvF> zZGZ+fltBP$0{mBtKNX4pESu|z!Y2t{azb}YL4of?X5mlPO1&xxrsnpCE^MmVnk20j zgE>IEx0U`?jBMkA#{ny2FF%*qin4EVc_Q-WNLq-Ii-I%CadW(=ZlO4xLNF>$dq}Di z=yC14rYWgcftOE z4X9*j<}4}hMLo>RkvQNdB)j+E2}FGk$ah7~zuWS%?~Dm;oRlaDkhAm)`B1q@#LIzE z!0lPwpu^Dvl~v4W3}Vn55z=|Fs@PaL)oFNYf>V?>)XE8dZM*WC2thKVrL!TcXImSLU zQ^8a7C_jZdAsjrbl<-lB41|wHv4xWwrKMlr+iDC3 ze_(per~O0VS;ZTs<970&PNqpJSLXYz0x^hTPM$vPWH3*>)47#MUyo#4?ef%7+a2^7XQ$wpl}x?`SIi)scBNKTxT~LbYhQD_4e93 zgD3c@Z!x2NhVnSfApusl*U(;l741xZx**W`jO{Rmh7m9un;Y!zNEr8(M-;Y`&-CYM z08hK|Qe^Y{GzL!4)S%y>mq+3Nc4Rc_zM^B-;15HNMQ2U!`;(A)>uS?<41|7RZ*jxL z1scgg&h3(eN1*99^8efC`!=Gs+ZA-fE9yUg&4MNKde{Ej_bzJD@c@Qe8_ga4AP>QE ztJ^G&5lMpL=p(L~rN8v-NFmFkb8r9i$LjICoEMnQq9|sDE3gCX1Bbw-24lX$JluG$ zjL=^67B$P15%48`&lv)2PrqOK%F-SEt1t}-_R5<~ABLjAdsLt#Rh%TzNB_jTur|!K z_;5LDy>oPR&!nU9npT?PN6yt+1m_1_u&dZj|A!5iQeP)yUG_h&Pp}{Ekbn9t_{N8x z?`>_~d<|%K2O4Nr!>xwEA)a&0;p5TsdgnvWXq~%y)@IM0z!iAjzSL%9$Wo4rdlUJX zP`DDZL%>nrT1*BF9d3bSMq(l-_f|iPDr^h`s~#q->$+Ri*VxuO8M9$~LEa0(?<9Ts zZ`qDK@=}M2jOoKx2(diMd(|az>F$_x$`1t6Xxsb0-nUK9_;NmA#o!pzrxqg>hwp#em%`@Q-po?P}q3;BEAl132@(Lk5SAMz0<~7Dp_`9p|0k{|J`$ ze~H4m5liuHu{?^^*cvAwab7aP-?4jUGu*FXmDZa$y2f6#dV9Tx)jx|rv$yo2_9?)N zB>j_W7Bd4&DpKGjmdzJFituPhby>(Qn51b#B3onDgmTHqlfma3o+br$9&c{A@(?~z zB?3!K_!nSNUg`GP{3+$*P$w0nw*u>tR~?&%Uy5V4k%)1lcbi)%*}k+31t+zumyF{# z4Yc9JFx!?qq8=|4sOwimD^0O4sYffscM3_dd{<&{k==Tq@|aJOW1b~!N#cLDsZbcO zdW{NOX4bEFo~|ysD49Y8@iaJ)?9dpyR(Q|J$K5wxeTncHa^#-)Ra_RKmoam2#QZ1p z5$^w@BG-FjBFf98i!(T(ui;2SkNUm+%jf(xzf(H<$>1EsPl3i{56M_Zm=OrCuHC=# zrqxR)+iDVK%&tCiOhj@;g50$A{7s8=tgLB---1e_w*v4LgI(YUSF#3gAU8jeFsCHt zLb8tvk8{0yD~+?pN>E}$AZ!v<*XpuswwrdZdIjXtTt&4<`^wh%@(^q4Z~V{ijHp0U zmB+yzHGL#>7r73j;j@(VeD{^#-~Opri#DGqG95PxPl7fEhW?}vc4=3Fv!k7$$@81O zGU3N8FTPyJ5f_GFgOqAhoSDJQ{cQ#hY1pTP)%J5kU8#L)4Py^RkjkTnX}A1Nd!9dj z4l#Vp*K7UQO+1e|zmy_T6~rucP^7j;xvdV4&?Kkvh}wOD(XH+2$FFWEnz`udydi8V zsA-+CqpDp#_RDiEXI@R&JRW%*Z6e6z?(ma&R)vUJ*kLyh!AGhSm!HX(Ipqc1Zk|-E z7Tt7!`1^p)TQ|QZS4%&e>X&a46nAy?PROra>bJ$iq3v7u#0!YkI3(R(HK+ef^bjXO|D&^Q$`Fu;}Q;++C?C5u$*A26OjrI=>7|r!Ex0Sus z>^7qCw4-l4_lg|vON?H(1&uft*<#)@i%oLJWITElJlq>shIX6Kx%a2rrfi=$@rk9Y z8}-ObVf@v@@E+`JxdqqhzkR3OKYuMcEl`T65~x^>L{JkZM8D384|)c+=9Xz6f2JK8 zFn*4l;?eB$Br^TyFOE9u1ybi8JO2ZdNTmrO-k8yAsBYLnL25@U@~NR1?ocG0(@iK} zb`Sk)HTXRu5n;!fSX^e;K+fx)UG#_gBMVRVP`C8wuCX<1Wo+#oJgUBsSCYkSiUaO@ z19XeShZ{RSBhqzV^-!*CTCcdd5BD-K9>X(~+l@Sb*i&O{6HlfzeE6nQSpHeV#wmb;HY=&G$Yz zrq{@Rco}!J{@+%mo>km*){(M*ygpnOz)H-GM2@_p)wA%gqEo;h*tFy_KFQvrfEMPE zb=aebLwTigVcpf1(FjDKuh2B@{Eb9|BM

@1R>MICm_vp#;%vV5#sv!7?zuha7O z&F~J>3&}j)anJd=3voj#D!O%~KME$u)?Ju<**m@3 zs||wH%4@5O-5LaDvtJj91uSC0JbKgaW2uPl{EO6RKtBny#8~>Dn`A%wmvIEE6)CtF z(53V<2QeRXS`g3?m=T}gb9(rA)b1ahXS-2y`%`Qr-w7s0eBLgraPJl?_t#Sjw22sH zE-u$C0zvuw_|%Ku$+W@rFcNF=_r9wUophV5^p0zHO~tK{%G9(4O42NzdHi5H!N!xLp%j#KLVY}G@~ zP4}0SFAOJ2Pj&z?Ez{G=FoTYq?XQY0LCSRfc4)!U!<3}vsQ+@e|Q9nSwI8*#pl%v+? zacdM%iS-s;IQt zCix@4r?h&kp0zrimxB}A>vR15{9$c$ETaaBeR%g()(5B7II|idcBo_a>vf}30`OK{ zG@isZh7pTZ))r>R;l+$9BIN`yiPa2DKh3t9K8BuBIAYGXVlfO+ltyn{(9$YpKl(>S zwRHaQii#|FAk{GnTA;3@XjFSX(t74XuIZ;UcFjqGkek`t{#TSqKvRjV$%y zh}$)-B?xox5k~!;6V?x7u^6qJurQQ7igqRl;0v&)-twbwCJe8$U9FqBOR0X%&&#TE zFFFOlmHfja+HT}UB4^J%*1|V)tJ$pQ?mcByoxspwjg<)u6rEKknv@4EPUL+Jom{FJEm_kDS@@*5E<1 z=hhE1;6Y^$)r87EQ#oTk*|+ZBx+X>yC8A*G(Jl~q0Ha6nq(1d>R-$s5;=TA+NfxjJT98Czre^23#HZH+z`>5@S4C_s=JZJ}2HY^%Z|3S75)6b2xq80fNgv59%_(1D+PTT?!6&K9Su#!|<9-NKbJ z>X%o~=$s!6Q^Pz20k&Wt-qSy#FWG!NVO$}!7ibLGk|x%`*!)!lfTeY`my~6zzhlmZ;tke{e5!ybYHb}ARnmVhAfU+n!)3mCL zQ{7FW#HaxE5tS5NE1}+Q7}-vBbJ?c$oVdBD8TY2cJ_y@JifqurqF54*N-8hbg%{Z@ zQRb>7iu~RL+m{XgzFHjwM<60t;#IeR>~MVmMS}q86d6K)d^T+UW>9^w`_3OmwsL01 z*MYjIR|g+`ov5A5>s9${(C~3|Fja#IJhaH1N=@z;*Ph93aisciDTCwLK#0FdXnx-? z)<}PScI1styU;LI15jYkWP-v(p0Nw`2$*fp@lf;wc!fF$zv|Mjs^qVpxt)3jJc^;P z91N_L*~MIkxF(rvMP+y6k^{=3VtZ&IncZr(#?`+y8^s0&V9xu%w3 zXP-1YM`>sylT$B<+Dsf+mS->;iBQ!{DAd<&%72$Kfnca12q2qIT;V-DxBLj?t`mZ0s}lrJTVP2vv7#tB}zPvytA#K zYY@mlo}ufZgO9Wgb_ZL?{HaqAOZtunimDz+TeZHssb@GA)FgWEzP5Ra7{>~ffCo($ z1cxDa@SwhMcm|qhShUi}vLxQtmUrl798frfK*tmwni7fj0hEQPEO#++Y zKPLZPAu9^b&FUQzgt-Xf=VatoOlGtjvLvUshGe_T)vHuOo8P-Bo7A@iaSQ@KLT?cxGc9(RmR+v^HwnP4{3HG$r$6*o@b9Z)^=8E@Q57 zr$d(-p%BP0eKV^r*LmTRE z;aLDQ5dnn)U5tdYh}Ypo>y|sX-s1LTWo)W88aEg===D=KO}HEez-F?L`M7^9_Mz3Q zMJfcr@|;C{u`F4W@@%V1h^XzXJu^ksO+7z7^Ty<}WjnQ@4U+Lw>u>LGtEU?}Ub#=q zM|+8LUXOz;o$Og+hfpR3bD_X~QWw#f3t|y+N$o`K#R-;e)5cQ#zxQ;volkK5kMy_H zH2XpaE?u%~w~j>{M!33qQGVRGvG+0El$mn9S)vY?6U4%M8f;BeZ7M)&va0dorUDts zN9lnoONM$M*OkAMtYQ4AW8^l|u!&a+*lV$XB?or)wB5+b;y$6i$VQx{8s9Cs!oARi zJR%)B7b?rVJK=pb;?NKOF6sS`ehRn#OP3Dm+M`^W7HXk9ZNYKvXNg7>k^dN+f}w-- z<$F5mC3B>+klvd_`J2=vr*v+Ag=*tQxAp_7)^WuU`bE344#D(IF)R30d*Rg!Nc$68z>)hZ2(B7t{Dj9rRZu;ioF`|(i!e7|T_qMPXJ z5}mZT>k9+1CBO(>dyd=t0jnrTH~Vq^?GucYU}#>}@twhRQ}W!Y@e^IGh55CBw+RRl zkaVP8aMkaPR6=C^q~E;PVbD%(1npmt`A6FILkwcJ$V;k2<)r#GJ5Q_B&~q1Ybyfkw z>)J=oC(ibr82Ga(`7k+iwx!iIAzIH3zJvWsvtr8LF>ma%U}F)fO9LggQ~pIxPJsVr z*%JoQ7`$-0s+b}d2<)t9pM2qAPtQ}1`SaI}u~#Uq-_$Dq?KAyv-L~VFnPYEmGRVD zk;}*PCi$qz#=MZF=G-ACLFODjRf%EqPM3T02)8lprb6y{9~&2|I|}#NZIL@?Pr;+@J2swFXfyp@`9J(6W`@cO(rVBZ_5b=Arv2k-$#QcA7hUH_PQpeFiKkXE%8At8D)B+Ovqo2ozJk zCPLZ_ski;?VcNs2@W6{ON!ziPBNvReGLVvd^ezvY;ism@1I(T@zNM&n=L zYR3QV)5|+LrK{lL?@}hMOS6rj3|oYg$oV z4m~H711iU=RFotg(Art_No7T9%P!<9v8f(5Ict>CX{8(X-@XxwYQKf=*7$a<;Nk`L zk8_2}Q~qPf?V&@z28GbFZAR=`Zv~%J^7jvUe1}0l<)Nyox}rzFz%LeL2=&qGge1QA zNcDE1j00BC98>=KgsnM2e^>w4XpM-TR z3^S$eQXE1r{+uBCjm-Y#nBcTK4!RRsc>FW=+?6R)9cijHKk`?TqJhO*zbfRkdV=sb!pNYr*R8Mgz&h!4X?klbN8rygc$o-vthTGDG0{;o1*L0n!Og83Di1 z6=r5m%NP>4tzL`$;|1YNwk92kC1Y(NrZY^Im$M=m)@XOcw288wH+t4?q*@5eWpp+WcUa4IbOqT zVx90{)LG0W(Gg|cF|-k{3s_IDy&jl;NY8GphO48#l8{S#OVT{ky4|y;w~9^Ivg_e4P;2gjRcmqAq4{^OH*oHdjji^xxfV0Fk5G@kXv3A)8NG(j`|HN| zlqQuAHW5&B%~h3sdTjKK(CYOqCDN+c!=;W`0KXZl-gtKfQtvfw-#UeI_?@{zN8%(k zZ7Tc<0eeo57SLNPjeW>Wz)G>(lxATG(h>8;XTK`C>wjKR{wXi-w>&g8xfJoK!L|vi zXoFr%E7*y}A3fezud6xWWBsF-t<^_}Ab-OFh$g2rdb>RR#D8W%ddCxAq{f=D-%zZ} z-n91ryP=n0aS$E*md1=C_U0FUtf&_KLPj2gyII7nETe2UYS{INNLag&fS~9uYb_)H zkjRaisu-o`RadmPG&iT|Oo$Kp&F0VN$Utug~Rvg2ja_cBTs{@8BZFW zz7p%VQJCuDCD^G)#sS@*E9~VVACXYwHIm-NUOz@m!`jdkzSp9`H3RFXl9Z23Cri@> zC^wkYP3fdA8EKCWoW+Ag+jI5#iwDvm>k(DPX=(^kO%eELf3 zO$d*>;F&)$UTC=6D2z4~=4Sdpkh<4XFYbWfrKo_CfKL{Wkb#0KALp%+}kEZFT#*AG2Q)o<~4sJvtR_+F4V zhi0|YinNFO5ppK?E@k-(n$5ifoDL5!HpHDOh~W?@ao1ITp-Da|AIj*uFqcjM(;sR3 zH7sH@Bx2Cp-d03NEA^`DC5i)b2k;_qk2q$#w*^?C&J__rxE;yPT<3l63i7GBQnAgZ z-n2*G*{3t-TbctG3+dFfpOpA4;+X^IGR}YoMD*xy0lZ^pB+9i@S&f^+*dA&4EHD9k z)+m(KFriCYJdyFZ9&9mlpI4FG$${cZfarq*&}JJlK%=-y5IO3>WhNmBI@jHJijA%f z9vR1hmEEI`L`6I+Nict)B^-ri`$pqrct4n|ga?1zfCyYS^LPJsaCeQ*LwW>_p)*q`QUHis z&D%dcBj1YGn}{(J?C;upDmlv2EitNzpLeXUt_-I7M8nk?ulR#% zY5T1gty`~>x5BhjTihvkE&LbWd5D|^lL3gTDD&9U7;UYCNP@HxE-VUp!_C+FAo;fp zmV(Qyx;Auc{Z?%j#SDnB~Z(^>&%RswgN9KD3q~ zx>O-Rbrqk)T=EnY8UfT(8n!3K&QasfcQp6@fcRS@<#dIVpP&9h#rZx*be`AO9USMK zLBg~@^qhK><9Bk5tYqS+i5f27Ld31DQ+ahIckBl-5OX@u`uVwZV@exOqX4VTG_YO# zx1U7RhU?{>2_6*Y((18QE(rX_s3DjJ2YIUzn+k-k|Gb~^zOMc%3)Z{n>1joR7^V4U z*%||*x=X<#k`G_{9jTh-*=h8H33wM*;h<-fjiP8}Zx}wdOH1sYz*Mf=2wL~(+ zQwk`A|8$WwScr4^-67i<36Ta;E*{|%wQokZwJ3F>swT2$CZTuA#TA*`Ur7S)i(paV z(wQqCm(Dm?v-~P|t6@1Ac2}#s?1OPbTh=7H$dg-}VtNl~HkfJTiz2hS37rs;*jcEt zV!Oun*(F2OX!U4{35So5Tj%FjJIp)&5EZHLphYnFx+_(Zrkx9R=jyKt@^Td{mZq9S zbPu>L3pT&oW_>?tb|_hQ|7WzxSPl8ZNt33>?o*A@N13hfYX{F zCs(}$XG)v?8pi4x(xVw^^{Kn;mO$GI`>W4arBu>UdQ<`!U<^HLwsQ^RfAqEm;2I*O zW=;LCGO+=(?RXrVM{@A8pDS;pmFER~KQdGi#3#CDLVKmOALBuAeER@ZNXfPOW|4Al z5g^Hmy00I|h_qALnoCag+4o0l+DszeL75JrRdrRL9Qmn{{z3Agr z5W?x>^|)MOuH;l=RNKEhp>&lgpqlyy+k>`Xb_O@^Hy0;C1=Dr7XIxg_$fTcMA6OpA*p{N9FyGl3m%12lCD{1d6Hin*PW=>wh=l%yj9Ek zIcUTs?em;dJKlrm_=gD4WVk&`b8Gpvi6Zks-Q=SM0q46H^&7JtzU=;973*%Rw5|p2 zydZcXH(MZMCoV&KReB1f9O4Y_2*T!W5Q*NWQVbiO!Qjm=Rnrz8e}9#g{?FwbT6*uA z8!p^?w*bL6x^`>OcHT6S;=G=v#pvgZh%KZua==fl{fy@~A&+5^f=>Q@(l}b)=RtUu zp!S1>U(1uz>@Nlmar;5fWPRpQ4?Q-GWOzm)x zR=wx2E)(ygsv%V`x4I?Q3L>O8)vO36C5PHy3$U}j>}c@l!}qd+<=l;v zYv@5SiE#raCs@&Z*JsWNHdWW!np_?1OvsfI zcT|H+4J9Z*y@rA-FLWHKwpi%>AP70(R^Ub$doxy(E3LTq5#raHNg3~z?NeTs4+d#f zU4tGsL6);ZT%3GDxET3s+c+EDOwc(HPVdtV`2sqxYPI7;T$AN)fxsh_O~+GT11;<7 zus6&hx?~9P&b^1Y!<_oPTjz_uL3HHmg(-A*8fZH45W3{!$_j!2FVHcMl?Jr1-T;=> z`dj)CS+Aig7D%#lD2Zm22n@Rz8`4|}dXdhxV~+VrYYx#APZ z{ThFFzp(@b0@CaG|50=<{!I4&AMe-U?jWfYMRAu?&gEE+cXcDVr4qx8yELbn-OiJ- zZwKTYVj<*|97Yb?Ft&0&%Q+jHP0q&VFk^@NcisPi$84YLbG_fM*YioZQFm>6rMbE{ zfUv$^*pHI0At$tgjL@$qM0>x+G`D9SVHW`)8%)2oKBw4S5Vqh5Oe-1?i6R3Yx)Sw_ zXEUoQ-)T_mdxN(R?jGsaf|2^zAuc-%5A+0BThjYjn zjSOJO8@~E?rkZZNEmd?`Zrre)rDz>c2u4z^>M}26nGKh$B((tjl4*YeLdP!0|G&B$ zqdJK^TOyCcB%ri4W51hAAO|<6^h|G+jD=WVb$$-;=hAnj;EM4?(jc(3`^jC0lfz*v zNrqkS5-4dUT)Dd-&27CY+lDs>>8BQZ0+4u5cU|A|I$iAn_hffmW*M`BOp#|Qx~Xt3 zWtU>+a+V57TaFH!6HM!?o7B)5l6wXYG+$jbd(sR9HqLmysB)w*XA}gR@+rML&_Dn9 z$&Qr=<5izV`!nmDpIxt~#-z-E#=}O@ zS=;&y4GB%feYAU={X-6Pu%&cB&rEiB?QnuZ`wyC{UVQIBlidx_hQa)vi!G5%+;UKu zQ+d)b`jl;u_ikD-0UaVXZ8Kj5n6=hfP)|rNxnnoG+*M#GEYRaa%wXnIFlGo90*TBp zM4p2ATuJqS(*kFNxlwQ;s$-g%vw?mgaZ+$q*lcq9JZooYO_KBAhtC9e%Qf@0&67q? z$LX(D+#wy$lwXd?ru}4tjhzVa++xh!@v$JJ6$}jEW6T@0WXq%3EAm#o3m`JDN507EgeSDsYePN+||IbJVo5D#HDuny?tC5%UEV#3#owD)KngOU z6tY@3eI56Y2k7immWriyh_zOWUllzs+wX4GSR;6HU+|l#$4qmy()_u^n~CEL_~kt9 zn5MG>rUH-#^xe-py@jAVZ=J*b`9Jd+B}Y;;@NT3Davs~Ji@EdazK8D9s=Z3>ig=H2 z?SyMR5*8+YH^C83{_ljV==$xoGjBUAT<*yzq2cHg~p z$Cft5l<^pUSwVRN%xKuB`(|asbhvQbC02htQOHhE>Z3 zE-A5^W>=QOC<|$mAfxIx@D1EcBhxnsrJaT#|I9R-vx@5h#_HkEEt)el=j058#iV0A zYHQO5%j44uO?)^?W`$&ybH^5{zj}V8a!2fa=BB196m+jRS98wi*oDo)YtDb99*R7| zG>vLwRCf-Y6Vc_&8b>RmBrmQvKl0)n1wd!HvQBJK`I5$(hLKJ58HqPhmyr|V z5q3XBisojJ}El;jy_$d(~i+tT0@YAYE{q5g%Bh?{dpjd+{mEkv){Am^oo@B zA+7HbE{`3&?)#eks>A}GN8euA4x07nyiDslXQtA6{eD#CET+}%7Qi+%1$8Oh{Th3B z1e^U!xlXg#5OhX%$!`t@b~X*|zn!b~Sq{Tc|6 zs}u7@(-r&<&m=s757;7$_~UVG8*ZZzqdyc)me#I!Ps%%>{gf@D!2htC6O&5roDx_R z-d(UZ%nOL~{G)P8&ZB%$ zxVr*CU1#4E0aq{rx^f0zIgN76u_gq6jhQ)QlGb-Js1ERC1a`@yus9FuW<}(JNM_TC z><9FH*O`h2=Jj4};Lwyl9c%3bz4-@?bGxL!+crbj=|ID|IEC9Fe5cspBg-w~J^6PIWwA4(0ITj|{?rRW_saD@x8SitTxODGe?K`QsmtSC# z;aC~HlSF)Z&u()|B{2iU2Qar-B5>v89Q~GmI!b-=;zt0N;39t}>J+kV zHlrh7ph1q2XD0%qkQMkLhtSdh7yufvLm5P^zkeUD96 zTrM8=kaNY4xOXc~zmyrj9q4ki|KSj4=y-S^4JD|y(YmJ!D7#Ue#7ZYYf_tO6r4QO*Wo5VEVw%-))Lm{Vn8f# zMn015X!^4+I7vUBo0rdZ`aSca$5jv!7B~vcR6JW}b;TiWr#T2+NEe)8PbQL1EImTE z8h+xHFFudVPA3QQpKLsBgBQAcbNjP}#)biC9s~Nlru-);SyWu3^|9RZ; zV;ZRC{d@2{E*m$qH`)Zt?E5h2;JFy_aj0G54f(W4+WR_8WWob8%}+Y%^_(8ZUDn#~ zP;5}&w2b-VkJip1f|}DW*6gHyKsoZ6XXE(a6Z!`T_T9T;ArXK`!1;Wp?Z5)M4|tmp znCoTCSe38PjsnBRAL)SNp#8mIx^P~*M|0XZN+byXkmS=f4^@KegvOWzVfGS z3HYABiz8(6!SMQj?niY{DoTgL&0)Xh>afGw(Qj*)_dQePU+Q&Nwg>5w3a-~!o(Yc- z)BQ@#JbblkE!}OxE~$%I@c7*OiU)zEM=m~q^9LXMTU+e#DX;1Aj zOf%@Zp=$z}d@a%!_0TUh>jKJ5N^Dn@nvd4D{6Wg?FktunP}&Maid6eav4Fxto{^`# zDe=zjq8BrTRN1}rU!O+Oljw5hz6`!45+L)C*`b)v)2&!(jFU(UxFwUhEVT(Cxegf6HJ_yfS&|za_wai;Kx8N^Dyzd8 z5+%*(b(*%w4S%5JyEWhPYhsjD1ey<{TEGk5R+#9P!BPJnsP5i3cMyJI?bw}Kaj&N* zG~bv5J`K@LC4qvM_{%Y9j1z4k61osC($uq9d|SBrCssAokyI2Uvc zsa{TZX*ScW+f+`5{BYG+8~&@DZb);0_lYLilV|v5%Qt+@j*w-PtBWm5nG$g^71tU; z86I-KlWtK4esmreU(OaRigh6OZ-rP}uH64;w{fKfZW!sSKRUdWW$yGbV|a&TYNq2!rwvmP8qe2FNmc zs~abH<()kndZEeOu7{&%cOQ&Z3Kp0Py&NdM(Uz%l@Lo44=HhAf&Pvn7gj<7ke&DvY z)6Bi_kJYNT0y{cb2+SU6_5LMBHzf+LNYz zS{ZQ9>wV|!TJm0&3rskR@p{$gmR}&aoz={gGrk5O3OWv+wkfIBQ>ye8SNm*JbD+sa zzg4OIAL^awnci+BHNdSs$J+w};I#tB3HaJ?WwQ^57(WG4?roWP%#AmVd3&Emn!gk@ zoUuA1$#6l`Sr*>;YAKz`1K`X@yo6^^V3;WJ5#~4|Zwq23%TB_ln0=jZ9wA^H zZH{!bdqQfPdK47y)pS2;_U_x+>Z{v%aKb_56l~R-;>JpMEGsFlX799APP>m)9UO$J zwrWYguE%QTrn9KsLSQb4K-tc<+~+e5lgQo?EUn&+KR8M64UN_dzVwFpFxfj}lWA`Y zV*<&Z{O5+Jj=hcAI(^*vFB>qU8#gv?DF~*YX#nI{-2_8w4~@aoG(LzI4lnYl)u$&!O%=JV1a`p>pEeclx0Pbsxzeh-*zuC_#$@IN0iILVcDnKF zddSNCe0^%7Q1s$O@!ou$UFHvR8{B^=0k!>$x$9=tpJ&9*ZyQj!{kr47jGci8 zYEB#eu!M#5tteMrV{dr|T6;vgCU?Wtq$7wT^5?*gkom8D4;k?;Y{QvN}@ENUA@f~*kda2wpb5{CF8#$ZmNtLmg^Rh*j8<~Z4z`=BaQN&4=MfuxqGBv z*NU4Eez5ru9oP2W{tJtfdg2P)+$IhoQO{jcG$?l~4-Wd~_`^3!Y`9W!bjLXIHyXE< z*6+iN2%m>Wvox=0Yk+i2RBtHdN47o_jmSLaDa;zaM5!Nb%|C}!aB7|mVFhme$&33E zS^6-3VmWyAp8sxS%vjrkS-w%FrVMEb)z+w#G-T?hCC}Gsfj)Tj$bazueOKDBg?hs# zu6unHC72vns#NWPg+>Lh&be{i+c#2F5qgd!O9F$~_W`T<_4Cc(lDGdyn15{Y?S6>j zWpeQ3lsnrtUwnEds-JW@_M4m>O-Ja^3Vqz;-TPnueTPs-`yiOrXv6Y*gAl$ zR~GyO9Hqg$9S#iABq>(0afavf%xCScfFZ3JbMjeyPtL3mlJ4D z_@Pc2LWi}!x!6+vnH`%*`|naDWj58SlxBsv+-|5ra_I?x>g;he0=0X4D~P->G#u%Z z<9WSj>F~wN*Exk%S$FC{q?*h`kE=6PAj$WFs)k))KyeFW%a-dZ9(`i7GRU%0pnUt&Hc~&wJYIw|wrGH`cQ7CA_mH_wr zHNKx;_$%CtYv%UFFqDc|giC*(+}x81lRsx7IV)?&v)gIVx4Ep{`ocAhW@S)FxXPTk zIqBP_psp8z@vWL6>w*;c@<=h~dLCN4J>??!LdeEt-^Z{QwfELsuKn`lo012%;K_g~ zYsjKnhqKrLIm&I|s6?9`;hFBJCx{6>SZ>OVjg$(h9Iq|7gFD7ANdQJiJCf>u=)L`w zc^l!I`Z?+eRfGiu9hvEim*a2k@Uo4D`Ee(mQ74VH8BGmrv0FQ#0 zg0zv0kK}N3i(>noFpQ8Wc0mJ0WwKJ!_476#R!jN#&b{TmTZ{B>`?R%o-I^F z8`HwU9XH4Q)^e;ihvBHkjI5%WExiO>1{f&YiZ;e1zaJ4cH$ms$gHA&%pJvN`n0S({ z0{MLFJjv0KUx{^l#2h-bNKYt;#VcMIEBVzwUVx%+s412r=?u+e?rMXVn_T_0t462E?jg&%4Si+#$*rcy{ zQu!wrb?0;HeV&Yrj;P+TE3QmS;5*KTqME01E1t&p;5ch3Af2ek9nlNgo8V)F5%9g| z6hJ=Jf&*yBPlfgM`LP~zS_SB$mC(XLIwLaMO6i*>H>b0CJS!B=CpRViPju-7|JHVk z|Nc}x#ATCagT`5De5E=0Z$3{VIi^NI;g$3d4B$Pk&M~_skjHe}WRx*zl07o5s&EM_%BE9+mj2P+GVfpf-J* zTjm@E?9RAk&dQ>mtz4UJQ}Y<^Ek(=;sF6VB40YA&)XRZBvT$S9rHYr+JV{$!OrPhI!$gPNC(CPt*SF8cNBD*~nxFl|#&(Em!b|iF@wL<$ANaEboTszY zz406#=)Yi4q<-WE8^$zy$sCdqSaZ0!CN7ScDqb!1VQt+Q!|ynwSyj(u3)T$Ay z$FL;`{az$3#y4BQ7~E)mh)d@-=&?We*+j?HEjQx$Zo^U5oJY4acZ??t&Y z%U!r+M$##k2a)$EMDr291 z{#a|`h$VDbyxANW|#Lg zg8Ad;(iX^?{1PCE@!M+oWRDZs`&R;GF|k?A!~s#ULdbdUjipT5WocE80EkD=WK$a0 zaG>!yLipQ%6P453Ulx(U)v=Ccy4nJiZI!EEgM)*&0OvVfQw1n3F+u)RJC1a`hKYD- z4v+6*qm4&OLEPnWI6&n zpXM0r!jW|uy5vWvZ7B%peVh0R0GL`43*;m8wBxeZXeJF~mbqoV24{R=RlK82|I!n5 z>K!dPePpH9GwVO$il2U?b(B!^ERFAJ3BVPR_@VX}@Je=%R=3pt;LL1R)Mj^0)dHM6 z11IYUDjYH)FsmOPBdmH^^@Z1Jdxmn~0;scF?==q6XkA$L)y;055fEEUcm;cPoK%I6SnaK zBrDJp=9+sB#W%&&!JdA(%Uqo?vpELDz!h6^*0Ac;AF!sQ%_{xgMS%yy$;w724J&*> zpjFRoi=nukh0(PcbYyXTXfHurIKCNL%W}sAPM%kJT}XU2(b_uMHtbB9{%ZvIkd{(w z_u!4$ObehdBp_x)?a}~~N_F7Hbz{l z=!&ARK$E9{V1hI7(_~V2%d*Ye=@)ce&1y@S$C&_j&n|-qvbB~D%g|v*z+X=iEPxJyan#HtJiT%bgL;=^zVUz&9qz3 z2M>kt`2m9x&q!AzeXnPyFUdsawoFcK=AX8!4=YwwpyS-3Ny{7U)^q|eQr^SUca zlg!Ivv2E(PQ%otiPe(m+1tb8;)l;#)y(RZfc37n{^>Ciz9ky^LglO~DU==dBHMz2NASpCZ12(C`(PcFgZrs&hele^LeO9mK(*|IoW}UonCDH+O`9I>H^|5dk&R_d?cND8 z3ux-?Sj{3@Hvqz5eQCRQmy;w#Ch&QXEqSivwV2uPkKB*|EEXmd< zfPV-fAxMkP1b&az{kS#xT67!40|*T^-_RGmH2jGZe{7=;PEiGBy$@1$6A)JkGg}5M zpxmYoT6JFohBt@b^TgN+tzK3f>>LvFxEYr{>PYf78-SOA`{~UwPKG-5=ng`VPMyD_ zNM; z^px0ro?6xUgCM1=r{hYU#^IL%AoI1!J^&WIM*xMhx48X4xWN51P|dLlo14+LXD?Jf zPk0jhu&#OZsw2SH#y*Wa+22d^uu@Di#4H%@lH5F8g&#HQv5)1Cp^ouJ+3}kLme1;H zNU^Px=>{8_QpsF>95FSE1R)sb@XIS)-BU!8yvepag9bM6Y9s%fb&2b8ZMl0JQ&VQn zE^~qo3y*l}02N75=qtY*Je@DX_Eh3HG{t|CRYyB5Kg;Dru|@*+Y@Em~c^N2LG9oVH z#RH*6&d7|O>Be_=Tz=QaQQFLkn=FqgG4(kj;aE}TmQ%B6h~QI%sNJiZwpHp=w^E)` z-ovoYqKKia8KCv}3L_eklNk$+Fg<#o{TYj7YZi8elx&PCCOJ3v$SeRvj3BuQ2cX}m zm^c8aI!X5b@5iucg@>N!`HrEJQ|UJ>Qb;-*YH-SGJg@I){OJ89+xqS-}}ByS~@qL)euatq%DS zcVE)6&aMZypZ`$c2K&wj?TiASO zd4)`cHb)(ZlZ3JXR8b4oNAB`#%VG3$iD;61ZF37DPt>{b0G4ypUvu08Tmd&=+Vr7y zd;UFOm3;5%mVyH!3p9_=BqPouM?24YOG!z8rL9ovfP<*Wd)E!cnYo80*nUk2#+=mw zYRJXu^Nuq>A37?J2j)wmGU3JPI{wyH^Rm^2&1$<4U%p}BNH@hXmoa3>C&HGB&;JEoXpe5j|9$to!bVEX z8hW-Y>iYQLYb=t=MYENt{kzgeA$g^T)Or@IWvDjKSguAeeIyhe9@n0Ngc*- za6bef9w+1!Q6d>7%V>cKuR6ek9*pU)LVy*WC&Iux3p?koNuETDSq>0_)YU-~$k(su zNaDRq%#y6uz%qleaMe!NYNrm>;$KBDYic?1g|3MfG`ia5$#R*xdj-tKZ=Aop=gr!l zOhV6=x}V3X_t_nD7ImOcdHm4UPs>iuH!aSNryV>N{T6JlrF&ZIWIqY4d%3(k3^X8r zsTs}=;zsa5z;#D7=zcs{{9qRtI<6`?EL046)Knn7g4?v*2h8c zAT!kzrc^m4rUE|aFqXYSJC|^`CPvn=4(dP|wFP2qfC&MZJiz>2cy!T;*G%`*v;j8t z8|@?;&dP>Kf#^$>9HN`Aq%A^XecT}m=nSkQ_Lcz$+7{~QNsh`{j#)%ZE2zCI6+iDA zJX)Kgk9+kVjH@3l4>MKM*S60Dp$+%kx;_D8*CQ8&xhN3rLr zY#J78qtlXbr{dRiWW4_7Q$hnY>v^omSx~Dc97hb={h#0fd)j4S{Uzs2i0o>vQ70^6 zRZI#H5C{Ho^@-CjN(bhw!NMgkf8$B>Iqg`&KKd0R>I%~4&xWrfGE9AP9l_ef;KINAoNs>9QGPc4er*1?(5BQnV4AT+Ewl6b)d}D^gJ{T!rYSb zE7N$Ou0@N{4HHSu2~*3VhX|arFXg1|QR= z8H>=?BbF76uS?xyY?<@{8D*%0_Z$WQTd!AbG>@-wWp>-$k9Tn+=x>`k1ZR)UHe}yo z;C;@cV=NrtSX^j*KJYl+%_hIA>(^3I8VI;>>?X;&M|omp-!FUESijT;qR^QUoZpOH z3K{;%%bxJoMkpuK7;r%~@1yT<$pFxkQiD6Xx9pMt>On}@6N()`T3g=l7+Uatz-3o*6 zIhtpM8480v-7#T+sKTp_S^YK$neKoGs=V++E^!`wtum8LE&#G*rSAekOu*3MK!5{P z9fxYUn~A9OH!f~rF0A0DwS8QK3tuME(||Zu#J{s7li4_jh!Oop=7^8Sq*o$Wm_X6= zK0B{YIV&V6Ny4Gw;iSRjneMlB7GiqgCw{5k`gSx-bm9<4dd{v3>%}OIZiWjnQW;G7 z_@c*S877rPBuZ|#R)KctE+2`$M268j4#Fhjq>VhBE;!y$IFZB!Hi7} zC@}3yu3bR5&^papR&+-AFx=A9TDI@olf7x1EmXN70+NzIQ;RbRVUr(l9AYYTOA~gE zxjZ2|Uw&*S;mbLX%eHU}yNS?+!9(QR$%a3nKR@<~#kaO*cwOktuw<4z2UxrtdfB8* zYpO-5&Dk2gg|X+@u*NwwK1^`Ve2yw`<~h$vyxf&D;sI}SC(y0gZm$beIjiPV>dTgl|vAVHF6Q z_54eRUq18E^mt#LZ8)*<)*KFjwHsuEVKmuM_;$YdN=nFQS7)AwZQbaz#?aV7T??51 zyM8LG;EuBE+30}(-DblOar;pTe0v@+^tK}bY$sTCtVo`3Po;J$y1K4*JiRxe{b4U< z)VRTu7SQE2OP1$RxLXdQZk|=icTa*W(7YeN;+1Af-<$$oic%dM@PdI|7(o72xm6Oz zQc+}B6UT6LMeT?d=zv%GixqyJeR~y5g(o*n$E&26X)TZPa_QeG(`HwlNi$zmUh1Li zyCK~fN&Ql{)9wVKI^H!Xxt@j%)=Rw2lO5LwO5ydqI$Nngt-hP0?~&Y$roAHQ7HZyE z!AE+YDpOm5J`Wx-Q*fQ~HM~xzdgDVY4y%@GB~)mTy+xY6c`hsnb`zkY2)7(2nO4#D zo_T{qwsi6FMT&|Od}?9^q3NKwNAd)WQ5(CkZ|w_vc(uxz5Y-(J#xSooY_cVL_47g= z?TihrW^V|%W+&iZ{d-_8lrX!8viM?A!Fzo#W~j}@ExlmcS7!y27uYl3by$qp?~oF@ z!2rlD1Q5+kqK+dUM~PH#Z3;H}5OZ8cbO8#^VgQ5s-BiKACZ2^Q@&H$9wE5Ew%2DoP zVGbJVegR2f{>`aH?%H$SNHsa}yylM8NBXY!UYmUf#tJ431Eze4G5~k`wlqsx2cLWF z_OXUEvvlq&-Y$D!n;t8@{~G!FPhJQI!T2;@UoB>*AsuFR8Bdtdw>>kP#ubBwH1{oQ z|5KZDFXL*XQ+(sw0qvdG^E`kP=-1=n>-bN4t&ifiwbH~Se>|^gsjzuzY(7cf>wJX> zwt^*5{m7sl{{oJL9b1Icj%@~UCxpmrk-`{gLWxJ(POxAJ(dzdz)pt$vYa zZO;j-kf3$}Yq%3Vi2G)PAtCYQ%^ zCD=7&MD+y-%*hS_iP|^{AaBAp@_E-b)^O|;=B_}pgY3KU3Vmvu9GmU_JtbbI`sI3X z(hK-Gn9-;nUZ=GpsKgnpBuKM(;qO5R3IF}{Ow*7_pm?4=@U>h8F74qy?s7m#s9Fy7 zMEJ?OiIY)KHuJ5j1p&q5?op0JEh>yUc^+T4boaoHH+- z8I#IXZ^Y`~j9y4BpMt%xs0@vR24WloboT5mWM{zlox9b@K1eaH5f4@<8q5^k0?3>m z?0mjQr?Ykv&k9NxpUOA#oL*xML8UFZI3d<^Jwn96_#qTk)s+H;R1NyS3f&Z7MWuw>o z@VL#Sn#9w`PJrKCJSJBL+&|9nr@|o4eR+0>1 zoIJA4zfd@`qn$@F77>)jB2n?$T2chZ26r9PJ^=qj1l~d?tI*$# z&18^=xUD?-5G=}aa=TiLA~5|?6`P2HhA5m(&3SWuW~MWBa8!#*1V5T@JsD$v<*?;C z(fktn>Ch3R%;X)8n&7Wd6hQ7i=bO177NgHPbf@X9=~DIp;`cmRA~^i%N2P6Tsikbw z>aWJ6ZKn*-tF8x0;noqV`Y;+d^nrS4j!!9mc_rJt!k4%F4fzpY`SkYV3VqLy+GykN zX6ut-?g|+8ZldU_H~Cbvw%08B;lw!L7&?O|2Lk_Vu!iZ z4t>gQR-gIr0T^w$iI&2=?A}Q`O`iPyddYMEb((VfDdI+Lqmxqh-G5qdzNxw9bMC`? zp=}xZyR;s|o_k73lUPZZxKJ8no4wq-n~LW1j_6+%ieyE8DS@u}x?spW%gd0Yc?s{92vUUwMJn z{_kegD^m^J&RbqQdF-&f6^Xq-kj`iQ*uaG4; z2X8i?GRl>C5`{h9VLM`rJF24U?8e$G(~sQusi`{dm@Ib#tL0f$wNJ5CDGd&qq)Px< zXzXQt4trsrGPsXUWB3<2tqB@0f(EAh6aXL7{0uRSKOV>&dYr~s525DcjlN&ft<<0@ z@`?CLbT|fP*iQtu1nWEb|3hCgJHhjWxOe3rMumS`HvHpd2vb~cVXQCsQSQTwKd!uc zlloTt-MQ3n`iuGExEV=DP6@e{b+P$w5O&q*6M<0ENZP8>(kk|-ckcYsiE8})>ZP+$ zyBzJF!OXuGuieirk<0Y8HNzTEM*4;ZawYF8Q6)Qe#`C4lAZndaA!`ro;>^vbLN$TA(s;QqxF^;LxJH zgsU$}iSs8-b~t#-pFHf@qIvEwAiHaWq*xaN-*kNkPfw9dDf76N-)CghU^y`iRSWc7 zVL(Cywv6T}*|mx5wujMlcm>r+Hy9NRhC!~HYo0GjyOou_eskA=o~=)zk|0Eo$OVqb zCTa2R>@v*Tg<*^;!d73Q?;XL+)Y>8%Wo%d zpvrM|K(L$}g9O+sd1eRUzeU)v!1fIzdQ{?)hNAU$Ts`$emV3X6UYaaYctV{Z_xkKxc5a%9d zyH_CXV`E#gJ{l49@d1WucM=J+C-Wd4%!teWlQy+{&-5=YEBzZGCF@j0ojzGO-=avZ zO)-a{7vypeKpD8AU*VR3j&ik>9Rn!Q63ze*xxgEWAPa?;9fS3cL7wyPv@S#xd9}Pi zv|kk5#o7Vq(M6iTfUgzH&~!1bNT@zzbGe?g9H{O>t8_fG4oxb^^`z?FC9DMyFpUnd z50e6Ei5TE*Thb>_Y4Wa{?JVTvx5`YG@j3G3vovtXQ60r7oB8+WfBFCJH?&6tYKl&7 zN9Ha;|0+*M{$+9AIq);%!Gp>t%S+gz6eEypO(B22#nZ;^}_N8cUD;&C#71O&REL zCburufiB9}b74iee-9i|xX<1iV!~CdzBWD=jgXV4vOuVWg?lyY4kG0}l8X?+HmzOx z@1;(e_juQ8;_B!uFB+D>3%LPR-9(uewU((SW!~v0f{$;m-=fCvkwwQ=*^_PgXVBg= z5Fo{J7MYEZ5ExDBM;NA9gS)rGuS7e999Mus9imGhwFWb_dJ*6t&^Y;ci0$J!W=hA$ zj#3sv?z`ffxWA8v#nwYbzquLg`)gSH$E$PDGBp`Xo@d&=ET4GnL!T^AU&FY^gm0 z9FeoWE?V`+o&z?P|8p9|WOMWNJATP8J8nX34{zPWHWqoMNkJ-WfN|Ekh`S<+Xil?- z+t_G^PR`d&)>JFaq`vf!!#BM)9eU-kkSv~}lsq|246RJ0dC?%`kTAGA5W7?2SWMI~ zxba(U+GL@sqZ6v4uhaP=$X7?*w~b4gA<~dTYlcSXOqN`FncjV}A#GDY zW-(}T4DG&K#Cti~m+M@8B?C1jz2NES5X+~6U}{22iNT6xPzrI<3Nbl9 z*`1?T`C-8P@dRyyWoYP#52#xgQgn1GB9|4m3<$fJJ^=ZMo&_k2%H{rEk#=@daPc}D zm|k`wpcUUj_W`zN;b%A%-86nUTdU$YzRrjb;Mcaasx>|A40rQ2{D!N6L47EoKmdLi z_2W_8&3u#`6Kzimk($-RfESEf zXZ33@e^u(y+d~GVMmiPzeB$b3P?gxON zo7Mf)><@%68LhG~55}AEf|wP9G^Uum7z>tigB+J_;Uj{Owfp3O1w8P6y?p&%-xp-M zZIg{nObTp8r<+JxMR(Q18^Zv9GBH#*gR3l2^H=Sg$I{E;uqC0<4HP)tr|7}Bf{|m1 zZ}t3l(oJp;j-U-JlJ^3wfnpNB&$6V5GWD|$R_Q%yI6?D^XHWIb>*cEvrkxE@be(m_ zGHmdTFVm5=16~Ac`QDd;af?)PtS_M-VDJJf4dQO-C|vM=pJ!aH4pX?isuK6BQ(h^a zQzUo|f##VGgZ~E#CgK~%mMuTmMrF64uythmSjk1FBA8qXZ1IEOT$xwEsrF>1(q5E< z5Kl&XP;D=*@UkPXTHhu-}uZo>d_mx+9Qwg@+KHe1gs;n zEwkZQnAWkgsz-l)?z4D@CC`BMyxr+APDkTi>zLz8JH49(+{~VorJkx#u z$Gfhp!&OpO6d_#YRF36Xj(2q-xuld0!zyPpOL88&uL>bt2q9ER%q)j(*laoHu;k2U zTPBmk*ociC`hD-;U;e{m-+e!a_v`R<(F`5Yw0$^)(ah-=UqBlTQ&h+Y#d@PrKI$FJ zJ(AZ*HG%StLKtigsFKOM0iTldoL*MIL;O99g6I*^je2He?xYCWOII(Hrt`8OPdm5(0!7=3} z)^=SQrlAzzy^k@U0O;<}0x~B#$txn#8;dVaDo@*-PEYIav65C-3f1Rt@eC-X*x-%W zJjd(>X-5k#L?eg7NUq2Wm2k3!cb9R-LxT3Ct}X2*F|KTwi)!lz z1`%qkl6CL!9Wpl>bwp~g!aUTH;UPp+eBV{3t(6(v2DNA3BxOg!13eAh;qVepAVZau z3AAw9x?@$+j875L9~Ms^TDmw6N+gxnCOGL;V?Mvye-9$#8M$&|jEnlXRMBXiwcFJv zuprb_EP3@*;j*f<3fd7hdLg$$Vq`X&V;;HybLn!UT|8ZzOWj1`CXi=@>TFkaVfM{Y zYnvKJc1Rt~w56nL8=}AJmlK0}x1xwvePySor+0;Lcu?R@$ym-jg_M`zS556w%1=GQ zTvulLHIPvu_nUr}-uO1=BA2ZE7-9@9@FgeIh-vxzH+v zX9935t0*ah50{4H?w#6}Y8E?W1x+eQ_qk^+J8TNVJ!VKn8a}lRXw>-ylw;eErp4qQ zGSU)!8!GSe{gwU;D1^xHoYASCu%0aZuw4)HArfX{cA$9d6+~*1rsNgnKKvjKyEnKP zk)3qT#}h}{?wP<$Z9AkQ&TtD<0!!WBF$A6!9%5kRzRKrjk&(hd0BQAe&oOI9i%g^w z247Rd@~~!V~xy*Td-o*sh6e z!2?37r~J2X6<3Ohh^=LMf)=r{DcTkyUvxjxvi3z3eCY&*L7{RN-dgLq^Vjm;0K}Q| z&BI4#LTz1!fD1)f?SNmPoZ=U}G52oFuZ^dU{r7oEiuqKNYkX#cVd3w&J~nSXxm){z z5ODBJ^bQK3jBVi8w+kgz*wSVp(!=g+am?++V$Slg^!UlZmOp;Kmu|G`^5!zQ`=th2 zwkJPDkDjCn7)3hN50O+Nfw|Ok>BWXo)0Q<7+Q2QEMveH^h-uR=U-=e5Z}kIwHX6pY zO<=x7n)p`s{3M(s=dt3*h|Rs%S2TKEarQ!;j)R?WtMR^PVIe%6fmfPYN~hh6wIW(v1p?)V1;2Dtms(L{b07S1M4*B7|MC&$7=PWZbc3Wq?RiVvrTzPYM&Aa6O!XLkPG9dDFV(HRwe$@ss1aX(!Zzxr+bN5UNA?CD&pK)VcAFTu56g&eNE;9~ zy%rk^mF6Ud05{j7hZ_VQ3opFgl)jtrI~<`|q=WnSY1$3ngJ*L7zWs7O-CM2%Pqucv z#TTSmyVoZ+E{I>wl~Y?p_MNkXrsc+K#SVZ})`SFD$I>m7>uA^g3opa8f|(a?#yedk zh-d{arp0XpSCiES&*uS8eZEnw${$KU(hSN|fFTAjkqs$3(hvho_z^fOh-bYuYn2%L z4R&%~vI*#@aAgNLv8Y5uF-Sa2*8I<;;0)(dZg9TWwY)*7fZ#y%1emS#O*5oK^oMi; zc-~v;Pq%?Y4$=wrl_CqXH0U-j`+Y1*)^gX`<(_|T$UB0_*~djQGH$rlKL^=9sw*av zp%Hwws(V$!7zaM;x#^548n%Km06VvnqeuaPo7V4|;S?k425?FO0S-8+&-`kIX+?Kb z?foUw+UstaxNwURNWZ65)a=e7UX3Ev7I12Tj3gBf9$ieYr8n=qwJSnb^eL;r(W(fT zw;=(PsNYRxOUFZj3T1(Tk{@n%MXfR68xu$hu(JnHQmXUUzlqeh5vj$}8Q?n`6%_(g zu}+{&G?yCzM5)vmCeRQRTKMe8i>cHeuI=vaX6+{Tg-bP+q z+5crVt*6>vlunfYhE$*5@gXTH_UJt`6TyPa-6}D2M)I>gzkYh|S>QfaDR-^Pp9U~7 z0%ys`_zB&Q|_oHNp6=C*MMtOMc0ZOIZN(zWeBkRLNh) za{d%S*8SY=M~g%CWv?bcXdKPOW^LDd@R*nB`^_jWEAi-SO1Pa+$8I%v6Li2$ zq<@qcXj9(;N!>&ECHVVUCk+#C=Tn0~D<67c4Z2N65D7IxAJIi=4-N6L9~Cs525PG> zkJ5|*(jH4y$&7c2k~%+-U(o6(DH z!A6)U=Szx_-lmxv*XUj@gJ%GHTm5$_!h|Dg^*Wl3%k=A=vV_fka-G_RnwR_Z-7F$N5nr5SrSo$?YPJQhL zBzP;J(7iEBVt_*3o%);XuP5+y_oo3x^+?t#mO%%hf>mPF@u|~%amx~riPJ`-@jHJL0StqP9zrw6MAj@7e4xdqe~vghxbP7p z7nfd#(ExlFT^oS^0F_R@RQX-!bw4OGV)yL63<4!-GEC$9fiEh&B#I%q7_TXKDA5aYb`|R@1D*2YTmrv5(4P${*IUfW+G^jCdOgmv^+SdA+3k^avn41Rx9%>u|>Z z()+t6PX=n)-+L?7ihTvNf%Qqj#Yp{;IllZ8P)t6is`yo|7sG3kCe{RSf+S_%2uo0Av0 z$11WIfaHOceVr}TLUAwfEw=v@W{7-4m?zcrg=Q5rhH%(}M{boZdxG}%3NAbMWO&pi z1A07;+xwztE}e6B-HYS=D7N^^mSwqE;mLe&X;j@3`Ls~xE50!xIjm08V#Kz1psrE6 zIkc(0slt&gGT?Mr8IO=fHcXoGm4T`ubtEOGrIBt8!VJF#CGAbE zO~9#Bbc$FBCqe+%$Uh-zg9^&b&mn_8`eP$d*Os|1&c$M^N2{YV5zqk5!U;sN@;PyY zq(&@9`P~4Rz-K=#$60j2gYNLFvwyHoA)SQ{Z)n2DQ`bfi2LBxmtIM4f-GEd-X3jdr znT?-E6pGw2lS=lLaGeVxgY0S7ZfG+q)6|Zpvt7hvrQaJBdm0w<=KIu->u*^hWS41j zk~YOQn94;%!?3l=$ZBuMRo$w}^OA;usC;Z z3zF7#-%t0$)SY&l>7|56(>%KqVt=4-;y2_?p^ivXD3~WCi=%jbooajezByQ=15dBR z*b){~44l%7H@ki(7hY>@Nl>ZwvTb%2z+%Xq&l&0Re!q-RqV#R zcMYsCP%Ek?mEA$baeP+JVEIwL7-;$E{HKUswM($U1+O~!i0jeB@-^r*y#PM2#^f@K zOD{hE`&{KhK%ybbp9TN`N|s0F?RYn35Nbz-XVp_k_P`0{h@=4UmN6CW zk`6{KdM{0Wnf0b$p<1`ajo)0hU_{ zf9_G1S^;PCi{h(%0J@vSI&7x?Oku>^0lyJj3wOTSI%yX@K-G-CZA`zaHa+3)4>Sjg zC}#JOo)VH&O(>X)!gd`^1`u;yuD^Vbwm?VRi|K!r&)h7zeAgeQZVAb?0N7 zf5q-VqSu*?)(SdD^KMl=t6F_^`s7CIW$`%!n{s1(F|=?*>E80h17tRySD3pzkJ1E= z$-tYm_k-jmP_|S4Ukmk)-Q{BR@`v%;V-PS+eU{_9hN8$u(2(k?W5<0&j;AN@G|%p^7uDUU$x$JXGU6TBM9*lm>IoxhsL|31B7r)gwR*NrVY z{)CH4_x)3t#x^<}TrNX`e4Q3+@QK~SifPu+Bmc0^)@c^N0w(Ii;6bU^uN52k^3C?U z8f2JKzVFG?96Bu?16GheS;cK@e#$?5)K&h4(RVyl=9%^aRv+aciC%hS&VO7%B@R<> z!EKd;+$;Da_hUfHU->5w-&?Wcd9Mr~i?zBrg7gy^A?2ss95d^H4x_{xj2p;&aO+v znJRx1mcnN~&9;t@P4l#Ce$%o4ip-ftZs1K5!bP(MnjNAu+a$=J}H{-jGg}q2CCg%zRF$& z7=B3@^lV9*5qBZ+Z(nCPj<2jCn-NwaXmmOE`8Qo%2TGRk_4j+RWo2hDJ4+76U{rU~ z$k|TmO_x^O$hpv*?_XiQCKXyCw+r>Swa+JrXct)ZH`Y~=?Xuc&WpMX1GaxOw;`-KC zY)(?noIUK<(iFSC`mQl=fc|HZ4kdkHJXz!5gHc8MEXPwWg{k_a+wYyL?Z09i?+_Hg z7IM>2qYp zPr<%2=%241-7)9RwY18m<%>H^lwwl76#}Y|>vs_Gh{%HNU>_wT7)gMbVt`W8(CjGKr$Neb_ zTMtmk;mapfUxHI&=jVfZvCrb|#AgI*YP|OP>|pezl+VLAM(>eYRVIA6KUVzZul@7v zsy6qr6P~-N)~?a{zfydCBnX&a&Xp?;znKxjZtx_@g!C;m_TaPwsF(3tw2wp>fv6Nl zYLzygF_+#pdf_r55G%fP8@kx{%L%1B<+!(!Pxh^C;h}4~RTaSlmdx!`lPTl{xTB1l zv$OsB+YuypvEoNiN7AWr$~+(@ha3x|YMv2Yee!wl-MIL$Ypt>!qIO`i?t4iT-#SiK z3?@jU7n;W1PsD^nnOA^@C+RXKT0zjXRNjp0%arM88*0mA!P+iC+W$IGJZJwDq3zPP zSKnj)+ix*EP7BbV=q&8uYkX8rO=(6X)AD=p#h6+sBV-dZ&<8^Z-UYZblzTCoCa8GI z$dJ#Wh7Q*onLw*mWm44ho)L7hn~Nm<=tzPk0#%;$PJJ~&bVqzac#wCyo2klAN@iz* zecJfHGJ`?U@vNCg@26JzeJ0+$XpR#H4&5?#+~_41ljgT;h3hbpGt#Go-JEoYxBX8< z#5>xIcL@0ekOWkPPYrr1TjV-yDn+lsS>~)Z++xF0{12(hL{zDIr$7<3>y!eKIOtu{ zdi?p4=EkkcfF1Co=M88l0ldG;+JM;E}_j)6TIWXwbyu4*gSdO|Nx7(H1=%kfs3Th_;4Ch?JJ77%4 zzVU>;KBM=S@Lo;Fd*(`;;&b@I7w?tB@VZAknO#87+O#{vp!?rXm`#9G$vH%aF)3dQ zp?d>?m@(Vdrb91U{UODm18c@Ri^|n56CrX(5RAA}*28$!4{%~$dd4sB%rcGBC z%E;ly^LpP`HXN$m^BEFu*>JK$5-9oC_N2?4-BU}o{I{V@)z+pwm&X$QN2;DtDlg|n z)_T7(sLEWByL%vzC^gN1!awc=SQcUahCmOEgsxvQfWjU!a3cc->P4~r@#%I*sNAf^|uEj|QFzo13#6$B^|AFxx? z*4yw_sOEv)DxqVb-O7_I2{KlC&>xtqFg-I&r3xh?oT6vWp%DSVixM8ARo!J1(^L2b z*o*s{i3%HaZ9ysehSm~2mz<>4+$Ojv5qSBBofHSNrJj9acdlF49Pj*gKCaMS61TMx zO$6ChfAGHa(f8op)7fmtQ@$Zuv9~}kLdXT=Vw>gua7vU4dO*g;uiMs)kYbbq`gi6# z8T858wPw1p^NVB>B4boI3~k3dS+}6t8V%sWppUx;lD8bE<`PO6q4qOI&!S*h+g*F< za{or%g`a!IE`l!p+%tno$z2>WCN^E4C7Oh9>R#idxC2M6(@Wj}uhIMZ0?npyg|)Go z-7Q?{$ugX8Ys@mnYVH3~wSQc0i8_h+3PW}kS*46)+t3p2qUMN;N zQITW!_4*Knrz>8N)Z3cHu5>_wBQZry{I?Lp&Q=g&JC7~hD!uC6vccVgTWV1*%%B}k zuv2_=J^b!JKChGT9PNJ%<*@xynpmVRTmdgui%-aRdKqFx5YN6u|1Q^7o_3BlCPJ=$ zM;lTKI*SK(wRyOb`u1narFk(q~iDfJWmSG9Vi9pnnS6@o>_F^F)%6Rk#j5W>(p3-GZCj^)b97= z%@K3JG><7_al!pf-Z$$0-e6ljn*l=E>mn8tTWJ@9Jr)X8qWA%)<>U7o+wOhkUKe%H z7(IQAT=qryq!Oi#?Ek=#D534(m;5Xrlu)Dd{o!9~!zELN#OU>HPt zqYwFHOXHe@@x7H4vp`a@9lqg@4D7pMF;?n`mUj4>yZ#~_B`f6JoUBamaw91_yrl%^ z+$~l~rsLgx$oK2Y?vy|l0y9xeERH0@22y{*e!iTQx?C{rY36(hh%~l1{tdtR27c7> z81YDlY)c2dOqi)Auwfky5{8iDZZ03&^Qh=WC`V_Runr*16dG0 ztH-(B&O3cBoD|{r5bI|hb0#3KtIhU?P@td6=`!Nw#w60j*uKMi&sxS6R}WRWjO9%^ z-+9L_D*Gt};WaT}o%LURz z)s_0irhR@3hjEw%^j*43)|O0^F=|eU|CkC3LcRCB@#Ry^fo)vdV4JxQ^KbZ}dItLE z9vcfI;BFtk8PZ_v%}m6+1BC?UYTX}TMS%fkMgj~a>#UtGD=p(ld#zsAcP?Mj$bOWm z&7~I#!9akaZGg2phG;GkF>HQxParUF@8od9o8!3i1*)}mbb=l{n?olecw$^e9@fnw z#K_sm$4tDRJ!(R-3W7xe1p>6S*99gQAqzWz*h@0A4%$>=qgX8Onv7}-mFwr!w zIB1ZjHKm<=FL|Ad?DqssAi`|>f&Cv1ALO`fq7+3g8<@-QVUx;-3-Dfobol9FIQ2yR zc9#WXhwMXDZn{S4Wa*hf8giZcDL5|nBvt(-8gbZpK@Se6F7$8oNhSjTZ_xun@wJ7x z`g>O>Hlvb4n28~)1OVNN=!GJ*Cnbm)XXIPvQ@$XO$(jC4=AWY3zSZRGboAm$?1x03!3rqJ1S2ZoE2KLm&OIH$saqbz zaPfm0TI9mFSNB4SqCX8Wp+z_p4MAdcZGsU^T?1uYD~Nv{0R~M7rEO{T^kkr3dkJfR zfx$MX@^toM`Wn*%{BX@Z6KLsj-$?@gW9ZtrIr&H{X+qwc4?;g^qM;3lUOh*qW5Pug zVju&Tw1GAn8enyK6&K+4fIdt{5zX4#;012J&XgffTX)H9CXNgg?Faz!?#5&?<{RET zEeH;OL#IQ~rcJ#o2MV=0JMI=neAFQVl_)9haL4}cr8jmXyNNJ}5SqSy&egtJJE)ju z@MER_u(Qsz^+K_o;U!z>mca&E4<(~#j1|BNg=6pPm?lE7wen@CNuR~}wl05nXiUo1 zzeC{l^Kasu?bnZ{O3#Dv`~%NcO}Rik(=3ncwOzFx6#R`&989B(>VW8iH>pCf zWO>sy8-zOg$nF_S=G~~_`WA6r=vsf@4pETcbD;u|G9o?QX*DLZiJa$a`xUGp*{_Uu zcLIIVv-HZaYg1azkv6DQyC%LVf#e6vSHJ|+{5a(+!z;`t{lW{S_d%7T;DlY^sI7RMZe*d^vW|?nHURwil=Y}EqU|2ZBPL1TmtLQA3Yj!!%`*V*vNx6KQ zLOWU?dxO~#gNh=KccE)<1h&X@%mLkI2EZ{!&4H=Ym5VyZ4)X9a%T4a5mG7{t5|y9d zfvlAkSJ%ed+_sfbvDiwl={e`Sz5w(6ObrvbXk*`1S1^$N!rx5Hr1_Zs+F1i&s!baQ zE9jDgs0)L8bsPGX#YE&%CD3(+qs9{ZCYk3)SY1pVzHMq<2$2ApS+aa?ULJs0H^31J z!>Q7#0;+Vv5eTMqy0X!!MW#^%HJBN>xCuK`Cxs7cuF{c!>H)8%gE#|I00yHlChxTlAwhWTKgNM2hT4qGSv5E7VO!W z)K&uripl{CD*)q{FbSGZq3xir^mW9C4!xda@*)|R#;cJ=!0RbT=^8G|~2!e+P^q1CPoSqiI)F3Xk zCCpx}*0XP$A55QLfKQ^L^2s20Erl5%)m<_}q}r;DM~xun)@7$>7*|i!4>}N5X7B49Y;s4{xPbmlHg+y*wl2ZEIij52mPj7q+QAz>fP$pL1rS~AE?()ej_ZB} zeZJkYOzpIjHKjdn-}c#_V1h=MJk_0^4DdXotlYBN{kKp&#oVz^JQb@YoVrkJ-h;K| z9E1qs8DHJ^ar`Q-OZD*lq2%i|VLVX^z(vLYDTl|ejXF>##iqQZC-WsXz~FqXDVc99Nee}i%-PN-9WRmXU^E2xT;!85&v1t#mqMJ6S;b@2fk$Fg-4ZP;rR ztcz_BXSg)7I@Dlsb~QGD+2qC4!;179^#IY23zS)r$wCM)l>S}v#`YXi2`D?%VXL3| zj*qX;T}h}CcJ4BndMm?PsEOIxew8@xsf2t#+7KO($TAke1JM0C!Z2&+nk994)3o|9 z1k&m~WJzF7z{pQ8Ljwbw+k7Z`Njq}?Yst$Blp5f2$6!e+MddeIi}$rTNES$SXk);~ zNr}SaP|o0vuYC72E4SSqOd3?u62}<6wCSyLgEMBP@jl&t9#dO->&@-s!#X8#iRaxz zAcu)F6yR=2j`WNHuB|u%D>jGiy7dRzVu-?ovzP#aeZ7`rN>K{krXkz%WMCsF#D8Aa zJaz5Rr|5n{_`)aXc9K`I(ewD{Q$y0zqo*S*#3aw|PlFO}Eo=8ge9L=W>Ko~SpL?pc z5tjeHI-SHqo2|_k^xYtiD}Z3hPy7$ZgnL#>Xcm$mbo90|{pO^#31%i1(Zw_wyDL(u zUCyYi#SP{3HgsuqCqM)VMKQXc0ORKlC7q(K5A?hb%ZJ=VOX_KiBzgov>SYq{0a#V8 zAf(!qJLCf^gfeTP%3QY6a$9E4;`X}~0$72!^k87a1<|JT4Ja`xd<5T+EC5amyV$9e zSG*R>e%}i(*gx85b78#aB|>LkLon9tsV;Kwhq2pMwfoxmUXs$>x(79QU_w%e~QR3EE8o zhR>UPhNvoWIX${V4Rz)J4@Bk!(7n9W(&mUVl_(kSrRbyKLjb|drtSicG}2E%6mfuy z1c^GRp0~kew8D0{ci*>Mx{Ri+SG7TW65vssNDHkuJxK5IMSRop&CM(iZV zsoX8?X-YJ2^25s)z0qywjy69Ssywn)&A;l@oIS78$x?+N?Z(R82>i&U7_Ui8KtZDA zVwdfow?h{1ei||Ex;WFaxyLTH!J5+RdL(27=m)sFyZ+2D{JQvZjzcjI*LmjWo_nDS z{@1)NZY+O#lN3l}MWH8}B`P;uRbwx28J&tn-E1BsThhNDteCmpmb~1r{twbAGA2^< z>}6O~!uPk#hBm#+=a?UV{kf-Acqz|WrMETu94q(5sZu<#=9azHBcj;yt!^jk7TRpo zXdT5flwOT;l@SuZhPP^!|JPK>+G&n)8`Si~Wo&hIE(GXST|Nmnv~bd%rk$Lr%qa{j zl)E-Gc&Kv}gzd(vdy9|p_o|&&K5SVrd)W*f0*{~kMECUHBn=x79O_2(;68LuRGL@& z(QeCyI;kY}`b6N|svy(DTNw4EH{w}FL+*?P0u^tcnFMkVz#f!T>myc?8U`E! zyrRo=>SnXc{ZVJ@j7?p_OS*gEf(Qr$06W_W)I-j6(btDw!gROEV{mu{z6@9_a`-O~ z-PuI0W$KQ!J#@k)!j~4RcGuzn*N9bY9vj6q16m|(Z8IE3`Cr=)u~uoeC1(lgG#gjw zp|Y^CYpaz{PKPT!yOlhBH9o%=T^m|pKIRddYuEg@5*Ne0D{^)~Z%E$Y8_`#Z2&xzw zvw&5WlyfzBl%B-<2@sp>(j{A!?-nuVJ=T4Cwqz3*(hB`dD>yRwjI!u0nvIpy1U9Nf zbNtZ?jPBpo*b+#}(sl`8j46Q~mApMDoSDaY9tTC(F^3pPT{g&G0= zxzf5e^~h&$TxHYpEw7Kb%)kgkEBZ)#c1dh5SKmw~wIsAsp!{%f(M3kj;e(@QuIWSl z9BcuFr#k5AD^F-D>1@mIt(o&M#Dbz@wMnA}YzZA;LJejMlI{bRh)U?1m|jbMXGT$< zt~+L1Wl)i{oPdUS78N^qObPOK2612nps5VI5;g94rC=fxLZzyFoyXzWT#lB=dXkRJ zZbIwUHUbkd0}UWm0dxeQ=PM31ynErhyBqJzvhs}E?s8Jk^#V003m;B7 zW&Ytit-K3!^=4^U(bHQMFKMSujN2}zWv#R37m}D0nsyC^z5Vs1jPl3D$;4}(ujK*9 zV}Eo04s<}&t|;Ad1@kX$jjwM~Xi; z@nyQ?zbkX+nW6aeCQFgm>&E&Ip-l_8Km~vpP~v~}`ew>7>YcOztNTg6G=l6!;N8Fe zbu>kFwGwn20TiWVSzt9hy*Tgksu7~|4=&;Ns^?XMOEH0-s*v?_dMYMHqPmznZAZoO z3G76sPHJacr4`MT0BfB~11^bejc01PfFk_*e>}cTL%TVphl4B4>#P#jcAcG!eabJi zbUuwHVx-ZQ<4sve&}b zOlR&mo)|a&8~*y5+Qqr72<`Fb>lgp?^w$Ib>9bvYD|GHIzCLUJ-Wud&KI7MddyFV> zH3}M6%UH4wXakA=n_GyF zmwUF30m?Z-&WzdtA<^jnsXOS?06DXt)&~+kO6SZ7Er7jMQgIlqMT+LztKBNv)1_bD-VZVw zTc|@SQ9oE)b?2|lhjt#zSGsI*ws$o0$?_!LB|qzFs3m}rsb9s8Y;W40!DVv~33gv( zny(k1oiF4t#4ZwG5+7d;Ko?`@{^E&d59B?>(o~@ffK=(#zA-O7ND%#Jmf>QvoodtK zJ}c*O_tyx?^Z;;Sk=l3x>W$p`JoTHa){75UR?2Z{$1XD{!G9iLJomq=ur(YieO9RQ zAk*@OkmfpEX0#Cmovhh3(4=r@P&u1tmd{l#kN94F_<3>-%elge%10$r*2_yI%e#Zy zL6U|j=n3&9et9P%W0#p-r!MF6j(HiumVq{ZCV=!bAl?ucopl4{lQfR{Y`^kO%-^_D zz?rufEo4;Fd(a_d?FPjQWq}!g;^#L0RUv@fmoE8q^6DC&>co@hO6wv%uY#tJ%EdYx*4OGsX=7^_eE_dVb|r8HousCrg}L;d*z9iYkrE z?LN7wEJPDf#;%69nl|I-w;xF=PLm2$hGzslRLz99F?e1n{qzog4Rmbo#k_O}`@vRG z+M{MM0>mxEx7poZsgMwLU$aHkA?hZ2zC1%mX&+jxZyAMRU-nyXMaupwz11 z2g9L5c<>*>hbOfOvlLP$WPyf}OfUoh#7|ge%tvVqMqmPvxC*=Ym7<4?v+sSWRu4DB z>dJkLzAjAbZhJkVUhlgnXHx&p=@c!>F+LyqM(?!CW>X8tDJC;BKK$~fJi3-+I#_j zSRLz#Fk01|=~?$E(Qt;&Y0LVoW$#1JCohXxJpvR>%C%M9Kl5}KGc?lPPANPNG4MQA z83;7=0VU%bXh+eoP;rn5N7+Oa9ydHcq@a68Qm%WwpF!zb+iK;0eIR^pfyrsMKJw;}$q?~vf=9Romre|AvJkj~Idm!aKW5j$qm;Kj< zCuz(jLeCVlu@(qz12S?__5U3o0%FJ9gvs+pDP42kQd=OQo8aliW~!X5OiLO$!wso~ z5;b&dYnu`ofISusF0d$ku)0~~brC;^pU8`ON$GA(*aF82ib4cLQ9F$pi;^0dkpT883)V)5uIG~w)gxCsI zhb+;=cK2QM`eF_h-436>{>OFe4})zUIP;UUS1nG`992!W4GZToErDJi#b>9dR=Gn# zUB%}TeEuPvZW+G}C6>>r0E@u&+|BJM;U$F%oo9i({JvMfSVV2}1{H=Q|0n}XpNQ?c z2n*QRhNKINLTmWN`+DPrYwykT!j1DPZn(iCwbsq)jf|#KzOIEgA{Hu+kKfJ16<-pSKoLqnN;m2)}PZN5Yu78k^x%A8Tc%6 zdhE}KomNL@x;m-%^Og#vCan{)FY+KBB`$~dmfbw%P-oXnyY`pQ512j#Kq4^#29g$e(n(r>Y)5gB5=Qr8e3H28WG~SBM%3I2r?y9}Q8f-Y z6lz8-P5#`&TUk7gVqKfHbf95!Z(;;)Ai!Qm{TvY3(DRW1We@5(>qPIY+Y_a~sUn?vI>2lbnl zZ`X1?t9PQCbj7vA=w1xY9Yk@>t1jrJM&3a^loG=QBOYr7Mk%CU2Se|Ekjv7s#Hie; zASUmX4k7Ay=}xYWws&3}w>`-F%U7mpm}jRJ@HpsP zGKvYOZi0TuS-Y74Jr>fn^>&aU30xD3ptv_Qv{*?)PUAeP0;Fr-w}0;WZkpre5o&nb zxwR{Cy~(=`c*p)rV1#a}dgZ@c)B!Xh>`H^Uza-FObxQTNQr7$BJ0+wlgK$<7)A_H0 z^iLjZqxbMDkKD4`zL)Qw(C5|v>Pmnm&L{$F*vHV)&!S72&HbBLCTQj79^^<=O;<&C zI1VDz|N1{iUGR+$N5#*86*6mdqU>SSIkWyLgmCYulMQ8!BCiy@n@uTY>tayT`>l zys0_6w0pc+0j)aK+BE@I6R1Z$)b+8gsYmH~-tn=E&4>D6#Cr#A=rvxVruhS_m*-HY zVjD&5?2dp&qR$N!4#t!p!|5eNM42F|W>ueOG*TQ9MVN0;gQBARiokjV;CAdNPo<46 zCmop*|576i_fhaD7C1Yny(SYp6$e9%;6*Wxo8i!AqoPw80{>8{5qsBj@u}w{Ueg=5 zs&h`bXKl#OE|2^DP71CWCF!}H9Q{&)*-=`l3-$&{%>ED|;e(AqoTh0)j?H=37 za-*bFJo$S`+7}|hB|J+lMK?rR^q6PVsuP#m&Kmm^%b7OhaBbTe)65`z~kQ+Wc zKral_BM5-KV+H7Fojc{kHfAF;10pk=oSZ(pw8ndStx#7+)Ze&Ota27qfEVM*a(rN6 z$UI7}ODDsuZ?Ya~QO&q|4YM8p7#>BBOJr}QG|WAoL$c29h@g~uM0?fqgc z|NeWVt#HKU`i z0fn>XlTadX+0cU{)^JD=qL9ZtaofY%8Bw%sX67kl`y*PsYE_%4HpaI85%VD>122;iG=1mhD+znmE8wNAE1 zy70g~>c=3-}bpX3fUu4f5Ldf>YGQ_Li2LQ)Rwr=hTahF1;YP;=eL;ZgZ89a+bh4Ef0qRUv+k9 zH!N;nBg^)~NCo7YZ%~>2wQc_6$4=KY*E)u6>jW} z&k3w1+v=!?d6x;FTLNZBkr)YUn}9?UIbK0CTjx%vNbdbx)fHzOW_icbhO(VGN~6#E01Ie1<4m3CUQ?D0T5Te;-WE=G&xeh_`Io)& zHRw6%-tW!rOAZFCIxxg*jc&M0zXWP>in8uZG;$}{!8X=kI05_xCw)D6JtGE<5*59+ zLt%urzKON#JzC`p^8%;7Z5lIF3?rWzH^i*2t|>kw491w~C*7}Iqsy3B2$D7?F$L{M zw}+JN{;`%tNH8|k_WaTwP62IVVg4H3FU9B*GCxEUu@eMW=OgJc;0V6swy)&1^b~7a zn97c-e@-AgfXmh)?MJo9BzGU8pL0nMV006J7kua_TG)dh56Wh4J&CF{i#lltsUl;U zAyCMbfn{zukr#V7$PD)#3dC%gx0<#|N&)L{%^Xk#YEZ~>57*6(d(8N6yeKYgxq za^i}wvuD3KfLtv6COX|Uv*5+-pW{^3=iabFv{SG}a~X4`aTqcylKl*)_x)-j!HxU@ z__alqc|-0w6$qy88okYx^uOauLiEvIXM|!1oX6_P^9*6l6tLvA2yMjDuMQ$7-XwnH zjP}}D0cJq2R6O&LBv^V0f2-5VYvyoS-uxIsJuQ;#bS4&9EaD$vJCA-v9 zSBvIC*?!K~yo}r{D@U`9!4WrhE&NxUK6wl1?%n_Z?k(~iN7|G{v~xl_JgYk>jvklR8(Y$$jnc4gk;S(HHx6z0CU zC2Kq29IP+y1qE{zuFXSWQ4AciWzxxZ(}}nyvFAm8U0j`;W7w{(&!u)?Px=PaUNlI^ zuz!x!7s?qTT}+ZObd{UJmr9>-(8E#ZMxaVDwn z0kx-FJ|ud1PhEWU)EN9V{){2?HvrcfALD)qd^2aJ+`&k?l9! z^vcUO6TPyOh;7)INqsUK^xuW+FM`gouH7(#AsVk!!zaJ*IC|u{v^4o_#~iG_s;6rK z_+XThd;q@A04ok9GEo0b7+`HbFJpH!SozF~yvvC$$oCCj>hTz=YH3g349&woE5K9v zgwIGLfhx#P3SCM%gVYt~e}#EQ87U$y$>lEKjl4Ufx9acG#a5)28TS$dA5PyyMZcGR zCL00P6|Yh0x!~>G@Qe zC)h(TW+86R^FVk3G?N=%-94x^Wc;evv|&78e`S4T@ytqiX1wnT?3H2ayrXcCH};pX z@GVlEz#9vNwoIAUV8ZiUv)GKn(COJ(J#XaXrXvT!XflN_G&bh|bv_lWkhZW?7v9!T z^l)``{z9FswVR6$GuJ)`@+f0NOuoM+-MTBZa~!d@h(hdpCAj=9{e;MX^L7dvl%f!# zLCA8-_b*^FD)ah53>*X@BGO3zLav@2HMp0@@jpiU+;E+kPY(3>xrYhz8I-IO+bEya zPWRnyxJ13)a`NHr=mjsWD1QdtiP)}>vvchIoE&zi`nq*Pf+4FMDpYU?|38k-#32du z|KsgwZC!SmrIo8Z%5&vad3~#;si_?kLsV9#ri5hP7pzq}HFasql+=})B_eqMDheyF z6wN%)1cAV$JUA^5Ire+@`v=T~=XpNI`~7;?@u_Mvqa*%_#p=Z+g|qyix||l257q@- zBpU|?JPnj1p`R$t>Jz;e%zKx)VV^aolDHxPn*VPCUl(brO418AQ4D z6z>6If8I_}gVv}AGhv+U0{YV;Ck^DFd zT$SBqYL!C$v*%Y&&))xIo8_6Y-HNZFcOq=%1@$UC_q$TT)X&q6EWyX&vfbj{$s(wW zcSmg&6(o~jHM9t%hV;3->gZ*+)4iu4j}S}*GsD%hWRhHm+^YJ01%93N`9#sXF ze|iV^=hL^sXTw#Ppw!(#_lL)QQ2W)TQQ4NknYqO zJ9??%(KS}yaYUuDc+{(6o1dfdrxU&hW}zX6$yz#B*vcOhmh-ExHuP6_Li~9ydf?k$ z3c>c^*f7eANsIQgLta}~`Ig18*`jX7#~Q(pz8<&E`Y+=n1z`B*rNrzgYUeK<{u5`Hw%HWv zBw~stX!VN5RqH9M|J}nd)zdd8ctZFVSp4@v!x0%jn5~%2PWaMLoTz)XtRZ?|2N0~r zYGM7x&D91IzyG_Y8k@2F*zAj+A9qLummo^E6zBOQxrfa4o(O+#=KOXM;_!+mlXc;zn9?4Krz*4Q3RGrZnbAItcPrd5av8&kvugjt@1~{QQ#eR60N)`t}{uA+B z4nbw#Aer;xpFoFk+CJ(p56Wv%-3JxVItVGJ`(HKE#5vj1Yv$qfA#gMG8WK)7ydpSf%&_7?uw3S)-#lN5iu_$t}aq$%0JWe{7Q5*!l*#}luDJGJvv@=k_o-Dr}ZZmp?zbM zj|usH5rdxVu*fx+g-y|IyJh-!(@m&fd}3X;TUJ_$Ux_%RfgulgH`_^P^nTLnLJ47a zM#CeBE{g!E(9pO0I4=xU&tw2KZ**-cP9fJdcO})gaV>SVZvHom8ib2&8!^Kr>*jM*L5Pn!JKn{& z$g5p`Zmnl5K1D&o@Kg;i1AJYRyGQju(4Qb^`*lr@gr9l7f)uT zMEI7_*A(dzzEiOFY!?-C0G6!bb-`_eF!{=A7bWIiL*>9d)Xqo@Wq~}n5%00IvoN_m zY?z*wNw<%inq{omfqpcq%^c}>e#=7gv@@klTI1@)E~V!}%fVc#g&HgbVOV|VVVs$B zqfZw@Z2#)0FxS`&4YPw_!l)84K9jzzrJ@y!(ns?tk%TUK{p!|1!@W>^Ow#6h$DLDC z#`Tr!8^h1J{JUn!ZZ{5vH6GByXkrO2Dbgn%iQ3x`s)}0VL?Nd)e8`+O%}B(-#6ZC< z*3z9$>dkV7GaWG6ony<5x)aTv7Ll4kMD*Zit)i4qf@{rtW!f^Q$hbOQ@Ah`$iE8x7R z(eM4IaD|*`OupIU(1(gbnL#LAZY&tOR^=aP5W^)Gjs+0hcVZ{LfFWG=_TmmgM(VE< znT&5sl()p!B}QUmIrRpDGS8x70a*R%>7*y{ST~`cFagHU{fbGLncS9*iO2E^1D>;@lUR^3QL}Dv!`&1neKn9EaNmY$JDG%- zcj`IQ+*A`ai$7?c#bLvAWpHd#A~XBIW|3oCYs=@ch3=|ai|I?Jy(b(goj#7;uA(}6 z(mbIT7erHn$(CkBKO|-=8z7{P?9;#QZ|0JlyDs1*D^P4Q^8roF!|90F1SE)D;aif% zg5R_wu54>OlsDd(>)Sdf@kN|RHRH1Nj85`EH;h^Qp% zrf{cd%^5<|l!m_K+@R%YoF$SVWJuV)l?#Zy!LZJbd-Nn5Y?(;Y9~A`D4PgHl1?O`p zx5k~a@5Co7a|y3CbrE|;tQuoSIz7AP;z&3cENAE%XqS)S@#&WGpP#<|j5FCeUf)T| z7FV%~P1IpNDo84d!>7!W83mq3uh2fr?oQAs;)jqFrU{h;&& z4P8Z`0HU{-0I*>Q{pzBV1#c8yNgodWQ+Mj;zp@(UE#f!NWG>Y4apbL zhK!U#f%jx%3f0xxhvpPCO9lL!|6(&2a}%+@Cl#9@Od{l1-13Of!4Z>^KA^;cU$9$= z$!$^;YMFLQN3HQkj#fg_SYorIePHb ztLw-QF%iX^#?Jz5U4osc`9GYH1e4wo_O*yNXP^5dt(1|Pa4NU!MQ^E5=ihH!GeN{! zA36@vEVC6!#oK0*Odj#i}@w;9tktkbiobW*k62;dqxp0XofBZski&?RCFWZfo z@3tTQ)XS?(2R1ezFY~Tx?1{SpdJ`Hce<-TVWT2qfR5MRg>b}THnQF~RxGI0nROIx2 zbY}WY>!nD3Z%%K;Frov37%UI=FNCPkOFnyW!Sief7pa)|cM^O{jK^1ik`&ylx#ZXU zqFAnSRC!=M8%n8F1 zkO*s%onLvqe^O97+c5I%VqO^B?ID3>v-*@J?D=QaqO#`tPE=YvxRO|V@84R^AX<1< zc$EO(98f`7fL?9IGq*gbtL7B*3(~0H!%7vy*HAYANw)vohUZt?x4ixnwm^YM7q{>n zZHC8K?7;-L_yMZ2Brc3vTim^KF1Ssa`2!(51awnN6)glpX~A!7^AAJR8qyuWO1p&G zCya_x2L(MXNh}DhZN9ACuX?^2KbLjIzU$;%(MC+&>8CS}1x|eh<88?b$Rh13tI@@# zVMN|~YWys_yn;TFT3>o-lH%n3L{7s}f-=%+3l3`Ky0S~sl*5UKJR9S`JF$5zFClSC zQff^e7b*$q#hsAfQ^am2`~u6D6~><`jt;9!>#5YPtO7L( z!;!e*on12#v9kjCG+EcMDmrkFJ_B}xbPl|t!!IkLAAKk(Z+O#pJ0aJyIS0EY%Y!B8 zl^bOWp46}%r6}vl7X*t^H1^sJQy)9>^eDsBis=v48e@#e@eJJso!C7l}okX6Vf-Sy18kMt&>o z*9?57?}XEi6*mXIZNeouzdet!3;_~>*;Z~_23FsJzu1>Hr_0%aQ&imn`Hz_(=n52&t7GJ!s5k|({B8gWwEYrA5rm|_A&wPb!sZN(G!gdZ0W@AFJN z?7}SW$kquYd2VN%rbL}K6}97_8f+oIMZ&c?@Wc6Y=q)cTo*#!`l@Rd0Q3z_!gNhq9 z|5R@}daR+n!lBn`ir`hVShM-(>4y)C#!dHXf05?gMs6;bArybLwVf5tN)h2l5xaM~ z^OBF*>^#03tz?k!!%6n+QN;miL|KPb1 zJ-}<*S<@M2{l34h)*(dqIDk68fKGHJtMe$EOk75kWIR%DODzLk{aWDYbC4k$-;T&I zuaZMRY^~DR;GNSC<`f)iwS&zFD$>xd;=O_O2|4*JK6-}`NYn2&HVpDYuoV5w2fYRQ z%RhSKtF56Mk36z1yI8P0;nrmR_op1r#~t-Wl!9oMn6RfLQu)$}X{}+;%Q9#AX^?P# z$K$8}&?G?$$+nqy{d7^Y-)Pk-HLQlzqOn_k04n6_s1${cR|C;o)R0*f8PJU^3- z$q`S#q3`;4O*AIYb1jN4m{(aW1(M3iFPlPY{OY+bx+5KMtp02EHBbV#1NPu>^Iyr{ zM0 z48&7<(Q~FGwBmSIipBx4vykptxw*2Ica$j0;+a=aC7VHZO(Ffd&ocI(=)RPFW)p;K zNMlojg>({)SZ9L^s=Zxa@n#nktB1MFu*I(%NpkPy0nMQ$Q`00LrvhK`+|SY~oYm7- zqSK>dmr6NhMuZg)I`eP|Xt9&JR&R*va{ER)`Vye!+?9|(uu9KG4PaBL;S=SxAU!ZQ*i z5oSj3@!j2Y#n5{bCOP!^WVPP}GQ^6N;L1k0R+zSe#pmcBfD&LyR-uU3qeEndiYMgs zYw708uB7iVQ{HCnEVA6Hu_^_7@Dvr02n*?_Jb+~Vxy06$C5K|&2ZIryJpbS zK%Bo`U4qO34NP@1D9Saap<`_rqZo(l)BHI#lZjm~>(ny9F>51f*Y;k1v|-rl^xA-* zru@x5-35FqQ9vyM@7|9YiwSxrIZ<3z=;P}zf2Q?V3@mqN zmdGUQQcd~F-|WtO{LRByh3UEQ3U{N8xB2>%;wE%}n#{az2*qi}F-E}s_@1`~kpHVa z*2<>yk_?PvfY8Y8WKAe9%*Xvp9T^v4b2KfF`zz!@inR|_S)dukB_P%K1yRC3+(Xi( zH2FJ{?!`h{eebZRsMc@AUc2rVKp=!JgM7Rk-DcUf%m^j21FQe$-!<>134WUGre2kY z>MTuIN6`v$_mm{EsW~RZ{^82wyA0VoRMf-BK>y+vl1Nz9q->p%TzZ+gAI#W>bbs;u zD@OaOtW|fO1v>Tk+4hD^Rk@Au(xB+R70f?$s@ADr3(nE+SJ8v|9dDi7Q~Xs}SvK_X zn4KNH)e&HRGIAlTy#;3Z8fsFEkp#TKU1Yd`h|jcsPOI4~8fF+1x~Oh_dRuyWJ8z@v zj?XmhU}{P4>QoJKZQ-M-^st)HSuaC{y2O_nsfGz7IblVH*i)6eP8;|S)x>!Cxt;;| zgNd0CrLV|@-wr=qbs)*|qPBIDSA8P^~qqV-cZg#Qd^rp%D4GwW^JuwpgR*Q_T zX^ZONi*b&;lzC_!N3WO)iq%fh34KVQ6zMGnJG0WMjeS+|Xn9Yd(c0W3jz5VQq z0njBjH)47MWN_`)z`0_7M*!PBmTJ%oN$?s^>!js2#1s!6bhuLf1X0T&(Uicq+Aw`u z%7wFDxmS>?bHrVS(J5`tj~impp{-Ux!fkzh4(bf*_QZT8{uA*B1osHs0Crm7p3!@v z9$XUp_ky+O9G|Mi%YncoSUk-Ta&52NJpZVa3`RF;Pk(u$9XSuvh{nlo=lHJDhmY+>Zr+2jh;v{%oZ@Gyja5o z8}v1dGY?^0F#E4qETF=cq%nTcdjk}`Gt(Qj1_ zuUtUYob=&UM(XY=fqXC$gb0J-g_y`;H4r*zN)B;}yH;?2W!;14!lMrxoYj{C7EeHZ zW)?ZT;HV}v^AFk5p+R}W!{%G5c`fOe@;eL2Lflv^QM}?4lbDPKci8X0Wgh!|7k{$N zJ@6cJxjIf1`;ufv2||uFv~9C_OOXL8XgPgGjjhx@!`uj*=~3WRx23IcSGSQFK- zeP@6p(hL5D8h-R$0qt`H0tvq4{hpU!gTLL{JpA&JBOglc<6wjXT%9N@k=(E>l}mQF zd;h_4DyX-yO*)ZWsxA&6-wde;o^PRE3azl_zE*e|=*PKhR6l%+{!s;$aku z5W{<-BR1GGK6)z_Y7ac63)66ZV{IOAk1T?G%$Pc9KWcWyL~|Co;(ZoozHX z1XK0zKVVnKDFD^EJl@*4@i}>$pI?0A^kpcjbF1BOPGGw4TC|vw;y`14ArT zuR)J_xQuB%W1f6(YhU(;^J{M04L5RuVRAq1sz|>lYn~{qHj018JwV)0CpydtU-dmY zzqHu4@LUj&-8dIBRu^dCwWZW?;=&kov62v1u*hUnCs!R_>GyWEZs9^z$$q6ddN-lQ zV-^?0yk5c6mE*xH zU_IK7`RaRqtnWoc+4)n2E=jPs+>UTc- zGP%-st``&Wy9KJ0=0jke;V1zaGAuh^6V}5|`%rlHvwDMbf7z6dP@SUt%%JMOfjAkk zYeF@Hz*_!qrz0k|y^1&3Jon71z02k~qw!ZZyN=}t zKeNPJ(N6T-Fs>rSoWwXhIzSS45LKZcC4jN*@uPI_m+tyL4C_)4WZEVFB>hyP!k*HSh z_?u&t4MLlIWiU$FQ2J5ySn#Pa&XH%Ry*l1>V@T3D;e^`U6aQ_#k`{@8aFm!F9jj-O z4yygT#@T--@(eS-mU`7FlG!#>AxTp?Haur0OA+kbK)2U|*=uaKZ(6@!cw^-4o>SF2 zbUnnE6~{kL{n8b&cbpeoYPakCo{BtZwq0fLziR>-UIl1A)$OWdy}1}T;%aZ&sX8Ow zk9nVSQ#H|3y}8Tb+2V7JMcQ;l!D!x>wCJ`j)FxE%$*&&4YG0l59ca7#+tNX>+x}Pea5`u(es{~QlZfle4Ef%V0L3<#ow~5xk1c1pe3mvWXU`g zN!9xA)td{lq|?1pRgQjlX%A0?FpC?-7q^dOS_V5X61tP{Bb19ROYH! z*464Q7J>S3w~9eX_4AeQ&kvN=|JJztIm0!%2pqkm;$t7W9+Z>uUM;6Ucuu!6Sd9O; zJAC_AE7fze8VJ!Q=H;Z`uaYuGwYl}DvlAX$g*37cY+HcNOU4amM zXTEJX@gn%4Wx(cb`7JRg(oc!ZM|L!cMv^j8)1vGx-dLY7eEfZVtN%+vN>^Jf$K2Xn zSXAp44Ql8I&*RklB&E{9_DUy z#gqPb|0K`O=O62tMIa*g8daHP97>}{7Tqn(m*WjgW2vV=14>?{dzylr%TWQ5W!j@R zm!{lJtbdX7%6Kv)3ZGUV`0Ui{iG`!*T=D)2FLtM3 z7n*5yq&1WJl-0<_+Njn{sP@7nKmWPu>5FKwi)zyra@myUjYaV^F`-_NxOIFj4tpA_W@ z-ZBnqK7;1}t z!r0gQV53hxC$)iT2+&DkZTXfXz)QzGNq_%1e4p_{W6Ny4#V2KYE#(4>HKW4#O%jQA zwGYaU@pB=^cbDU08}LXZFPYY}ws((vn5COj2E*HM{d$RpaQ_CrC z&(}Ua{d!Mf`)ibwu7UpkfoHSjZJHx`l)G7q)o#b|H6g$kJ1@${&u5iincQFWl50-g z8Tg|2gAMt`?AKZpDG*0W1>uCQ`V9$E43L2i6y4>1JMD&;yMzZ3a-BN545)l^~#lk-_(F!S6?JgDI#}n<`7T zISaMSlKk?62 zYNvLz%|;tFQ7*PXV$2aWlugU~b4sI6TucdH5(co4LsOL#S-TYy@b$>8ZiyTwxOqy! zV0vxHrV+9qYH#dUUxSCKYA2Qv?rFd5qH#pOsowGhzD4T^z7;+5zAduq%e~z*=-r@d{~jl;E(A` ze?2XlKSIIQHwhtXDA-zwuIfjsrSbG$3Cl=e@P}=Z43%Iqoyx zC%i&WrKe%KSL~Je`s4ij-U|QFb1Un|>kloBE*JZd8Wg_-J1JO-3=SuKo8TPn_u{gB z_f1#M1f(lt>wMBBNavU~oxyt}5mBH4`O7W^Hyr(qURLd7l1y_v4Bl!FBNFOjU3@l@ zSF8U(k0NbuhQ7Lz48v|*zQ3jRa=|au&9J#?k-Ka_@mJUH@Z1pAVqm1JU(M-Fjp^#3Q|qGZkm6sGjV`L9`;C-k`}a?3 z&yJr-F`RIGYHIbV?DpO#r#IYbe+_AMi%#GdtzCL>*@MEb-tlB+)~T$yhC4Dpf;xy^ z^eOG4+m@oiC7JHdE)N!JFa04q)|s?(b7^>N?2cK&laiYSxcpY68L5RrWm;#7QH8x% ze`<)OxS5nu!1iAeAF$vap7%!5JTS&oEJ6o50*+>>K6&{Cp*j(-A~lLV^INWKh0G)sS1s9COl}qGzF_i9>^F zD}h3^h+Bi{Leh+`Pmm@uw^R`PT@onqBW%?A{|rJA7|;F_MVN}8v9f4 z>gQamA>L808U}`;!K4}uJw-!$S`nHaLa%J}F`vXNc70-_h8Q}qV3pWwoKk#>d@9D% zXuLf`WVX1zVColG)-ZnlHP4>Q(FxfdH1Xn{kmfFFQ+CCh(lkU?O1!lVJF ziz(bljA0FOc!UxMOnn@G-|m#5s(e~?K^y$GTf>oqQwJ+teZJVb1Qg13W$q+7BZpRc*A+tjFiI571#e=GykMT_el>g3*>`P4DR!} z3gFC`J~!py&g{CBMHV*3#{3Az5q!n1)JmA1kG~BCgH`t|URC!sO*7Z9) zCPBeKUfm+&_^dch=I^Mswsw}32PboQToe{H#EwNq^qt76Y#DK4_6>24iaLwt*#})1 zBp^ZIb@H_Su;)+-5 z>MKFPK|mr^JKM+rpfx^N|5;_bCtQG(i;7)iGIdo+omOpmEt!pYt*6!x^G~eRH2y?p z{?qF1S`_A=7_a#v%m2X`$gNx^*QgLAA{M&Hr&Dm$TVzoKU8U5VV+Qy*Ero@%SSIRX zlMJ}?;l|IBDkli5kzmT@bbw_N6)?jH!RE}ZwtVdJxl#JoQ*W(Xoq0-_uA0x5>sVgX zjmu&)QfY60OcWH!9!BzP@+nHp(FXA>71Y26+@{ON9oy(lbf>5HtewwKVkjTFa(~SF zW6XvZSNx^v5{g?;{v>R;VVGyn(7NJM7?8RGO{e${I}r7v_MRWuw0&og6#=alX53Bn7O0sOY`7XO6Ne zlL+D)aY&nQu{+rixpR3}I};oCAxguu($Z6I#B~M)om_o%9k89IGK&)a=39Oa(g+2UQk-OuTro)tMO?;}Qp_>ZdtROiJPrrYyr^ z=TeG#YLQ{^|NVNa{tb^07ZV~xrgjw;U&hdJl6rq<-jfjP-P=NEmNtDe!wwG(Q#axs zV-b!Rhob{PR6udS7%(W3pzbz*~le2*2V@5eBj7X|yn)bBN74 zHQJLN+N{Ec(Elvo5YpEWYF6#?*AQVnxWGmm&*k2_9HHa*U{=7O`$ zERBjn&6OY=^C|OiMJYb|(&cBCD=QrP5;^4;yP#PD>c@#Icl56Gi=a-hY5 zEE6aQ`wiyGC^z#FOaV&2Qt%x4)G3 zubf-bPyu5=odHxS3~)_@U|LjJnPAm#S5$B|OK#}T#^fP^fcU>K8|vj>;x4fz6H@jE zNPcp>ekYSPH3&IF1*4FmJJS)lw<5NYeX~F83x_jO2Jie{5eOt)uHAgp8LYqq= zZqFkJ7|L~|B!?!_voPO^i(YlOh{n7+Tw;E6IcA0fIFe$D63-P_)RdnZ;&t?V`FG8; z^r-gi?kAs%^u)MH0AA!LSs%QmDif(wZ$lLLy{WH%ll4o9p+hBur*+fci%pemr zzAe@+jIjEiZ{Cl7n|hLFu^9YA%_kOfB8I!E^?Lcdza5urVqy)uZW!W+%dN?cy3U70 zMHQ*gv9e7rL9#&6agBc}Ltt>1_NF2{AAx#5ViMZrg)qt3S;CmK$Vb?e_`8(b#5ZPO zQW8fFdXqxBKlakBR2QVC{#ZeMZ_#!i1>>&9%(!&i+v0I5l|J|870Wq@d=DU(mKBUW zu?@5cP^#&5AP>CK#87!v!>39VC>R{)jqsBSv`tqFmTK2e?I)q;u^`I6B zKIN&d1O;AKL;G55RPBT7p^#Tc^Bv(^e4GK?8cUDqsNy1^gy=$ruB#8~+=e9kn3X7F zX`vX~)aU^W+gp&*kMvkMIo^bOsW*t0*R^KrE@pfJM5{P)unMNnnq!;l5}Ozb2&h3L zGJn<8==Usnc#iAWr?M|i@B(OeUakh_Xf2Ztz+?9Xe0tgebb^$^={N6aA%mS@uH8?? zi4v-}-OBVh&VS()(;ufKK;rKmn?W?Pgk(0Mv7=0H0V|G7wLyl)tkl?^0weGrm^c(Qp@SWpsX?*$& zycb!&RFOo`Es^)0wHpvE)|P*lP5aDH^J=?qSBPj$TBnIv;~ADH*q z-DAcj@cC?vwR>pT)P(+y2 zKUI1ZlGhg;_DJnWj0>s%IMalme!k*|{-?V2qayUrPx>9qeX?*DO8canUy{v5FfV2t z-x}h{8C1d$aX~;eoIJ_mi6W##gdJf63@rHV7bszphO4dXewcvCh@3?yPA~nJ zWr$>ocHX#8Ftgviu`1S12Pz;n43v;5LU>tQqf$?B zM3MI|)ajpga~d!&s1-xnd;e|tx;6I0W0VIS29d* zmdh!e-tgZweFopfl>LN|ajmpv%9}oBM>y=)JZgCak_s(HPMv`etNvXR0pP_=rneO< z+B4E*dO{wVEee;SZ7rHt<4{AKC)x`w!H23h zXw$6iwHlRABwLB0x-cMd+Jxr;2lc~mC5iROMoRh%3Y0%5@kZ_`i7U}lL8?=4YZhyD z_Dx}AIsc>A*7MH979F=ulT$7Vq4lIX^ta8KZn=siWoB__gb}`L=hAe zTa>T2NOj;(KCf5J4V>6~+pca~?N0svu0pmb-;%L%<{(<`KEzux0#Xh{+@i|HOc1#fMf-mZZlQ7tA|D2YKO$>VX?#RT9Fjwep0hw|iGa!7NuusxD zV0NEJlfQGk&r1p_N5`phz;qx^S7Ryr)(E}tZP$^%7eZTz^LB`B0dw?@_zGQZB!Zy7l{%*U>7(OJ=&D?k;obTcFH|}Hp6Y>& zC=lPSE838`Np;oHFXMq3)|t1(zA-v)h<81*!q#Kl8DYVhZ=v8wH2FYBW}iMv@b8)$ zyWpNs7v1BW0q^Zrm|kK2{gknneS@~f0Y^)y)cZ}7f!Tc8=J#DyCB*Xaw0mho0YTdy zia#jkmCt!F4w+5uZiCmJSf@3_DIO<)vo7yA?tdKm_y86)mkyZ_?2dTi`2O4LBooCv=YtX_d{UQl{Bh^T7DmA6Y*lCI) zI07h&Q1&e1X5bYKsT&fR2C^K+9l7ukvXh!j8PSFlHJ+KrRJS!3QpdFJQeg%|;~HED z7R(id&&la_#dMd|cY^Dz7?!|b3sgQBj+ z^oH+Ejl9dK&+p^&Pi+nUcqdU<*?Fj)!(FgUvJSH-UuC=p12FhD{q7L~=3bR;JG0F@ zcYJ^S;)K_gRQ+cUIK%OlHP}e&BSAEJta^H`=pX3kzb*w7TBpsY(c8WhIrl|PiXzJC zb3jWzjuyJJp(&YwEQ7)^^@ge6_?zod*?c=BQ^4UEZ^o zcD%^bW40{Z9BBx2u8H3>y>xG-7G>v7=U#>wqNt!L+_|W*|IO1d(f*G9rxGxXrtl9( zA^gkc&bJn;Mc1H)U7wOEV%)tjUIt2uFi|vtjN(bz!}QPTm)Ib=h&5Lf+t?S=E|emow|j0r>~gOysC>^Pph!obwjhrqwP2eV*2IocK-4 zfP%Qy2StW+(~!n&?*l}ztAMPS0T7frLJg%sDs7ZU27D`xOGAGZjW~}NRdf?9Cq$R^ zx868})_=46(5u7l9T$NL?zU16`*||@cV67o%I;u^gSFj^83c@h zP$S`4grx@@zw1gxx)rJS;)$@(G+Q&=HKe9I#HYe55=o1Q1Ix&0AwE6pO&t>NwE_zZ z+XbcAHa5Rze6*W~I{?!?gH#Jb2FcT`_w5NAZZ|YL5ifSrj#H_LEh#OZCD%j|D~-vR z`+a@mPt}JNh1FRUbtxfHgZOgd9%=s7|Hh8|QW$iGO#vf_S3^`xx024P2^xPq@uA~H z>7$ag%ueF#1}X);atj3AtI#Wu>=Hp`#ChiX*3Lo`m%=xGI5_#tMt==>O^cGwfKLPX z<%4xZb##bvPOg*51x2Zp(1=pZn3p~#m5e}w!kQb}*vaBqHkl&DA8lfNAzpMcMoon2 zW778RlQI8FPIlgs-I{?&Ept(e^dJEjENcDaO+x)%hJ5<4%z1Ja1gM`w+FD+p=%-wG zk&DYPlq%Z5M!USZgepOrDyNaPsutlqdRV`u1NOWusGE z(;W)$XXAV3MzgmsCO0215$ql2-?iF!pSy1=;3JZLBF^*>N8a#fH-fsRqR+=?X?Zaw z0~C!NMjI|qsUEzyUz-^qbtXuaecR5i?tC2e!pEtJ*%1WZ`z`o5-3R2JTYz(5swl_G zdjvUmx=w;R%>)0wdZ&5!h^|2JgcIWZhp?bD@|fe%^4*axelE+B#+lu*hR_k{5N>J)3_j+7vbVRe0#T^f!lT4@z1t zw)$n&J?So?be=;nYc2%cZJeUVt0gE|hQKbz5iTvtJS#JAg>}-!+LK9pZUVzi(lwAV z0r@~D{kukV#&R%Cb2fF{F9}HLZ3V8Q)(A`rBCg5g7Z}2#@Z|*xGAPeUGqREe1pk20 z5#2`vMA79&yoOmc9|XKC960mhYk7L>{lzQ%!?w7)Ft~HM2#9%hpRCWTtUhzxTp9Y= zN!2|UxE-+-lnmO_Q{t|Lpdggf(qJ%yLyeol>44fOou&sJRF-}cZm4(7G@8~r%t`hV zI6@pA###*C`pye$TzCIZ%AeCV02%Ib#$v%FPGSqCA%}Um6B{E$5fQJeLNEEvw^)X^! zH7vYrx!jO(+*dX(E9yQ*O?-H%pp+s?_}H9Q<+(lFeyfX9QVoi}(4j1?D#3?^*5wp~ zvl5wmf%b0cyHDC~hUti0$H9{@Gi+*7bZiAy5qgui7Nr(pb)rEY=eajtKShCmW&Rf zD%THYwH&r@#KUBY>xx8@L z;QFdet{1r8yIx)%gh?aOphjZREZttpEO6lU~j3BmLqz}h`fblK6(E{-0LJb zcW~XR&G;F44>g4oD2VaQS7GPtJY%2K{NNEj-QAt2MT=6ZuFWT>w%FjoXVbOv^XqSU zYhG7ErqjN1gkuwaT|0P0eq7U#lF1aY61NUSD0CsPzlzV6I+7kht2h;N`Vg~>%)ZNP zi33v8Fg#4+A5P8%Kd2i;;g%mj{WiaUD_&F>xP0%G1Mw!EYiuMrY90q}4z`6v!C!R6uKLPB@?ziZ6JnDWNi(72G!u^V5$o*LYf z9AoA?mbRcMjnn|ji-CBCh6FYYWjXCD-4i#T&DAo8{@{u-$_4;WV`ah&Qy&THtqkhBKeGO`zPo_Z%F}K1P@_eAhUPHy-IAiz9U(x%w3lVe&%NGiSe% ztD1n(JRXV=Y=9PLFq@)Zm<7t&d4(3 zdQg|lqq-*c=;A8OiPU?}jX`k4#mtIrzE7R;rNBFbpMP;`#b{#m6Y@_4zU@K9UD-#) zxuSeLjJO*f2DF+0auk2iN)!plp`^M+IeR4ITuDGDBFy7f$n`R(2f=2ge-h=MmC@H% znQxvq*jB&We~+S9#-`zdwIq!1T+6U|dMY&NY3^1QMD!v;j!g~Dl1FlpLr5!Khk?{#jI|k6c7nFL=lll_K%d}V@!0E-V~P*6nL*1 zWODwUs_{|Xi+;`xU8+E8UC2hk(;%)f@s>^g%2^<47qPXz%-jh6Ue`eb=LJ7YnV%e! zA|`~7XL{DbIH$L(_wNh?7p7jqRii^1_8h;L*!cWeAEDT9I)8#QI z_@;LQ4fOmQ_M|Z1>wiJRVklc1cP5-2qF*nWpJju64g*zM6UMzY?V@pCycn+CEcGu0 zG{57QG;1T|NXW76!-X-nA08x&iV~|}4p~zV%FD}XNRCg@g~I5HcLU#IqEXJS&pfBB zk!%wm0OV%tLA?dp3>?#E!EEE57}G_o;!yxjE&-(I1t*;l`m9)DUfHFmcmm_{MtxSaIvh@CYND{}+9 zR-=LB=_HKdlJW68`O^7{j3pSyShzb_WG@^z*ofWuGR(w0|HwtE-w`!_9R1ZyroYN4 zI-~y6L~8VC7xPI+dplK%?CUKa*d6q{<@GP$_y41L^y4OM)W&5(o<^<-z-)=3M@?;; zQtT42Kg@=ZhL4CeRN%wt09PwEA;{L{mELHaZ<+9R_S%67x zJv&TTaTcOF@gWddib=ssH}{aOE#&-YM2bJE0g|!(*hnpIVr<`6(scvjhj4=EKitA( zUc!mceP$&Ad53K?k0vhsn77->;(xnBNB`KcXy0ib#TVXK{F}Vrb zL$8++(rmjrtT#7cx8C2qyYdQEU>p{c9|9v_4Nyf6bcJb=!(l{7jJV8X3ANIH&bL+zEha zfsx}7@r2 zEDIG;S@RA8Q}dPx+nSa-b!y7gsVi?OB6$H)5LVt&B=dqMh@^(52B;*8T=qNt{?$xx)YnA zOK82FZiFZrEQP=HZCC!xSNiNu!?DZU1q;?|W?)wV3rL=MjPRW}*;Zn1VD4LR=wS@T z-uv;yr6jIC+JBuNJsAre?8S5&!1MBhq4X$O_WN3f>wFIN5ZjEebC1m%$B}FyiinUV zAje*9g9`573b8rTjIPU$Ml8}ZU)n`aYGD$&v2dNni_{wMM#Vu7ib}QPK z10ww7uKqw_%90ZRw6lzaEW5j;cXL<~EscCifXH8FL5=d02S?iMz@cc4K@=weT0iaC z!1Nw+Y1W{KCo2 zF_VsO%diUJbNzE_O8bDe(PyN80j&%Gfecf^ARUJ~-~GZR8vk#%C^(r8*k2`1wIQvx zRFNbzRe-DEs+c=ixY2|F#Rm_Bc@-e z*(q1v~a04Gj`B4%;+kH?~}A)C&O*~vkq_!V#`Fp7_VtIMa*XEKSz6lSOp=^+h9ldf@a_Oi*^TB6w78}sh+n7IgkmV93nac zSBU@s29#h6^-Fg`^GIJjw0ux_zO~V&=`z_O4oFa>LN3iiK*w&?LeZ+mdE@Af z@JXG7WxZd#HJ6vQsgxs@_>qIrWJ8Wv>4y=}If&94K8ea^cxlQlXNpr5m(c#QaC~e+ zFp18mdoCy~T1csQIN{$?*vBNM99TKe$94Vi{FBn^b@KdwJK#stSRb<4cG(V=TG3M-a%W(=N`es9NuB@*gJ2Un{a0!_8 z>zxt=xsSE#EbrN+!Ana@%nWBBu z!-_n7o))<)i{_;VQQ_6!@f7d#;{%*MCtv!{J`Etb{ z=3uozqhW

0%}NpL`h@8Fz&)?NlFxD^7Ne?|k)E>xVit* zXd|0wQDv=ilLWj7C%6Z@9f$zdFoKjMy;XL)mcddhWfREj$nUc+bG(zi zWbaglBzB9a2a$Qqb}KdU4f~$qw{`~UxZ`sM-tG~vsE4KgyHc>mDjSJ^q(G7SosuO< z`$YN808uktC6iDVMK@U;uiTL2wD-w=@K99QULpmCfD3Jc`(}J!M+(dUb!DQ!7ANWr z-Kf=0iKv8b?5=c%RP0W zcKuE=>GW7(NYQVNwGnNbcVg02p{{?&oc3}OAfVMVLM+yZM|e{&+Tna;6+SP{q`&bw zG3h(X41457up&lL0fSbbVwIE!<}?9EXKwESDJ6fkN>oEs`Ga`0-+H=;0XhmmS^BtT zN)OmKpD*zJ_t=>=_rkAcvp`S_%UmBfx6XVCZ~tgo+%85a9JzwK30wzx{Qf&DnRt z{M9{+`eTt~?#aV-Jjs{TFNM}g<`+|r7FIEfi`aE41Uf%Sb;T{_wJM3eN+06N3CSvU z(i6N1OzlE9TqWYJ^DLbm()Iq^@o=jU9cf^*4)!X_ge#myYt5O0@LYO2f9;4B-erqg zgFT>*;8{NUv#ILG)yDqnf7zhA<6LqZ)lXL4uy^PNNwK(3XFaNVJBu?q4@2wO6V`NJ zXC&L!_Obxbt=0iwvlt(VR8?NibBLppCRaOEe5db=dS-!+NiXk!ucwZ*%G*9_6SY+l zQ4ex%=A`B%7)Hjto*ES=4^9c|9yQ5p*yX{n(($e`J$r%l%Nq1m z3iHgW>eAS^i!uA>gL#px(qi^fF1-kRG^%O=p+FHUN0)uPup3V1HRFky*TWupV#a1H z%o?9OZ}IFXEpZ_mq4{JZAVLYvnk7sP_ulD!;GjKC^nG>A-LEXwmo~j``<+qQ*)T8> zj4dSmiz+1JO!dmPd!r~**#UI#cQZmv{xbgfS~d78zS=_Uypb=D@YHpT!~7m#ABF$W z&^sO!bDDJtaZzy=#=NlxhkrK%zJ2ums9SHhlFZXjm`;n5HAnnCpa7P1Npz>B8NWB> z)AMWJPi|1|ZKuz)U_blGyTT)e5=Ac>ny$oFu3P`*if-g|B&Q2`E3S}L-}}zGKhy9A z6LuBn<-T_%j<9=R^7|zogPBTy?ijijWY|e{h(UMcS7-8nNrGn{i0!0oZ0#RB_wi2^3OlTilYvn}JQzLKcFo%KNq!1ldY|4o@4nfLF^vrB|`k{^102B&2nNyX*AcjvnX^M(``4feOi5ByR z6CfX0&6l@hOSaY4lZ052;O?BR#Dr}6pzL?PTZWNFaxWuxQE6@LADOo-MGz%6o?_bT z$=hb4@55C@o&Ey zzcuzjm9~*au}$D;Yik0-B7xd3ro`9OWl&IB3dbz>GkDe?G_Jt@`oU0g7Q>)q(iuZ^3YX!ODD^1h`(XV6aE!e-pgK!J_B%b2lA)Os6by{X`Bl zaMV3FAoh=$;A|4mJM+M-sL~*2A%kk}l=CV;GKB$-j9#?d=fB9vetY8GB3RHl^{mq4`EOz!$bo^&wnj-YeWw~~Z4H>xTT z9CqE;`6`jl7$-Ar)=o}^Rhs6%j1U?ZVxTb&S6Z!j*e|Io1nJ*oOomI@U#e5Lf;?iX;@+8!R-ixPJ_V{Aaa$9 zv1R{^QNL<#R+n1c%D5%45nSQupJ!VSeO$EeTf=;$wmGb>zl5d=J@sQHX_95e20gx_ zX(aa>#yu#laaDD# zTZ5W+<=y;_^=>oiO4S??5>woHmWf%?6K$Pin1^SkFvM$E%SY23nmr#_9YnLBkeEEQ zmN^Y1_}}6WF@L|1X4s?ucvE+O%b6`gQ0~9sMqjs{zUn2+V+4H4u&Q!NHb)KIKoN!; z6o^$^c6PF3*X1Jxx0vZj@3ZKLXuUb_5K`jNB6I-=9$(-CvHYeG(FL1v9gTHPdt>Q0 zx)}2GNv6-qYb`nNTg;q^dv=!-hw`z14vu-ugkjbdlJzOLcuDphRka&`c*3&G8&X{H zet{ZY09ub0^&sHprzPN;n!s)w>ikvR7F_bvdUrP;hZz=|r;?nwyDl}3G>b}E+tx&* zbRJeZc})M1g&i;YBzI>bol?}bF{O;YxHJQi;?JOT6ch0#tv@gaz-# zK$_}6O38may4~6rf1y~iUdceAgRlC8+!Pw#YB6TXdOf*S$`lbbVv0-0(=t&vy(qml znR@`Ml`il}bnW(IXIlc@KW|+(x*`JDC>#XhRtXy51^p4S8@<=-5qr!I9hyJYt}&@T z?pe8?2(O|By(HeM?@!p7_5K8vO>Ya&lAR1Rp%)sEjM69k3zA;2&8@kNExLd1L`&WB zqn8F|-GmDKtDx?hw#%HOas9imE1Sl=PV@=r9su~q&SaJ=?rh2DWLZ@^a4vI{gTcYU z)5B3GkdDRAm91P8rYNc;5=*@O$3O?&VJ0#%NE*B0^3zwqxG405e|*Og?6YoyAgAeS4VlW#uf`?3soSU*&8*yKYIJ&T0jN{V>BNAH2?hp z3wU!_q%sT$8xqVKC6US?9|6+4AAO9!Foipl7-!JfFE;DpQfnfGMpisv$w@62hcLjf zYXnOPS=O1A*?D((7V8Il8INKz-GE;Pygm|vJe?;94L|9(tISa%7}%6xIX_>1zxpK* zXF@?M`r}KIA~k}(bwJPN`7h2+w;?+YeRnK1_LuliA--J1r65l-Qm=&TaG9Ap-x)M)J(`NIxSnO(`;3ESd@+ zF9J4(dYf&%<>(a?b_QowChm^uG`6+xtF?%6PD+-v4MhTc0b3$bCTS#W8wy9+t3m~* zV0%RE#9V5w!)C$2Tux70x#VkY_|No0GI(xModA0C4%-`deQw{@MYTcd{1%!#kKMbZ zOZ18RIVXOwrQ4X(0Vv8f0(!h=39v2%O5b_0TWm>8L4Fa2B==T(uWMwg6~JYq2$-L8 zpeggVUxiST{>F!XpiZi)Ce-%G&cp`?CJAHem!DoAyOlY*5Qr{H2qD$~b#~SebxMs2 zQ=qr@k=3bN!hh6Oudqf`=7hCz)`t1-@)IreK)#n1@TRTLL$M#NWKGHm7<%E->$9r! zW-MX%*$APUBmrjG{(lW7wndUp=R6(n_&C1foAka#JdF;+I$bMeMJVxsxA+WD6RURK zpW2?q987uFUI${5H~y}-H*gdrmC&wyXuNv0*j`AG3@JN(_io*LzTI3sYpwIH5VpMS z@^+$=R&_#xO050`$}{=MJb9WI^=OWk_I_c)(TFvwL+Mu z^8R&(WzMO8tF+m3T2K<_J6pCztPfPen{yUh?8JI`F2k=7uQ`2)J?aqE8Q^fe(2L;X z%~n|rIQy);EG%eUEKMT$ zjR5`ekxB_8{kg?C`ltH*79!@6v2+c0J~8vsm#4qeKAY5yzx+$X*9S4zuFiJ^s-GGb zM~oYuo=(OvcC&Y7&(DrBjcdx~m4cUrVu0 zoat6Dye=Pp=sANQ_D-DK9!oD7e&R)=n&wH>R=v-b1H|{OiBX%^>W9=4D<-v$=qt%LFo@EGSEkx)9Zn3Zd~unJ4Ml z=2b*rUI_%{rKh7B6yQG)m_R;6qYR1?2d%zk7LQUz93I83n0P*@Wk~Su&V!_6>jq?K z_%%AXDuMSk34}j_l?xi4x2&Bz;~+5l#Y3|Dza49ks66i^Xn17cypWEOftpL@ggWOO zM_vxl)z#bhZQ6I=iz?)$F>wKNz`;xJ3Hv+gONBfulWUdbD-r)pgnn#$IaO$Mg>VlP(4(x_3N|qY|9~?vG7; zq>9}Gc+vxr#L>%h`Z+oGo(2X;bpKbZ<5rGYh=>GLfAZExd+vUTa4U4r*U!-o1Lf~q zy=|tJbrZU^4>;@X*^O))lBe88BOC-4y(R`1IpJc^wCHPfY7TMY>=Zkm4en(;ln=53 zgle~$CLfFj^M7v8&!AsWWhPeRJx$0KAj3O}H3a3etOu zORBYlW7vPnKofjk9vNoP#hl#DI^QheENJCPyuh~+=q8|W6U~LdMwh>gctXpQjvYfP z&s=dG6q&VoF=RlFLEmEe3pQi0mbbEBI*eNdT&4dg2=XoFB}2q{_66X*6aYYaq;>`B z&L55hVzzF7u!vCir?s8z`E%=@U%=Y#$?aH6g(6UA<$Cl4*#4sBN0K2t;LEQ8j}-&8 zbVv6-#+9cnJ@!a3`rnRPSRV_PpKyt^Xx5e*I|B#9(Lo03(QohI@8rm2xqVOae(vYv zKZl?@guBy#I)|Ji- z*3z4neCdGklk}xSIGPelTnMVk5ffQX)?Lzu?aNSa1E0$<~Mx|Q9)B* zO;>+DROBm@v(X#E+}IF$p&a(KZ2fTTdt=n+)=~TTM{qAD<7pFUj*%A-5fsCd#C!qi zWbeK??v0B+W(JT8maWwB8$rEdi>hT_((_R?8eAMm5`K-4kqj;8I&xse5j?78t1b37 zJGo9er}Gjoya;Sw*^q_l779mg|9R7&ASt;IuCxq8v(kH7q!DO7^uWWv|L ze)w=Tc#`Eby1=?*iulIofUkIHt#2lDVR{HmM{U|GgHfAhm!_Cq43v&j$o>iaziP(6 z%Utlto8Ch1RP|@$?hVqMzR)u2%zWW@f0x}vN!D@TjMvaV=JJ{O2 z9AcCPY!xT9Hj8G1R>+B^%;R7euj?4jAJPr*iBqK2{cc=dEzr~Ou+|Iy(Ta~`dZ~YN z5JDHjOhq^2aya*kABA{?yeD;h1a#5C(lulRIKRZCvRjj}@4p+P*j2b#viJjX+aD}S z)+k)L1zOxl@|cO*43ME!ug4WHy#{JARTZ5Y(JxCJZ-a2{sa48-JU4aj@x!sQmG|qW zWYgEp@K{&4L&;3k-z2?%TYyFrScGw}#?>KjcBQ-|-6GWYR-223x8=FApKbHRDdaTn4PaA2!^nn(FOjlnAK8k1u- zMTmiY>3a?P0fyJ#`KSpMKPs+-yr*!96Rmf5bcp+|8{I4erlN&6^pu@>uIMQ^6y;Y z=x#1)EcaCtVktR>ZEn3U+9~^Sb`u-BC_1lO`BYwWmYq&fpd>P)(yIg_KUBV%1}zN$ zxotG&5gHm{4F1bTHfk<#41WyzbJ_XBR?b{Q?Y(E(-A}7f6u#cXgcn~2%V2P(fpRpf zk*K&ZjVmi@iA&S>%lZHIslPMaB~5~eb8f7aJnH;OM~TOt07teDVCx3LHTMoqc`!>HF88>qk>bUs;f)C7(~XVf=G(y-H$&2iU58LZLf5%%;6SzrM0^4pD+I zfk=TM3XRo#&Hl&vZ12s!pQ&eGqBhab2atul5Sn!3U7_o555%E=N1RL!uV5ClR6&d1 zu6cHg4*au~`)2rP3T5u^5}yY5#uSw6p3#Y3%do(C=C3$aN!gsw&=^D}Y*jO0$@Vc@ zCl!}-<`DU@Os9WR4*0E6vh9z+RW?p!esi^xmFNtUGj-%vZZdv7GI}255L$_a0#HF; zn8Rkj+QQDVf3X|DGQD=@wZjEIkPwa{4W3o*lUg6)uR7Gc@|xFd3VeW2xiO!*El?c= zjG#|6#kemOf9j_rdi7enPi8xx|1n&3dqmAS8ZBVv1j5P(!1CY5XVlgnkC?B@KeF@g z)P;wa%@34LFlsORJne-A)n2R^HySf=I{)ne@ueuu1c2t` z%5iKNH2v@v_%UMtS>HqDYZrg|XPN(dwa<-(Y~Kpw_PD#Pe^4hY&UsZ^+%)q2X7S_-d-qq7~MAR>V zq&H$iF#Vospx)xm%9#mN;M)nEk+5iWm6dp6IQj1(j!AzB<00~c&(1l@`=ZsTId@ux zDkMb;hIu9Y@G=%1P|AJER&K5xG~*n-gKS69+}Z~iNlV9b(epqNRRCu3`nWqlQbEv; zPgbp7lO5eM-b#CRZ0}gm@V~4B^v?B}F`v96Mwi~x&H9}Sl&x2WSi$jE!m=Sse-`79 zvm7UKU&>Ye!Q;p~z6W4rBi+@pyJ-W$t3Dwf&W-BPRwYh$|L0WuFR#|wE^nBeeZD_u zbN;l`Ndtr0wD=7lh=!NIW7>(n&>q>kg@Zmqmbq5cBjsrNxDgXoy5WiFBs<8gtO5NW z#D-#t$ni-QWUaYn?%wLjU(^RoYJ0mLBD10vWp;*E;!O|^d^K$Di`I3B{wjsXvs`e%d1NuS@MrADCW8q5C6gB@o)%sj`Qi*q3 zN;rzZY;5E*kmv}|TiCDJDwalAXJGuL0DkDhx)>T|kwwH>SLViPBqDP(Vgm_$ZYroe zd{Vcp?x9X`l+IR(Kf=mI0Om{2N!ra#6no2OHLAx^`xxC$a-)T$w$$@Wli_@z@M3(F zmmE{Frzp>BpZ$f0a8Ksv9kV;0)YK?U`bGTE%W3OHJQH!qHvptQST@V_j z7Nx(Q7k;oPNp>LF4`)-TNvIWs>ql0Mvyy&^QSA?uRv92n0rVk;#lArG!?rea-|Qsi@1aYc_cVN=f=8KHw^<@pyif?gL% z_2tQlr5mr9gtdL1YX`a(dqww)kp4+4C7wt01W zgp~DB<|#Tz+KlO%VUPI|HoTa8ek=Qn@kKf{BBG%5qXTZRsf8j{s0H}QplRNFXHV4b zN&o$!aBoH3x@n>1ih^3OMd950{rrTp~{KfrMCC= zUB%ZnB6Xh#d>350`yj}YJ)ieg9gU-NVoB?R= z^>9Yw8a^e_*wwrx z#1L9${!~p$BUsIW4%(9*IOTsBh(*_*Gl(E$icUyswy$5#+UFaF;8GC3a^=U48uC46STQo_0K}EfXtSJwJLSe-)LN@tA zA_eviGLft`9J{^I=@Gyz`;u3Oa1`CH`Y^URqI7duZIqzNumq~?!Uxrd$qm+TVw;!c zv+#2=-~;&Dz0saI+U`G5*7ulh^`|6ozMwh+T1u2Ql_xb7NQIr`sqQcSC~>$}M@bJ` z=*B8?FI%_wiW@bL+)9bJQB-_ja{B9J4Dz1;n!aGy#dLFUJz7pVFOdVdF%EX+;r_w} z$PyjLWvEyPVZ#Tqu4kO00z6x?F+v2?G&t#tar>DZ|jUHDLirc2+pO+Raj+r|I$tw;a}1#PMx;R?18x61YWm^e~DOKT1sR|_BsWA z*z?))w|pecVUzZK4Bhs*;u`WyaA<0w2cyIzvON9;*qlcA3rs~s^o4mcqMNz)22Oz z;Tj(L2fi&I#und_n*>hE2x$ocIfQ4PiS+=M{hrwLe{>y*RSOCBZn8CWnXs%FP-KN? z1uPVLsO-R5LWP3p%#zNOs8ggKoX_w7VkKr)D3`h7#WWQDq5v&KcVr+3Gjt&|O%S3~ z$yqC!i`eWnyk9n)k9BsyMkO>MiMoZ)FjY@8y^LdygdwEOViizL3xnHH3sGS$1vJ)z z)*956B72kGUS99mE;NM!9JhowP?t_eq3+b-NUgO42F$J^NGj>Um|II87534+Fsp%R z;y`r5^R>F>-`AX7uiia+>c1V%li8IB)ftj4VSb1TzgQ^$$??kIn#3~_4TxBSz?5Gm z+IqGs@2}eXGXLS$3b2Pc=w)W1NK=C8P7<|ZRQcy6pzE&5N1@SVYB4aZIM~j@9nj~U z>=N~B1@e=^$e_VPOaAzY@`aord$MNFBl5lUBilrv*JN!q)UH&ueV&(Lq6*}`k?jw4 zZ}c+WJ>rFpGfwt8(Fl-t)bM!hS>vVu z@sHR)bYD7m06%3>8C-o zhXlvOLOG74-O#&tFS2E9#^69;D*H}rZ#s783qTQBF|y{a_sbpDF#ZkA>RY#>;%}$h ztJ$UqGnkdd+knDzMj6bHE~W*$Y_`Xl3HV{0ouL;Nqi z1a|z7%one!_x|ts?AM*M|@uX z7W>aXyimR!GfU?-3}$83Y$Au1vXc*1*^pAp!$9l)@ge6)7co?;l=$eCfH9xjchjx9 zuKS5mi(1bDEl=w@GNy=K(;?L_5%V?mmd(Ye20QOhycR<}+FrDP%FZ!UHj- z)hM(`a)F+Z{(7n$c!m(=uEuK)r43F?tf2ERIXa?QC0k{teXwd%bL%|H}D(spCe@-z8m9_6C+DFE7@hH-lbn zdEAE$#(sj*(P+NaUF;`DqSl}$GB&x>QWTwAT~L?lOO&<~7!l|h_!W^Y21*$MJBI_a zyWKux8KV%pC^BG1q81}O#*ff6kNVsrfAb}3s)RJq<-&GZuTH+(dDl6jMjf_A*q7!0 zvNk44Q>%c6_>`S80>7|X!*61+xF@%}L}oMz+e)SnR+IT^-MhWoy;SDtZM&jA{GP<8 zop6^>CndIB|1o~Nr7g%7&NX(kpZtpjH&c!P_q_G96raE6&myuSybnmn$k9G_ABsgr zXe;#yqxEtC`gLE^E&pTpdK3_OihmRj=a8CJVybePJAy)Vnve1NP@S+8pYeaEPd)38p^RKGS0GkNAY$^Jvq%E^*ZTd(9zSWTI|zk2*B zkJ<6<`&nqtdlmksQa@fJQ>Xp6SKqT zpRTR66TlN%r|O6IIn;iKrxhsep2BWV z&novallX%!hV3xK?h)CH^LYc~iI!Ivd~Cx@CbJ9q%5`&02+7Wj5fp(2O$>jq5h$!C zn*Dqa0&sM%-pPW0f>B961mtAlE@*Lhrtg{Xh(aB7#*-wt`jHS)+w8faCn7# zyZ-PuNCh``BAIxN&;vroLp(_Oo+nEUiDIz1m$Flh01ZYM8LaPfS-|FW#buZ%_Ln#kdTqBTMb2^w*OvEo_?jZ;fPfp~Eie;1W<*YuvWxgYSa33h&<~3n5d93;7yv8!3=s zlyh=ngv|ODYUWD!7iSZph1)Z*6HhB=o|RhZd9i#1;O8awL=IR=FS%~6k_(saSh4Q* zI4Ec>qL5TtC%OS%8g%$A=!T|6pj{G_GTB5Bk@tgYfvJ-H7e?4ahUzG zv=F%WP?^H@CeG2cB+Y^{s6FJ?_*-6t_f&H`i8G;cj{8B}yQi}a4Rc+WK(%s*BA_y> zRk;>5BllmZUJ5why@?!}3=I?Xza_bMM&uH*;0pkagjEE!*Pr&wH(TmlPpjRcyvU16 zu*7*)zxMjPg4%xHw2hvvPvX>;9G2?GH|2;WLbVgLxEy63@WbBXS=cRNU-Hb?W8yT# z8FDPfn*XJ#e-USWHhL4xG$Mr^#8{9+jUn~wmajc_kwN4UAPFsKQ~6CS}DLfH#S0ldyrnC zV9X3JREgxH!~W!-(r@^zLULn8VH4G_TL%u(99TcSoRFxvf`x?gbL zcC2mh9)_1TtJ8-=4f2RqcL6?KTRN{}-uhthQX5hat*uJ6=S($vqaFvPqWAoPKAKBk zh&gmCMt6&DMuNx~fvR)9C03va8k7;(SK0B$_z$pl(YM)77e{7UupheuP&reF!ZQtL zi&Uh&!dA3@q=qjc>8veZ7fpvEJNZ{eg`w^`<)}oezkNjBxiWXC5+0jifgBWooQKZb zv|bRMp4ig3)%X(aXmCt10T)`GW4?w=r^efzX^#a8Vlz^@Sa?jrH5KMy)B>di2=ZF^$6Hkqsep`u92NelQgv@&o2Pp;tD#-o2hbbqx-iN z`9BrKVV<;ko*-ERezdHsgGLvs$Ze^l;N?FQL62z#)m|s*a^fy=|K0@zV-v>gTNfnp zgqvl}OATZZ+=?Fc8RSVY(t@ra)6O;%k{8VAgN>rZQanhAmxPFt&%TVi>X(gfX7dyu z0=(XBeD>&%b7-8WiH0n`>#69g(&%%Z%0W!dD^7nnBvH#CSnDD5APa6KUBB7Q@AUoM+C7xXf$_67ZVzrsG|NOg=ZDitY!0i8(W zkO0ZUMoK$wZzhIlVkIMqp;x<;&z-%UMdo|8Pn0=LP;XKTb^ThT2&;2N@k*3F0B?bb z#sW!?nz2w(KONXlx>Dk??v#6eLRbvvK!qmH?W!QM;%X4!1s0pr3~CK?bsgpPolPBz7ajZ1A8?nsv#ZvHqV6Pe|nY;Wnw|_~?0{ z`c;wi33uv>R6o7(ozq8Y%v@|{Awo|S`fVz=1E_+O@c5=&DjqGeB_xqNlS#obvIOz@ zS0U~!xEDnyH5}aBnnkdb$J!#LT1!z9g)j68v(xi~_0h|S()w|WMHK3<|90Ht6ca%j z?z%F&i9t6f>_Rt%&n;h8tSr)-vxTyVQPoNP*YNzk@RYx%R_^?S3OFz4j76YLPfesm zPIHfb-_$~T%|9EiEKQ;<_(wzriDhcbS+9pgzj1q5UD@{HzS@D#Wq61407R0@?2B;# zDEasOYJ1v5a^=atE;MgdD*u=~V6`In5)-rxc^U~OU@#*AU918sw#X1Fwr9rXDbFa^ zzI*`PtVq3{o*B``Y|8^6(%S{rAegTyt^8wNUn#2D{&dZQ@G8&Zntw zsy+kodf9=L#rZ?PFgbj5s&`g+p?fX88a}#Ea#mWG2La~1ShfvF^TM7<_SDm^v4T{F z7ss4|?C5|!=@J&*JevOLC2s^BVc{EL%WXPY)HTdi+t|8#I%Qqm5!FMO>YtVM@7wKB zQo_&7nH%1xsYL8cdH!-GcKfmtWsKTnr!Z;+Z#6O|sHz7=uY&dGBKQ5aRkY6&Qfv0q zWVaJ4#@*3QS(eO0HN{c|aCxHqe5J`bQr}1?z-{}p(j}845TIGPp`Tg>P9=XzJx+fOWb7ewvFyky{>s(1F`Pf3gP_uX zUgig69S0`~eVbb7uEbIKO(JXQTP6J7Rt1{Ia#wnDR*L!a`SNTDUzSWAu^j<+U-UOfVs*Du+i{W_=8)MbN4bn+$Y>w-P+f;xhs;a5ohz~F%yWnbr2c$aGd?7 z8nNnU{k2PPW{V1mLyy%yjfA<&a_zrl0lP+xiae$Xj3xuB1O~I%nR7VvEKGcz2hH3WR#XZcH)9=%SL3CKU>%~u; z80s!?c;wHaF8DEfcH3Ao}eCmHUn!g0LBUgGuNrE|(&b ztSw>o7C6G*sU>#r)s(!fEfCe1Xr0+>G`RZ9-?Q)2p?Swa(&OpOH`Q$jv*%2Dr7|6o z6c>Pfl{_pKI5sIeZzT&pa`0Mr&Fu}BY(>?Yeb6WtI=%E}x*ZwTDnk0kv$c&s&ImVC z=!H}5S8x#fB*a9TBX3UUwgQDb^Q}Pr63iXYEJ2|H(u}AD`GqTfk=PUOq8mGS@Nc} zz>pc;_f@YC8Lqi8Uo*c{-07=0xyGPVnYh}8guZG;CIWeuY$z+5pTVeGW2*oHXih{u zsVKjxko?knf&R?crOWVo4h z^(r7vvrSR|kd|(}%3e)rB`sFI*SY4HNbdP{J3eKZZ+k@Z2_OPju##I{>^1H(=ohIi zDq9O)8~IAUj-jsEgqm{6GF$$oE{rmUJbNs@$V=G;Tuou6aeTKA{yAdfRmg*aQyk7{ zL`ado%ac-7gCw2VsS$9fp0{hKTzmH0K+lTg;&*-oTNKgZ?3i>~kH-Xcp7WEWuZ8gA5%=q{n{qI!S4mu&L|Q$q#s9vDMNX)H zUVRH@|N181Z1{d^3pu&Si%mwn8ZBL>-$btWcZChNTo|TmugfBXEr6Vz(_L@C$E;x` zw#g~=AJ!q3@9_ljSbsyvm5KKd^HBqIb}*W|T za7+n-6$=ne+p#p2q>r(ZbjWG->t1Qp?h_p@O;=<4U2wEdh3q)KzA#Fz=0>52qe`Ud zSq2TQEGf&16H->AA2|k_y4&)M2FrPRFUMv$5z!qv{tjOB{FE3Tq~NF{@`x-Fl=i?B zQ9W3nom7R1C-dx+X(dmjwVj4td~I1G+W+*ybCO>8$NS4|8K{g?6yNtAHzB_DdVq@H zEn*B&^QcXa3&Ctv7AY8JItezrtco^`G%nEA;xjqXWxvs(g-7y$cc!eY8X7qXdhS>j z!03>zl^rJ40-6w#ZX@j)nTz{{U|u5S9T#{3b4L*flaMq7=!<^$tujdXGUvVz@A*H2( zclp<~iMFhWBFDT#A|}8Dl1&{N7g?MRzoxiQOHi1`DliwB)S{hTv{$OH-aN~P2-nXG z;cg_=$h4q=nO;WyG^rTyz1Mx&t~3EXa)T&N6j-AQQwHa4;sNMb#bDk8Q|TajQ2FIR zgb;}ca|!cTe0vvut29(~fgXf;#zT{}!Ww1$=77Ac7Bxg&ByC+QfP=ysfPaU zTEmxBrR7V!THo1EQGy84tzy>%+YUl7v*8TX| z_I8SVx+9-WO)uSTBAWs0r!DZ^b_mYt;I7ze+0XrwKZdk*UGdAT5svJzrk`*=D=Y)8 zCEBVKTqMhoX9FHZwBz$!tSOPB3yqcb>>pMVrO#vO)GEWp7xg;hVKFJNb+J(tX;TMAG`8A zEnq2NETW}o9<&&?;y zwv}vCGQu)ZTjs=1X0pa8NK?*CYktPax97w25gD^N2fdCTa>*25t++0mBo1}KsUp@q zTGjTyFBG0?4~yons0GAR5!C%Q^bi8E?U>W28M%7oXb1CXl|Y#4vjuJeJbaC&fFyxD z$p$Fl#ICE(czQPR&E{6B#ml9ZR2n;B%teBgM~;XlOvW0Q`D z5(iDOWU5CAVoobtnIe6x_uWE)=;8;2a!GSjuQ<@TdM)(#a7VcAy+*?MmmvX8C8(S2 zw$ERIpTf?ITK3Ph3k>Np7o3`8TeeAIKLDh}3oJr$>1k~FwO6-SrXzcs1q^RY3}-S8 zY&1Vc6~cC2^!|%&$uj&$>%*Ic&lmi_FF!a>cB({fej|IDhpwJ5e$=eA0`Lj(O`Bmd zc!sH&QVKYM>p>3cUIjA#YnUs{iHtJ(twt)KX4 z4uy}u{AY8^pVP{#R!&Umpj&+D=cu+;jJUJ#6a8bZy>0ThZPiTLz2*8%mt3h+@IKIr z=?g2BwRDT&Qcx+51gjB+O1t}CYc1=2TG!ROZtG;|ogS_6!>ubh-)=Pt8%wtm)J$s| zr{-QFv%AGXBiOEW^H;f?8#f(CTWI@f6jk8;9S-TY#k-S>*{@1J_(xYUB7VgF0)Trv&}5*7dl|y${1zT=R_1{v%ISc+y=6=Ls}CB z>BZn0aymrX0pDAIn_ajCI4W{djIalmvQjix72Xz*5ytrH+ff| zlG?v0aax>Tr5QH60+&peRoq3~=5o0&$obAW#RWWmJ_3^83Uj~#{t>7#jM=NZ`{SKygMTb=f2+}_KtacGFFVgJZB2>A-j_o%!&Vv;h=~YD@wv7>YQYIRW*r3D^6C7MZ zvs)Q-jT|yRWD}%#q22yI@ssfOCWff8a!y%FZLcuc`*Nf6@l78F_Op6rp_wOGxDjUn z6S(33B5oP{!AdqR)v*paKQ!84MOwG6A+- z4XbTh%*4dA5Mr|oM0D`XjCh=ttT5}dMdkE}Q5+p#GmSG&ON(*h!)wvxc-ag?XzYHq z$hHk>oaiOQdnrL#I5n1M zo^y@!00Nn1zufG&*e12M%m}WdFzTA3=TlF*%j!mQpM8>oi}f&46A<__uz~#V3xdUy znsT#*Dw5+RnbE%wz)u+K)SSI%r0yJ;bTzM=P#;HF>q?jMVoU7F8@ZW!)HlLvSRs^v z^tO1Sr)BT7$+HZTpQI7Z&vi-7jGtBz!BmbMs)q>XBeFjBe1Q6e2 z?9*(J?>o+O7LwxN?j>og6lw$O<2hpUFtBea!L;<6ybLEO)|OE+^)Xh&>JnKeC9qL) z&2Br6Ux&_}pp21z!Nr6fLZpRlCask|bamY_k=wI(c9e2jeUT83CSTyjv1%xCE^Dt2 zVN(xyh?-ajPhBgX9g$VtnbWSY!k(I}UL=g@g9LRF5O)@ddDgXS}ZF|aV zwraJhi#K3MfrqUJm?Rm+sC??;LXN$!H3CRg`UgWxIj*roNu%X9MYAZStCFu$K5%=0b z?Liz^O9hq_x9!tQQ(vr(+%)^8)mB4rmu1CQ{Z6x-7Y?p$Y9EelzPZM;kGQQ*F9Mf3 z(+Xcmj1VJ6k=Ai-!cy&*6lK_>WTN-Oa6_o!g+GCeWn(4t(Eq-uEaEe#d&V|!+gGwz zsd-c8qaR0?U7dB>91!GQ7_}LtvK=qB$-6Nm>{=$20nHoOL&1b0Zh58Il-!IZ-Mx#n z^5BSK%|UW-1A;|J?5A0cN>$A*A|G@ss;nxA+Z8pt%n#v6XJ|ZG)@w(HhF0YbpDu1H z8dHYj0-5>{sNRFvX-CVVAp$Z_`hj!q2z0%CDik(Jo>P-7*&ve#)&N-{!|bqP8z?-W znB(`ErwU%G*-mUVe_T4=y~td-96cSV5w(65S#)e+cgY5so*0b@}w z^Or8YqI9mCHJ5?&foWk^*hv%c3$oh@EM*C4eP+h8Cu|?~UyAI6RaO7H?!{*Bs;6HXm3U*GoQg1 z-~I>#ls5|?`v7JCd5j9w*<0$GuzorAI~OWd@d3BOuQ^i5OT){&AjL1IJm^^aw!sLQQZ#C|U=$*K(F^sXd7M-Gl8RI$lB5P_On6j{#a&rFRxaQ%fU=&Z~w|G0ee0!wpRY_UEi zg!I^UzZoL$5ML!A2G6b*W07Ay zSyeSDS-tx63wY19se%J=pC5j4e!rp0Mzq_aP%^ySt32`^rc~Z~%nF=rvrvG}`^34o zS>gT`sbgi-+cDnwAAbu8yQ8q2Kd1zGuZVItan#D^ym{0ldpFEaeM{Vl?gEMjlDV`k zHNgMY%LNqDd{$T$qc^LWjF-wZwng^=p0FwsV!)T$LKXL)fn=8@a%xM08F3S~oLkh*DDfSk+vk{I+qlK1FJp28( zTp!LHBF6bN%Q)-T8zdVR=(0w_w0cp{sKB(0Q){{pJDi;!pvXPMPC72;RI)kYs%T*w z>s(~B&x+WJ@SaBn>jUmp49vqmWj=CXQ}k#AwaG&KLA%%*$0HvR+7Ph)0Eyd9n2U-O z88-63E)C`3#lxbeU*r@x-`Nn_dc!Gl+Z6~?w`~u9&%2g^cwhy@f-wdI?$J5dYWown zcl?5F6QY)yIo7$k)JS{j#qT(u?nw5)uIik*kP{@rTgU2Q`(FiZVe8fBK(HHhxKl++ z;%xaQ1rzR(qxOks*(7-H^L9LT4ADKzap-TMzUO%r9ULan{Y6rkvaBqJX(TcdirAvK^=;$xQA-}WF<_wtCZFQ@NV8b8{V(5i zg{N0UiwaU#HC$y1-!alCgjI)r!Y?gF#J=nEjJI(>8g6$q3Q`K(SO`9V?|+pM5`&pz|j!_vWBg4{aF#BNsPO_z#I$mqkIyW(CMi z!<6a@1H>CCxi(0^S*UvzQyV2>3`c=0J|VmqbO^!K(qM?@O9ritj7kO(kR6YR!whJOvQm0w zosbKTuHE#_;!U?(3+^m~Z2}`m05l|}?8w4kT+%vrNYVk;&s+w}X3Nj;f=XDU~+=Y3E56r=#!njBdYug6Py zg(2L0L^_6sWdl6N(QsuMa9uZqBy1prd7e$K&PZhH<_mQE4CiLlZR?m*Xsy4^fu?{_p-Yrf)|MHa1IMUS7 zJ=}rJvTCrMcV^8y&4CiwP1+&7QHbsOY5c<8zT4fA{AV#Xpy0S*Dm+33?wl0q5Zkhh zv0~NuRpqqbze6Y$M!AIh@wJp!>ET6eRF#$kMGe0^hKdSdsrs}gQ_7T=i{nKontg>p zvPQciZTf3&w+?^Bl!LKgGRuELBjUe*JLcfxSZTL#pe#JBq-%3d2yCLkT8mY7Kdq{( zFdlNaRyfqMcx-_@hw#49a`t8au%x}nOE5$RJ1sE+OAhHhxd$YO1z$bJr+^+dp8KKW zmC9w3R)+3EIFgNg=9KR71m<|7gn;y-5b6|H<9^9}_IbBa4h89ECxmgo50PG@MHbcu zRYehog}VkAgN1-G7FHJbv-#-+Ze1f8(n6bBlD}-n+#I&wF`0JnOvN|Yf7KxOg=r{g zN8dc#aztetX!Ap%*pZ_pfNL-$u@|Yh?_va3wxeDS?$F=9^M0s5%??`8WV88xEICbt`sW-&p=ty}_olrHGKjG(}Is|9R!WXvpz1?tja_FE`9t#qq2m1UJ}bv)zk zTg?gl6HGL+i3_Qne^4vSFS0%TaoOn>`pz|{XJ$x?_w$+cMppeSGo-wQDrI0trU7&c zGQINME%qt0c+lwV zW~IR3;?TQpCYbIH9GZ_Rh_B38v6Gag@SN^ori2>wNB{9>IR{Gc-5JMGGoIxhZRik= zH$X{2LqIFr3beL&-Hhdwh^dk$gqsud2~BvTwX?7}dItBEU;%g=bNlG={Pk~^8(}Kn zAQ3KwQJq%?2jd3LkwPd99jO`xd9G;H{vJ+{(7lG7elihGyozAo6oNoWi!4e+^{@y2 zp;W*>V?G~Tf*OUU@p`HKZPA@QY6qEG4zM@)uUiX5HSLAg)mPVRGxg2Lbe zu05pl%eO9h*1uAirP4s%&75^=i!A@wLcmlM72+n8uy(fTfb*@d2MYFud8fL5Wxs3T zWeY0b6U2Xss5iy4DasMc!u6@rujrpF#_@BIkn3{f#~jHUyh|SBw+AIDgT^RBWxQlj zi&;sp>=u@HzBR9G8yHRAr`6lL4L-SJi?nkZb>7kMxA@h#jXx7{hFbkJ%gvlsx27De zQ;b7!PkKg6&*gHK#){|HJg|AcS^y*s!PPJzLIO`)x+=At!S!P%FumoeuSW#?gg$)7 zcJp%G-RjdFyolSoUqDkd73m%A)V6q5do?sB)xNA9fVFj!NSJ@GM~?fRwWhn?cifLGz_#a2ka1UH<*~*vP6RTrc8VgyCsDIlMP2-1zXGQ)Hcj4~i zXZ++*t)V+RT}JS>eya8x@9=&hiYfKj<$5ZbI8o)3T#Z?(;_jS z-^s$Z;=;aD|Lv5Uj8v}eb2hTO?+gINeM*# z_-&oSq_6y{yBT51lUIjyK>Gz?{%t!Mkja~D@iwk>2H+Z83Lw5rRo=CAWIRbdJ?j45 zq+fE4q9EXR3)*UY!pVNVNz8+CA08x3^!6c<-5!WhL6+XV{!qJSQ+_Y!vk`lD5JsWQ zk!a0JIo3Fqk8N!T%_SK8ojHKN{y#>uj-)kpSMJL?yZ{JJsyMCh!mUTNTd0l{PJau0 zVDC!HF%+&`hdf7>4haM2o1(uDJb3OR>0aHg(i9=WQOuX&ht=z@Z`Ho6PyPo=-t~n4 zd|XvrT{o=xv|4OF+nG!G0x`!mVM-_CVyWLt-gRWnG|C}4 z#$FFB6lQ&>et;nIAS-SGjka;Mt-ZRWHUHza&T}PgXQ>Sbt%L?D2ntw7`NVvzGwVcR z4Tq-&*|_~mn%i1kuQD(#eSEvqyUr%I##)NWJQuq$TGu?Fxbs^4y#h5gfrcEQgnhS2 zXV~b|vzkagvmt(JD;`gK-qujF3~sl;dv&#qt*5~+i4~9DF896AvTvh%_8r3x2lAs zx4G|@>jMI~kItr^{az7;6RSb}TSehujm9C2u>EOwdNwN#mv{HT#vI#MW@rLQoE+Uq zZ76J!76zuQOesNpYAPP$EoUtkZsGp!iW4ag8{|+h4-XyTsD1Y6{NR3QEPQ|Znk>6( zBH4)x14cvd-Bz+lt;tH@th=jkY4;?Ss@3aCu5YTytIV82K2{9n(YWXXcKYKz*6K_K|3liss3+!ZQ;e)Q&v=_0-#7wfaC zb1b$-O_)bDmS4HX4n*w zGTR%=U>H%){F!;|s3zZ|G@cM%?o}8}vr;DOH1%q(&xFd+QdxGd({dild~RT(5!9Vy zOBJaou-0H8Kv{}`P#Ii^3pjUf1SZqoy2%KHV;kc;37kHSt}oB%HUFfD@}#k{X|Xcq z;gDEzsXQYFexd}<^LG$?6`eHz7oa5qD^h5(a)8&YnC}*w>|IJ)Xi9?I9YG@Nk+xq0 z;Rai`cryj8+U(1MtMW>qf%74hcd^^l>;{A~X+jLFX!UTx0@eE3p{07$!bY=uZ&AW3 z!OPRlMACVVmAZLgZqN&b9!dT@@&J1*c{AI`d<=df{B|-WxHdlp<6vT&($Ahx$5}hH zG)l)yPWU7d<2YGV*ce(6OBbyqu3R{&+gt!!gH}{d2|aWcZlUrk!&QtLSpM!G!-K0U zVj~NyPfV^}81e%d4e@v<#$>P zcYf9IP(Rd!iBO&B2qQ{@Un6h4ko&0K$hu;t)=goW^)tYVirXg(8OI-T7=Jd(#X{@j zA1VPS%ixUEB)TJHw3qD8gBHvcR>x@NQV}hYS9%|7@>C)5YiD+?ZZ;VoL-O~(wrJA- zbU4`c)r=D6i8&UJS_ebp@W>dR7rZ?3UGmvOLQiT(tItzpibr(+63!OTg2AMMZ1}$~ zc7vJTL_H}?cSn_N$Y*3-X(;U$X=AT%h{ct0tgTZ`c&#|Cq8k~b<+M(K(>^Nf$l36a zX3?oc_H5fj(vh}|V6d^bc9N3jB>T07F;uOi0u^q$lo%U;J#({p)E;7vKYksh8(8G| zefv%p{r`e~>vm`edUN+yPM(fvzBGMmL`O2|yJu)L$Ql(v3kokD&L$-JfKs8V-5t7a zgmhXd9Rb>c?!~W4Y$taq zvbbIvz5Imy?LwpWZOCTqX9=vSvEJ+Gi2`9kFHHP}rc8Zi+Sc+s-?mX5Ym;?|Gutr^ z^6RhU+Oatqw0N-SM@v)-T3H<_V8EHqu4gjz%Q9^r+rB!Z?ef(A>_I03`jh6XE)7_U zlGFaS>;~JSxX}Ob(dJxJF63wFhkOxiqPpx_cB3nMczC$^t;9|ucFfZV)U!o`=~XrJ zC++6{z5w6%vZlqRU5V{;u6n1dyH2XPHlN!Na|Mzg>ycuO|Ie@-%L6XhqbbZMjhc9g zB>7!ChIn-Vz4?wVTSv@eRst8im(N@yrMYavSDWr3p!r5m0!A+K4Nug>@Ro)N!!gj3 zu2}aGV-suOtN^(!bIqT)=!4#yw-sfWshDep8*UDeao5Q7SPMzZ0?9F9kcDG;M+b4U z){h-zP$^l}X-1b6*&h_2emBkWGY_RcrNOz7vmZ`b2s;& z=U-ZcCpqpvy=2~yyFW|jsVoJPhzw-la&!+a|DF0*#pS7{`xlq|uW4$g-e4l2JOqm2 z(B|v{_$*MuTM&$tSc8i+f5tX%*6N)pBeTLAimRLpQ|M(?XAOmZ3luJlpAY2&8aD6kH}-`40};b6n7Bxf|$T{`k**%4%>`^-g3k$I87En#q7yL5IGps1Vy z+jNdh-gVIEDzm_|9GT1Q@O*jOiIy|p?~<*V%-ex7R5KU#I{Jke!?rwAYnuf5Ns72m z_nwI-r)?=WtpN*`kTX|R7r0?3o)t!jOue_qb zC>PteodUuZRLm5B->o>bP`N0O`3hV=Qg`mjg{g=FrZ)>COPNI|?1OAYSm6L7pO=0k=_mg;qoFT0{3q$hwKZvzJl!8Q7&MkcJf zPuVV4UM{KN1>I~iqn}|!<0m{F9Av%zhEC&g8xkF@#G|#eXJ!BUv~t#f$Cv8BFKhUf z;WGoY@u_m3{SIfrPJg$l3*AddwPK)QA?&d52du^L1bb1+Cbv;(+>RNf2d2Wt!g}ld zUVKW!sP+EcFBhfZdYC#JxIGBTg32J|)tYC`%vpY}aVhKjGiJ%?YiIg zPkKprxx#!6e}G#2XI)wP&DpOLiTuG(i#CPtvp&6cAh0JbR8fwFh9H(;>n@WnC)=+5 z0g%vV>?ClF<5l0}u?EkVprpk2wZ5DfNpg?uySxs<9Xp=wdRtSQcdsJ~I@t{GhB*R6>f zpX?mSZbY5Uz2Ze^=N0UkZ6?OElq%G6+9rt7hnWtk+Xj`v+l`Cy5$%B(-QSx;2 zdsoIk8^NNJv$LN)Dd`wAgJJBvq6EV5sHhO`YK3`(q9e;4Z~5)%fID?feN0NdMY%uQ zv81x6vu!EiVVz&Phvw%>&)~&E#hrlB4xmN~ymS`97=5*1ndF|_8+v#o0SM;)F*Cf=pe>gmom(CSJ10x8*05@`xLbE7zh% zWlc_+x~?ySwWB6q+f1!J=4~35JYJ@UJ=Euqfb2`-!n0U|t>PZD#8d4+RRHruv8c^h zXVqEk_Z;jGXxAi_7uJJOoXfu?qjytqbq49&6VAOqH$t>W_`Cp8H3VuzhID&I&JJF^Rsu;2Jl) zOBpW=50U(=u>8>V`nC6OG8E6D}^oU)#C}A&h2o!`PoH5xCbj>Tk(QmEU$9 zSB2`LTe;;%ung&P~nbi%m+h*!xk!Lks$lNX)T$jbL!DHP9_RIr#${E!sc5Grz%t z-l4MX?Egz}JGNOlIxP0Cq(h=!#ejXIpet-h2*r~Zw)M0fcE7Dwyr@}@=ka(>oOzWk zzjWTmhLzaj46xRAoRX*~-ThB9SUVLjgc+fJlQNO-TH`Z;g$?K+ggjiUYm_d^=z#R;Dm zMNW>kIe5=DJQ|XS^Rm!I%8lKb*O&Y%kQqAH-|i-nUHo!S0fDEFagvo17bhSc8KioF znj)rjE9K*7$Tu|e_2M`Yx-z;}bLh#rivf$*6e%;Igl_*S=0RIxkEZTL2z&t<*FdBl z3R@{jY2*r;n}(QH#1<~f-gk5uRtzo&n3usbD0T)w%{t9Ri9m)=KqLwyK!UKX?Umzu zR00aZ)eLnw>wo2T@`S+0U>GU{tDc6@0pkjItJ8d>G7uS!tT0&Jy7z&1^Pq0`N!$~% zsd!l!!tL^qX>Xa5D+)S_}Eu%lMO$ zZ%-3J#N&36}vqcWNg;*U+|A24I_Caj_#3vZYC^RC%K@DJONox zig+d5ikB>_8<;&_&!%+Jqwa;NES~PDhx9E<;BPJwZsuB++DSPmjd*1WSS!$itDL#n z$J9b0sfYeUZO_;8P6%+z zT%m`+Xue?hEVqnij~`+?m#Y2b8o`yBU5V$8rZP5SrCTadr_UDJX{J};AQ3Lmi+6O4 zo90hj=N<>{9bR5Jq(D=PBVfcz5Y8co>4MmK>nO8XTFXE=Ekszoqz=yHd%}v`n09lZ znyscr!6Jnec)ME$oH;tz+`>~IOh~{b$I;3km7fny<`QfO5qV(9{W+dACj-|!AZw|o zZdLBHxqJ!Ww~czmMkwz=-zMD&rIZpLe4r{=|Ae!(!@w-D70@s-Lu_KVAEyIe(153= za3r_s48+c{=!_{1YCbr*^MW3wLjD!7lN!P{Ixh|dVp!?{gE_#6wi7XsIb4iu$jsqgp)t;tzp3Jr-XjA- z+AgU1R9&_##Z$ynK`C8RXM5kPK3@s(?iIsRN+`_XQh{!Z~^opUJLy1ZM-M9Y0e$M ztk`dXhFUc(5ARvzZ+ zz`o=kA~AR`7>cw-K#Rz<`O!7~_A15gHG7+>q95^y^yQpq5IQT41-!sTFI%nHMqZ5Y zf}0u8E=)PK;WI{>;i9DE{T39r6^L&#xNrA7I9z4bvk8NSbVb}~O)thWstc>Y17ku$ zi;JOf6z(DWQ(LC}d>fZ!ZBQe7xc^M;V&NjDPU1)Xv2>ZLlWyj3F;`aZf`*$Y^S!oA^#(%V5XC5ZVz<3 z8$^%ZG;&z=wVI5jZQ^MyaRKYfxeR09Ky&+bunt27?k{hFU=&px}& zR|QOq%<$&L8u%NsC2L3Vc`_ySn3n|f9qOg#*|9G^J>ikVl1GJzn^|YsuYJScB>U^f z!NfW_!ZYut#_eijdf?q!BzdFAdshoh3=K2Kb&z2YkNcs=>1#X5kMPI*lFT_`hk-O| z{jBonz+4-9D`N5;t&~EusqCYaW%~LT^tGI^cs9lIO+z)=&op!GEgoC~jV8PhyHWy= ze9uX@VaY*#A74gJYdM>oD=D({% zFCHw_Fr+bWR;0m;Xy6sbv69k(<5wI3E~7*u1Ku8ze0gutqwBQLO^sf|b{In9yS)4` zmI~w8b)WqCz~y*Wr3H3(9cimP<7cOp z_*Y<@Q2xoHCqy;|LT_1MQF*>OnjWiurcGf#qa?1NSNCq#FKai8aWPALeE}Aq&{do! zFmoPI>0p=AB7zM)^Qxiw$HV2bs&{Y5>zT4LZV&Vs@OyOq}L|W|ZM5ZS)YMBwQtN64XsSal(Ct#jg4Bni8gsNcYtTYY-Qx zfr&=cGRc*|Y$19f*A-=o7ai^)s_XLC{kid<{p{ZIdU_4n6=afaUn2>pGoJDXqW>L{ z%p3w!EuyT*XUp0#cTXIVL15fBwwk%}T&jBPf~;eG%zK+Vcl-cxb4Bbm)_C7{w^}FJm&`GZz#gJ62{?NEf$j7M`BtO*I&I z9`gQ70eVxki2>LX@dYkQ;>@BK>|PKwKG{tznDI-*?G2uw^vyk47mDLhi&xs?jv zHv8{lJmS`Nte@j_j{>k1@~@95mRVBP90(UiWS2!uHn(EQ({3iymdmFy>(m!lkHi_) zbt6=~pq=ky8#^qMO|!K9$(Zt(;I1w8Mo3GR3{(t=MG2YHI=;W+zN6mxVQF&^_1QPQ zE=D+?U0bAELQ6-gt+&bo=O%~SZ}EFgzM*tAUn8GxLOS~GRPT?;eSx@oH65wTGDgz2 zfC3E`2?#FbUc#`atGf5E`b=$R7^eiL9&ou*E1WA&p~osAv4~_E`YL+=?)=x|J=cG$ zyHK4TdBD2=(qg&#vdG18%(HT5nh;d&o9?YH@_6N0r0avmtqWVHU0%)DGWIZ-*~}bI z;Z`sr{^3xENv`|ezAAMz9kXL85pH?k^$WIbWAn$jpG|`ozn6mDnP-t6v^|W@{zo6T zYpwL2)*@>FtbCJK?&H2zA4@=eAlOFvx20iJQ7!|caG!Dy5sme+#}dz|64H=%(gu=w z>Y?w!*!2zw&$h35{CzU&_bKTuoy2WlsiABL@s0W~vHUoe+@h>>Q1TWfsVR~b2(H@I zb{cl7cf!w@Wz+l9xeGtP9!&BI49pFT{UoEozVV~857M>z}PC5$wEjBr0oqu_uTq8+B4Ktti^Baxq5K; z)($bZ^)k6x79@-M3x7tBig2%+x!G&pC*LN18adf>kvP3_;5fwEjx4IlA|CaLmu-X| zu1RAIz*@}P#2%eFDBuz`dnDDGhqTzOtg8{oPus0^G$i2tb~}bD^DU6%;`+oIA!e$|Lc^GF<7^~ zE1O-5RNf1_j^vs5W;m(+{Zjo0l+=M1o*;|>lxT+*w+ZJI2CMiJ=V6jPH~+8VvH|<4 zm85Sg6$q&?r&a^A9ldm)Dgu34JeAB*$4{5%MH(rCgln8j2$ktOuLGXI(faA8fA#CW zpDp~eWAb{_mcOi7ey|m(wJ0*Omlk}FEI3B`6+gYW1dgrPR|XiZiyFEd);e&R!dF6!Y_o*~kop4f4Y+#&J*ufq zd(Yfvmiq~Do!TK@bgs;Dtjk#uCABSH=5_7k=GQnWF2`LBi)|iQ_h~P^S%{h>wc|P{ z%CGYk^t8%vkDXQh^!lgQjZk6&>4HyVDSga;c(P?{{5^^&2Vcm-*r! z&o=O_MAjlV_cep=pA&ABN9RbJEK2=o+0R?YY%G*1ZMl|&tt?avco}D+U{Kf#GuqlN zR`gSlv#fca7lE?Vdl<|65f0d*zO=0d@)0##?P0f45>wn~i2i*lBh$2JQ-E}?G^5>M zCnuzTJkL4%k~m;$;{x+A?+Tk%mX)RmKB*5i57vOJUgMSzy<*^3lpv?_nu#EOMinqH zkFaW)^miCkjhs9_8wo9>_A?>4((xBRQ-+11KB9%pE=>~?DcMd+xis$T*%X49@J}ZW z7!AZR1@J%LaC*#cw0avO4QWFhTz#-v0V@X*Y>*=5-rbI(5o4RupI6)K5z~tX>v|{3 zk8__D#ezsET$$HS!;N?f5An!j2P@y(ndTfgqS7gL9G#@}^kGc?YW|yZEK^bjg=U3y zYTqv2lCWw-lJ2dO8LyL(=+QIPZyPx%aP~?hwt9i`*JoR~vIwnAAtzYTeeo{oID}a1 zd!^~cWd5ybNkAbPt~vv4?(iy277}YVtj*#WfO=a}_79tuNcd1PTn z`+;iBYzAO)|3mVE25$XH`|j#Q+_$Kygz#Cvoy-ZIZ>JH@ zi&Bx{bq2e;1tF;_t;%w4%U~?r5~h9sP6k?`%Qy4q^@9J?8FE?|1WcJr26OAK)G)jU zc$#3A55I*q6j#;s)h#$oJnzx#77<8kGWcSwjk?`vUOo%BEvUk*AT4P8H?UuzWEhAU zy`yctO)*i8@ST{9S-lyqPSb8#;M$v-t-X~%CXJAq*2!jnHNreR}Ewxz$mq_uM+S`73_oBd;# zN_b)~k$kB6|0y~bcec;|kMDW5cF(5TwVfU))>5`Qt&T0quj`zuskL1~1X+s^5ozlX zhu~Q|S`@>`$gtL-B?%>`N8DXaQt-tO$1E`3F&NuBC{_89 z@1*&~e;0c|vry&YVAw5Kq0^K{`D1z!`!sjZEahGYh@Iem2i2i)31g+1bo4v+@#m)8 zGUurXe_42}l)Ro=V%#xkhSh@&vqPM)RhRYrSA|b^3qpz<{P(0(N@jF->cNi#v1A7J zbA!emx;={p0;q6lYSTzw3P+oai?)@zzFmyXr3V}roE)&N_Oe$xs&I}DREtRviy=GH zD_uTtxH&sFk)cUM=BC~bvn5#?fOc4PUGolhpuNKEi+4c|L>rUQ)K&R~aXWjTaB?8c z#r{Uy5=m`XND5b&al{78K6DFO^5MVstKQ+iks5Hv8{;RrgTQ^$6&|<&WN_0^9A_Nu zW4V%c`?_mbY7m_h_;uJV6URIWux7 zy&-S~*K2flD>UN_H^-Z6hwE;$va`M8)<-ltky7sA>j)BemYmf>*^0;dVlk!4;@f*u z=I+n@S!~v=z?t&+S>(wFXshDt&=idAkP@%j1VG`HhrKyRMOMq}51Vx43oqBGM$c)H zzjTe^oKCu+B?oRSg|b;lMbqT(pHyrlx_f@RT|&;9GR(EEpj#>eH$WH6 zh7UM>-nx^%AF1nGDGgaUE@yW5)b1?CT|K^_Pi_DxX@byZiTzTao0W18&`sr0Zp{Ya zWnF3izPFyn)9i6IFP{GUECWNX4NBX7J3GZ;xi4};b(n?XfKc7`{kI!YwksvPzin<` zh{=Fyr~96_v`(G3nlurV&1c>Hx7$z7P%}Y{8MKaT8_}U-%w!zz7I7n+qoSrRjGS3W z_kS~jsu0Kz{~DEXTcFR0*ooVK;&aa~ha3_;z1lL3IW<)I^&@#Qh81cY9*M)kh%zBH zXmyiD+v@fn`Dr^pXUClHzuUc$Gq^p9$@$DCacz>Dx$h=QA{c*n^$DuB>DFhYM6Vii z!RTTnua}dW{pkb3_sbg6QG@77vGIWPQ2(zP-|T*s6F*svcM=}Sbyvx|(ijBE-%c{8 z^@qB%4<^C3R#Cf5JKK5N(f<9?onjR&d;H6?B5`qVw%FdtW92}bZyypwBH}~#xjp|@ z$9ZO&S}P`O8hiBaKa1Nl^d%O_tbg*9soAW!vVpk(a2u&fUw1SyC^pZtZFJwpu~-X8N?%^plz#o}ttBVLUc2Y?}Jr zNmRxYhv8-_D7K@0$qfAER$C2dLZI&I1G)FjF!VSgqm=_w4Ew_>aA2aaPD+!$FEvRo zosBDCd`kRLSM>1w!-V{z-YWxUt1gwBKlIN8&1qvk{DcbEVP>oczvRzCzRLJ-@o0Kl zY{YWYWRIBJegxdk6_xU5DG#5$%DA%LK@fVZApNlV?^3NiTk#V0^`0-c%X{n=YO@iZz`I*hNId=xZ{3XT?EaF{|`Ay1Wx7wYxN)^wV;CpYQH- zN6J6`iug3tDdn-F_aN-l+79hs)4vNj7U5Vcjil3uv>C8)B|Ko(_g>GA!c&yI#{4~Z zLY;@1#g6K2f#*=ekdf!Ffwrn$vo~v|hqulNj(WZ_9lm$pjgM+o2&vn$Gp?D05?zq2 z+SL$1(XH`f5q1tf1Aja95}I-|_ykc>bnUkpu3IXK&&an=i?GH~P1SPHoj6di?6%`S#BrH`Yi`wrn&`OWLtj zEN+YI#?}zXh>zsC*2b-4bQx!Og{fH|)ftV()LN(AWjsSKd+M+1<0TfTE4R0$>&jnz zzSNxrOrQe8!8_{(V+539Z#iN>^l*9XJGZbHXz@=IwsJMZR9}uo$&b`?f!wKE)@Au~ zcxYbF0fNnPdoL$DbDEKDcXllC0f)wVzcUf;U7OqH{PQXDn(O<|fZW01iV zVvg6TQJ_h1c)E<04UpW+rT@(_=f|aDe1UM~4I*n-PYuW{)9maqI2A2NAp`_A?_{SC(ldDz8kdWv($MpxM{6fa;}lPS}Ncp&yrStJW2a8{?fz$ z$Tug(OZ`$bAMl-NWBjyhEmy5Bd4ALMW;<-zw?OgliYOiF&w0gvblHwH99egBW`5|5 zg_YIs--`ufikr4*x@~8N^V;TpEE!{1Nv~&@dB%sFY4EPw-s`6Cs%X(1X7)y2YQ;HvYx9iG?KQGE-7 zrL9b9YPci$A~~QD8a#Q07YFjLF6upda|A&T`B!tjKiqlcRy6qFv6ik@8u$8lp2JvY z5#Md>5;6!QTuuxW(0+S<-rPS=#kCy&Lkj9m*?)j5MqImnA#1O`?i&4OMzRx8wI!=e zGXT6y8NBD0y7}RE3WTtA;47}5KdYx8E*+m;zoFz}I|jA>RF_=-X=`YON_Nqcm2dyP zh3mdN`Pbkl0%>;b?LDU(!^AKgF&Q*!(I7o7Z}bQ@pKTtUW8yQ@D%;_apxKwn1(nWyb{9r9usaC=DrkSBvsE*tv;(C2iG7mLQfTM$ z`t9UBvZIA25D!1dOSJafG5xo#Te3zd3RSw1yI_s$w!+@|LHDcVLoKt(t?nUNzHo26 ziQ?Q|f5XN31*ejiPEq!)lASeDSrH5%LfdaUvk{6i!;W=;K>FUY`keRR8C=Saxwj9|; zY?9X6JzI=Tj;0KF4z9x=WEC86s~8UJL7Ncq8>Ee!0URuR*|LP;kbJfS614bU#u56# z7J9!`1h!ECYbf4K1vkSFjd=)xWZAuyIR%g%)N3;z{`CNXfLA*?v}=0gan?8+qDSqn zOTBmo9Euo*6+?mMrjkiL&4r$R(tSE~kmpmfz4al%Yu$6(>RFGqIQ0|WD8*FQ>eklO zG{?LQ|L3EGbJle+IT2~a1`a0RNQIMPiUYQ9?gyVEKi7?fuYAAM5+%3EYjLV!DxEdNG+w3b3I;xMBauB%RIn|F-j6G)kIh2H-Q?twuzRL`$K-CZsHt}1P`QY<} zAHbcgB)r&SmH`nu#)%*LeWq{Tma}26q?vsXmK6n$8LyFkElR44VaacfYRW8kU7Dt^N z^NRGlviQZbo(R@1#l@xLRbG`y8{er#nbtLnZ1?QLI(b^|8_65@WfIO_wWy&!4AbQp z=079!u~uiF$c&t6Nx&kyYJdU!9{Zc=M=e^L^2#dG`%sBr_J#-<@Zcl|QJYo}I}=Qe z)TGuQ3BcaC;RHJWrK!^G`r*hi!sudG#V~dVVBrH>JHX!X`;{+y+&RJgwTkbvn!|kQ zZkR#q_*wLU(@6~#Npn6}rCn02z6a@qar$h=*lfV(n1l`_A|d;OuD6PXsV0)&R`f+` zXW=nTyhL5)>%bNA*$@s9n-skAZ()uk_HSzF595Tn@MqG)ZaAhJwL}$GrMe%RpBfKy z)0&zK83+{$+LChB^t>?W%;I=95)t1%`5rGC3YR_N>wTHruZXt2<591|<&a}&nzbT= zxG&&XuXWiuXMKfp{}2rebQdr{<(u&NPvPl<*$nzkpeQsK=HGu5-kF4}Wv;NX*W*`In3p-+Zz3 zpr|bcajM zF2%P=APG!&_PqkDbGyD>ECc2$Mq?RRPv@+5ig)~yyWi1`)NS{CB}_KQTu{$%m_~Gv zgFa`9i=*|q8BG6JWH;c^$yVi~4yhb38=5F3_%NNHf$!UHc+~4%ey*!=s*Jhh=vQ)v z=gd?C82|mzh+;^8sB{F0SYuSx%(aA~mDe!;eDu+Rh26`gD+paAC9;XTz7a|ckEHhI zqJW0$1OPvRLQ9!_k#+eAnq3n5pk`_;g+AeCXB-LvypMICQU?*Y2hz8R20nx#SrtU6 zbuH7As!1xSz6ZZzpic~pN7{3@jMaEg*=pS2*EpA*DA9{^G2zr&E z%~YkvWNp5{xyp17z6|e2*jWZ?F>UpnEzz29=FF-!a$K@aQp=oa&Yj}q%D`~Pt=5Ik z%sEM9UAf#g_t-u=1;)pS7>?M+_2Mn1m!G=4N;p^wqR(E9sFL{CWew${%t14-6U}wj zlS0!}bA&4o=b{`YP~x>YhH-Kjm&GXTg^0XI-ouJf`8#jD!B?~Jn~*pD;PJMqp-?j| z^L@Tz*8xKb6GPH--yWN2cU9?=nm5NimOBlqoc~>3R&om;VN4H@Tj?Kc>K{ae%h&;_ zO`r}Db}U~ah%E_};C}D9rpI#BNO>n%1YX?*369$L+hrx`d&b)~*5U%r^P<#QlwZ`W z!}OXqeSpt9Ify3?mIu3bP)5o2#f%1hGis_;d7{;z0j9BWe0n?a+q|y`}G3q1M{~9`|DEy>hSH#hq3b2PUc(iE)l@ME8BTq zYTnFA^=U$V#X@+&jYS+?_JdpE19{Q6F<6|f8ry@HgSnSb?e=5z4rJwcf=r3$pw}le zadr`qpa9r)pRaT+t(Z)QKa`(q>~lNUjJ)o)9cQt%m*tIGZhmE&{)PsM%8N#=5-L%8 zrfBJ3I>CIJi;((O>z7OkVM;b_c2=snr5kohf>v2$8R6|~aIhrad?8C1*9AOd7w*{= z(4g<6?`^BtU7hOs_}M7fcWhTT0#lAa+lZwyp;!(EHl&4;0w5=_(T2T3cQZt2m1~ip z(A*fnm`kL+&jK>MzTzQCDU~?dAijA~69Dew9pS=4*#V0D3KJ9bXuM>I`#{`8B3!z+zAexBb>1#%dq>|lI%7r^Vp>;Aw#|&&bzEB z^LjoL)h+c!S&P>m<YLi^9eFni=J`MVgH5wi;9CriWGqO46h)(@*OCBW;70 zslSaI`yjlGvZ$CJS+@n+gDLeqv{jf%Lu?rUE-CSN1&lhgurRVe{mOI00Y>40(ukJ^ z{`URdcNH-w7q?{y_a%^&Ns!@_WB?YVbeClk3l_P)GiRKFUV0~Er~?giVmu~AY`l)h zs5wxAf&X3OyH@r>nSr;Y&YTzKv)~X>YK8GP>5{_(qqFjlP<^ zs|VYyCK@F@fnt?$r7Bd3?|Ke)w0*_J?I(h_IAzkrX`h+d-S00u$h)Uj-j%_Eh{+j- zn44JnF?|+7GhkTrGVBY+9_uswwQ&U=8Fd0Kbi2i-=W~or(i@R9PSqeRq znfzN#a2N;iCL4$Ee$SN4DUsv|Id0Ee(_ez!f1Z9Z&7v1XLAk6F{F7KYFQwR2U(c2H zL}Y<|`|6fJ1Q__p$-P{+m4=Jk4Y{br&!xlJX7I@1%geRw=R=D|b}s1Uz@g4S8EM-* z2a1ZM@KUq2K@&;uw_Qe7Aabfm*!}L>@i7(pPjVKwI78 z%9>1SeL!m5r1G4nF5y(G^F!D-{Xxxj!ZYKVnyFRGpLu%?|02Y6JQ1fZ9a*@H`L_O8 zmTPCBmACSyS*|@fC{du~giR87({SJX?txy#XJg#c51VT9ZtsL5zg-#xrkLMZiuQl6 zu->6(mZW=rpy`y$<+B0H1BLH2u4`i&hVf;|g~;=Vtyix-DR+;B<-3<|NGz`DbCAC) zqMa8(T`Vp{W-93?;+4C5+!Qv|GuR`5Y!dABJ2W&we{YDa{!Xa1=W9=}mUwTPavP&e z(<^!v$EHIc-@0E^NSJ7oFLIh?Nx|Ht*DUG0?~vhZ9h~%2OR!~G%kZs;@unQ~Sx5TJ zgFpVPqDEaQTrx?0S9Gmx1?IYGEMBzu>pZx(A=g`9Kkn~*SG)l#%UPRBIj)Ly1UKq? zM}2<#?Ee{1mFQDamDknvjOYJ#v8>JketI|c&{gfVQG!$B z+&+JC>!@>toBF;dCwI>fvuJf&mGIu!{abcMN@NquUAeuJOFlnAr=EkaFjU*OUew$v zPW^_Ox|2)7cDL0r*Ru!=xtaR{L^&^08Jwou|H>)XH$JvF#5X<|7ezglA@yio)Hat+ z=9E6Mq^_)8xDxs!`XX}u{U2BV`KWQo{{z>~v-i*5Yh?isGY+T~seXCfk@64@)7S9~ zh||bIRfZE|$UbBcZtz@t+#tIaTbw zOp;rH7i(p}9v}YWOKXW4n56}jHRPXmV0|~FxKNjj1?OfNz>Ip<`o>x&)QtqR^@J`_ z*VYmZ$|5?$-)$hz+b(Xpv8zXH)mfJE7RXS`He3^FC!SNSg4B+$RA_6EH$DNjO&b{> zSS9Z*_vm`eKMfpBay)RbJlzaZXM)IsAFhnfu!2E1W!2#j4m1=mWM2LI;fh_n!i5;b z8f~Lz>9$%9z%AZfW|f~d^(p{ZK44vV+`jD!ZV*L``sET`>5t{ZQf==|BYsOvzFmgB z4G||8gX}8DO3ABjDm}LkrH69+sWy+kOK|I6upp(Ws>3U%#1w&}+k?-?S7dK~a1N-I1BOd{sXiitOFBYUjS>v$M% z598=`QI=?O`;nkH+eZB)EUp%g*hW^3VNuZHo(-X?2(}!0+ttoi%srPIw_Th1rMNx1 zd0}+$ksBxDW=2A1fQ?A*QN3PT%_IkGc2n6ZZkm+fC@~yj1-8?lJZIux`s@F<D=|8L#<1)<&d6OAFK4bP>&w6%hQmDez;OUptS zRR8m984k(R;qB2Lc_~KC5-Q=$4GJ3Oz8V>Z{*YH+&6eORK89F>P-nt`&C+q6s!mxQ z6C0HrRWaS&R+`)r&ywtk=}Gn`hVs1}o+t2Jg3Q2-mao#>QB^EuD=U9mfvECJn4!`u zgC_Y94swQo0@Qj|1P+N$ozo5xN@0z9oT?#;)Xl-ugoAF$ zlwSc)9o~k0IN@f5Ib_Y^J}yUgz8bV7+Adj_DB@pNq>K-Z`70N!4K^_8RB5pjqxhKk z(AIH^W0L92H#nzuQE12=aOtuxcT{!**(WL40j)s$?gJ-#{<}=b@3Dx@^wgLwT`qtU(TJ6V#b>2n|!JSWcrx|EJ!MM>EK<=l67A*}Sl=C$G!Gkq+Fxyaph$ zZsjyqBrL8@rqMLsE;DD&1&*b>U-qF|bc2e_I{TVifBlJPqrED~j%LvlIRk|}$rBTL zqLfZ*hDCLAx&zL>d{}ZZ==i2LGc8k)6Z4V|Kv3+j3Dp48R7n(o`dUU8l>B*FzFBp@ z14bK;#baD+#w*>}oY2g!$jLvl*GfthyUGV(G{dV~UeG7O!UVSv$KVzpWA~3IyLQuSV7IBv!jOI#zz2uXb+#oy>qT8A#pJ z?3Mk0u|H8R|B7D!0?>9M+b9k=M*3oVqCx@6dR98)CWTz*v4eO5MBUwB~Rn5xL^<7*%q(QOXBsKt6=$;zA&A z`FapnXQH+|yO!UFMjaiarru3yVsDy7F^SH9v5Z#fKG9AJ`36x3XKqoE(dh?&SP3Y= zVtI7$ljgMp>%U>vPesRe(ZXh>H2VaetI!H?BSTfFRZ%rlG$TG}u;i}b3aILdW^PZp z5lF(C8revDuzh@&6O7&@Fr5RTiONtuYVF&txWZmu+^OVKOG^%s;CY8T9ygcOV0(Vk z@_0JpuTWwp-RBT5?hJdW0JQ8e%K*>Zl*-nWpOT)o z0*6n3xtV{fKzTwE)QGO3)ES>`o@Yrb1w;hZ!Tv%14qCo@{ERl52}Q(jbsbE`#>UF5 zJcbzeEokf1Gw++3d6$O~sgd8s*95R(Px8_R|L28P$48^|Jz6_|#@{n8l-@7(c7t%Q zNIP`%1}9-1L*8QEvobGn*Y+~g+$BE8#jm@|rSo&tH=DaItk@T}$`ArM1I+E2?R*Av zi<3}#ug7Gr=lH}9;5E4KiIw>|E9TZ8Jq8YFK6;Di5M5$Y?pMUmBPQByz)8uWA&KMs z4D8kHh8K?!lM&(Yxo@ZJG0oBevw+yoHCPV3x>1jJaoS)xPEEYrN}iDiaUK4~ehgtm zo(n}7J8oX?E;kC!9&H^Sm$@cK1`62@o&Er{O%iB*@#Zj7Mh2w6^2fUNYC1irk-el^ zaj5{348V>koE~kJ)K(F){HB=t zfX-!h8%7aOcpOI7bu7rqT}H3kgjQjyA_an0ML3NhWTjpqJ?n92h?_e0NhhgIf0U27 zsTRqT!i?Q-U&DRg3-^?_j>`Ib@V^gr^b2^7^s;%2kHil2r!zq{>!+woNl8{>HJkNB zW-55x=lbiwt{a^|5siwM@ZmiCCXZxFJ_xJ#x>32*_F*Aw(5Gf?{0-Gwa@lgvTqYn4 zpy~rw;tDhjlD^&X*b|9O5W~{(P)uxT2%O~tu?kXk4zeMmtlz23Gt`G|!R?rR36{%c zTTU-kV{Hdc|CzsV32O);su&W!l`D4VB0ot$1MEKnIjkyky)tf^5Al6&adiCswv9rJ zJ|=c_Fi6aQ0d*96yMqGZ8&_w7B1#R7tq>G5MgF?_zp>Zm&7=EV;B8IR$be5mmDsQY zAqp4&4!|v~QXsI%X{f;_VfYgH8_)UOmv3e{FuFfjzpIr%#zcW_sId#Gt&rK%YnDYH z&P*tF^^}0NVnb#u~MH;jn9KN@fDEgmYI_6N{}9<%g4{vxwWqLlPRtP~-PBoegbJ#`@!N+nf&yV8*pz&R3sL@V>X4 zUf+9SeIv*Uwbtld%Q4#G?^;mz8s?I33S#oxVl|LL$a_a)m_|)!yuNty)-@kBZm}pLAhl#<3 zoxq#YlvOg@MEcOQd3|eT>xrQtDu+zX3EEiDcb<;U+82@TKHR^FSc&`Dd!JZR1#;8u zP2W3f#%CD?*43!h@KclbOL0Ro=D?n%N{~AvCC{K!7G4;XUl!~rzB5T*5T3ipfHJh+ zwS040lHrgDWdDSKSvC(xIQ_WdRCJWf9;1Js|6vAuIo=|EEx5?cjOx1`E93-Q=eiah zqulGn!rkc@Gk3XoE2Lqt>D`3k_~iWlpcZR9Y<~g_^Y5v1T`;@S<1E*l=m2$qwF0@3 z6Ujk@KW_1M-f)UVdvzgWSrEs{MRP3qLeXmXr@EUO<6{Q!q|XkjT9+*D^}}PEWrn3i z*lNI+W&NY{mrDxYJcoRdy0m;&W!v*r`q;Dex=J%h{Q@kM^_Jvl%|fi zs^BwMUu$cEa{-8{5rrM1vi@yG^?3ekx1lwLbm>V!yQaZpi*M4qiq?H@povj!w@h^ts zBZ8QIn|w>6>n7eMDT12ygRZP`3ep~idD)ASKn%Y=4iw^48W`LPOJ<+;fkrClB?%g> zyU5oh{eTw4H7!nz8;&J-?ru1~O~!Z>VXU=NLF;+w=qVON#S(CuOEq9f$V;uq-Kb{s0!X@kHmbRtiQ%_B*zS?53W|jlcF=*NoCzKd)1_h$~={o5Tg=0WXN^_P};{KUsHz0g`{U+%0QgE%kdrLl(l<|6;lk!2ezWl5 z0uge&Z-A&8u&&FIb3kn;kC!HVz92l&vqgt?c{RExS66Hw_d?}^eq`XYhnz~i+Yb;$-l=fNp>{nYQI9$e% zkP7X}&P2CWttH|k?`_t9#Fc&`CBB*#JK9uUURd1I)!w-J8dW+{gVSxSs$xOuyjt~w z_l$w*?b-PfN9Whyg@waf{)98`;W98Y4yME3IEWgNPVW@w$x*USdaiZ^{1By;4^3)I z>GthC=;67hCN|GsO2F?8Mx|ixto|=2@J4%dib5HsV^xD}UoW+uj0>uOFfnJmkh~7A z_~@#NUGfx>nCpTVmT?CU_@Q8W;B$P&d&Yl?TyXu`%~qJx1DhL1*qt$*B*$YcVCyFq z*u-DjQYE)9yjjj9M#AS8!Jh2A(C)u-23b&?E0VI%b?tGO>_2@AM)qpvQAg?RQC#H4Uqu$64?W! zNBUxx>dk#2H2FJ^pe43wZ4}KL(V(eU;`>$tZZ8rp;ZkSJe1DO88%Lw#qHi<{o2L*7 z`=cCHvKz;@iq_N0Py1lfWqcG&D#N?|FgiQxwrBT96LRb1lTqvOHtRz8>_kJny}DGj z2^xPrF|st)5~!U5#41S@1z>hTOT7`2pHy&<5T2WVI;(arlHVy_vS_I3?~DNk0jfJQi% z6O}ieYn-l)>gXCvB{xpEW;;G}iKC?7vWb^LwC>NDKWG3%T^myj2Q`o?0LdfQR?MuE zsJa>qy4>97(D>zvHoAO;GfC~unX#n1_>bF@8sa5*`IIs%?JY1P9+?Xrm8X+~SMq2J z?jJ91u-J*`#e$at{_CkJO(c`d5kO(90NDxD!Bw?>_bUCHD+(tkrJJ^lcVlc^K#DXI z?q}1MCVlgale=RsB)UCbm$mSpBv;?PVNHGH`5K6XnEi>1U69SB2}#qs7&!(&;P@*J zNz|h+|1LVe{M}Sp20(ydSG1 z%+)K{w<-E)}M7Clq(+)unLSl&s#O3fCK@P+C zhzUk80wE)jD#x!Z?(;K4W35*OWA?@CX=T;bVAfV1a+0Wvf#RyF&NaV!%N&nNZ~b6B z_MZ1RGW!867JeYa^YB*{I%tE|!->y;SLdvynFjp4m2BeidX5%s zcc%oMA#6W+7qpVux=4BW`3a+Pq=VzS+>7I-@ZjJ&zi$a9UGuU$g!ZxO!hJIL!1sM> z&DXpe$N1}aw}j%A&i*m`a@TvU!cC_3VDgF1&grnXtk4yN?&3cmJ;Ux=XKs9Urtbd1 zN^jz2$5+z8N!|?`wam7PY!<-Uq#_hwv6*5bgiTxCN(&9f9gBoSr;LGIM3BYO`+hjD z&|m9e@2efb@xy;mMCkI~7ax4;`$cv6$i-D^jCjwlkEi^{>~Cf_XPBgsgRBKKY>BUP z2uU6O!}gQ|1g>YUiBiT9%=RReUC_eIhM6balkMr3wdYrmy^w1gVKmXvljq$39G@FI z@k}u8_emiKA7t7}vva28x>h9VlLFA~L3jsAVRPw-(=kQVIlCaQKw5^ZCVdtP)Blw3 z4wIV>SS4-05fa-X7wVY%NUHw=G#0a%=FD4XJYW>Bb7Y&^57@%09z$@fadA8IQ=PvrY|39V*6y z=hzpw>3A4h0l}|R*1bH)HvQ3^6H2y0=6c5)8OG`ih&qv|iyovYe_6i4g zT<6q@bM_m?wzWsnIHW}Tu>dC`#{l0N3t&`YF6-BvjJK@wN1jFP9LHVkz}Zku`Zn&W za~GQt?GDisWihh#uyE;{#9R(mFy>LcK_>4pg*u&}${i5rC8JK69~_{yqAIaVLK;kz z8+w|Apq|Y6W?mX~=gf>$7xL8fVturEUooS5pw>h|dHM8up+4i^E_O!-y|cU-yIY=t z=u_3|9Qr>5gW&8srn(-fE zNwQL#kSThCfHFF$CUkmK@lJp}waJ;DxPxc5s}9eP0~Fzx)SRldxg138sYi;|_oasN zLMGG4-JVGFkUl`)P#}iN=821z)i;i@1f!ZwB?sG?o8~W7y8P(#^-BVl`Zibg#VaJb ziWrJ~d(fzfjDUPj24LJIX-`Z!Sjk9yXAI_IUGfY95o{d#++hDC*z2w116q0f0Vw9J z*uUhEAczHVBVO5yl$TaN`flz9sTE{I?N`stOi21QHhq{{Ja(7UXVTObU@)&zg|fh zxSWsa-#p^6FCxeq`9x+5R@vO&(W#?FdO915+rw2qg#6uTX3`HsM$+BW2QD;++6z8)(gt{ z*FRUeguRIGk7HJcH-Ddf*85J*}F!{c|9n4AZ>gJNu-oJ0y`IFWD zL*yvJY1^;If2+4!j7uHWH!D6gOT`~54|*kDui}{#4#&;>hwDJqMYdBkH8Zwc!#I#! z<4#BGV}9mVtKn9+aYB`05#AF?h3I(85C7sF?6G_m@hOeXeq1h3T%xEpeYzSH8RZc6 z=`@X8iv^cUWOXRUQ5;n=FddrjTE9P=6rfAJ(I9M?Pg~y`(5$5Fg&fo5<*Yp~vLwDC zbUIZq0Kiptj0U@Nw`W(ezu6~a3leXw$-LD^EN<9r${h7OhX4H)U{BrgtTV4 zesE*^{(3TfSI(7!y8VYC<1L@Y9kz4ZPQmyy-6-aX)A{&Iwi;Q(-JI3bI_2TG9xHL@$9H__&1CPUC5;A_Giu)#9g?e<>iDFs zd~kf;D9<%=%lT$@V`uO^Z*R(5vgjEPx5p=NFbK{e?1ms0t#hjX#+jVUf1<8EA-&S) zgxYG{p2q=P1|19j0S?Yi0IYBLNbZc~=dbh;14>_ zYAK&VO!dqDTefI*#lod~MK$K?aD5cxkWR%Wbw=1@Z+u!|6&WGm#Dsy?s|_@^R9({b zXe(ga!7Eak^C7T@R<{>ukF?=^yH!$Ijh{XS-caLP-LoN=o2PnO-c5mH)zxli&R%R> z8M$hO7)0-o0F+$DE}+K9z7K7bQiQCOLgnfxm~ho}q&~W-eK^E|z}?!4ARZ!~k2E*S zFZs;Qd6$Ud1Y=L>uz1_Lqv9RQw0PmHeO9PC9tIi9z?d#VE7bbjC8BL{&%uP6oh`dN zu40-)9?tEZ;x`YTc^#5!S!;%n_I;vs&?ueZ?%-9zSA(Mj%nUNR1^siI*XQHR7`Gl@ zOez!6c|LkDT&wPZhD}4kA;yf*D$7A(85Iw80pBBjkMuE_ln=9S_1(ar2u+N9|B95V z(I7Jy=Pe3V)i&7()8j+kj{8g6q&&b>LEu(HdDOas)s8A>+h0(3`b~+Yv-wi2UvmM@ z!o376Vo@mM(dslxazDr&Fe+n;dHHl#rG*Z)Lh*)Urm-H+VVMsr-12K!)OELVG`F%` zeG0*kjfKa6FG4;<#=~z=<}xFBw;XLok3BYs$Ltdw>C|Jh!T)^ZS8{3=ZG9^~CKBW= zB&tXX0F0aBW=OrO>34qA75EmWau#<>9l#j}Ba_Vy&0QZF1p@BF(W_>ftyVH1PnTgb z+kV-9gD_U4DIb-++d%mI^O3ycYrrfDWuD!$W17PiUeSXInL%yxZ~!X;vGlLgsgj=~@w!5d40V18i;0V?*Nk0QX;93?`@%u#Py z2gK{w=fivDL50y% z;PRCI$N5n;KNhgJJ+l;nTNPtjtxkMm?}`Ww2tJU^dww0fS0HS$|D0HJX*-2 z(-%r*1VSclM85h}_J5s(F}K+O>tZm(8y&Pxd<^K&f@1s$$+$A?<@=)CFLcVYjLxnJ zC(SVff8{_e@PeP{6doxXUI- z`U&2;%(~CU+up53W^O`9)mi-PE5XG_napx=EPCIeq^%cXDJpEM+I^9vE`yy)Tcqx9 z)W!Yf2`L4A4EGlI0%J)TFRadlNr$@+zNWRl5CV4*q55FrJz(>KFdACpqg=$^XLmP{ z(`z8GtoF_k@3P6kbp{u0%_!Q~OE2ua!1uCq5;Af&mh5T`eCD$H3GTh`{sq$qkxu1> zADowpYXblIh|tldiYZ`3##{V+}!iRf!D#h)d$`_c~tHWww(oXUl!6yT<01%aY17<6oz%!U}7 zaZXd-y9XEX`|1vZ_gxH|a|hJJIN*R*-Y;VgZaKTFVo`0){rtZUmc@%rAF~SIgj5AH zxva`|k_v?Uhu!iBFQjKWXhBx?S@%&e*yfln0h#AQnqa=P|K9V?2WV|yUnMsS=V*hf z1OJ$@<+F*{W9`+eOciuWh?qHO1Fa|ysLo+3;z zs>EusS9WKBb%kpg4a6eC-K79ZL8`2xq__F*e)`jgZf3Leow(y%7TK-%PTBtK7WlPD zTCX-Eu_H;3*ajK|0+pfo`pI}|c5Di9SzUDeo*2K2wabv*0CBOu`QkBC1cnilS4%gr zn@sS4$?N($x{>U;iuFj%P9h@RrTB}&#pbFn|E9+L6qQcuRoJZTXe*ud;*Ts=(;0=^ zF|l328AsW$cu@ljCkJnO6PZf(qJxsqDv&s=g?x;GJPMh1IEhVMYS%c{xQ!=0V8+M9 zqPMES#+`$gmxE^PuA3eox<$4t`_TWg-}2bAl`&$p_=H{qk7fbr^!-(n2AP^bc?qWV z*y3tlTBp5pdOjBAAxc^B657Ze9sfk6rPboNJjWpGu6zj1nF3Y|8XO68LbEDmJ{MP= zO#YfqzbR=&NF|bBYi0fQTQ}stdn)^JJLBsP(NpTcd?&qzK}p*?NsI8%7)JLE z9DC83kx&;?9yr7yf`Ekscx1NkiV9PtAM1sO9nVM7lv5aKO7ZsnT>`54bF(zP#RU?~ z4T?J%BV+qIVFP=NS<5|@+vFLqGfw`*$SQGmnxxm=82<_175w+EVr7}U*f6scXGf)@ z8%pafWQSXnP9t|sL2^N3Q~o|U>l^zNS^p?CZo=Vp4Q?uk8FHF>AJ$m_kWg^Ki%>(j zQ}sgczkYty*m{;<$!L5~j<(sJpnPbbKFm#X@I zmd?a2>9l?OJCR&)~rBJ=)Zd$M*wjOSCHs? zw5kT{s&y6esR(L8V-PP6&#t`>915HQ&Vde)+2b=e!wL-p+8tx3?QUsg3SOFlNG)w& ztSLSQ@L`TK=2s}u3Uf@|;UeD$HUjwWis$R1-etrQN9WWs+3#HVyrljupRt9A@}wEd z25MrXv9#vb*u&kD^vwCzQ3FwNXgzU>v^z6=jpc>CA}G!6qm#~74>VfU@%d7?EnsHA zS>Wqfe?JE@4kl8Tyo_Mc^8AghP@W#TlOcvWIrtkC>59x^e}6Ud#&{@V%|o7SHwlC3 zq0T%?kN+kf1H4xBRAytUkEFrN^KQ$m@g9Jz zQ&3pstWd&0)oS-P${Gu)Y3+b|p4xW7$5d0C8{TVXpzP*HgPBsjk*SSDd^%eq1q_2U zKz(`(R-$zvUb8O+dQ*>08=1belXd#VYgX&Eq~|$yzyk+jl*EW2fclHwrY+CQIC6)@ zv#s}|IN^(HyZd*p4{r3~mjf(SSWbV>ME5ZX0yGI)D9(7ZUQw;FG8W7~u<82r$|*Le zQD?_9$Ov3=%faTrfj~iIVlIAhbHk>^#~=fdoq7gMFRa0hH*#M4jZo$mH;87x*2HW; zk4DxEA4m~^5E6$}g?|ok0i6Y1RJ#HdqU$yCdY8&WQ|v}aPt=~5ehE+uc)k>PAQI-o zGXGX-(}vr(zMD@m#U{rDYfyPqM4#&Pyr#F6hZ3>oP{u0sX9(;TEPHU|U4Lra(-S^R zhw8oUGsYW5%26WX>!R_vgvIvn`HcEnF>#WMWOIOKHw8dvjL)aac6wZU&!9s#kC&6Z zVe^XoPH36+wRO|-Bx$?vW^}=r_kj=mpK|7V#a;5jcE)rFjPo=kKPnbvF>KPCrN&BO ziqxmR#_2!XhKr>EDd$Av#2MY=G${{z9ZGcnXpQy3GS!Tg3Y!?_Rv>&^Fr0L?`(UKr zK5ZvTdIMsXnyn1$dbg9mWv696^kRiFnVa~yzGreC?3~pt=62VvR9g$Mr5)t zyzf0xKuZ&SnQfF7I7I-UOYk4pTfeP{XPRQeBuAcv9C%hHCZQedQ`3@1oW1-{t{69+ zq&Zp#Rqpn4^^h)?UXet*`w72U)cDz4YLugc3RCDEQpdS3wSGQwQvWCO=CUM4+b{}x z2B}MXu@&;yxtsBr$(||qy%60&?1^4*3UDSrwBi%yYcZWgg7RVIe_o)Sw3e>Ao48a( z$W!@wO|k9{*gv^27!o9Nf?>Os=9tp@W|6Idg9N?ydlDA@-IRY~#ncBOo>D4l_+OFi zu?FU2L35#hl)oZkGq$}P%^23aEa|+V`ca0y%sU`+(TUE1vO!AiCN?h-OSA=g0<|sa zer05wFnKl{yHz>MmThXG_{6h&5laPKO~sD8Lh%^ZZtUYV*6Z702{}6c#zh-&=T*hb zING8x5r49Wy;g9qY~8wD6Q)+W9_*9rhe<{B`t5dFwRArgS|hCkwt@bx-6fzit+rn& zHxj;iFsHocoQ`flD1d}1g*6VOoQ_wUkY6}i zX5|&Gwr%Z&$1S(-#q)IRdC?8EL!ktJpTd;BB1UDP?a!w?EtJd^CiYC=0y1r^cJs+3 zxlR8&G)r4h?vBCMnyp=mv~L(&{&}L7W@rxXbQxF;qY7Y}vUGQ1-=)Fw z#nr9#J3jXNE#E8W;*U%gqbqV`fbe6#P$ru+p;~*$;YXnm#C*ZeTtV`c=;{mqS4OO~?0N%6F8GAnqdUQRD=pRkXy}3c3~2lPHeG$_MA3U_AZYmSNnQJ3EMZ|s=r_?k;hiN zhn*&!QZa@$T9aDsdxxHnN@v+6>K~xt`$(i)x7$QgDd&OyW4<&b@9-Mdo~)C$)SA6c z?dwdNr#_VqBWAh7EO<)E*g$mw!BnTNex@ifV_T~RQt?4xE+&s@nJIDlO6?TFK0-{g z@q&fNFeo{~N|}5>gT8q=7qv+K^K&@auRr>(JOyV#3unpwCjN(}$YyCdg%U{9Wx#nf znY)%;ou2}|J$@}i+OcNe6BA>4wuX3~FS3agp6Na`(6a(z?<4|*mIY8v>P}>Qu8RA1 z_0LkNz=$Z}#c40B{rHwT6}{zMNQa%8SOGyJKByy@++dN6%YJFo_(KIj`GhIURX9Wo z;43wM=Lu~>fWatte4EeNDpv0u_gjYl5f>53ddMIR4coI4X=Lkd92i z;Gee7J9^nw3~$z!$EakM5X@c}Qw*Q{tSZvT?0LYncEL?jt$%MYQR2uLYwMe;^dC~r z-mDBvZM?!^6gjMtU1a4=nr!%J0$=!63a)%&v@?E}7VNV;1z2BoWA_@DE=bIQVaUCF z#`d8x)Bw?a_k6;n+bH|Qf=+BhO>vm4x4OPA*;BLB6dpKbdNhO%P-aPaqs5EeO*t8Le zilcDF%Fr^&;~+AqKcS;u6a z=xyaj^T2X!|0&tOLwVy{l-4*(7h`!`^7Hl=jUEHc!kD8NmYam%+FTSSU4cdmKH6R$ zvakWHp%0Sx5v-;Ms_Y8VtT&UjEM5xsn;;HfXU_XZg46`enSx278|P0YvHg=N-x+Hr z4B%XwNvF)j)4_SspI39fCZf$@x>RsNp5QB%-<=(LVt8hz%(vat^sJJvf*fyYf=PWs z`s;dyyDV*1TIN6Nam}lD%Fo4C#aO$>zeMN%HhkF2&@9%rEb`U-&R~P(W1fzMtcK?6 zh}UB-E=L$F3L44>Ne2TRos2|fPsK;(b#`p(}=eh7e%3e!--(jFr@GW2%r_TvCbD;2OBlAr6%9+bGdEINAF5(b}K-E+B4(pp`Q5mdX zUvIv5Fo_T5ooKvF7q-j~=C=k?Dye!cLoii<=wN9H)bDsG?!( zEXwH1(mvu>IXH_@nHnbs`~VfQVwMblK^enkel`zWmbCfBy&SDyuGRN?Rpe)>d74vb zfY;Ov1vGklGYY7T$-S9E^Re~TN|JhKd0{PGFsIC6TG+USAN_2LAb1@&p50U)d2aD- z_9>*AtTsCOpwL%_#exx22PKv{Tmbf=GvOLvl@4fQ78lyZXFC7gbKlZ% z(3W5)S$q4cktGf({@QO&tS@^j4CF9M1}~Q>^Tw+T(sXFDROC}z#6B3YYV~X}J)?~K)G;-khF)qD znv1gr+OmPc)YN;*tb~Iu+bHni&_R@}04@2Lxs5f;i+uHI^Z4Cg6#{H}so<6zJ6XLg zz@filOy0I#o&x11gapQrzrEX#%Nr*r@#=d^N646zH#^Jg+e>>#4mPY=k_Wihkbg7- zFAMdVjgomCC4n}c%q@QXe5Pm1Zks5-yQn}g`E;%*;1*q0N5RDwK?gx!+F#pzn*Mu< zMvf+u&uiv})?F|E#`6*45rQ4SlEpzsXeq0SD-oR#l~77fIZ=b~Uw6JX;0O%`+s#t{ zPR+Wb^-b7`8j3GZ`Z0RtL>{Lz6bT0G{6wDnwZ*>s+m;&dSSm?$IsWBEYI;@VB@;UL z({>4Pu;M5;EqmFB#08zH+FYN--Mq?KO@>(e>+(bgY_1t8XCUnT|{N3`=Qe~9(;-n(wq#}h@(8Qhi|vz|f=+DAs(Rq?q! zS*LX(YmzygxI)?p*S`>!`}Ovvfo+Gf_VMgf(KSQS&szt|u=Yx567Qq?Ug%f7-O<&< z2&U!Z{%VF8urq507E3zT$TxP!w{9tVUuXPcX||wk`cix<2;*;`Hks17Lgo`FJY;JJ z8&l<-QB{?6=YrS~_0NXx=aw)Jx_%=k+bQ=Ylm%rBfU*h3L-~0gn@3agpQ%nhcWtj4z?juP5;?tu$?nuhGUezcRK9f|Q>p8HGhB-v^$1*cE3be-$ z{PNo2Hr1pl=56@B$2J$bg*4FpT}a+Nc)E^bueSQM=2n?%Grx%d{zFr9yppar!!U)~ za@WEzWPO*w&`O!PR&;pnVBeC%SYX}DS@^v!!r{ym=d;SGg0OnojJn$<0 z3d?WpF8{eNHx0a(*Btl)B<77k@z@H@l`QHgJi}NnAh3O>|khQS|y@ zcX7>XRXqC<5p!w)hua;9IU&^+M?{l;0cuf5OIyn^@jYA#BW=$|YsfF((6^9p^^!erY{sL{UvY{Q4SMv>`9Y%R06`ngy8 zSEUGq^H;NKwJuZ{R7L0)9NQ_0vU0ImwlTHItI3iAj1m3QuKqe=JX`%o%vq@|HGwh$IR3KCOl0a_|B(PgQ=&iZ_H40}g7TQ| zll0g?-No)~L#%a2(b~KMc!$<|!E=t^ z0FACK7<`l^cT=w33@&%aCkG#56-?Dxb(W6MhB{*5GKq_!TlGL);lzfOSr*UeA=Owi zanF(lY4*BUybQZ_{6w952m`W#_4KqXI0_?=rr#<4V$l;m&5F*h@HV>Qjj`Gw%J_`| zsl}%qyNC|ud8HqCRy_<4fD4A59SVIY{|_Cz_lY#6&2}-`+eGF{;~_x&2E}9qN<#%yMK&#K5y*TPo7an}xV9K5fVfgB6x# zfR2nKyL>ifZl#I9=0#3StS65@y$U9aMZzb$Gf_g1*(e70ctehZSA3XX*lQ&4uX*%0 zdbHX<<_lA51in9uDs~=LYL<;ty`qa+@$ea?MCCi@P*Iyh`I~!ekb|%M0L2^K3Sm`{ zn45env?6R7CihlA>R&62#_h~@X&pt2+v>qbEneYYmV3eBL#|M6;u)Bu;t;Ph^y zFdA0m5#n!eLrkd_24Gk0u?Sz*iTiQsUr(nUWUN;OmkBWY^1N;^(!;U?GRE9q7wb|d zdydA`{Fj9UgN>!ZrbL&^@y$=4Cp%_}UPK0IASrMM7Bn1Ci2!l4JjySf;Q}{Hb{oMFgWP4qt zU@=pbCDL!>+P^B^f5USB^^Aqs?HYx($-ImWLeVfO7pU)taW6ixNY7vXM*4Ag3Y{jc znGwf8P86;nq%|iyr_vANO{%Neq|gmY^iT)DMTC0;5m$TL*w#E+GVV4M_ny3^gAmeu zcfc|=uF=)d>aD`QKU9(%lSjaiv2GnU?% z)ywKt{HI&*=tN4)?TLA__TkrLOkRmhmHD^rXWmoGJ{JX_bRu(dmnBHlv3gtIlm@4o%!lFTpXh5Ya`At0y1xpveQ7gK z?kJ#N$(gNArphAwCO)L!c1-;1KX-whc`WFQ*PD&6F(-yU|6XyHdj3<`_1mx;%Kbn@#Eq$b6WIISz|M8 z4%66R;*pUGpU^OWZCNdd>Lt2tJAg>>EX{LSbWv0LtZ36_5p0#3jN#!$ zBeBiTh&QahERN}@EEpy_crDL7+7Oc}hf>XM1|T=mu8;iQ;Le@|VWjFVT5|}_Ss1r) zWcF4qd#pvjgNS@n2eHk_$L&-ZdPVc)o?WeM*^~_h5!r|(aJm4a<}*qoJ&b~k{yW9} z9%ak<(ru5*r7zGo&1*lTC2w&kRZOiFiz9 zW46bD1>T>!w`#q2t1Th%OsC*p2khTH67kYG-KoFoH^^!_0qF;nJBkrtE2-Jw&QMJ~ zhGM;sPP4in*iZ$*kii=L&eo9x22jc3Ihtnz6p{XAMe~w@w88qG?|ZDe_a&FfjG}l` z?yS^nDR3?z+ePdi7*IBO#D%gj)V?N9Kmf{mZuZm(Zg*ueaY$P%vgUwg)8Z z!GViEv{0iKR{}1G_qPbqYw!$;nIwfoGh)~skSSj`BU{7nmF@IxK9i} z5T7v0S~^(TAW>Pn-=>^gO2{AG(VMi*NWvj}SoG$hiqP9*r!`NRj|BVRD17O1BV?CW z7Aj>m?HVMd|DCA6KYA57$qr!PT@>eLd*)_HguO_iMR8j)nQQI;9?P{GXvGP%h{ zmXzxHfHylpKXrXvolg(T! z20=6{#)1kCG1Y-{*3XrWOgnwe7>-&{mWiah`+Fva42mUST09(=ZRAiEnm4*t576*T zax-H{us2Z13EkW7)QBmi5lSQ+y&lJt?rIN#s1_-N9IM$6T3BGAFf11(537j`dRF34 zp-iO0peQd(k_4EdC+Ma+5}|R`n5pcr`+A+4>iO~QU;P68WKfJg52N!NRrPp0uSnK9 zAaMn8=97i#Vz(jJ=y+tTpK&8gq}TH-13x;jJxz(bQG>O{oArYF29Rk{oaUld34Tt7 z>zV(YJ#mG}7zB{!-I-kw?ww=Y7Q01=Lvv31`1=IsuI0OfehB|y)Pm3d_K=6tM%C7;goq*9`BCU%`eFd@6GJTjxvcHy;ln__?^>+pZOyz7`WxiJ@77 zFbK9_UWsvFOYKdk2bCPiD0}DYBT@hE89k68*gkJ~CMt1lsndKAs2^Cbg1n+5KIvjS zC86nItEoLXpzgsp4{Vh#J^fcOnXgGl=onuPFz(3RP{*JRR`Y1`S`27wwW0#m8RNaA zf4N9qADP4+UHiXr;l~`o)r-PEHvRzS#04WF<`-vSnZIMt#1?on^yfhWNs16S;>JoO z)u~p)f9vTkf)O&j@)P!D=RQ)xBsOg*h6O`QdtkE3zp3o0T<|oU)+o4zp8S=1dFz)h zUbuN(8gsG%g4%{bzq8d7Co2HG-QRo47mMdMZiI^NA0FL%&8|7>2Dl7AKOAn(BbOuQKn-sfmq5588<}#ztkk zzWO$9caW8_D_6`FCDC(c9DJp!bdvK2#*m+6Mzc6oinxgdnVk(v|7T0}0O^Qyo%r!L zW)kiTuV?Z|`O)1W1n)tq^e81P!xdZKXzeDy-p1!%r&{uI49)I+KP!QF9)>Q@mhaBS z!DHou$UmX6NOMnB`T?^A;Hdy*Wcbt@j4J`=9MbZ|{x*fxHsGUT=!qAkx#VzScI_?nD(P3ce!ediSP5< zCEU7d2`W;6v&c=YdD|Mw;rMHW_zm+=4NO^%w;ZTO_L~A))1rU8N$mQgagnGrA&}9U zu=&%&53BQWa5sA0jAU-Jm|BJWZ3T;uiaxRY6DOaKI;I(hRanjEMs`VIcBqAG#YXGs zvA%D6g2rgUZxYr_ZH2u>(OJV}5#zZ&NhGbwYPFa)k~LG}Fq1afTb;Epx`blUMap^Z z;%Bn`ExJ&K=J$M@Nnq+-$m@wzup=Z8!;Lbg-!MPFCohcPQ$R4yu znITs{1uld2UVX3=^J!ixdEjKAKn1v9bc+j*Pc`a%sP2*@_|#;hn2qd)u+SAPup6Oy z>CwjYzg~B~%7DVWlzTh#QkjsHY(7-&O(l4OZyh&maDIxEo?;?qstjj+q&wFkE5 z2d{Aowu44n@D`5eR8;C-gaBEr&NS(w**2*tB$a{(wmBZKh!6k>BzjEM29Ne-)6IJ<)ykqDOuQP^k;M*&U05Uafr)l0Ln_TYx|3|-c!iQkSYtn>5XH;=Ot zJ=cWdp$f@*Z4=s7rzMlBzc1-UyN4+Lte%(MQ|z$}l>qR3t{C zac$-fO563U3%h798@=qRqJ=iY57}@Bg*6{-j8Zh2u|v=rl!Q;JrdteR{)cI~^Af-y z)>>LVZ}l_B#+9r;cK?WI<0hjz*v}(I09Xnjw;wjxO>TY2?_}47$Mf{4a`_JYbHc#c zP3_+)h}LBT#`?KM>IK#y=$ct<(>WVGncfJ)OrXU>rCW|f)H%&5dWN0sztwf*NhWWi zja^37w#(M#7Hw8g@c2=w&Z(NYRjA&k55nItM_=w9TV2y&YHgE^Q3s6lC}~I-RcH)381py?tNeKtjox`CD|NE6M^YU_4uttrE)b@rUg& z;fcZNMs~~XiTK@2oeK??Pjw(RH{LdwFg}ff3iRhhHyNv*mP^gaJ?r^^D?!DVvyey{ zk;xJbWEEX$$v&KRc+qFI6vMT$iiCIN;#9dmApJfG;jXn--Rh)avqpEL@7canL=RPW zGnp4LzEV$IY41B4>s>o3~;}bi1t!WD2{L!x($=Rdnk z)eP=WpTvM7wSR_d)Yvd=VL|qOHQa=R9q2yXe*2GvoVJy`xXasMd#Z>JlBDe!0 z-24{rd>y7`PK1O!yj6HJaGmL~FAutIAh z?9NGN!Uve{L2J8W*;zFh7&hQnLGqk4#hl@l&w@)UqS(AgHhw zs(;?m@A%Xosmc7YNxba$?$z#O$h}gSRBsA6fkTU$X};Zhmg|XQ%7eQH{enW7!?3-l zZTBM2U4axZZELoCDqvwbu6`=2J$aXBn;e)qYVO{x%B=}dv-T9(B;Wrm?WtMCP>X}Gt0;fzXSV+>0KC1Q zkl9eeB2@hT4^6RdG=1J0_3vvYzvqf~vg67=H$tf^2uzDcegcCKIrPX@FZvpOo;qx3Fsdi=}fsps*pDUe>>D4JV%7z8Cs{blS0%G8vkmD1&E0mMU*Y1+f;PgIngI z5r249*%q3^Y(+!#K<2cw#AkG@&)AXo`|1a{u8kMI>|ZID$kR=6~q# zHkzRtT6J%|oj}cRWN!(0f-4%Inwg##58qOppDRj6qQe%%s(J`2Ea+3Q?c(r*IQwRx zhW|jT>JB%iPDLJ&c7~b5AqEI)^T!CuK%)7hJgP@EkDh1>(VOqp85~G4c$tpfHYTEr z#NJBYG`BlkBe(D)yw7UB$eGS>Fxo0ZJH7^0@sBfEO2e+J% zE>~C_dPCuYAS ziNpt1gg52T3Pq@^e;2&kKu(EJDEXMaAfWu7=xjz=F3jU!$}0xbnz*jr-S zV4VL?Wv1gT;Ro6Jf4kHog>TZE>S|UYG{js?UH;aFQ79r57K#*_yydsr5SmMAd;8R( zG5DLK;dHJTTUVnwCdSw!nuA!c2|+uZO*Cz)#SkId5DQ$s0FRQ@?cFaLohm4MA4v=T zgLZtd=+T+jzG^wtc5pZ_WUhV#Gr7^eM*N+&FQ$u4o`gR?(XsILr_Ehz;<*3ZXi{jz zWR3!whv)F=qNS_=KzZ z^S49S1<$v2mq;b$Ntqo9X?o&Cq2lfEa)1)t(5@BP<+w?YZCs(^0qYeov;~*egU9qc zS~aJ&o$p5hrJlRnezl}eXxYSu6&nJyjZ{hUib%BWAevCEYNvMRr^2G8F6+5vkV-(G zDN4BWMVHVUPlY;@tK4m$cF!B53^Di^zC0vh#iVZY8iu{*@RIaqJM_qs;=!Ex+}vtd zQ2B<`tc`2bo3?7nDS*#(lrKcsr}|7iZ=IR>*U$9$}_3eGS|y z&ywGEue&ehzd`-z=pKGJ!-k*^CCP{W@QXL4CNPPkWranfj2_*Vrs9ASGhzz66ea6E zA)mG((TBNuXO$DPRv>uft~Q9(lRkK!apWV+XTv9W#cm^eC0e$PXf9kW$xMVZo{!Et ze~Z2)m>j2b8}UbHzf_s)DP+`!?xO@ls7^Yafg{Jxzgz{a63?SV9n&wt1~hn5%If|` z^#pF}9@o00ssSwlTP@WzHm;D3u@toXi0b@LbrWY)`SY|&WMGvPY0qW5LON!ByEeP> z%4F;M?YBC+Gxf%XUbHe_w;<39WGs{S{C6T2@({U&D2 z`$x-64sUOA^3K(*D-o7-(<%@(kzvbL%8J9#M*bQJ-2nJBlZ=?$@JYi_Om1F#-g8J2y+p*NLj!APcs^_~ z-^WLM&yCqy?{y@;#jXPxsp(6a{V2C2F=m-PxL+9UIPC`0XxPrwP#BJk_2H7c>{wyX zaig;o)3~Cxvkq4g4?-BSDL2|=gNHmAI%T2+%wKDJne5W6YFA@Pkwtf3T0 zo*55#<@bV>SBgG7H&DY@*buC`S$6kecA*~>iBFbrZKqOPu;v-eaV7kt53^`EtRxbo zxANm+nl718FV-tJDsb$rr>(PEfR$Zt_WP9*Q!k{Y)CX%byMTw!2FOnP(MPw`@559F zlPDd#zvc|Yf1NkR72-yv-*1=*!JmiEH`z$$G#eu2!PqbyoBc9UTSWl}?zcO&C77~l zDp(6u%#z8dNVgoFcxlXXo3sqL+6RDjM@%S1Yc`hNPFIWcy>&YYAAk0`2kkn#42Cy) zjg%w17o3OZB|@O$kmDmqWC(Pi3DmGlesFQ;Y5CKkP7Z^SfHiLnjF!j)OV~XRVBsre z%v|1vFYp7H47u6F9GpsNIRI;*GZhsiZtj@qs=ZN@osJhMpB`Q$-`V!Yy`()I#GK`3 z4A2BaZQZO%#s-#UD?34EgBYC9CX(m3`_mGgnf>_}$CG0oPVHa#K`pAND6g1uo?6?# zd#W1=!TJ0Mi7xnrwBI&ZdjRyv72c@+f2Pxp75}sDg&h3cQwujYypriv&mhzlX>Jrv z`lP%~dnbe#MGZ8SSn^nQ#D=skvC8+1ywSF^AuuvOz?c&M(Ge5BJXdsDi#tDrlQj|X z>6UzHQU0YjgRv01ZTc#z6q8&T<9y5RLM8eiTJX7s+^es|bDyCOaB+04-FjtLOv_UO zXQ0L$b79!l^HIv*vd+0&9S`Fr- z8RiS9%C9uzR2d79K~{hzDC77$fDLz6MMVTXdymXSoZcF(QYgSzY zjTR$Xi7A4W^yEYyGT!XjgAj`KEL%n0E4XjRFH^Qs*?TyI9+G!I6^ZeQbebH;l9YRSM}VCul^)n-sqzt z&Hi#sa1P&*6^lbDd>G)LpsZu7dzKro4eOaRmktvueZ{3i>i5fYlCbj}dNJB)XG~hn zl!xWVr6E#UZPI6ULos&$?m1-U=sKq6`L6FWXc${ij?xLM_~VTHHB;@{*~gMlKM5=o zg9T011wNP_LO}4HDYh2^F20g>tAY_&q~9Y1iSNc#J>~~&~Ea#$WPK}K8}z4PzEYx=tcCQR~_osznSxM`)tN*@elS7}a+ zr|lH9B9yfh#@lEJHnw-Y^37rGp^i=L)2EYsal%sS3tL1;PK1w75MY)_`1M`#@GV7$ zz@LOt2hc!BMSl$)aa!bEtV7NI2rUWE_ezPUr2PCToNQMKt5{siS#9~kHTPnsC({m7({!f%uf`(( zARj_-7TCp+mZ+xgIv}fMZG+XZ5W~Ye^&b=;c-;~fQ0=9OT~><6#@+uLgYg)4FTTbCVaY@sJS49_dE@3#BBhrapzr)2oCmF|(sm{BMrlT{03gVO%|g zV($I*u&g6f91Mb#t2AYVA_sb!3w^O4mpr&gNaQe!R8l=6U*DXLqflr4Ip;8`8ZE-M z{+`Ld`z@69d|HAuqzNg|f7%SU4MjyC;glY{@}&OriN3i}&FLXRWe@as%YuH2ua|G1 zse`2F{q)mYwq_rYpAqE^UX63HA62D`iT4c$I1cG!z%4tiy4jOnioQj@y^44}hS&S% zwcmBj6%FHKcCnAY3ORq;*Y<38_rmwG5u_sNHtN0|>OH8c)7E{uc6-&8j+ym8mUHU1 zYs4|HTr*jES%8tW?0{Vm&N8eri*u~_ME~m5q}i9=&uai^_+IT~rgvtPm02Tj5`?iL zxbq8?*!f+}^6K?jH`bQ^L$mFmGUP>8v=SAlXu-~FgcOznvM#eQ0d(goVW2OJkG8jW zJu`CXNu=3X{BoDLAju(c02Z0ao5a|RWYXJYm0IclfV6JjX;V<1nD)csp#$yqsxjGz z$lVLzypaD_{3A3*$DXL^-l%SbIl$8SJ+Gd3^3g`*56aw5U5f?yl4jxIcAS<1*cB@A zjtO>(@8flvUf(|dE$QDqoIkaP#(zFuptvm_&GY(6O_D@aWr%I`)t}h>`R_XZ( z`SIyH&r_#Ti}gRJ_DWL()vX*9i)K$^GowGI9ohD)XMi+b$wNKoWADbic4B3W52^LR z5#wZjj;o}Gn=Sydw}75#Z646L?@u8mHkmoMw9t7{$#t)o#E4IJy2+xCm( zj8&|4r|Q)-`Q|jEHL7K(B8>R1JC3|J+*o0)G)>&9*c#Zdb~&0Zr!B6YO4cwc&!{_o zXs)Hn{#M;aCTqxlR^b!py7($#60<8uZ9kauUUKNvR^0a;_XiBjxR&2VKKW54i*>8s z4V33NZQJs;ZmnG0UzabKKFu?EX7Pk>c~}2%y!Th;O`A5mhFkWTj*eis6TQ*Suc#J% znHBek&t2#jS|OyX_kyGUYhYxa>`LxBBzD}de!mIoIquU~fECIi7U0sAew>rneqx>r z%PVa}({}q6*6zpZ_%+l^5u5k&QT}!X;YY3gWY&@y?ZN$hDqC#7@{JGd z(+tlUQ_JDyJ5$g95qMoa14l>?J$x(_@40|62o3cMm(Iw3L{U2<_rMCRN zlOUZpY6!g1K3Kk#JC_bt%{8#m9K+1h!*<%$aBQHRmpjJs43kq#L7GQ(Ro-oFH{S96 zch9lVnLxeupvn)@q}!Hzl1hH_h}ySCK7(ra{;^%-eL~)|L8FJ#nhV;72iop=e!1Ke zRjVn^AM?%(+}rw!e06XeHI_Y#GoYJ#A5O6yYFcQNS06l-u!I&z@-PlE zrWW2Aiymf5I_ya-wc@;(pl9-ch)W&)4ay8xzwuC%g&V-n^eR>c076>!m=AMVy{GYg zt=KpInS|-lO0}2FdeIiJ0g*vw%J2UFxugDoYqy?0ZV+FAbg7-STk8pnZ!w-7&8V>1 z+Bp|=Qeb%Oy&*p8f$>|J?gN7E`$xyPX-k%EgyerLYEvLvhmvx~Ld&!ZgxzEMduEEi zY$Xm~CF+zDWd+BX%b{WW&Du_1jXiS1$S`a3W>?9PG@F2w^P*AC(~1?^6k?KH5b94G zj|j=?^0wqcpD$@4%CK_PRd&5b(4W+=pXjkt z{&&!xTKFy{32clM`_RP}fGd6%_gm_(0dFibosP;04n8e8_PrmP>?#Fph1MUl+@CJp z9{tZ3S36J#chv&9rB#UcNkceW-d3u&mv-)*c>EtsHqpZX|6lWl!i$4PZgh~AOSLZf z{f}3EZPB|8UyG~R#@pXr(DWIy@t%`@s|tWKH?Mi2PdsigwRtEsHKDp23DftQ5nQ_0 zv~zC7~qpq+d3@tJAy-DM{Z3+ zrnlBdj768R{j!9$+Wet8rq8d{CKl>|0s>B=@-gT8`g<>9p9$Tw&1e>Rc1T2ug*XIy z<)2r7NeTmhE}R%GJ?{ayT?Oa=^r$jepNvxM*Vu-RI%lQnlf4hCYgSp_TV)Og{`}Yf z-7C*QeJ<1@x^S&-pStAurr(-ZwpS329Q?SHneiFbd*t?AkcraQDdPM88lY_f%tij?lKy@A zcXabFIxmp^!uO~Y|D72Pb}<%BSxIf)B5qZyN%zBC)tX5atbV7Xm>Y6M(l=1%kVe{j zF|Eo`LZCkx0Qpy>*A_iOqpUgSEQss~jx~aropU(XG1WNpV}$*$URKkp#IN|ox9{q^ zC%cbgb*v$*v$So~<^u8*Jf>}tYf>F|qaZ&KrW!!N7uIzaLgS~g`^tUy$%V}I|Glz= zBJb`6HTVq4$(YMp&kJ&T()UAtym4j?=hv2hj?Do5+eK30;s0L$mvU?s$?1}!ZC%k_ z{N)h#pVq|++W1++ORtU{8eCO7e9h;vX2kp-OpiF3Ag=Y()aeriS1ip44~0+vLZ|;2 zwe6FLQ&lR`wdo@CVk0g`5QaN*-cPzln|QIUFxg$%Aw|w{tSjG)y)|eSJGxw-t`4w4 zT}M>7p*2Ee)>tV!dYX%sCSG7OvB;K4h6@RDjhaD=sHmAeS01MdY_Da_=?$wNUu1&| z4*2ePkmA*}bV2^jqT+$$$!AFmSBp2_{k%{U@2nO`qfb1rN~K|uaqENRN2W$)P4yg; zhaDw$EacI=@@n*OHQKI$6m2q;%HwHwhsOKmQyiS#8G zrP0baV)}A~cI+K9>uFP8FB21xzEort^C$)~*-%s%g{o!fq_mz?q^SYUX zNfpZwxL?R?&h3G;4mpl&Krpbn2y;DfUdLASekAFD=vunoiOkQs$ClSFBd8TZ*jbuQ zY;=ytxP&iF2h318T)@VyHQ7FB5?Db^cG7sp^*e+cw^i>fEv7ssUjMtt*Kt6(VuUj% z^FQ?R0KSptb9d~b{_`s)71~W>1vHcXMDQ#=d-|w^U0Z}MzyIilFFX7HKZKzHNkwPB zK6`9y?1%EDGv>%q&Y%Z#vF`q8vpV*a@rH3x{{iiD^hurjet!Bt-+MrfKpk>YkKmp- zS2eA7LTIiWZDF(V+s-ss>$VsE6F1zR8hRN}_CF|p@7R5u@3FU-*LKT|67yn2dJe7I z;2w@w(VSi4>$egMF^tuex8q@XT%dgGBuq({w*{<{ERL=IJqw_^!iKvlaX9AOn%Ys{ z+hfk_DuQ{bKWP%K1#=?+uR}f$SBO|kLv-G#FlImcbv^UYYD?UU$y)IIP)2xiUhhlAp}QGA1_poad%T2n zSf7zXMGKCm6|?fFOOh@Yp=^a3&|@PlB`{N`wV0M`l(_6;$}OIFJ@d`35q)Vg^M3sO z6oRk&P|ZTr_PJoH3wDQM`?R|0*Ecr1fg|KNr?5|H{u-qO3O}sUjk(Xw)Aq&?QK;bF zNw-ubvuW(pXw`-0SAButw)3au?)fp}o${13Brz*s*K&B?CU-AgT=G?QibFFYZr$QX zo{h08+7y5QW7Gk4)>%{INms0fS)(0@O= zNy|V7<`jUg!FHrEdiw0c@O=#O%^qws9O*eT8@Vz3J@axC(CKI&A;)_G+oRR0ds*oN zpQaH;dj{hTE^-P=6J}kiN75S=uBBTALz!RQgU32=f0w>&T+prjX>70g!49Xdmyj8C zx4vY6U#Y$@ipQQ%P<{`l`z9Hi2&?ypU;`KF~^xEtz4(nnsS?5 zb3f&{W#+ViA}LcU3It@iujkaLm7zx*F=5mxmr52>Srib@g?Yq@}mrQ$Zf z=ls6^0kXV(p8I~iuB&LeJxN-+5#k@qLg=G16bk7d|B38P%ubQ40L3)FaIag=m7FWW z{rccOAw(Qb0Y(rHYD2{M*qjG-!{kK#w-)v#Jiafo)VF$OZik)vsix(~uZAuxg9jjO1ex9_ zxJX))OkLELU&-T3xJ3cG)}`zB+9(^Txd-~?k=#Oy1F-q%QU|THgo9O%9X~aXiPRf8N1B8;%ZE27QK_fz=f&TnY6;~K1N#l@jtD$8*lB3 zIH+3H6-8glYUoZS&LL^X$B>El2(Cfd4hP7Z{oXlWr_uTcUbANv=IV~m!25eV0)O4h zFXm~~3H5N&!@oZnVla&7QC5WdRaZ?3_}#XF^jgOW^^Vf838wFXyS&UAdB4N-nN^xg zq|MRnb9J_!Ltt?F|E2(Hr^)IU*`Ala!|a-@lrfEO1|3B0ekP7-WuYD$IG6sAe`>mf z?axaUHeHaSPFLc;%6YvRaK{AwI_uNX*RO1weOeB55|WqxB%1b_$ZOB&g`I@Nj9Z5n zxjb~T8cVKimLfWhqq=|F7j^o$R((lrf;bO_@rBj)E)^7Ev9KsXep3yIZ0jU;vV|U+ zv|jF-LC;&O!PIV$r9ZbIz!0bs4{VNl=~e{;3g<%8X4{-wf!u_#-24aLS^0V4&{ODU z%x-l6;$}o9JVi;uXKI%6(BqB&?>IWJVKdduXsioE)yNiJ%rS696x2jVK9}A)`eCxa zPcIOQ-ZyDla>TZ^^X~shf2Z!}7Uk#_cP>X;+%b0#Q*XibginU~-#-hCTic&}xAXrJ zNU+CHJL9u)pDzzY1dV_2DCktgyck+AH^~lWBn$UbM4YIzO6%fZU`CVaRtP7os(dDw zn#UKW<)AFFbhGF?-Bk`NUlcgp#U1~XUhfT+UrtV~`n;|Sn z_y7dkKQKg34*9Ng|C5Y2XQd>OgwQU{uKc`C-p*a*-t+Q+CKCVth;#X}X#2wQ zu4eu6@@)B&v>w&Z>5-_#k4yR7&ZB=e8~Sfq!euY^!_U<5$A0+jT+`>;7}@@{NJO5O z_1iZ%e|A|6wmv|Z2#8*tsYYxYTt@=H#U&ug2~#CyvCtAy&{z%12O5D+--cl)K+QE| zXxlco$w=bXQT+1|$)7y&s5P(ADnC@ns;Tk^TQ!Af=B3D7)}i(>egb(qF>kf>gmph9 z6?_TisBTG()|r?i*0vTW&hE=y`~QzSri3gz`gY24QAhlQ@3$~G%J}o?4pb<(a|f@o ztTSO>jCk|WxPMrBo+yWXhR#|2RuR`c_pQop+j4z99Q&ks3{Cbb*R#6OGccT9_|Bmp z>`KPvH3?&soqDzNtGlwxlJ#Bl6@z~v|Ni7Wq#$(F)-vJ{l2gKn)lTnBrl7}MZJrJhp~2u%3$Mwh>98x;6yf- zS-J^V(_thJ_uqj%c29uwuYZ=JBi+xZVgvTDr-y_6mHnF27kaYz4ShBks-goB-tqg; zk&Ok{eazB2&#qXP=eyVeL-hmDrQ~u--#Mp#KW6(M0Lu3Xk^}(e_DGJpEURC&Z}*5i zGK`-@~v(cnVRLpjFveVOq`8MJ9m%C;GAT<^~<3wfX%v`UfuMuS}34 z;@V*cgfQcYEIFRfOLe6Ax(NY~nmVJ=KD{`)0as@fA^kfH&B=a5DHHKNVHOk8?)4kDTBbH1(m)0SbaYOV14NHN?ss7 z7lVJ2z^2ZIJiT(F?}++-*vUEqq;}Q#;2L8gyCZ|e_^AJVX9hn5-&1@yBy+DDl)a=+ z?M%;(PikrjG%f$B&p>t+QZ!Un3t${=l$-1s>})0a{srtU&+SMUFWANgEnw>mg*?U>v(`?iO#+| zyKGJcuLq}yfWP{cz19MUhsMwJygpYFJbQv!vaRBSL@8wk3W@7DS~{g~aru4^Vk5|Q zf2Ov6)U%sz0!#i~vE8Fq3sLrRoUp#B@tL;Q(XCrBXINB;_=}|sb&MzaoTj8w% zEIsmGZO3w89)9@mPofI~Gf$NrBZj&|tyvL~?vZLz05M3~cmIFclb$Bp{U)YO2tAnI zS35vP($@+?o|P>wCZP^FSdDieBVe@&?_ies$ynjEdiB5n{$wp9rfYeDm_`j;{%pxs?x1``zMAma9gSK}Qc#kxt65Fk zw0`Q9S6z4@z1{n<(MXl>(>%A!?fbJY%P+i_*N82*e!k^&`ZHp1TWYZVTt{kK7E3ln z#l0EvvUH`_zoMqaOnk`wX5Z$TS;$2d{ zBy18K`(;-c<|?urTK-*B)LAJUItUaWP{awD5l}*Y-MIY$?B3q|{Yjei)!xs~pR*1T z;;>{EbHU6*!CP2c` zg^5~PDQJPsuoeCNa!k|c_3hLg_L_`cmt}HMECU~27`Ih9uUa@-d>v!{UXq}I2^P}t z3)^a;*i1sm>E+6q`LxWlI|G9+^ju}H{akwH$Vz{F1#kgdfoy@j1gq-2%n%&d{zb|v z{3=;*3t^y)^1o3%wtlg5|AC7^19BuWBwuUfU*tgcM-immLyoU0< z@2f)&y#zD(9+oU&v*YfyAsUdp*k#A)*S7fE%5rLMl=evVtohBMCa$xk*NHEl_pNi> zDzwbxXV0>Nm#Yi1@XoU9B#sPQSvDd(Hbx#3U?3&-M-Y?&yhn0HPL%Qr#snLOyrO@p zE5+D6FQ)Fw`{xQobh^iHY#Su!K+3wF6~XenaFgqwu`VY(#k{=woT5lbworP`Gv`^nI-ATd}zS`Tl!*U+UF22wLV=x%kw*0${j)&!n1sA_Jg}s zsB@j*o^@PmW&-$9&DF#{k>(D>|F(U6!eAeoa}l#=g5ud{(H}E87CFYJ+;hV=M~vZ) zaZ7#$0uNvn9Xpg;cr0{S+Wq{^s|kXur?VilzA$>|BZyS%YsNwHVsIA~A zkTS#>Xy~wEs=kRt*`yo%3%}GjKtAUbjfOVw%-Kz#_}xVTGC6XU6l^HK(wa0|4RV9~ z8AnU%UeVI{J1jcRXnt}R<8EIi<_mM}Pf0b*Qh)$EqK??^2D+pMmsNnprBn?c+ll#>dcGy*?(Sb;wBE@HX9kq08v0;QU-oNlnC5UJNt$}* z05BUz5ia$n3|e#=lkU{Qq60A|q0XtRcx^1cI_XBARpLuk<~o`O0nEM5;J6io7m@4# z8gp>oh@9OYL7uevoY}@A2XmZGA9<;A>=7Q(6kzOU6c!KVl_wsy_Lf*ZE-Rxzx!jP@ zMy`Sit1We$y`MVoH?j0@u70O;v(j$b5lxDNBvXE?zCaF|%C|TfQ&JX=Zs0r|>JjR> zzK7LBR$qAD&KVNCn%Y7!l7yMhXeF#Y(-D$|4a$=Jn8~g5Y6lXp3|3Jy{`!5#K7ROA z^`W$aNw@AovjdQk@EdV!k?gB6l9&_#gsrC6n}Zq#`bDs4^Od|A z2|Wu`hVRKbKYySXkEun6y_=&u>L^#qK~f=kRfqshw5Xw1Rwe3j7TZP-Uh+zI>!xmO zO#Wo78NDg*7hZmH$XhSN{@L3@4?JEK+iv^F3};3~`%}q!Vb=!whh(8vDatEC6CBKo zmrI#}KtTKKVzeB?k$KdXSI4&!k00!P@`f4uSv5~fkI_+P6qZ5q6egO+gEW|h_ZHJ& zoeg7i64z9AqTh^L{@uQjaUz4x3!rP2_!H%CR;6T;;~8Z3yueL zK&WNiOq}!mR?xlX{9J4#O$Ji7 z40zDm8V^M1P4L1tau`?DMO{z5IOnr}XYLc2F8e0Uj0RjMS!^=C48kzE<=DgHq-QqJ)&WHKc`2l=05YVGi}K%hu%?VYizM(#_bO_l2A&SP zt>-|uXslc3hxy*0aZ49ot89<%VI0esO#BLk`Hp6nkX_?I8WpQL!I)V!EXVp02{us* zU@Xm;tFNlIM*cFily%{0z~-5GQeQlVJ9iiw2T%8!g(4Ihc!gmM%E+6pZP3V?FIPFO zl-cO;PFF4ecr7GGmBNcUmEtqjfEfs%9hPoWLs%(l znankKznR?m=E_m3DSHJm?uQfEX&GWdES7G%3(u0nI48C{T$e~4-CMqg#;>G>hqo_p zlZ$`TII~(pp3$$zwiN}%?Cf+u?mYI8y)dhjXdQbx+TwO5d)qQz0{SF6?J!x}$DDdH ze^Qhr-KX%F8?m+R5F|`Ph~ij6s-uP&d=z9~NXhX%2YmF69G;|wckV;E4#5KVQyI>w zZjRBmISA78GgN)RuL2FZnE$<;S>*bSHxKL%Hk91H6`9%pO37}JZe+mf88q1!Gs2jc z2KP(Jw``o;C#e*tHg2J!r35Ie5)4c+T7gGND^{xK}n5U z!3pGU0YQJgz-Pp$_`qhC$8Wn>*2l>P5f3NPLTaFd9Hb67P`PKtV7&Gs(;E@IyvCV& zRj@hEmPYo}eHJd*)82|H+wzb%mV1>(_Tfz_@jK&*z+&K5!4A;M_v9k8FCm&b+J-*1 zW*wC5#x~EJ`-p4U?3K@MT}gL+{gN7E-0Pf~o1VzKp2=}!C|Dm4o^?O(U~o3A@VLzw zzMWL$hRxtn)os7coZ?v?>!~)OdqI+83<^k_lf#35<mgTkUkKVn&Fy%7j7P?G00Lx>^M*MbqfA zQMo+kb(rc?_f~a?O@9p6j4&7Gl3>j4QTB`GV5=LVV-;-QiWH7AZk#oUYC*i$UfDRB z`F?i@R|G6^J29!SVj1Z!nrDPyQ9+tw?&Ok&qg)=;QH$8noe!l-s;Q&IZvb6btnT8 zVY_47M?C0su4?RsGVA7&{D_SX`}ZfIh9F>5zPlNv>Wwoe5433Zj2RcQxvN7R6E>)`5v zn;NW$H>*q8Sz5ZRHu#NY9T*aj&I$hh%DH(J4>?ZVU0SE?(0MuNne_AKWpNjxU`gAX zn9T%)C&n-4(oPNQcvk52fc>K_kC$u2;F(2;oCSh{V2BhUok2`6sz%m09YfofTTnqq zVRgvcCR97+S=~tRfmghgnIcH>0HPv>oUj`Q+H&>J>gLMnakI52nYKb391>Z)Qz_(N zz*uEU2&r#kw@@yE9X3&GLg>Ra!KqpQVW_xNPKuhBgI^=_EX(4`!k0hJpc9%#gwaaO3s!oA zSE4@n>2LmYbV!x=sXtGJ@(T}wu zh33uI>)9TJHyB*2{#ZfS1@HnxQ&? zJP`-?%rI7r0~unq|1Zm);>#qPyVS-HiLKGi8)J96(D-Lgw>zcwMY1BJ4a z3W3Kb%nlH~Ud=k$zmeW-re0D5^cDUyCf1E`{iB}e-=BP&F6Ba|%P;hyta>p7&ai zXhZC=R8Zg6P80)mp$jf>&@qg_eFA6=odUI@E?`gAcU_c4DyNp?s<*FED z6K1m&19X_ubi-yc)VGIh>T_gdnmNLngTC}C=6cBw2ymkFch zmwz4m`;^N?C^1ZmQNcfcTmVp)#MJlMfd;+!yqpuuN>S#Ei(jmK!LeHQ!_r>Pvl|+Q z;Oes4IL>W<8Qm!egatH+3Q9x46tvOm`S?cR#Z}GG+X}q#L~Pr*knOMV8n$tAmEt{I z62#>xs5x6xE^d;gtpE;ejIXBd6tgtO?gC4R>86Oy+@6vM; z79x8m>QtR(R&&vIk`FvlNJ!N|-=^t1{cS984NRC`Jp{7STRnJ(RN-uH>dZ=FTQvOf z6R3JjJC;p__p83{r3m-su;%*3Y=C>cTA3E!#w^L9wFdkqOY zw5tt}dpbPqaK7G8olz6QE#HPc@H+68a6W7!*vk+tF}5iGytiOV>@y(UhlABd*h`bD z`NhC}EtuCJw^*y;_kCQ`URZpAj@HjL8h5E2U3uk0(Vw!tm9tWdGV_5Qij0F9b||;# zS2Rs4pY|A=$u=5LmaQH&1NRcwA|!h_Sk#n1R2RNZp={BQEpM|%{PGf$Q7rd)#c*n_ zJ~@(lU9pvfhqqn6=)({TPNd~t9L=)2NAHmdC{(yiNcWp`1!V)+BkfpBDf&edZ3_m2 zRMhN(<8}4EG|6iC^6Vok*@qim8U9lXppv^*HfmVA72_=D1gARcLO^v*2lZAW21F|d zI#v5oTza>lu-@-U#FeoI<*QqXPnj!G&OqZV1HQGyzF9Z$`K+&VQg*F28nF}YDYB0a z{Mm$B41TddZb(gk&@a@>qJE>&r6N~dIZHlqx~A?MgGVZpzrK}vpT!hsQG)UHn+w#0 z`jPvio3}azr-o@+r?J`3JY6Q|%Rjw16x&vDhz=h>SrSi87tMhy27(kS2Obj`tlZ)X zb9ohQ?3kvV^;9u!5@f8G_egu{ch=-kZEm*3W;f~(zt0eyF4-;N{gDMd5z*a?4E*+c zdlKGt-Zhpq2NB>a%Gk|R@U&{-Xm->JmtOU+?409l@pZc?EoC5$DTxqcXR*(j)v2Hd zY9X3Y%T$h>WvR!6SO0^WMU6y&uv!lmP;3yBzt+|kCoel}pK$n&o?kYB_{RWl=?UEI zLd^=g`}%VclKuQdW2uD_5(m*rZ#F9B&ZU>t&p4eAW0)&@_4n=Eck9S^**afoF4o0D zi)Y(pf@`|_A`|CahjIx2(AMd+%*n9ADe?71OY$0gD>L+>s#CINlj=hAd_+_NOcVP> zlSYSJt;GsrY_MpcwJx$_WxJglq0JWUv3Cw@Q3OcUHlfMKmL8YzXXAFJ(g~JXO`tx{ zkgc9P_;oXF>VhY#2Dd;}&iAho%712tx9TpgXP6}keArSKvh_9fol!A{m2*ike; zYa)zT4;r>#YT21PFat%DQG@`5_6)u;X7)j45CQ8;{&T*x(`BAXHS8(hD$8TIs-Ym} z76CHQZCHsbf=o?AE}fMoE6%);D8CV zYY4h2M|1b3g>CuzEjOo2sx=xvQwo*S#orcvhsE5zFuP;IJ}NHw&V&u z>Xuh0%ifAVJiEpnJ-UT#Zi-Z)IugnkIfeK{OGmkFPRdNNP&TC2N|XU{pbEhG>+es# zz_>>_}KW5ZN2DocpiE z=@m$EesVihGYJ28;^gOCv9ZiEs)1v3U3g=l#dhRHv$GJ~G_}L75XSuo+x-X!r+3Oe zcsVUas>4dwXQNK{)TCyb#&>m$<@RLsZ}hzT=kj=Qy=$qSKOMDB{J8`aQ30;Bgfd$c zP&@!G169?os*^CT=*vzIb>nU0SG`WwT^ZdY{_)S?=!L?kSv?8zTQ8=$j7a z-Bc5Ul8$RvRV{&#%oTJ~>*LalSl$f`WAmT<$F(9r776Yu}}~Py?q$Y@?E^ zaUfN32mnr9qZ*n6xxpqk8;?yLr|s^WTSI}esGIBqAQ`YaB#b)FZGBf4R0XmPsAj>V z-T@QznwIOF4WRia$xaY8K?fPt|JcmvfP4McW{E-PA4p&p9@fJA+n&j4| z+qKoh>m;SqHa1Y+)mg-@>ewV2*g2C)$He2q=o*Shv9{rCvT1A^-8wr-qOMiPWx~cZOZzVt_4n#ebQqqVqT1n1LJG5?>tSM_ zA^W(05<$T}`J{ zkTCg|BHpE2fTcMd!>|cEjeR ziTmM#qz4TR&lzIbYpPQKO~@J0#VVrqTYR>-Z2}5vnzBj4_YqSt0jEy){& z%hLaLeCNCtc2{7+E(jZnOL)v#UI=14?yiC@`pE7T$sCOTFsof&R-=Q`(gL@fI-^an zW?nAwn5+83bzP|kmb=%|jb1HfiL^VamY!xnfKO+;*Xk&jCT45u(3_zkVt1J2`7}Hf zLxsFn%jP1LE(-51EQ_+ZM%ZGv#wDo7YIde_)D3=<)p-&h1CQ{6>-DYN;Tf_v8yzBi z%JglfUh%Uk2{Y>w>A^c~Z7fIwA{IF$*TbdAYnf>i8e)K`IOEDhxUkUMT$j{rxIi%+ zmaLa4d=?fmG$H=L^EKNztK5_aitTcwwjP|ju`U0`S#nA$`Sq(_Yj~HM0{oIOvLptg zE4watE8si0NZP--lZl-?=wR9LI<`e@#bz}@@(rQ?&ViyjQs`!#*aZ1?0tYWN=qkSd z6I#Y#O@!qwHDH`v311`TR#aD_K#2}pFfjT9^R{a|Cez&t8Ic-@2a(!o_Ivn1Qkd-3 zM3VByj%6hk?AtoEf7L`qhfMOQLXk@LOEd7t2om5kn0sDrMoM?fh-n=R99JB|*8=bt z(e>Xv_y`p=`XfnHu*41(6ed3Xu~(I`U3|VG%e`&`=Si)EhP1oRzu4@&FtYeDD83Dd z<`uXaCa>gT!F_ zI}&v4W?i+bS^$SFFBN)uGEmR3xs$&l`6+ObU!>UBo4uQKU&uCr;vIH=PGn+^fZ#BJ zmBoFZW4p2NS7O>0q`Irh3V#rxMD#LHx6hL zSAM<{Mc&+DWVLQn*t(MPSs;s%(NxY^jKUSfr2o6Swu@QbZ`YJ`Ad&v|g07zZrtXN=RZ@jjW()PCU+U72J=cO0uukLKS$L!QR=_&ML+gOz2vCgEk z?A;h_k?i1fD-^jk(CmEE<(F3TL#&ZW0AbNjw(dG^CQ<1tBihHMrRQ3LlbY3dM+IHX zu#~Ttv3rIc`X$s$bz!otFByXwQ45Vvs2Qs8r|o~F)89<~{mHEr&*)L-r9){Zm#c!3 z)26blyY!4f3C?e(r^BHF=$F@Hpv7p-?!lzuJ0726)|9S*=Z*@O<>jr0x#ZERWfoV7 z2i0Tj22H|Pq^mBA1=3lt7$l=-Q{E|y>dLWVFBe7!KJ_>I{jsNx6ykzR*ydGOBBNeY zBh&xW2c!!DrR#~?)aSC@d-zEkGdyg?h=O^dz}R6DT_kkJY{oYVUCC72CV#!3X8#IJGhs&BFh(L3nIm{tgX|tW(`hVx!cHANAI_ZJZLp z&{i2V-@B>LvWHE3&R8P(M>Pl|BhT#`tZ;stCL9%H`G;*fT=KfZ-OjfuP5Ixgbu zn8oKoC`m*>`wpGSWK!{RsmK!7H;>x_3v^Jky*LulbPl3A^~e$X?+K`)@=? z!@P*mer*89ay$ka7Xf|=q?ulY!ax)qAT#g7*@$=%zv- zpA*I<>?Y)3hq2c&l4tjy8P?HjctZ&vLt>X0bDK-hU*3!k^HX^f^z}}92#nMEI!0_& z9nwT-v|ZG&j+5}?%m>YZp4pXxA4H z@#$a2osNC8+4V97Eax>HfG8{~06!b3LloG|w*#r)qIWG7mbcA>v|r+%oJdo z9b|?icf%N{j?EOcad6RG`=1_}CtIg%MX?0*TUMH3C~#txy0~g8Sd;Y6opbjJX()uz z)q((wUiR7SYs#^BlCL8&PoSC&62+yPj$CP=0;*1Yu@Fk&_t*U6y|Ecz(%F5o)ATiL z3^qOv8|bsjjVA|Lu1?9F0{TbBf%EqaxGB>u9VG|uCmDRZV4SU_)NJV6C&{0>cFAgb z1d+Xttg?@Pe^UL;m=)Mkb0AOn7Ij&)Zlmw)=&)@S8}evKRu?6E#7o-#YVRKx*MuVTr@iEl<22sbD=fE4TXM0v2i(m#cwS>;}&_+QVb%PQ5eWzQ- zn-$NmVs8h3{PAN*BP%z-kEh<-9Bd)ltv@BEWp&tb9Q)ScxRJC##7dH^`ZvDy%88$| zFKSO3uC-HDus5BFxF)8f^WKqAr!yT$S-&!=(DVjFp>n&Hh8B$wXKtcwZjNzs8X@sm z8oh4Fo-)gI7a4@IheO+|34EJLOkp~0irdN&b8IuAhCA;bHpm4is|fjZN(CH~vqAz< zc2_Q&UjLq~Bs3HzB!@(>?7q)FbSJ4uEZVFZKv~cJ;vZ616TID~jpNIXL2l zcpNF`tlQEjpvW9Kp^Er|Jb3!F|;rPxxJA#2(Z5blriT7*F7m4F%eTph@6+tS+Of()xAk8_<7W& zPBKe2>}X{{?io)eKynhGVH4O6J{Gjk!G`CLKg6fRzcgb4W4QvMfUrPcAD!a=F2hIZ zecMfg723HiUP3+Zz1D&lzv91&xZ+k}RcQRzE}iTxPt8_0X9G-|=Q`LG35q!F|B9t??V zttiJ=e4lPJ9cV>~i5dV}MnYRwPo#MHYLi}s&~g-+v`vQ^HIwcA8_f_ z_hvBVRFhn$|1Dg{!oInhB5xY~h&qnI0cfMGhJnH1B)cy=GcMm)kO`jMvisw^el`Zf z!ka8M!_|aTSNYws@YzzdE;H*DWfm@3H~*UBcEIO&`J%D9syyCBP`nrZggo#5s3>TR zaYw6cKvPOjes?S%{uk6 zCtVdb88;JSsoQBYJTx2}TD&G8Ez+g2nu&oR2@(I`!v zeCUxDE7N=S4oKKC;h?1Gk1RzuR)picu1?yyb#bc9C>IO14s?c_sBzBndg91ME~k~l zD!EoY14Ux#vnOa(kDBchoGY7mWM+njzg zJ(JH?fMN%FY{N)?t~_9*zKvS|AJNd?p_qyQlM35uIrsCZ78iciHQG&V1u2YA=WOt- zp@vU`MVQC9t=uvNW7C>Va+(1NMd`<<>hP_zFAl6g%V<%7SXwa8pVM?l1c%jt|DM+c zu{N_#0X!JxmXmF@b_nfYiZ(w8cZaV2#Uaz2qC(R)>;GI4K6P2OvBjiLQ_o12X{9-{ z2iM*H5Lpq9Xz0F1y>I)H1Ab570&4gW(}?@qK9tm#7Y93B<%z0*x+YZc*-#|VuJ4+P zLSAIx3uteL78~um*-e>A)pO+Qm`?*O9`?&gVO%+=2&$a_qmJT5KCdg>tWCcpe*d9e z-Coxb8hKW;cq?ybPt9_1!N;IWU^nL>D6x0Bo0EvWIL3XvnOtEw0HybZ< z4d&RQ1Y5ITIW1y0&z-8c;&nS4zf~JYW;GwGt4LAr8lhx$tfKs^FDKr$Ef1rIA{*Dr z2#fc5sW@b1Ru9Uxxku%wBXH2oR2<{j9uJ0QeeArER+g?@z>{SgM^D9$RDl`xG5rV6|+{^)yPHsthESsWbAEc5b8m+U$;qw${YyOBn7 z43sxASz$S97gJJ0h%E~CO`&Sl(2_`tG6*I4!iw)bfyDwhN;H#K>zHt&ql38>Uv`v| zh(FRWP?d>O90PMfdCcxA6unXSg8N0e;j|NczA)zL3pUk^65>@F>L^-E8{@arO-pH5 zt}g{I1kD?9z=JmYL1(#|zMJtx6uVT{KCMgsGLtEcG+5M4s`TD_c!V-w^EoM`<|wrB ze%`ef7P2IkRpom!(vvPo679SZ!vPwV8@%+k@Sh`#%nE(=sK@o<#6;IN7g^sC-F3nF zSy_|j@~#oyJr#?kUz~F_7WeS_zanz9d;mz%oPLAn>GCq z8>9=6!T~e~{eu+8PMNFWT--hQ2b6|BBk~e!yY86TL8eaVW6q8 zVwT?Nl(Pxs!LGA(IpW*Mjbq`&FoU_x^PHIUWP@rG#JGPFyVM-jnA7huM5z_>0G&474oKXuhPGY3oMfXHormC_*W z>$zxi0dshQ#oB^=AlJX~8ColLQWJ4e5@o8;lmf{eZB(FaeCd6cAMD4+35O*q=Vc1* z2gy?V$XE44uS8c-_{juYTR;kY^r}9@WdzpO5ctJhMQ-Ym-rc50a2SLPQqrv@Wo3dW zmO*}&xt()VU{|COr+%#ck6&0b;_k`5{VB~@t?blvRCCaPGLm+E*v5XT3_|9bT>%Sj zMAv_IT3)cF&y{j^GdnRfCiy8KDFOMg)U~9~TG3H?b$0+g45vE#@)BlOzFYn%8oW1I zc~$-KM;9LT-jUl=P&`|v3_dzX(j3)pvr8xg`5^us2pBv7bL?MS7ihQ`<68C1)5hDD z6`r9CL$adRs6XrWCn4kW)!Tl{+}Wu~ANqv)_=hd3XJ(}b_5#lOz5LVBnzUPBz4B%L2TyPJMuBv#knoMo!B{6cw#_Wos2TT9 z&=0+@J9LV<6+)VqhT)=-A9y2R=2X=$Ihk`b>o{2bPF-j(=oyKW2O5i98pb1ASTm#8 zy^g`VrL0Ft8I_l+S!-ac9NQF@YtGBG99P7Fv{Lx%k8b4MxKO+3xGGVAg33io0pCGzh%LbmmeBDn>cXX@Jb0hWB zsK)B>p9ZvxHkVKMoQFqYaJcgDmJw)slH=vcX&td`RoZRvCjQb!G98f?{&VKRC#%>f z2OqgH>;)aq8$=yOfmlLn7H#+hGn3?sw{KG$yuC6qA1RyTl%kDqzE4#NSmSh)K;vd* z;B1!A{LK_)R#ySvjl?z5jox*BRh|M0B4;-PSOT#NX=}e!p3^VFn9&@AcvvfV#4Isj z)vFA4SaGCpt87+ahpyVUoWpKh-tG53`vQ~cDHOMiTrGmX-zHHOw`w2?pcO3&14lwR z54}wtvp+8SBg!W#juy3vO18Pag|7U7$wWS1$$0Vv(WU#e%Xg24x#LayNWna71bmsl zKVjJ=sT@-Xj6*R6I~CFa8Lv$6p@gwifqAg<&7Ju8hQF{^UH*{n+j9g}LkjkqC9wNA zTg|vAc?$68A#QH)Iy=4*9F~Vn?O_q@LsPqq_MxC8AH(I2wJa*eQKmR;rDvKbZ%I$A zebCgf{hF;7Tzjd6Jlh>?npgg?qEBFOFbT4*@cdWcOxzwfN{8V7BpYy>ZHK6V&c3!o zJ1qmcvJ~l1+%~*mHnEFiVxw4VPHpC~7~ABToRx1L5ZA*hEdHBgx1~S$oV1!Qxy@iZgY9|kfx-T>@+$I<9oKcxFctFWdfNqpdM5?94E;<7>HWo@}<$J;SbhdbXcSD#qZ=II}U9_+Yl53-p;Of7+kA2N*i!-UGG#7WLG6+7=gZ&~#cuYdK|5&AqH2*_T($05> zm-k9!p)+}-GV98sYHJ#|xzDX~Hu$E{89P1D)XR#IHT6(;MYt#3z6H_W4Sz)JLq&Uy zIQ-DUEwNu>dIDssB-j>biE`eEMUcd0d{(QRlSDAVjY}>gy2_1S5Tp@|sPMh4LODA-je;T8giIwQU`2&xBW`)6_0{dF{=^AN7vgKh{FsAB8j?CkK?UHS*c?oNIl@2ILKEl;zb6 zO--XNyv(vzHmhp&e5n0Y7#SXsLK~vpfM+$e1Y6N>45wFjH4LevM7&!14CA?sQQ5F@ zdq@&-F0xzj#QCQDCDqi%>cKD;M)Ic41`MBQ!NOdn6>ziP@}kixq3u^7RdjszhhfjT zxR%l1Gt7NcmjlCxG>1p3&z4@2JSK(osXqqK-iD|1%d|15t3?MFYvPD5Fq8F^#o;sH z1jn+g{`dB$Pd0o2HT%?%-9(G{2rDWebA$ zamfdtzKkndZ%v--uZ%wt%-I(C zFSL*1lWRU;km|>48IkYqq)FG#gdIRf8nOX;I1IuXPAYtPjMvQ1iz`y8FF91s1hb8> z!*aJJD`2~pXhE^Pv%01CZjO1HDwOiqYH;!n1t&@dn=55Ij^mM4IHLoj|8xC-puP<+ zPgTYtPG(2$=IW<$MW*QgD7`Bgk20UiTb5vUQziV?qz!436vG=pIA}Djh_VVaqkJX) zoRlB-{kAuo2xDRW%Q0wB__%}v6gpz|(6->3Mi+}L5F*xq`YOJyJM!djjA3r(*iq#= z`c)>Ho3QljuxGM+?sr+XD_xu3MNN~{_YV|P-w?EYlXuBc*XG@iLW!Q3)+v-#Yratl zTjWfzbk2l?FGVaD{Dm;dI?W$Rq6$M64D>OQ14d07k3GL2MNDy6ct${tsmV257U9UVrI_w*_hkq8a%fkM+bBSXQ7GOQ zohBA6e3Y_u!&)R8WrPGIQDFHia>YtO7?sYgZro1TVBsgcO@dd@e&#h39IY+Gfl=Sa zj|d;6EP`u+I4>4SbSd7bCHj|C2oQf>`YXp^h!y-1B8lOLo|SH$UDHbTSQlQ-BfB4l z#pF+27wsL=D0bWUY+X6Jxi5P;>@A_2ps9K0D|ZH+>tVY~!9xs7;-Dk}hFvlqO98<) zW!5eDplBV4B&y;cO24tGfz(BNZQ8I*;E;ND7#xKW+$}qQ^YvRF)3lKnuQwVjj5az0 zo0?lnBUt5IcF8vHF#pSYx3F*bC*Y0}y(t}12hB7R@3cVxakn3M{1u-kfQ|%$#w1Q? z@)7R`5BmG5J4<%(o77w?mL_Mb8B@_al{x#fe$cWIheQ(!mn=^&!n53KI$0bwbk(^k zs#y_iJ@`glJ`4P>Fm@wJ4oNFE$#PvT?{2FD*yRgP?Q5|#x3p+g=cZ}OjZpm2F}t#O zUTX;h4CwRlC;2@H>l#3j53@^q{$~7%#U=6|wKci-y0x0}ZaRaQA)IOd@cS`COfF*B zBU5zizvfn~HcGe0R^VFt(i7?4A(s|dgPOP&%yC^RD}$cOK``CQjT@wh36oc!XZZ}7 zuXZ=j2kX=4@6z#ge2lN!CR=#@d7v@V?&&#IEIFzuffMA&|_3#tUbmVLuO6P5mYCDh(XDx>4Bhev`y|yN1jTfruT9-gQMr_V6M@ zD(aztXiZ`__5P!|kL^;59~cgH$QK+-ke!1X8xWZ7PJS!aqhM#?!P+Ke{7iJVTdOAP z84pRZczGDh4{1rEzh=x0NcMkZEFQzWF4aqPGf(%);E z$RwBq&gN2yuB(<{RrIdu;Xo|r%$1TGD7LzvR<_7Jjz~ogr*NSjkN;%y+CsvX2_$O6 z{DJV@!=tvJ3D7JMl18RcLfh16v@NKWa9~D!t)UG8O`O` zsAbi>>V=V0+8O+j9*JZE3bVx9gG?imoJ@47sE7eE)|?daf|tQc>VNm41cz6kjdc!p z8=VhQLPqIJ&KQKxYFN=B`?~!8G%?9DnRrUReiNJ$ov|~t%-Tk`{Y^M3u})-83eKv0 z-0@;016`W(y4bQ^(a~7(*GA1`3S}b7@*I1)<6n4Nk(eoZhcQOW`c?0>!G@!WyXvyo z3d;I*c?|?VDI2Z+es^zz!1A?!dv&kFVxvCV=)i$JbsMQKF7)kG?@iqoXja+J>K4q^ z%M;|l509T759O)PvLu-3AKC=Nk2%lTD}nbugnHB8BJ8EF_$>mwV@R;&?J(}ZkdEMt zG4*YWi&N}!1we|jH!Gx|3-Mht;@b^>x;l#j!_}sJ&`t9>BQiW2mM-FA(ykIn*rtZY zheMs5l$rmpqBHSJGHu&%?@UcIY1yWhmQJ}fu9Iu#Hf1ho=46QiqEoJjNbZ>1JJYD8 zsUs#Prskdkk_(W4`c7^sl8S-}f{9DHpo06__k90={(d6QbKlo>p2s0Dz%b1mbg#v+ zk*kT|P*6=cd~uo-Q%yyOym?WPjD&so-%5I#(VEav;i*TWi_~jFA@enC;0F;L*5My) zRQ!mgp-X->5dv?Fzxsi<+XkUZ0z(r;8WbKWmHa;C9Hh6b&If|z+mtk^7yF<1w4|hj zHS^3{T+qzC+)ucW5|Rta_loivN<@5CKa0uqZly}WnQz=Gx00B<398#a595RIN_-B? zNi(4mQUlIq;?UZT2q0kB5--*0Ry(s28QNw&3P_Q4QrmjU7NEXwsEAtqxKr#XdBEvQ zlXf%8Rtqb#Dm}N0qOIZx>9v-+xoLDgE1jk$gw~MnrduK$LNarYT{~ZmpbNXInTf%H zX07FOu@OlX9*9bW{v=*lhUb`PPePb2UeQL+mSCM@CoLTfv3K-X}sE{+5XK3>|mGJbLCHlL-z|_FpHm-JoKHDTI+dM=v z=?xTT+3Ku&lYWbg&!y?M7Y{m{q;{!6$lB$4>l^VkW3U0++VF{BU^4T}6Ms#DVm@0^ zvQ&-I^Pv?pP_Y(6U$2jL?iR-Hp++ifHX(s+VQuGJGj}RxCmp-N^F>@J@J7} z&mefqN~}1huyWU_P!&ihkW|ec9Vkn`JdI{eucSZU(;gH#Auo%@F-4APvzFE9!hkeq zo*7=I=sk)W2OhhCT~HC+QkrqSAj=>zxE8}*hu^#XmWy-VGYY~fpS(WS6EmAq-4)sJ zzR_?t4KTe-1J;*FXClX|)K%em9wwSgT%6ErtD6WE@;7H-+Eb`|eZM}B$sHQsvgq(T z`3f0lt1z5s7aS9jlu`rjtueVpG8>6BV_qfpxA{D8m$8|>8VKudKLecO*2ja1Nx!Gs zTg8(`hwR1h;QPZcHvqwAt@rsUN@oaq&1;coV7k(C!C0U38v|#l=?yrP@*D zoL@IGc3;o!x=iXGJ5K{es7s;UgXBEA%h$vhv3rJ+t!8@1Df8NztxR`h<_5RA9gOSp zPYuGEQDF(-W^@xEzYLo&D%6Z1W>Jc+Pl!{xid{h?Ywi~&zHX?*6tlVcUBmNNd!imS zIkliQaw~Q{tLV!z|1AIgRY&5m;}G|bH^a^Rcr3|;bQ%@G7ZnNSaSmf!9?XlUd%sK1)ys+miRsGNw8mJr-?3BF<<0YO6 z6{#@kZWeB^s!Zn*4Vnz97u!Mw<9`Cs71WOn1ZMytYw?oe-*I5Gve9lv&NdZ-r99QF zo1=HX9dgd@25e$k;PZS?J1;&Z#y_V&5Eio6s|~?h>o}5L%{vt)k`v}gC7bleHNr)b zgUy+SL9=YqYvj(r-xTyu-dpLeLxJyrV_5e4dA(+y|-*Q3t}#7gH+CqB`)VbNDT<& zv$RkcyZ1X`6MQ>`=;dQ$J=ODA00Ftx-ia?rg)B2~9HgssRQK#k8pbFi91x(2QZGDS z*FO>B?Vm<}8}F;E_!DU4(Ot$JuvdNw&dtCBL<_9WvWWTx?!q%CCxTFuBxN^dI}hN6 zZ4lOX4<~D;Q092^bxo#Mdt}?T{_&)l18E#!1Ep<-5vuos=$u4WQ&Y7k_69PPk<0vrn2N`w8)8fVI6(Rjycw&LSviST)UllA>XpkFd`1N?y`eSs0uEJ zFTS+ws?KXV$SnrmKyjId*0-cL0L6;yenE}UD0NvM*HxV(k<*B8M;1cLgqm2VV|(li zx(uMRsu-6QeLVOc9cFZXLHc#4vp`9Tj8iU&S$mA#js#7!Gxn zJb_v6I8a8Xd*LPAk|%1UI@j=_uM=Y%u~g}P7XF-d(X(nM6ad`Z`_$A==G%+5Lu@CgnQ%Bx?%2poJ!TWeKWfC<&A9*LrS+%~Q~ zfK&tJwn93nLbjy5ujcsw1Ge#rJKf`g0^?L3RfHWJZ{>twu3dd4LT&=;2Jjtw^7JFVG+5dtg9HVmmnh&r+U#ZeGK)w?st68b%+92=)CTjJ}GytVpK()V!vOE zRw@exewMxFo%T4S3(G{m#OFD&zdepcn6gX#W$LF=-(n*j!JBc&*chqP7`k8XZivv_ zH4STv&bq*WRQMSImq0tr-vAYcR6(4j&Dgs-ze z4%2W3#i$(T-Wd?z<^b4!M9J*VfnIFGTbP|B#2;7$dqeH)dIJHUA3-ysvB{LT8y6lb zOG>lsjf6*%W0%-)8W+`;Ef}M4t4oL+xc}sl;?A9^h2)?Aa;*)MNG9omv-s3C0RCX= zA?Tr6DsQ5+XR;v4L8!h4$~&g&WRmuQtDXD;c$12C4gCWTfN@d-uz@9tIV{f{mT)#c zt0h(<&k&3cI48^-`C;w6?G2#B|At5&7H)n^*nFh4*nl+91&;~$(ncBIb=3>7tH4W+U`id&pHc~$?KFhG`(da1wF>6-O=|Y6W zbAKx3ERs@47>OaLs;Lbodd7baiB?dUm$@shPZWyxBmUr!_!Ip?b?n<73bo1p1UJhc zYcrvr*gZcV zlJM(*g0*g`9zL>>H|m;ik-$JEK{I^k5nqQXHG~G8%w&y3b1Codl3!Og^6R|W5wAZr z)2I3Ts_bQxm_DJvM*^l5R%V9fS}c7#G*=_4_ucI*F<6;+O1=d`qv}G7!ow;?tSy7B zPw{(>5iIg;C5@onY9ND>?}bG1`$-j_tJPjfNBxA4>7OCi`^ou?SZq;MotAEo>XUZ-TcW1DuQ-%*dC#9 zU_H2$$3m#iz(?9zjvYh2H$1j6_7zK6qi)2y2*Q?%9zCzdk54lV%*#+=^b-VB;wY)c zF;^?E19hy)NiIHW36MFo>m^RaCwuO4g_yifyy9}IKR5Aat`?iHNTH<~j8TF@c$QxM zt15t_WY}m~BM=NYXMn!djID0s4e8N8^(UJ+&DR!Zhoh-n=54xv&j#wo1Evft4FdD~ z8bo9IDXZ*4T4J97V_+m!!;hX>P}58ArBV<_K2zur`2yfiwH#;u)lg%#ywGb_(2~3% zVU@EQuD#SgNN)lu@Emmr;4#*3a!qp_hmtQrpoo=u8hEJpaBPB#x36;{TKS-m zCN&(h^<(x;#8V0P%oLtcPcN8vxC}qwh2h@2g40&I3CFo3k@_g*SEAc-uBw`PyWN0@ zog||Q$I1VVF4pVv>ix_rJ*NxT_xkDe!rcXf!-N?5M?#CGX?TBQd`6p3iC+QLVZ2`e zd&XRmTWCAVGkcf#TVqn%4=rc~%1p-uw0V$VDgY~`f>l$gX+ZuFa6Prb3A0{zDFjU51k%IWb|y{TX;pByjz6S^v_n#`A1cAks8bUhl$! z_AC`*-aBhdE4Yxknk>P$lqz9*JURZ22O{Z_u97_S0?pGM5fx5J8*>$S$21W*+uc|Z zt^I@FcrOJwSpdHpw6c5$Kp?EV^WLJ}2p+PA)Y>O}-tA@Dsg>>L)g{lAm_syH4Gkt& z!5~{y8~lqfZ^m;8ZLL8>;`O~~kF|4Z`k|u!I& ziZw+|TzT-TcflRgC9VK~OBmck=9?vFg0!kRQHJ+h$o|h()c+>o;fAaCIkvs!zQ}%h zTLIeeQ}eD85+mXX`g3WCG~`U(fs|QTfCS(XFKoY;Ds;3OxeU*8!i(;$?DO&-nPtKR zvBLnep_>8*Fv{T6*H{OEs%d7#s&2E{-1X7am?HOMJ~tkS&;1@cx`X;yT*IE+3O9fR zc#gNDA%!H>&`#Eun?sVIY!GQW+huesNc8hoyV#qlBUVtrpk112t#k%=J>`spF@Gyr z%(JX6k>bsP3D}G-TV*L5QI<0c&dqO8?SEDN;@-Uk_(bdrXlyej0wJ2yN%(Z8Qs8G| zHMxjSdFs(+Z4a<1Ejl)KC?j#u@@<|7rrWrb`ydtABQe(ns>a<-#fH~HLd!u5fc%ACc6NOD0-MG#z20Ug%wi@LQv=U^bm6qr4k&%|Y4 zoAg{g&!iw|;H4sBe2Fo)K3e^-5F+q}3TPX4PtC6I1@81THx8*lgr8;Sdp=lI-EDig zMqkV+5L5#AOwA%-(zNAok@Pa$tc^WE<7 z=%YYlngpw(GRVbif8A}e3I2U#??0avPNsxmo=j?&p=$1ADL*ycO)_}cBAd$j_$R5R z<(;1{ViTm*vvd7m$qWp!(DPECViL{BRT};?lgt0g}*}&v5gVPO6D1d7$DywMUHAOyiMgm*7+cBI4 zgTSiEs02w}Yo5pOjVL&Eu3{wNc8QN?+1)+eamm@bAI{j+{*R#u8YLQ0<+=d1tYwh@0T03iMK|r@1Dq+gsZy`2R?Dj zaPt1rDic1~#(N?P0bNOuP^Z7m-$IHCgg$m7V4^xaLU+8<&90Is?lcJ~op%}frA+)a z{~UCIi7~-6DjcE-28f^il&-P)xE-LaB0PvQ{3Ye?Hb#G6kd*NyXq6}i~+e8m>e zcbHjtL;a^ItOI{&PtY!(-7Yn89bU6h@0bYTJP;<3wjzHl%W$Bj_hW6J~Wt}(dLWgks!UfkepFhnk+@&8)-E_;+_nWYbr!>!GxGFh$aaMUs0 z0Z;7uk}UBb=GoM-^^J{*oOmSlA`394S^DG6q1{`qQk~9ZT?W%N>8P4o`dyga3Ao$Y zIAbg4!Cu{72H=20&bVoYF#@*zvno;C;klh7Y3Wg7ztcMJzXml(m9mloQ=&-T@atcj ze0*R)J48R1`gK{!U1LA?E6@s&Erl$cag$O?)9cOPN$XG5Rz0$9R{BN38(6^yjuvx8 z&ry*4v4Mso0FR%7rM=e1oz&Z^J51GZp1ao%B(CA8yH{Y6;r8=U)8P7^xJr!67bsnN z^!ZxKkC+5E%pS0%Tc{bgN4$acAT?Q$R2(Oxbbs$e>8mHShxfok=p9&PsE3b1slgw9*5Pvu70HmE<17TUX+)BQd*FTgTM zB>|a_8gIxi&{*%XP^10m*45i2Mi|P+`<3;KrS)ygX0C8G9-X8*(}o+eF$~FXepVw@ z^_5zrE(Ikyb}S}eS_!Xvn^X>CH$r=KY78478V6#({?H5q%z12^1Sjd zVv^FpE@)~5)(Q49NZGK$1Y{MK1^HdA*ACfh*P`}nxTyfKpL_E=e(C-=E97xC=XIrq zv&QYtLL9eAoeHnAw1tg-kfOWfBoOX|O7xdfp6?H~67}n59D0uVa1j*wblaN^cF2wsO~hge}%7 z_s3HI^n6fQg)W+v?R8@vJ%RQ|KAiN$AXapGlmhqjY0dGBZK4jXp{?#0H@{m{U92O8 zQY9!&qzcI$(yR8`L%C;}<^*a`F4$cN4EJ-F zydiBt0BRJ)N)1VA>dRMgho9lhp`!ulM&0&5bq1=z@RjZA^llZqj7nx~#Jr)2gTy3T}GhM&T?OLZvSS zJ&K(RCVCdjKZooUPw1Dn`3b4>v&}iD5m4XSM6Gs6D)Zir7-Dzdw?ktxV9lfIbRmo= zfO-PrG6>Y%PaK-#x#HM>Mn{O*>OIM-4ugB%gK)s`5^1NmNQiE9(g4Znvj84!raG?S zLwmvuy#L+gN`&Xs`H*IqUr}^^F`qIFUn&A4NUCmMbDT%A^QBJzt2L{p#0TIun;u^C z5dSEdr7!~#s{MgLA*SrxArF?S^Wb(7Es?mO@DoPIU8+P_`Mi}HwrpzPF)!Ocfg^=c zr6N;oQuQoISz=;V`f_OdDplA(UbZkXkQ6$H(LXN=)_9xs|Bf*fy9KYFPt!w{-jJq} zpRNH@LZ(epR8maxznA=)`tpE#l)g#`vum5#jq00-@k|a9cxOF6kRQ|W%(%uuJ`Zg&dP}CMc87bAY@^d z&+iD<{Aa&>31VLNp)o#}zdh95sNXli2!zhzj^%s5={4nXl7<;5Z4zf+mFpLW&#x+P zQRwtj$D~XyZ@`P&)7D?mdF!gMOyP;0D_`1x=-M93_+JfO39w!SbhIcuAXxW|2enxy zTFRmoqqC91$9f5w=1mBkD?lX)M84dkAb8IFpUMF9Auy#GCtMz_+tc2%NFl3whcVCi zYv9Q?;XQ`L8oRPKG7s3$?i;n!`}Hw%f+KJ?d=rLSso*l1l_DWO96=+o#u}f@Ta? z+K~iIzrZ4T;X37iVUwPhevi1W@Fa$X&baRPOuLyI?On#ficGVAdVnxe6900Z*)Duf zkoC+OE*qPp3t~K^)%hzuozT=qZHjr>52f)NMknA~Ntc)*+)CL2SDhcOhzrcE@YC`~ z5r>v!Q-N6bJA(%*Ld=Pa4*(X!zU$3l(gQlz40W^55oehX72*-vyY}&RrU`K%Ejn!p zYD1v6d^>a+W71=LY9*Oc_kL3$rfPw0K;AI*3zEw7^C06+rXm;Gvfbd)LjcKScy8~j zzbD!xbTT!P^t?b*kYALw2BwtIwSpwEA`-!6~D9sRyhC+kdQqISXn){>&>@O+J*(Sp*zX7 z@#ZwK1FZNaH_$t!fmvITh9r?RPZN^o(KsL@2}9~-MbS; z4uQ4aNItg$7%NgA&HYa4qlBI zqoSfNSU|(aVglhFqK^sZ0zC0!ud^zG(3h`GjZZMQ!ItMe6Z?cK2T#f{?WnYdmC2Z) z;|$o`^%Jzy|5AlzK_Q#B8Q}qig$0|>``NNR2BCwTT1%$58RRvik($Ic(Hvncl0mQt zW(Ji@QcT_qCtpSm$djWUOtwa%ZqzG*SN0`qs^J>o>~mfcgA0!&MUhR$%~({frxy#_ z$y*~3gm<>h$Upe_*yjB~c*AcZQ7AH?69xAXxCkC(oAdjr!{OL#W01DBd~zM8qg#~- zR4h8LCsmgnuWOaOz&|gDvs=)RNdgM1cdZtkT9SKN|+&a&GVM-f2tMgCQx63Kj6Ur_3XN;<>u>-)Ib z_RJu;K~D`f*`85iK^a^t_U}+XQMU4^4e z_~&|7kCS}U#Q+K7KYP%jb@!73jESqtz(xR;b)vz_UF&(26EZ^UdET~N;BIiCs&hjl z@EyV?3YgTec0~*WM&~V~W6(-lDV?ybVp-R%n%eodgwB0{zvUcCFm8;8HTDr#ZL!i#Q~sJl27pSgOkFpDU$lFm7Z)=qD9PP%K^(;P5o`^AA#>u0~koh zkOvU$B4F^&%uPmI0vhkYacj$d(!Cp|e%cH;2q(g{E8f6_=poa6)buJ(HFKcZuscvT zB|yq0&pNJZq`5O0g`o!xD<^oPo!L7{C6wy%P-W`sw?n~Og*b(`A4sMVpjcLf#&3OX ziKV*RCD0xd3mlPvx#oV{ZKQlSv~Q!`Rdb~D{>0d5YoW;t(U<=utawe8R3YAcx*Vm+N_Zg56U%W-^PWT~ndeCKCxXHIGk;*7d~WYi&j`Ax-YqiV zM7w?e;a=6;fhKqTIP?lCd4T@o)an#gjM*T@N;YGgpL6s>DZpdzt+eC8~Y(B2I+bq?H(0PdTD2Y~iUpR^j!zFgNsuiqkfi%eIv_7AX$3UA_npXk-X<s{_uwb#s|EzSdDbCt$jJAPsF{Rb|ujA9ZTHxeLvb3wuaH?J`h z=INGc$?k=((oj&v7P`v|pR%W^beC0YU-_@8G~U3XaQ6eLBB%wN7l#x1&S2e$b#>f9 zjQ@tu$~upX61%#en)iB!B=y=>Y`C{xu$z}K87&3To2Pw1s1|>?$mIj;GWFAFQyXwI z%hVic`+3XXgXs*NBwrr6^zD#iU|Wc1fS*}xk38<%A;#(@!S06uKXG1^_e%TIZgODo zB+Ksjc&OVO*aWn-bK~@Hw82l!97o=eer``B>p1lj&qfqo`q(@#eCb?J%k1UB4eTp2 z^4cz+WKf;icdJfw3D=}J-U=ZatB>KAF@&KI18W^q+ z$m5b9{yPI`vu8n<>m)?gWw|XZM}BxTs&y#^JhL=ZA=mB=$V!8ADBzs zMlxcB2enf;+C5TI zS~a(soHUZ90Wtw#cOOJP9A|;QXPRT+sv`Ir_i@hYXW@`frv%1L@c^GVf!Q@)2XKFfhK#I1M(QqU+|M=# zE~}DX4BI}k4~O*=bP69bURbu zd_(%NQ9lB7AxoWgffJ5UaSRjsGXc1F_>kHgqaGHXui&Gn-ieUODdBO03J&AB+axWN0u)YmoJNknz_vjSLdQ`*r2MSv}38h;JDMU=@l(RG? zJ9^*mHWOB>mFdyIHCU=ivmsrGbVA4{2gp(UgJ3v>dHpBPWVq0vf8sR7&7|W%f$NV< zS!BrlJuDIUdi^Du_y`2p;sxN4u|gm_mhTB+;>O=L^6K+rU+8!k@&IR>ge&Y_ki~IB z`ZBRzW-&>s&Rgkb^)ysU<%D+#H6^e(g`&+No9=UrNyF}ea^c>d0B?R4L@5M-r_h-7 z8Ol=0#OcX)TG~X1vEXY2Y+-bi$KyZl)p{>Lylpb*YC$5JCY(roy@OCkHWv%#F-o}ek za0`^qkujiaTusX3YLD=p@Tkq}BzEh5*9+U9)0X%bRm7U_vf-mFInV~ClK*xoum9b? zXt`C5E6@av4(*c?sGz6h0>@gX8lXzvPf)+^>IqXYo+0rq@fiMj4HPfzts~n6Rp?L4 zrIJN~L)H$j=daslh8<`k)@luj0!RALqQ>x-QR@L?D_pi-EBh#$(gw~(iyYhhmQ@0q z>Oz!v?xbq+{hpS!H8JyJ*v6_tZJFFcCK#4!In_c1*7AWd>SK6>VrAevPoaFCo>`!L zlUjW2li$7sFjHYplzE1Q?iyWJ(B#&AgV z)q2fbd<-ymBeubkK5aA?yBfS|{QB^hws4ez+OC1pW~_iCfDG4KwMGAqx<2r}O|D6r z{wLEq!0%-IL%F9=Rp(16wmeSeq#4=>`aZr7P;dvxc-P{>^TLguwOyK10xp64fi!Eq zn3}OqzM-c=aXS~gnmR6aBI%PTQnjZLrLc?#9!*S~H`fal6AMjA+p~D%vv6?5wJGjL zJkKDWlKJcp+Q=n@`G>|H-~r;Ap2U(>iZJa zmL}Rr?1x$rm$!wbnH&gIp1GxY=E~l9TelTe&UWww4mGRce1YplBbW4!e+Ht&7z)FZ zY@OvHn?JC!xh}TLuTJv}3=~|*H3vR0l%}y!KfA><@JVC_LbTx`;Y){*F~hzKx3D93 zi*aAvrqmS^?-tjrbWMv+(UF-f%k@BkoIz`c@seze1QqXBV4ZP}aDs{bK|_9W_|Z<4 zfzAb%oBxe;FHWLeVAZ%IjZV!8M#ue-H`5Pj3Zk4MA}2y&AuyzAZjTAbj1eB%1gx}v zL@ixjStpj@ninE;&h+5+tx5b3i`U zGBm|OH@9j71WGP`_hYvD`XJz_bc>&68T2f9#m<7kIcS{lrO*93artZuHqONzzLa0| zIL>PoMRG6*$MpQYXCam#JFeG-+8XeP;fD2w^^}*SwL;<)jRw%GH5>Jk=fzg3yVkLU z6Fh8?3{nv!TQua?fOif2A=bslai?QFmj^;w zjNaS|goXVVM~X1yEpTquLr1rtWL`}{J>o}ebebDr2>YcMkW?!&9vO?!$W0#Dh%Z9w z=h6yPlNQ>}pv;A4=b09$H&EBwP+hhrUw$H-r@4x|hO<;gFL7a!%~+$op9r@d1C2sB zRqDk>?se_18%=lm1mnHKAfL&P#glri4tqMUF=ienRXIA(Nw%FF zE0jAI<9;5=uG@OtEkG47jw+va3KcNlZJG;#ijGh|lFgX$E0~pW2`ta6XJ?duT(yi2 z-9+d#LMN&gB^7N;uuLzf|A;r5R&<^M2l``}ICmLo%}xQaA$r7o2U+&#v#Ah|-{#xf z1yl_cNrqPU*(ZYJe!Os^N8won#=`HfQP2U4d#S_#)`7b_<_CNy2Tp@D`&wd8{nL6? zmewuq?^3M4NH6q%MaiQGgbOEXOb)c#Nn`$>MOoz-*1vqpY)zHZ#rcL2*xfXU?I0&5 z#Hm_Qv?#E5O3?3TQTTybnfdViRH+VpB)a|7xNySROsAQn+48KuyQlwB?(e-v3oe2E zm~r9g<+~2n_Q@aBBKGZTd-q(Wc2Aj?ru0b4?!0iV-Q!slrdNT&?mihAc&Vd)Sl!|G z28pNV`?@96P>9kf;*BLd=&L3)+Tx#E=3yD)KG9?uK;iiQU`8=VdW9c(tvq9`<_`Xg zPo1bF!1XhFXDr`{X<36|Ob$HVdr)`&Fg>ngLE_TKv0yVj7=#ahnSYQKR?OPb$-^l>BwDOQbv-z^7>ZB*TbZPtw{iIaUKLZtbT{?} zI}2DZCv77JmPYqU!XL^ef%eqaY8Uj3Qrp|NqrS=wbWi7cAbpyICCaIJK50D3h^*P2 zFgxpI7VlE+dD9~<7O*FVD`EE_^VDBIg_IscCu<>~(U06K_e(}yhq0g|6h`a%+T}0H z@j1p0ttT0cbssv=LcuI>HZ&Vu`Is8_8iRIi7+zaQD*%+CO%(4b+%*lVoW`lu^M&On zIKfd?CMk+%y$T^laRY0!G~sZ=FGf9k&zDo}PFZ`#z5WzB>}Mn}3n_8U{P*OlMo2x@ zy6iyxs`fvieM!flf7P&cOyth119nf$9#-p@LN>rdd{Xh~=9zN9wh z_g*1eDF5hmM$c&WdYM$?^>Cu4QD@5Y6639;I^$1_J72S7zf98pZu>bAMCDk!RYC_& zluhnKC7XziMSrZ7PK=<2bd-bQ8pOp@_m480hx7Nq36PP$LWOvgo%Vo**{iwF4JQnF zH8!m${C&8izimxPQO z{^u*6s>rmIV+f1(=CqfFQ$q9V@bD$F|E-M4na1y~{Dd>}deC9@gTbsxo1`yx$RX|_ zr>01JHX4i*<$gP)wW&e={ufs{xwg!8tI{9$?6wlbTCBRRmahu+b z|Kx7H(FkiDbV9_TL&hIkDlBV&FL|HAUm+()&(Y%(Rls@N=-9`8twh@5^Mj!;kB_Xh zOP}w=%w!R%t0C8qj3Bh=O&$8U&q|Ox0Y9U6iTx+IqPBF9@DDg zU;m^CD+Dd>Ei=cU^Nj0xE50MDEB4L3J*&8+*&u|tf!T~~SA}e4-lMhCR#Ho<&<4c| zzdiV;_ek4F$VM}=L9q1C*q4~IUgmP5$|?iDRQ3Txv7rZxDH_^&+0|+lVm5kGC$*}y!{RcmcCmT) z9UAQwjr$mV<>`;gp^b0k>n{5JXFRTcZ|^2B4nHu$;DH0g4)Jjkmm||nay`$=LvrDj&jnpn2oz*q&kORy(cAx<{3D0bes1gg_oK>j ze<_^Y%Gf52$DZ%6Jij@A-`6f<}j=XZHUbQe$*0Ev( zw$O%@HyC?|=Ww?pPrmZ-ujtlZr&TTMmjX|-^&6VhZC+-zw;IXzT7egxv?-+9@^36O zaKOfjZj?F!&5=T=P3Tx`wikQt=W)_^8gvvZHp}KIwWrVkyp|>@fCUA zwcYq({Dj2hL?$7=weLb7WaaM8W^L=R-U7cy~ z03N1TZ*4@M`5#kU0mr$Kr99TWaR0fUGbtxO9?dk4ZJf=}`&d3cU1s3Pr@2OLm=<(I z*>~Xs|IVttV{kW!)!pmv0 zvcxJQMGtn1M!jBn>arC~)Ipv7uI{^Y(^*R9@OI;zqNv1?yHG9xjN!lHphY!|W#yE1 za&qN<)#~2RNiIcQZqJ@WagCp9DcXH3heT7Lw>-G7#z0?sRLW~!T5q2Xm#=zs9;WEz zy7i3o2@|;hPcXQ?4g+5mB6hm|^5YBbV-o*iooyI@_Wc`J;jvOCQIQr9l%eN>+I7(@ zk4Uj(<_)jC0HEAmqcRa2sE1y5$eF6#a7?t;qkR#N2$yxRwNEAxdf4;)u|fw6MFg&{7L0gcJbN#aFdHF_XSZtK6?n{UOTB0#n zbZhH$sWK{tFw50TrKFre}b~!-^vP2J@)72<-E;O zGmj^o2ih%ua+s0<-;x0^_itRmt7QZxtG{OTMfjy(r3w)9EZtD^ZuWwVu74kc9T>Di>5z2hO#m*&Txeh9ei#<+j&zJqr)kLmDPXmNieQBSPbY74WmV1o=?h-t>lkp_Pk zC3gPzS5H|*AQ-qh7TSN8F)Tm9Q(cH{bG_AidU#L;WaKWpnrzlOGn#gZMjcrqMK3Fe zpuhvxro7`3JFvP(pE~~_;};7B0}pVPCk^Frf44$$q2$CUGft}Tc_~*)=m83%Zoy!l zcva{5zezqd%jZ<(g^ycU8^w`BcMiiwGYNtBxyX7?* zn=dZ9=putHNK#vrIO6!X6aX{|LnfBHD%mZ}`t;DGTC>299Co{1l9 zqc4yn%c`Cm*msp&D)1VlRC0r8d>M>Hd7j3aZ-f*^hZF+0pl`(T$9G+w-4* zPO&(gFlhr;#Ht?uet`1s$HTpPP(i<6vhBu~@TAQo0HU9v_g6$F8JSeDP1z2KbBfIA z9LP}9+KT&Z2JmxmoTSW~e_n3NRk`x=f6{WsITec_bn7MIZufSP4nF1INu#GfGg|Pn z`LXDv%W?}Li{aDhNt~iyPH+tRYDVw5KZ=-5xN#ExO682!&G3Ei(#|Yk!a2O`<+XBu zCCI=DoN*0ppKJ5PJ+vT-_0z{;4OVDPV#Z>L|BEYv^Gc)kD+dHLajU|Mb9}m2Y<2EP zD|f&DAG9l+b?rHeqBSmPeCJaiyEnirf6j}ldNKUsVlfSx^d?hcSC~HOcLG>&6DB{Q zn0Ba&0r5o_%sOoUXbh=yP{3#FcD2H`N|!+gv~UeCZ|>dLy`w8S|JyNoW?*J>P2{AU zwVs*|X$0l_|0ni?eArn)es{GuN``HZmXv2;>efd_?iar@^Los8nTW|}KvafZ^It+q z91L<&M3ZVbH_{-@Y_Ho1K-`Kc(0s=%xsO}!1CHpGPlx;=tFyX?ED{N{v@M1-r>pHp zsm;KD*COP4;-rv0J?)Ap&4Fl~Ic^1X#!(dXX?A=Odh2(X^%Doh>Lc|0iUBgY(yqU_ zd@+i-zfr+ySB4EsTv|9op zc+(yCF1^-=$IUsenXa%8#DI5>|G;3+ypxLWQ{_|UjFUb?9x)&h4vOpN_5Ih=Buu}9 zJoVL-gr$cY+a-b(0t{}qYvfjNYk2ABB42<4r%gHejKu%Z|Cs2;ZHeR0kAqsH+*cf* z#t0q7^RBEXKpEiu#0Q!($C&kKQw&6AmWu=K|2FihM`p-{R~%8Cd?>C^VH9C9>5*+M9w^+={GE-#L=WD)@T|-PED-ppkfX2|ON^TK$!?HQdPw zN!H6#=))9Ec9Bz=PD| zTPD&)hgQ}{bIjb+l}}lX&4Dk& Date: Thu, 11 Aug 2022 08:47:07 +0000 Subject: [PATCH 65/94] rm jpg --- .../detection/yolov6/python/000000014439.jpg | Bin 195229 -> 0 bytes .../yolov6/python/visualized_result.jpg | Bin 205296 -> 0 bytes 2 files changed, 0 insertions(+), 0 deletions(-) delete mode 100644 examples/vision/detection/yolov6/python/000000014439.jpg delete mode 100644 examples/vision/detection/yolov6/python/visualized_result.jpg diff --git a/examples/vision/detection/yolov6/python/000000014439.jpg b/examples/vision/detection/yolov6/python/000000014439.jpg deleted file mode 100644 index 0abbdab06eb5950b93908cc91adfa640e8a3ac78..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 195229 zcmbTd1ymf(_UPNg;O+r}Gq?nIcXtmyIKdr)ySuwPgkZrXI0Tmj3mTk2a0u}D{%5Uo z&bw>f`=n-8uixz2)q8hUb@lG5nwRe{n*gS~w45{m0)YV8*AMWrjjAgrAz`elt|Bd` zDD|2F0I0HxHV%$3m;hk!;O3$(BTlZXr%#SB1;7AE06f430H#n^CnXJOP2e@jNlB2q zzAF90|4BDX!0VX+V4hi4m7M%P^8Z(a1$A<90|1cft2T$Zg)8)xLtpuWr<>D1^7B{5 zHM9GNVPOAZmsba0ncyF``WG|(kIuhX^dC02cQAj|`NwA`b9?iDc<_}YJ=`r`8Larq z;T|>?p0B*{%8(E4_BOA4_sY2T7N)KM0E_yM+|2@N{mQJbjOwDPF7e8O0Dy>U^B-sn_;|@>Ej&M1xVbT_nnLYNUChZP9POP<9lQbH zU)TIkDFFMQx+Q=#U{ip1I zWRArEzzkBED>B(Ya z0cH88LjRrq9~J(s`M-z%vd8j|y?@soxuk`qsrv^v@_#B7>iEIY-G$uM$rNfq&iuc2 z;{V?T|I4la@`Fj;!qURU!r|4F_G>S*aj<%AZU=K4HycL>avO*LT?_yJEcRbM{Dc4M z*ROz6`V8P2vjCXWxB&Qc5`e`)0l)_NuQ{N9x0@o8Ht^4#r$e^?uYUi^ulfIT{J$Mw z5?+78xY}5e|09-A*CdC!yLkM=ulK}12RHy3zyNRoLVy&Y0_Xr{fF0lg1OQP$3Xlg> z08Ky-Fan@}HDC|80G_}{AP5KtVt_;-4af!xfKs3es0Tg+Z9o^$4-5knzznbmtOA?B z9&ikt12@3q>p+G8LIdG|2ti~ZY7irc9mER~21$VwLFynqkO{~N#t_B|#u>&3 zCKM(fCKIL@rWU3ZrXOYkW)Wr+<{0K041iI<_+Sb!Gnf}F0agL)gDt@>V1IBlI2~LJ zt_OF3N5J#oP4Ef$9u^)J2bLU`8CC#R7FG)u3hNB(4;u@c4O4*Lv; z21g9X2*(d62d4{X1?LGD4wnvB4%Y%V1h)vc2X_Mx504K|3(o^D3$F)n3-1db3!ewy z0N)2c3%>(@jevkah`@*-grI_8ir|J2j*x{=i_nWOhp>lmhlqkmhRBX6ji`_4fEbLJ zj#!P@gE)t{kNAj$iA0UWkEDWRj^vG$fK-aqfi#V@hxCYyg-nYqgsh2diyVZUiCm97 zguI4)iGqSciNc4XhGK&fh?0r&31t*z3*{aa8#;66~+oJ2+q* zDjW$MC{74YDb5hi9xgmCEv_7{Ep7~M9qttFIUY71C!RK*7hVotH{K>b7@r1T4*vsw z0{&w7yrdyzgr{|@&qtBzCVSr)aVz6e&VVGuwVdQ4CVa#KkWrAbkV{%|B zW?E)OWfo)hV6I`_V!>xoWC>x>Vd8hv_>D|b?XI3uO53HrEt8Cb8@@&Cu9c+Kt z8Q7uhIqVA@XdKcU0UTdAE;t!EEjaTzzjNVmDRD(`4RAei^KiRx*K;58KzK}ea(R|{ zad}mFV|hpU;P}M(0{FW4?)ka--S|KA{}x~tuoI{jI1r>2v=A&6+!3M_G8HNk+7u=i zHW4lq-V~t_F%>Bm*%qY|H5V-x-4~-5vlXiqI}>LWcM)$9zm?#Z@RR711WU?DMoUgf zVM}RAWlL>HQ%YM&*GQksaLM?}^vS}>D##|uF3XX~LFKCC&gFUJ{p1G~P!%*3vK6)! z85CUd;!mI^FujM#3iB=E7Fiw!rq*PQ|Xm?&X8d zhX#8@dlUP12W$sBhe1bDM=!@&Cq}1mr%h*m=QQVY7X_DcSCFfr>lZiN*I;VWoz6YX zeal1GBgf;;Q`@u23(L#FYtoy+JIZ_CN7|>%7uFZ*JMfX>W6;MfKQX@|e~`bK|3Cm$ zKxn{TpiE$85OR=R(6?aL;N;+|5S@^YP?FHV(48=uu$pl6aF_7q2!V)#NZ3f5$f+of zsH~{xX!Gdt7}l8dn7^^m*zq{FxXiehc+2>y1nz|VMEFF9#HA#Wq{?KhWS``n6s45b zRPxm5)T=b(w6S!~^nwiJ4EK!9OvTJES&*#6tiRdT*^4>iISskQxly^ddFFX@`J(yt z1tbMA1rLQbh2M*0idu_liqlKrOFT+`m1>ucl<}5Tl@pc6ls{ECRBTqNR}NP3R8>_I zS0~iK)VS9i)*95#)JfKTsb{V)YQS%Z`2>7&|8&%7(zx_l;qyQfZ&Q6Ub#rbDPD^wv zsMWjmyv@38^NZe>*>?H%fewL=mQI$=iY|(-oNm1CgdXIc(4LoGpWf>}r@oVZtNxt< zi4)3XOJ+@sG8S^NzPoa8I;Oa!s~;-DTua%*y1V|#T6x^ukiy8Cx8^cUK%^nJ4Zssr|eu0!d= znWOhdd&f@4k0;@$Sf{yXbZ5=KMSoA8>z?mjxLmyaiM=GethnO58n{-y-ng;9dAyCj zBfP7;=e-|!(0SN@^!$tPH}i@9spDDUdF{pi<>h4+kO06iuMbuj@N0s7{lX!_!NJ19 zAtNBbBcdXsqM{(9prD~+W1^vBp`)N+;$vdr;Ns!op<=utz{e%P#>K<^=OiHTYZ+KL zBse%ETr?Ck-2cz%r4PVD1f{|RfFBC1$r3-NM2(axYzLEKYXd`0fY!3 zy=Kc{0x&S3e>{hQ1;fL^02m+`Ff0}vHU$9BhC?ZafUCxCiik($EFP4E&!Jw_cn&e^ zpWSk~K%$n=2!6xqnp{jk!^JK2k0sFmY6%8-y{K1Pi2q{=3jo2uz8Zmr`^Ov@gam@Y z0>Bh(u-Ia1aHh^UltD>F=kSgFv$*WyTNema>Ud@@9Kp%>kmAn+a~jZX#5dFuoFOSM zD*!4O^m>6{EIu#Krj@)(m9pISk2 z^gFppY!zCt$}}n4B41(sEHxag;`>wdonB)u5>S-5gD-+hY)Vky*nW}ZCp0rI%Q?ow zD_?^%q=pfw)*R4Uz zCl9T+S+e1a>2TbeaWxo+2xNyNisqAV)IV;E@#~_JjbjvD8P&Z_o_xAo{YI}o;PtC1 zB_-&T4HO&P1-;y_o`+PKc1n6b&xpIgxzvomdyqMw4$z#ploN<9rVhuadbHKH82x}@ z$2}(l7Ac^uriCp_=fFAg52j%x_t)5a9O$e#M0lh+Xs!8)%jpd(O_OMIDQA<4?n+iI zEApg!u|OLd>^m>hu*BGaHF~Ex?AmZspWxZjnLZV@@y54AZ@xzSR6%*BT*|`(Ik1WE z55aTdNXQ6m2r;{2g!+D2n0|w@M|2Y#N#B@OUzCvixK#Q}sbbQfGk-GjgMKg&XTtcExN%uPAKY6pXNsn=q4tJA`EUWrL|jqQEai zzF@Z_pOgpbmQ`bi_|CfBFloWc@4I9ndEW^ymfR*4QzJ6Lk4uQ9D{ra~^So6 zm*}XZD>9GGX1`J2`0Wx5?R#p!JCfs_#GSL)!bNUfUOI<^pkImKhUZ(awpqy+OXvDw zzW|QrQ?-^ZjrHe+d}c@^o38oItd}r-ii@6wOg7ft9?HdSZY?v`Uj@Fu7cS+hmTa(E z*m3^4OKk5rf`AbJsqs&5`wG)jvBjIhsCA95h6T<_?mYdqqoHKqa9jq#0M+o7imTj< zVXvcdG9X!thYxJ6H_90u@%X+K5y+Mb5DMIMXUclH-byOZmWpjl2&PjWyT zWvK8zKRplih6JK{l`i}1wh+L2@cx_NbT^$qA_GB+K8uuc=D2#ES#a~yCyGtxisBYH z8+1T6VGKIw6Lju7_vzdIG0#TvoH+D>a_g3!>0;W{Gz|}5OVwMNB7{}s&&X*mdb@WF zlQeYGjIs#*n8whx?YF9~oeEmDhv~a7bYpRW!aMyhK-b-oAp5V0JZ5V>Cg}=^B{(}b zOpn;FcP=US1@o@j`f4wL_y(1>s=n;5RuZ2uyk}y-&2htBvh5zUX+p1(C`omzryP~3 z4AlO2hx?g+Q`R@5Z6~@zhgLa}ec+AhbK{Cr@Au2g=R9{2WJUe+OT~ewEtdx=gCPZB zGUF9$k+3*yW}{o@F=p*HA_*hx9H`r^K)wVxnR^#{JDuyUuswEZfkc-qOUqV}fLc3t+Sj^01=IpRI>nWRK4#HE1 zxiNcYd--5dWBp%%UZc)%eJyHOEK}l+5kDy%uPNcuqbr`zBl=-E?{Fv6f2|GSBK9MN z1Bsqc@_SlAlTByNNem&_{~qv( z?~l)(Gr<;%?TiEW1~iRvisdez+ZU<^aq;=p63*mh*UlBP1v3}HE!SsKz5@&B zuqq8hg#_q6ltkgPxsC2|VivOk+3Ht)MnqJVv%O3wUQn8{r)dy*th?4nb^TNv%DMpm zdojjNuR0qddqW8YuU+l%h4)FjHcw?;^cLDl?T4juv+DOAPvVmX;^2v8hKsJUVx;OZbMc zXKcE3Ijp__M$OdUrr+@eA#fGXSI^34Dv){NMVXs@UZG$|f8&f;8kFpPH_R$~OlOu_ zN50X3-CbA0QX+UU=7ankjjFp~ex)~kK*KIH9W*v2B1l>}HG%};m!lPfN3<@njdS6n z)Q`)gmKGfk9R5Z!Es_hb+=3+Q{} zrhSbd%~bya2?}KOBH_hl#u%PY4IAN1TM0e;rG!>T2wrq*qJ54 zO#`?f+3V;cGcmCd3ig$1XckVe?LNym>#v+mkgtK{SQVVT6+o^`42w)8*c0WYGKGMG zoi5f~k%EMB=SF@el&UZTvBS*k-~KY(vcy$E!WkRO3D;>3}PnN{T~4d0`BiH(hfFf%Np&ZyrK$5JN!{l-!qrDFuoZza}_rl&1sCw6ILHpzTmc96uH>969hFN3f+mTm4yOi47SHzbgiwA*a zO@1p37Zn>m1g#j4yN^HAHxwx}$e~rWv@YUKR8E#<(_2JNq)G}e*oZz!JX25kc*T?P zmlcUt+QWz~B<;L0WH7EuvAgiJ%&YR0ZmhKt@Kc!fqsc}*!HWu9lVf-R<`8U?2EwW0 ziwN$K)gY^>*kIju7RA#YjfHiL@z+PoYRAgURwd zg3ub{$h1_U4r}*AY?P4e2zMcz4*wEAmU+%&p^6$$Tz00mt0(pPRQAvEyOS3aMjctI z^0Gj3k=L-3gvY@ol8N7+5H3O@2EUqhl3QkGqSlO(zK0P9*+u_f!i(_%t!#sK+ZM8X ziNRBT66q!=uG1R+327esCa(~~>^1^BwK0R{!&``QAf07 z(n4qTf{js=4Bt;mhA~pYg}@&#KssL3gEQRJv$pgq|p_%i7QSgGx7_y6rXpDU`vOP!G5x#}M6>EQ{V z)u4ac+dLy+CyH(sZ>MSh-G+A3EH;|Wv=yRfrxF`gU#B8VR*4WfA zJwNZ=e9wCNQP-e&_gK~MOb+31Uy?#jwAFT)n;pP>M}+E^Zzdby{6`M4Co>083*8aU zsNR10WPRK4W1i+cWAFU6qy;EJq&60vmly-qYD8nN2Id0ih#bwj-&3@xYHq%?hy>SX ziCM3u%T}Az(!^9X#3Y`*aJSWC-)RjO5=>_jg?dqRBgKr*xQt#Vp}OoQdXSXOV^(8R zVEYh8qzj1&em`yn3w+EkVm8xK3&_e^98bc%EQzMd#0kbLkbLy{wjcajF+$(gQjVAMT^@w|xbzti zKVu=Kdn=pE0lb*WI$DdYEkJ&oFnkXh44r_Pj zRQI~?x@}M=xCJ_O7iqRlwp}PZHCzM+VrU-a7-`Z9K9iJwYgO&s91S9Ex1&rPO^jnp znhAb%8o4=7C95>@AO7n41L`M{fLh*8tuAuy!iAI8l)vLL^Se%F3vU&5Yn619`*#z( zP16icHsttjp_SP3vFw%fy2u)^>b4ma%SMEZx>z&SZev;}%K(mm{Ab*-K=6b8mq#xL zpN=fikS>lyScE#;8S8A8Jct(MBBVK|7Rhch2K8&wm(#0#SZq!3p7NROkty|=J^_&o zXJ|M-k$nuwZ0X@Nb25{*AM)HcH9Yp@P8q{=NlU{rtTu44*VK<-(CuY7h76mDP&xNeHz=bKHQ($ae;c@$3ewLtRBN90l-ropGf zA1*U-pVA$4dtue8;$$tY%8~2+N|O@BrV)g6Gnma63mV2&C>Z3>bKE$u!kG^C)y^hX z6p11}=)}xpl|8v?a9-6_;)VE}67qfkvcx(?NE{UTEO-_2DxaHUQ(?0PvA+hmUq!9! zG}{@S5^m3RJ|6mKR2k6dX#E~1MR);HM%lt{Utc}@!2QQu1XYDe-(RN{x;`Med`{2K zIB3w9?1RBwAtCc)d@8iH0Tl_so(|LIy*F-7{-g@Zk2Cp_Dag znqgz{m`-nUHb=ph+aAjJvN+}cO+tPge% z2JAG#1X08Z`RsyZmtXaHbN4V#QsVlEDs@90UZLK_x9fuL_6gbd_vu{ zxw0AAx{@&)2a55TQqK0F=80azK77WJ>GGDve_J9aTaYUg*sg*f5f0~WnV!3=>%^(E zCIlD@*%@1BR9}Z|(341f4I_bhDW2BmUIt;Tp|6@nXO)6DHlAIUK9}WYDj)BZH&wwy zem#q=#W)UHtezZ-vgsT7>vS7SJ+tK0#-z89*+y-?7a%eqV-xszE#}%+w=Ez!VSVZm ziXs#}Tgn3)!%^hM*yOh1$ouUZrrGO+F>rZ%8GwT*&o06jdJ*xtp4x zXt`DAM^ufeLBeFN5$?hH@q0;?1(jOE*$vEZMmZ!fT``{yulvU| zp5LSbx=^t73P8#`t(AZSGF+iu;z+GZc>#{ReXC+SznHXD%#{SF`KClt2U#XXhhd(V z!YQIl!+%W4y+2}$SNu~t!8Kp2Cx#)rz&utuw(2)TjP)bFmQB0?V$SByZlTX4EVi02 zH%dTawWatqWQbY5QelaHdx2C}WbY=rZ~n?&k;%OVg4b`rmB(8Y7ZF!qtIRem>^yDc zQ#B!G)qZRJ8GGxLaAb~Ud8YR1{ckkkD^(a{i0o7*|$vQE&kUgVh@2XRdZ8va&H2Ui^|w3=36zIiqT}`RH^)d;l=*x`AeuTW>V6bCEsho%ojkjAl5IKosqVc7q`2RIHaA!q>BA=ZG$AjcNc&M~J!7=`Smw7;PNu3kT&_N}Ab=)+f%TV&;ymrH6-_IIZl<{mw ze%;j2<^s=<@lAAiWfdXOT=Rt)`iBA5Y8&}jcq;~#R*DCrjw z%U?$$Q%%vT%%ERM<>W?8+CzV12lr!7my9+yzNv@yWPegL*>~X4ixs0?z-%g+_dP18!UYbj<9ip(|6v953*rC|Y#R++=BqodR zz=+hdm7~eZIMBe^bCLdO73+Fejo7PZI~(QUSDt~Kj*8sq={ed*?1CV&w`_MF+cOPY zCh$w`3XoDw)-IpzQMr!MIgdB<@~y_MfdpzLQmD!ired>;(B525>%9PPO4yRG|Nd0@ z#Zrczd8HtsO)!`N32k(EYYpi^$0a6pQX%o)%*gl?cpfb~$07W;g6kWTd`nTCn0=wN zo{#`bR~TcwhlL977}YuGxOLMnGgnoFTjQ}$F}X1CGo2#BHJK>CLliQK+)-Ek`1#Nw z-VHhi=}F4i{c+=+gKZ;Hb}=2|!x9Hu)VR2QW0IfR{I6%7;tj0cRq@Y*<`SoIG2l@$ zgg*fU$5mNuEGtVe@M)ot@mlC7?EFIKU|tqX&j#8V@^2v>^j<4mRUgi>NAm!k1@P0FL(iFf zk-thO6>)yGEFoZKMh%gsXNlr+_`7iRl_c8Iw$~?-Xj4{BikuSkM{a}+cz*Jr!o~|x zTx}7_3@pU~-;#YWm~-?vt*Y{2Gi<+U*)XW0bzy~U0&Hh+a+{=eCeFzfQIYbwfft0Z z9D`ET$wt3zQ%ye^?lqE_)t#(&D0+*Oy5`1ulh{>8x!`{0q@BYChvUaQR}?b(O>*Yy zdh3~FsK$4mp_agDqy+Mi!R}l23%OM0<7g3yaq799ee14}5YEyRyU~Gg5jGTQuC))w zvktbwb+hiTX=2Ka=ruVfFBxPHW|_5Jzis=_e{~;c$hTQ$WZ8mS-MeIAAgM#ORtdjG zvFSQSTZfJjEq`HFe-*B;Ao+VH{#)D6UIoioHDC9V7G{Na94A2hhU`CYW>o}wx`?$Du9KtieLq-=DX+0FXd*}|bs$47Rbj=ZaRn&!^dR@@1S zk-KQh49ET-1yptD)Ug@Csby=Wyd>6w-zeQCzDXyTeLuCam@O(QJ50aoNGu*gEPqa| zA{YgTV{=_x^W}E(yTk~>Ci7MmU6eNJ{qA)oW8~AMV>!B>Gmf7+wUuNP7dw*~iQ>Mv zdDeOCC^BWORU@o!+S9HKjqQgK9?$R^<9_|dM?0Ya!QNu}X8gesFN2mDR!cblyHWpd zL%U}G!hY`f?Ce@I1uPksk+Cb|fVfg@Z7H*&u3(P1dPwO7Dm$AOPA0AB@2Q+PvGWL7 zbpr;f-beytxrYW9CADO`HanfH)A`Cz2;&5ohr9JBMH3iyIElcq!q$D&potzRGjV$6 z$#jgo`CVJ5^zVhK$tA8w?Vzd^BEtFR$Wb-O4HX8zgIaE2?cn7ZQ;DFvLXikvJMt--e_og_`8 z4!jtHigN1I$yrg(3uyd7+6!($E*WSD9**{h^zRL4-I1Y0_CM?>1)5l^n9Ot+a?SHT zv;Y3^2~mVZB6eE?Q;kc~GvxPsvXbj?g&)NNjUTM@@>YFLHx7H%Cpla6+{&`Y6XVcu zuZ}9p+4aurzyDFdiFaMRKP(tzQ!m}huYzr~^i6(0{zQ%(jQ#7P;EDEkw~X!_WQLQ3 z-o{h&c5i{vAekPHnY|^j4ssHp0q5ZsFYJlkBYNx^Z8{s0nQKG|Ed=OnW#@-?4}HHj zX0ABCX(;VzExPJFn)bNKL*HI1t*VkhZ&Sa@_`#&OCFv>{CLeA;n6h(qSs-NQ%tFh9 z6_^(Lm8mogH@7sXL*)|tlO!TNr^4cQ=f+0D~PokooD%(1O@GKsnkh%*X6e=fuCy&q%^q2AEw30IJywOsn%l+66&}*jE5$T#HK;S7-x2yuZxuGjkfv` zH*?#;Y0xs8&i+d5=7q;vW8v4SwV&N}^@l$3jjCFYY)nC%3^YR)DDCEYcz83|p=So9 z=ollqcPf!=e#Hj7P9k(~ZGoS%Tu7mwsj~aJ7cyoyr^+wD@2Q`sHK^A%)9)HQy!ztB zqvNRTojVb%hn@F&oR`a;nN0gl7De8BCC`k3S>b7gYY$dfd#`EV&y`eEEL7Dl6_p>f zQ0dM!C&T-qChmWS`P#O^_9W4Fv8V{o{b8d|30GZ?Ha-7dls+bjH0xBPJX<>BM+bh2 zl~@@|k$SANioIJUY}hWS%+jB*6m`U|zzIx7+aNgfQ7$@*%Tgn>-PZacqj!AfUa7Ep zJ)v~ETtR)>*80@bL3ry*la7sMz5lE0K(Z(5{*zDWN3mGApa6`9(=zhaj#;(er?iKj z^M|H6s-i3TtpwDoI|=W5&zUB<)?%>lQIgAAuAuDf?Wf?L@qSP zACh@Z9{LI8Q9n2;sLpXIF^st0qK$@n?yOoeASV$-qO*O8`<4v4#dICfkJ+wL9c0l% zKAB@SvsVx@r6v@G=W2P-{B-29G|wEMC>n8k!W26)1}odMl`_@`B)bT@!}idWIrWaz z3NSLFtrrarhV5Ec-SSvzIo1gktkRW}2oL9RttRfUYW{#FN+^mlcfvMc|7HOK^;lrgjBFELKoxspQb7>X zB*U3h(hP5C)SRznLr`hhc>9oZ<=bR&BINI|KHst~pkJlCUc+%q(S*z?;p>oaI@2T0 zGuqEbR#B5IOsDu`Of=nAv_lCAY98_@a-tSfCP_rM;feo2V0C@$YQODW3(Kxu2IpQA z^SE|sA{Bc|Vu5uZ>SJ_<$TsYL4R5^=zv6Wv`DeLGbov<){Q=Rlzzwd1&X7Zos8xQ) zb?%Znq8Gpw21mF|U;dF)Qymer27W~sMj8wa=9o2krXmgG>GeVA~;t z;QnmwY4B6ck^X%gy<&m<8pI5htLU}J)+*^EQCjGsh*uj!8mR}p0BoOw*&#@|L3gnjdBN@0v1k*${SwJ)W1;}^-VtK_i#oieicWJTsp;WDgDAl}we^MTNozUh+ zq&2Dql7n26WVa#?N}9X42y;Y540`cRr&|~L=6D5-Gd7?mi&1@JedF^)-B#kqyF^>I zW?00XS3a;L#msX?nXF!$%wF7LF(#Z%%r??AdVn*++T=1JJ|{ex0Ory^55H8QrpZZ#ATWs4=3qRzGVk?-cH?za|6#nttN%>=d!ky=y?r`A&9J@y1xer$z@JU;0M;q+-%#dH?3zoUedAoPaRerqOi<0GPpwO z&MWc}+CPc@y`G)U`7X;*j(%Wg&|VRKZkJ76Nv-MYebpekNk zDKaEX6ce*#ryhYUE{EH$N)z7>G)_JWnzU7#Dn~ZNkK3k-D{?xRYpAp3e5>gjY2}12 z8~~jk?{lWFtzu6%){ayhQ^{cUFQ&Xc`(9ROy<91e$&{uK@v4(xmF{2hi}i+e;kZA< ziP}n;t5y7*ZZ@KZnidwUZh}#+qVc6CLyWO6*o3HV)Z>CJE)abXv_r*r8T;40Gnz)4 zEE2aWi~Nq^EoO#6#v2j3#H!K}X+k)8%ED2T{!#Ux2rAg*rFHUXM_A;EA3W0qg*j!0K0H8jFu~!#NVI0P`yVz z*f40cR*Zm`f%6NH1HKqZ$nW|#Q*4J6$*R_1`Wf25Q~>AN{Kq-?<7Ps9t_5ue`3}}I zo#RtaM4^p_^qdOwn0*Ev2r&{$5g^zjGSQ%L$1W|`RG|wX#662MeB=8??psH3>Ug;5 zzwiL9mXarYtM=$stKdKQzHA){o1rA+i4(!I*_$?BMP3Du_ryhUU<-l`8;HiF4eLG zq|~%PYuCdAc%D8@3#*JBk$;nrC4S(P*I24v&?BwIhsLs>#u4iB4X^(oc6b5u5rp}} zG|DP2>5D~At!-ZbZ)%EzL8ie_>_V1h+OaWp*L>cc{qhWUJr7OsGOSV^cY7D2o82$g z1{ILQLpe{^FKN=OdaTki1fx^JKHV$Li(w2-C~sJ(g%ccW(djf z^TQua0vdEmw`U=iYPVZ3d(y$2rzkopvf<2JS2?(54KPe*3l=(HLh3+1s$>~bpFaJ) zNi|)K&Y^)FS(;z%Ce>YtA0zoMiE@8IRC)F=$JuzJt19}B|(G-}p0@yrBy)$8BPYU$Pyhf+Oh}dQiX#~Cq zp&E*(CQJJ494zItJW7R4kG(o*Gj*~RWi*1GJ) zK$31b*M^2F=S%g;Pph5z@Tv^FvJ}zGb-Z&WRbd*N_UG3Ss73Dxj+u4)IpL|>F}kD| zVttRa6T7>we;)oS!t*FYZ+fdPu#HaXqz6+uh7hH55ziLiF^NyrxNS7F>+cI?7D`K*K|C`&N)j^UNChfhK1bg0EkxTSf_VU#_t_gsFJAz6H( zp()~wI>o+g`rFFbf%c>1n!-zppzLZpwowZKqu1MpyLg#-Q=!g?ce@&RlVIY+5 zM+rk>UXq3KRcf$TvL=FZ!fO1=s{0!+fb3QOQr%i6XH$v2K@~Uh{z%fW$n@oMc2;KD z8+sPLfl2hnbielt*S=&L-eM!gkKo$TGw`QHXQ5r0Njw9GR@UOh+1b)Vg&uG2Np zyf5rg#W9rIpR~i`DTQQRe3!N<$eOni%0OB*MxON46+lF$Gd|gU{Wzc z@RaEm@1zP+?je|7Xe(1E^ZSWOMp`v=R7C6A-gYpXCTVT4%-*pL>nh^YJOZ=c`;Ul& zPD4n3pTXfW#z`qBxikj3)j1JDLyN8aSBIsddNQ2W1p^j)1;%If>z0}IC1uU$%TZ~5 zIKyy=?T#@Jx?;sEi z;_i@Y@?>f=ANniMwH&x@Aa{M*c0JgLGH&*LY;Qg$8NzSjd=O5ua3?4Wke+dO?O^U>i}Pft{6NZ) zFsw3UW=VmairLSg%f#*=cWTUp_E zWAl}r=bcV??A~Ef$Q3CZyCq6zbb-uDm)Lu^5)^`!PZk>1V!5oXM+hc^bRL@GCYpD1 zt687E+>aP03?8X}%Jdd}d-_?F$TcKmy(%W*{IK}o!jEa@MDYm!I!A{@$b<;Oo;fSJ zdw~mG-!J&Rgz#2>F%A;44(OiFyO{2U?m=tSA2&{wkjcuqXwoU55@%7B`pNWQ4jIE8 z$*e#@MoH-F8kY*eF%*vc-Gz)?^3SQd1+r4fLNPghf@V+GgKUr9Yp!hR7`+7n?`oU$ zC+j#Z^Ekw@00uQf={aSCb6FCfWyCv?2xt%KS2GJdOTu(lRbHCdC1)3V-EG>7`#XPX z4g@YLI(nq>Y3+8JlPl}O05r|3t)D(08hq$rNxE*zvWJ)0bn~6+u+;rlq30E3mHQ2X zcd1OKpMkSa^p((Swv}@@wdfVdpNvm(W+CP;X1B|nCSX?kzeMf3Kp#X zZT{ji{UFV65g>X=NhXjaJ6D|Dk(!prMaWjwl#V!L~s8^W*S}!j|b5|g^4+l z{k!|L^->4~&0msh)1Byc=4+Wom5^%9jkRBZD2MY0ts1>K&fhl(dNWXO#)Sm+IqRdg z4d!T-de!r2-D-}lNCNv+p9kNqg3!xN2qQ^|IKC)L(Z=qH818y=EYkNh2B>_dCgX>? zGUC!+y5Z)8?aQP3#3jnpv%5uKW94-1OdEb2W`4Xef>onWa1<(eQLj6-%&+I1QL*og z^q`DyEP>;1)0gNlOLCC=q(7#I*jfQGDm9ma{DSMNaM#`ogj|L=D%}*`h~!O5hB5YQ ztitf3m&=^(UN$zGdt+iuDto|cif@-vwh43Qede`t##-~$QgmLolmlil-$`G;SyIOL zxwId-#8%)Dk@z0V90B(9(!xd`jnigZ7%<|uN2qRe#=@!g?%Zc_>^>>O-9LSF1F^9Q ztHiF$Efj6 zSQ(s^spZ`=6MJ25VJ<818AsAds15~F=A#B%eL76CebgC*WIxG*eLiOqglL8w-bBWZ zdBn=7EkNddOOCYLJ!FRq*3d9kCva-Noi5bKcJ}ofA5z@y2VE`~mEf`Xlc9u3{1Ro^ zc`92SrX=T&zbNfp_=aqULF|)k-qb0>gh47Jh(8hw8r>N^=UB#O^=gLT`^Uj$2@I6{ z_CsgWov2c?2zS)Oo7H49(#$RQ5+S~S>PVv>vJUYJMX=7+rabkNn1RbfDp@*YjJXK6 zF<()+sxbWf*P`!!H~?J3&icLG%mz^;qrPx3*i2K(lw|)WKiAohj;xT(1nQMdVTCjsR^FUtav|iZ&RN7(L)3hb zgLw!iFucJ6;Z7n02qQ+N=8CG|W?@Sj_3$LkPHLaL=RcfWaWsz)M_vh7h}>ud7e%4N~8H{%x}s#F=20TLa@(QV>Ct<9V4e`qtL-L zUr#dSbiQ{sNt3(0KipB}%L0KO#5I*ZG6WJED%hZK&A3sX6~@%&zKRXv_@Vj@rIq}m z3jQBxXbdXu>U;#IdF$h7k!%yF`+fT)GE@=8>cMdxgTp3Lq+QR>8JWMOu8rtxZ>=1U z-njd5yYVCsWSY^fwh~t~(GMgfmReP@-H8m7!~HJ0?Sf5VVyY054X|WhQ4aEyZm^K? zzDXfuRQerGTsFW%B;ZFYln(-m%u(# zK*Fb;55k#cWM4OfF&Z(7I==`L;U&osGS6H%c6pvoQ)@if9f`|f5;ug z<2^7Fkc_YRaIj*OLbf1;Iagw6H+(6^n$9Ir;J?5pg6t&-&An!M7qZ3Xs>S8{wXz$% zny&!2V_ON2Hl$PL*Z%&j0+1z32P0WL5&P{^ z#VNYg7)9!{I3r2>cfXv%@I=oJ6()*CrWFnr+|AZS_P;yCcM$01Qqs1UcX;*vQ{jAc z3X|Gj2|v?3-%r8?N}kI(U66=HD#z8Ey$C6)*B6Z-zI|BpYr!5W`YaBFT9^x6PuHA& zkuF;KQ=@M)-P-fMa{_aGBgIb*g60Xi?TOIG*H9qdfdjp?Ybg8$=)fEvmpy~SneAyb zn|D;pQIFSNuF;zB>MBu6+9QCyAV{Ho+;xv`zoXR8H|9-LQu#%h-^-iduF~AXkTxq^ zB*-;bf11Dg$(+_Uq($+|-IE^S^ON0{^A`g|YHF_UShvwBS~NE9mZYF(Cj^g;d77E+ z1cfs~fnZ*Xs|_2@l!{Xd_&0WJ%t=#QujBpuE(!wYm2b)G+k5%XY|=i7^5Ud9fv8%K zHDa>@InovIO?5{;a^VYgMC$X;r|9D0mS0)sSP@co?~XR;<;2kSSxjBpCod!)k_yoR zjBA90gyJ-b>_}qpi}dSzG7&?AcZn75D9?+qeD`fjgU@hP>?QZkP;Xj3XCoXqeHqxL}?i%qv1ot zz|q|?fBXLazW2WFcJ4XPd7kGTrx)fF!L5F}n)yXa_5`e&RFRJtP74)We%|ln=O&l zUaI=4FXHFLWWIh&FW}Bk`YQg=t(dV;@(P&mGdZM{AO`C#ndqqR(HKl@Z~d6|ZMWcA zrN8n=Drp(4#cjQvfJBBEgr`wKPiIf zrXm!)3udJ@n4#{%`PGA6W%?1DEZ-T#yx=5Wbz2g|Ao0byvTiu!4AcX-4?R4K|8Vt(3t$D1okhe?kaQP@ken;Jo zux#-P2oCzwJ0v2{Lkn_BZ&au3EXd7FEcd(k`2)_-*Qo6v1#**kom8}E)Eh%VZMPXV zs+xjS=(8*Go^W_96=F33AGBs*WJn}=J;Pvrm)`oMQTx591jg&k535Rqp+{GX#-=`| zeABy6dGWqUbr9(B2BACkV(Zs$#`)00Z$UR5_Hh}Q_hRf6E0M{b>U3LuZDv}H5&i23 z&RD7`=KObk8oEpKoH;5u)Xm?n!ANmZ-PR~tQMG?BI*f){IKk>4j)Vy4vVK+|L%o)@ z57+^gj|yov*M=yM$rS&z3(OrcE~epAm-KE)XWETuvx&3meoOIw!v|@=QcRRa9@S`i zD`ZVr*cJB=P;$XidJb&eoU(5*k2($cRX(}*>6A@6L&l!BJ8)rCmf-%TB~j5q-5}Pb zStaU6R8Z)7XzxA~Z{XR!gmDUQH$O`2T@|&~P%N)TcixCsrW6+(#^k-F)iw#xH!?Tj zf5WPNFPwJ&@CH%jGJo+)_s7GZ9l&P;Sd#X@pr4?0}Zc6H7GVfkbS&xg|l+HXi2pLy#W{fj%GM%)cl(5Pi zWZyv7=sah)=Od9Z!V%7&mXFw`<38v%(&wJ5{cQJJ?WKU6pU!%2lxAR+%UJi*+S0H& zGP^y#NH|vY?TobI#}5%1V_%-OR&WZ+gsBBRwt8395F&UWSnls~0gkW5L2az7E}8th z!!{4Bxkd1!pFU{{9KtaJ`$>CQi7m4J45^(_vhtd2Dt+92c&`(=?Ot>C#Vku{pUGe8 z;EAQoOUJg_(zvzyMv##3K3>u|OV=qpJYQijGsi&TFm0st7eI*b!2mHqUg)v;4K0^V zZ_#_$a$cSPyYxT!Cbx1n8j8gC;_TwnQXIHX#)>UXWIBt;pq!_CIel27Rz5T+|N5^s zNFzjbD}%&bEi}tzhnTsTe7im$Z*-G@h|xP#7irj)PC&0IXN*vxd_WDke|r6u<)gt* zF#KCa#bVRh!AR=L)Sh_^{MYaILh@>qik>2yMi1X`aUAC-dfU@5uj5{u7Q)gvf)}Cs zx!-J2)Nl3!l=9nV;rvO`N?j?v{n)efc$@cZg|B43WW452?wIW2RvnezwYMo>yRxxo zPfj((FLFkYsCf2hSaN|CaxZ49b?-Ul>gGDNU&hJQZI{+)wsV&vwSyj82K_a3!p+N4 z)*@{{!^N427~T1&k*R&5 zXAYm_ezQG$8kB;W;)q~Ydp$r<)4{>1muyTBan?|~@sZNUE546|DmjK{R_Qzu(+>XS zr|Pu#DQof%>j#M_)H}`m`!2>y_pHI&-w0as<+A+Gw4 zOGO;4PYWSgKb0sR>y#byXq`bE!h+IhhY;rNS|#s&w(HzfB>_$U0Fplocu=w@4<8bv z(5KS`y;ng{P5|&0HS+)xM{paj28K9|d|1Ajc&S@=U%8|?*UatrH>*_pK^)b6<$JM4 zl26U4JR+=bYXN{z9pS{MYn~HFpR)u>>5{iTwn}u)Q>97z&p|#bAj~@|{hr?o9oUWC zK5FH0$)ujZ^^+^jQ%CssV^fss4WiCThyXBDQ=O`BuQga(*OkhA{j@la5r#JrOT*)e&OB~1OirG%46AP`aY%%C{e>`6vX*nscX`KFAgY zW;-{kILEW@t;TMx%4%PfkAs&EpyjCLXYC#Pq%J+pe*_Gukzm6hwJY9Hn!bD$T7)E% zgRSFnwC)$dN3j~I$p=QBe&Yw|1Gaj^3w?!LvuoArD*qur)O+NM(s_$W2PWR6?G`yr zrLSoglqw{wCCNcegH30(W^+dm)px_zzQaA9W!qZ|rFPM>KT`{FS$=H6W@JNF_CVo= zNPy09{P1gT@IQdT;qIV;s)4h>K+DTdTkn7GFxCsV&9{qo5iKN;oo~Z~G)j(QQuwuo z^hGv3SB{@MH`^D6q~Go6kqW;1D{*%7Blm1YXHxfb(|Xn7EJy0^d&oB@(;5}H&bSo3 zZJ9|;RHVY90*~KY9&;r7F!-9o6fRrB|B0*VdUp9nrJ|22*8Jcc;Qz*RMUu6n&_xdk zsar5NR~xTVav+~5${4;iSLQ#08VF0PiHjT3I6N@@&|9%S&S{ksrQ>M|b>n9^AXVR< z0qy@SRqE6wy?Mo4yzD*a%+^xAmBSKayAb5vM9~fOfIFgV-5sg=SvCfulA+x!cSY_q z&Xs21cTO(lJ_5wWw*hCp4ESNt01FnyxtCRSWGVzrHO1ZYlGAOx=F@(RD zW{r)VsI_d$d3B=86HCxY>~jz*sq`P=`VFOGA4R+FxtmOtFgP z5{p3A**es>CXb)Vu``cB*TG}|;UpV5r*XO!d8f55zhJemr(fDtmM_&@>=~DEONinZ zJdI;)dfYLYA1Xbww&)R$KITsOBE(tzgPYfP%(vRbvs=VLfN9ep!GHxz{xYU8kla{@U1B*xiA~`)Hu=>A3sy6JBs$kA>a07Q3Ek6DRsl zVdPRM2skM1nDD=@%OPIBaA^hT-n5rU&XDlJcC>lP0pVSOSJ&~BiWSE?NX(+hRfi}4R+GZ(+xk#Xr1&jGx;MC&Wr&B7zERY@1Y#LuhNJ0X*?`+;qsK0FDxqOlDBdd z$ER6D|D$RXFdjU+piX``j=9CvqO&hy2#F_<<}3xZM#JYk@z=MGx=gLr&V$B%_4B#( z_jo_s#RBL}`UART6z%rhRG%SQ=6QCu?qQVRYg1`0k#4+ zXUyxbesgDb{3YP#1gCAe?hWbt)~cgjZyRny5jW87@dPl>?VrOE{(8tiz+^+t;-9Zf zeODsTSs4B4@$I>*W*=S&f5egEKLB}!>iLJ54_%fdkQG_4aT3!+WHs0j`R>Z6XeXZY z>#f2)hv+_I!qvd>c$wkRJE0gJ^=Q>Qv8~TS8Rs(mq-bY6`MbO8Q`FrLk(`m=O2e1Y z84^z7f@|njko+H;wM8KddVitoQ^f7@7>UmdlWlVSr+gGjcVNE=9ga*sarT8~QOzXC zFetx-^ozo{fR{fE1()JTB<$=M+aYbu0SHwfd|Di^i+l8_TCmu;j-lS|kD}b~ zZz(qo2%>(ksZ14l9uoMB$Ui`hw6->w_leVuAGXu@Z5BH*`ZmQbFl{dz0Lce~j>uS-s{q})7It^TR?UeVq7zbK& zCIm0KJ=?&QkV(Jt76L9VmQ%mGV)j@FaH2H9*1I3*aMPx1#81+*ZbI7X(Mp|KVqX)j1N z?xq(LwxPD9Um9*Ll}Ybw3WvT55jZEOL}KVIvz#S-{)Qg4tAFYJJRj1@R^h%fu^1t~ z9tEO1hIyn+VsfXsM!NkbApV~+>1XP1f!Opw|v2> zP%eF07%kiwJHt4DMjj1xGke{hS^F?xMuGpkN_C&JkoIDB+(p`r`x-Xz4*r~&1(AwtDt0~JO@`P%)CNN7aO`Yc`!uuI%Rc30)0BA5`6Mr*b*ARDr{@Yip{)8U&uS7 zBa02w!u86=AcvT;eNNy^@Y={`o-O*UyqSZ%TroQGNWDR5>Hg?033|vR;VfjPin@rN zuihnS9nT+3QFC93M-$@hn8Cvy$`grLg(I-{v~Dl6`LDYigi3DY{;Oi{E>y8b-4@$e zbP&tBBVh~0`xsG2Fti!s@@w_w4bj;?O~geDb!!G|DIJm}0jwfC&S=b15-Duy*_53I z)NFTr&s`|^U0ii#4h14gz8)MrMqO@E$HDoV4tprRSsv3C-9OwTpKmpQF*P zj>%87o?BRQ6AwM-pA*B;-Vzod31hu`JL+x?X5RY$$!m_^Z7z|8>vsS zydXHO8d6md_&J^7?F7-URgJ+H1r$-5dN<1d%^M>uGeXG4DlNp7DRWhbMXv=?ePED= zogWw-^CkhiZ1A*JSF=Dzy5Gki8os2ZMN$SJNb}V{z~_WeX+_`h8x&l^<=0x~4dg!` z_T_R9lfDA}sGm7DqTSDb+ComZ+XMdr{)SvB(SM3I!M!fmXYeAIcZOUA=bhpU{}7FX*Nrp=Tl-Z#6I#15nXh6=c6Qy9;uSP~9(T8{BRc7K3LUvUigMISQavkf zUP~VmD_{o;`*kDtMD8dSitb*m-Hb@MGzE|f7RklGF$~dQ0w{Y;6{DC2yFWmVDGVN? zxj;Bil^|&Uy0>3Q;gDF&r~)s3d$-3~Rx_Pz2lhC(J)Ip#8D*4@FDDWoEybM7m!e}L>jIUY!#Vs(?VwqE;0^ssv=Wy^aGrq z1bp;M(a4MGk4TOi?f))8sU%{gO-k%;)T|CHg>=?89SWxyqDV{$*E(KFFXmS1;;TgX z!WPO#!)|3hH;^nY!GdJqbu>}FCkX*qHb9Pc-3y5Q+Ib>OZ)~k6TcF+%b}yBG^$UwM zxGmMM;{7AyO6ay4^u^E`hGY&t=M~P*vE_B(`39XQl5()7^16SqZ;Jg*t1dtT)H6wX zpEi(^LoDJss`|acHeVE#9}&vw&iT$!^7|Gzg-Rb+EzyKI-mv~>V47+?(L!NvY%{^x z>L;>r^hU;>eiQ(KQ*>>WpuOmzH=3Z~eTMAn;5+x%aZ+JT+XSs+g5@Z4ZX$R3_ey-) zjMOe!=Iklye3D9F0?N2YR#Z>zR`I4Ie+z$7SBHN+yHznyj;7^dzO%l!-HNrjeB8$G zvea(!x{38tSFVa|k%Hjnr)?5|S-LnDAwddne5Nl2rS$dZc|w7!fNq?Feke&e85tSj z*+op3SYvJ826khyK|hkX5|lsM2i~tXNxBvfk;iVSI*sjr$!mEm6WJc{b`d8d_L<^KpezGI9j9wK6Z5Pyv5J;XeOu(h@qJU0D z`|tP2$QkepMX#@<`Jzn2-4C@Rl~D7Bqz~*oulv{R@4VHB(Aa-~RDB}u! zzdSvk);S7<<=eFTrhAkUHYNI-t^w5pI=y{4+4Ayul7E0_Tvo)mv>vxfe7S#4;+czk z&IWuq30{~&o4Oh0+EJ0nakla3@505$<|I)`wF5mGy@JgdN*=zSsf4LLhfv%NTe#}I zUbp)Fg;foedHK^6WJ5mh8_imO!59Fgx)dGiaOSJ9hQ=V^&E=)j%x|v`>q7eV3EhNQ zZ519ibXj@_QtVbAyx0-|+VUmg$go8W7mkiGSrr@4KNi#J0VL> zYYtsLfG}dPf!qH8Q`6PtAE4>NEVFe#27SzGz?6OTg4@m=u>V2sQD&-?>i+QxR%4S}nc0jx-nD>!{0%4yH;SRj zecVctQRMT3A}a=>qlRY&UYF{h=2A__cUjIDW|6+cw#-WqHW< zt?xtcI2)^;uKI|P2TvtxH^^nvu!!9;2O?98;+}i)qY(uH*=x{kQl=Ar#g+UJ+}%pI zEsWUN;GHJz5 z_Gxph(u#alHmO{stjILCnKE_HxBS;D5_(NKx1y9^1(^hxBS@>B!vqbeh>Hb(4z$4- zc^itvlm`UdE>b556RpCb9yiz}A)$OQ3CJr$)?2+BvN@R{CKmi;F|5lK>@z7V7EHV7 zDWoQw(XWPsso&iqb=3mQyTz*4IRsg{*VI9gSHSRmX}_p_5~n384uyW z^s~k|J~KDN_9*1hvL?z;Qm+|*-=U2cvF|uC7>9wB1dUty)4N(pX2s50F>+nySXDz z>xp(XW)#UdZN~|}eHLE7qZ z)A^^$e*Q_1w^ie&b)<}22S4rVn zuoy1C^b1#gGvW9~+P+WZHzz5E%rJstz~0K|YUk$^{d;bsK9!R6XTk#c-hm5UgGWC9 z06K-9ESAzrqnm7r`6euE37G<`c-K-$CT1%)!em2vHE&zc_}`mIiDRinX>h?!qR!H! z)1L~cU@A+?ALA!Tg7Vf>d)G`Nzt`Q2loF2WYF+X1S+9+rRkXLCI!zfrbnVzy7@fKQ zq0*9o4&YEcDe-DeImWHhq_(~3gfvoK=&6_=-%o^BK3MT#@X6c1UeDX)kbQF`PxbRV)lt!a- zTVR#eqPB`%xxC);iF-Y}n7!1Z<6RoNkCgRoQc?}+TpLA&J@h(;-3JBtJY}y{+rVJYr`qSoA00xO$|5C;=%rp_@>>uTMg*Onla^h~*s(lG|2ZZ8k z1d+JTg-24X&Pykg8dZwhUNfnJK+bI)+w8Dy{PC{ zct->RVg`_MqP;WH(ICEVLjr;@BFNcxFLA(iJ_jHggOogl&7Cz7yQfUb+C@-0V(5Uk zXPb&ogV}kZOO-5FAmE}SF4&^VM#e6qNDTYsKBr-D+nOMZIpF?AL7`C9+)0x$nhQ+4 z0o}gFQ6d-0M+8aG9rZR>yu||xCs0P{c0S^zH9A5bx{B!t!jbf1pRNjsS+*a~GRlQ!B<;E{ zik?9jVb_@Q5jN9ZW>eUrJJBf&a(TVoORNEV5oj?)FhA_g}$UZ_@g=J93 znqP7?F-Z~x4;<(xKAjZ+YaE@@@H*K*QnsvaOQbd9PH}`2=W_{@J@DdW^B<=C^q|>A z)t6I74~p?r%VLM%!W=OaERHCP@?Obc>8Z9;g7hZ0H~H;yb@Zl9!amd-V%jL_Cf^l* zT%7m3!PNP+dT(1)^So#)-K9mx7uq~qw670Cf*1j!c{Sab6ikh~On9$$YPCj1N#wg? z^M)oq+12zk4-e)IKf|H;5cxKJy$uPl!ZlN1px)Dfq~q@Jv`qGtD0)H)6r)vW5M{yG zSe*$!-cW)S^*;dg5~(RBgEJ7sgGHg}?m{7QB!;rS^(OQKT*>4RT;l|xn@g=CaHpt- zt^E|CouiJoTpS}Jh&qH_Gyfu5Ymu$|^}n5UW}k8YJRVxm2yB?S^I5vDWBq~ zr5zO7Ics9<{nZFuau-Il;e!ngQqU`|z^>~Ff@GN=5~Ynh%n+6Mes`dEhw47YMLNtR z4Y+?8Yg}GZ~w;J=rUh@ zXSPy|ykU@AtpV{SxK!R4vq%ZdZ)rHisHNoauhmNZ&e`gz=8*JOuWKg=xQHEJ9&JDS z2iUH2-(sj~tJyK!O@Y*|B`9aN7L-GZWTOFT3F4ASOSkwwvR9khc<{5!&IBtP#aBH~ ztW{ThFx#Xzu=$2d@|?%h4H|T3N9L~sV~fVK6g_nTn}%2HXu9L+gXi&7e>YzPo@X_? z9+`=HQ5$jUGv=PT1e#1dy79|vE5Fc5DfjCr5Ohx8Q6U`>H;pmopQs*8i+y3ywAShf z!%yP$G~DDC6rJ_Zv22a*575Fgz5FX~WEz2GvKfHU+ond+_fv48i6By7=_)ui32 zbP|TCWzIaI!Bp7dhk;H}Ln>2Q&bKw)ZjzrE5b!)#vjW~QeneOrtfBJ}A8UNhCS2qH zV{Lyp=tI)fOv}(rrG@i*!)$xr0SN*&%HNM3_jyLL!Z20TTqI6vDK8Bn>2pk5W;eu_ zu2nf-zNx*S%R@>-s@+3U>gHx|l{DIY)xQ0$yLU)Yyh$jj^g1$XXa^+)6&e0c1a)7X zMOj}ZY!vl%BIf$GbBy|D-zLQL0(>SNF^qQ{e~lcvSi_bM=>7p@??RVIOq;o{Myl}$ zxebu*M2IfnO(>*QH~&dedp{kHP%p3YpaIj3U3%t4X8p+8r+f1|$n%l{7s?T8Bb4(! z`9oG8FUdBne_|nLTRw?vYYtt?P%rzd5yWM_&jg5CeQh>j&x6zf>XN1?`jz4;2{ z4mce^LsG7;uuJ^6N{TsITm({dEXOIw_nl14ZiVQ-nc^7{6|CPEh5IQ`cb$_Zh9pjk zlqY!6{ej2^ZxRglm#`jXHgdUQ&igL^`MfEvOVtr%^J`5^ys1D&%{B>I`9D-33v(|G z_{FN)-%~*3iT9Hh6n;ET(=7IaKC1Maw@>t(tSuV*OY%+X*ilrvdOI;OPjB!l;_wue z4;NUqMfeK&OOlZXj>_Lf=t^xn#zqSZS3LeM%`8-6dkZ_f-gbE3)8Y5jKuMjy!>0lEqDrT^l} zU5Bl#D8dgi!}=n5lRQ94Jvxa5PcJO={>Qr=YKzKE^0CRwj+Jj5i^0Ko+=yzPl2IWj~G*if#>9cKUA`y;jd2Z~_=a zXCh_)*EE*APV`U2t5 zGR|OJkg%17l7NUawetucGUes@*v0I7?y>Oihf0}r8wfvDl%m%pUXg0GWc3s5qFDkT zzuq9dbyLhE*aC#^dV3UX1xZ(Qa%*4E%b=YwlplGBZ%1*6!X+6yqOe@0FxJK%C3%}5BT*a+4z~&e5DjCJK>yuBgnAwi$}}!j>^*tuQm;goHf-~X7^G~ezPua? zxMrtFmTrsb*Fs1%4IeHQmA!DZ<*v?5Ky+3wbXFdwIy(vtGQ~V-GYfMYB_=>X2eC`8 z0xrYpZj6$z4cAL)KhN6x8%2$=#d~qM^pT}=q`dR7rKf)$8Ge5=k`uQlUrN*o|L|5$ ziPD;&xCob~=L@KHYG3fCH;V;Ebe9J+tiz!=R?)E@~D<#~%( zfus8&a)SMMDzU>r38!}{(&QMuR`0dZag@h%wchDb;!I__`n?ELxuKoL$$3|!u-khGih+R-@ z*KDi4IP~Z>-eY?Y^s#Y{0v9GUr#2;*&SfYYlZvQcV3)kF4mbp9;w)JrM4tz#q}oTZ za?@DTV~+9uy9{kG8mk>!FxHmUe)b=g$~nytf|PiG4~~_++BJ#tYPRyhk5&K6#xe9D zuuaSa92p{&wY}o~573-@!&%T`oheOejF?Q7(E*s{SkS43`zIDu4w6x{)_^&VA3kSd z8PYIZlCP*(>Rsj_4|`*JMs=@_Z33}mDug|DTAFnxSX84sMS!K(Z+)e1dxauVK}Mfd zs@%@aXE^H3-szSyqHOS=roI#Ydfjtt)@FUYny|sas;u}!E>wG$P(N8B7CL_;Ub;YD zPNT0h3m;B< z!rRLk#;8a0B7!wg179#^YS(z&-Qt%{(OT`>>Wmo^RQsgf{@%dAqJL=M?zp5){nb+* z;@5&E4s~kCT*I~`s$Lgh+U?J`;Ka9`8t|6lQXA)U+K1fh78|Qf#g9@+*l`%E4Wd!NdyeZG;>6o>BxU@ZgOP+0}ZkgUp`v{;|*nnX;E78tjtztek3? zOJ=FN{GvO12uBchEKp^9ZZ3os28^6XC0X7 zRH6(c@`-(f{Z0=KcD(!H^J&<(hlIk34UG-&_ zwazmQl? zt4&s?VRI-;+pBoj)!XAwId6aa?D+jWJexJJRvF2tl{V5;TbJw$%Ov8IpOIRXcNpiH2lzt#rq{)LCt_mct^yfwq= zbKNBqt&2~7J3LFYOU5&n{z8;W3VaWq|nvBp{x?xKhY?N zW@ypU0W)@j*F|`Cj@_(RF^V=-q%I*fz=~WRxKLXQ?LD#AH0EcrtGuVEtr;8wo>RUo*mDO_Du)SXN#tG3|T}nNTv9kjh zW=L|dfWPf?dLGgo^z2>{?!igG`rv}`4t)lL{{xJ8&6wr!=B>R`i)>Pxp|d;;6PL51 zOEwGPd=NM=Fo?ar!zq82UYMy83VsxNriNR(-<#jB5hO z61MW|>nr&{98^Hy`tpk>v$t0Y?DR^G^;7C7&H|t3X{^=3{G?WW$q2h2o@$8#*C>FC z3jsU5MylD^rFM0tC$f$&(d61RE~y!KNo5L2kcG)hogNPWndth|Ihu&gN*Nq{ zW>#8ds=yP?ycu7d@?Vi*;*iuiB^=Y$eHn~}-(fIiyjdpVF?*AQ-8ITDeFPT`UM*KB z&VTlHKhij04^L7fBo|F88H)Ln9hev#yxJ~yNJSI6K7Ux-FjV->ftn^7*x3R@1KW_8 z$`tn6%8Db1X^v#@Osw6fdHh$8Iq6fvl=iJel$ZghC~O{OZ^fKaDE+#gQ`4IW$8v|? z+pNlh_|D>#R+#5@Qbb(ylHDvkS`)Y3b~5;I@CRicf$7%Kz;t0GDpCNa^~ZwQfnLaQXfX@ke{h+2WlH2nMa-k zR?N6eDN9c8^#BQHW9qqSv6ASzfD&D8gl9|BnQlxib+RozK_(d$__Xt1zp`X9Y zCm^fvkb1iT4GpU`y|d-rO(>V?@ZG9#i$+w`QYT%E9oY;WqYS5P(?N za7>a;AVj^!eCq(x1aWB|A_|ER^uWNl@1<}P( zpf%{b*!04;i5KW-N!L^;Fb#vazTP6cRvf1M^T4p}ao38+uGw50rzEq=>l42Oc;KZi z!Rd64Gc|Vvwv(GB!Z!1qce+~1(t!OLJ4bQ&y1lb7hK}e0doL1WM3`n#;7uFT-&FRO z!gxq<(GGbp@%$Re=(FK?oS!qPSPD40W_T`r&J-zraftTh0zuA>g3%R26EEsn-oSnR zIkRK~v=xlCqWa9NAcTzh>c{<(vd0tDl!eMuA1_OunU4H^-!R}e8#v4UFkWp`jRlS- z0Io>N`Iu2`#||j34HkB25|*;2KeK<*ct8Yf#Q-6j69Y}qP`zuimtdH?T5*Cg`v62^ zfPfpxOaTt^;&D?0@#yW%+kcG)xrrG2>Ao*mWG4)kLP9c5lM)I(D{`}?oJkW-s^Smp z5SyXCj;nM2HRwKFP1eN#a4kYDd!9L939XG5LoQ=yKp(rl7z_T!NkU+ER~I4KY1;h{ zFYbu3btBvDW=-Y_QybVA-}mo=-!brvey{xhO<$CLJaFBbo;_Ddcj1!lt$@=}$U>&8 z9hQz+6)AhB`q<{^KB73W$h1MGYnOn(BvXWveEgOI1>B){CA7o2PShwKoisJodjHd_ z=ePVw;t#pVO}{kUwC@N4_HbWXwbMA0|Czs>F@bncZL0h=1CrzuUytfYViMq(B-jxz zOV1t+qx-^CNp?g^|I>%(#6#g|J{WK(_)2QWZZ$EVQB9DE;lXB-{I}R^~^3VOdCDu1eTwgIQH0=tY68%^1=TYsUTbAP>~}vgVmX2(R=cp&IS;5qm>q zCPvDb2!s=fdUiaI%85~7wG{@v@%8v=!xc4XKA6q;D?dM_?w$u`Feye+-mpW~PmZaP z)p5KO6>~RldU(`cFY-o~3>$;rTTPXw*E_`TM@o6FGhbu9k=ncQWQXM8{%uoQ-jF;2 zD#>T8m@HW%<8YKjB&ME24qpw|^6yuhWwCC^6)3<+k;#VrJl#kj&$pzfeM!k?O1Wt9 z<0zyF=`tx3N!@EHeXF=}dhkJ@dxJ6XYMV@2p5_ws_`lvJzp45@nn<{m(WQ!F1-+um z0SSolO4hOPPUayr9mL6~%_b)0;?rR7b`<#3HjK2(Ry))F z%(~>f#O#Zg-O}>}{E&r4q68q=C#kXqk2TDR3U{+5DAg#s?KVS2zXK}BX`dSZ2CA@RqlK%NpvF2sNPSHVP zNIAdUh#m;1Nr7@pE8sNoq~9qb?*Uouk;Nh7l{d+WN}29aWqngQ(KLNA%yjW zr{Hz@QjDbaW2z}Bd^SyFvy~eWBZYq&rlRZt<=B6Zkf356lE+mHt_hT}f`pt_xH6Qt z?*&-Xkr8vTMVU~VVSRWVMfGB=%@-Lla)Kl?Wr41t<%_%SV^rC_`k_)xy2+{Np(n3E z+Mi5I<8a*mREvg2*LjNP!`$G%7sV{PJMkB)T4-}=R*T2k`{-R>|VTgl196H-t z8W1d3yWy!h-hUtPy!fN!u~MSU9mUV1u>Pns^`NQ9tg}MXVfvI^I3|;jv^3r@i9OMX zZ4|Q9|NXhTRKLy+mmsYuuaY+zV2gYC$h?aUM20HlQ~v0{3Q=B7cDcj95(Cxd9MrF1!kTV={Zn|7u0*Kzs1@(rzz{sh&u(+d|pRat7lNOd);0U?WilG+7OXQW{A{?15y<4?Ue z^%kIcBAk%C0X}{cyA_PkXuoZ6{Ra>d-&$$R;Me4hI(3*e;WSL<22B|4m`u+Da=Z46=w)y*v!t_EH%wkEJ^K(TFR7Vv~EKKf5}r_>-1%?FRiMEZPS`4bj_jH8Yj~2j?o;l7|YC@(mHo1zn?t6hO(lOTrS%M-kCAW*lc$t)ig) zjCGhzpDx`nnh_eyNR|)SE?p}d;YigjV0POi$k-=`yx%owNDbj_WDWT$u!C!67<6C6 zWO3n6>cZYz;t;uf6?>e07}w6bAp)K-zS-IjB-U_tPFabwZw;=SI`jZCk&@6l7=Y}Y z)PA$A@bP!1BqzTtKf#J0_!bvYpW(BymAgvRNdJnKgv z;>)l`{`|=>O)0Q23Uk*VRN}*)br_IDL@o?*B)~E8*!I5q;=ds*AkN=xKM{1IC{+OPJsBger+!ZBXCl-d&t-&I1 zebLmP>H@x9lqW~*4Td?(-Sajl_y7Givy79w&@-z)+8|3oaz!5Jr&oTD*Io9YCZl}C zrta!O%e>g|rUkrVFY>ecwz~_^o?O6CiRYbdr~kdU&U{b4YNP;ubFfr|NXUxStFC7D3URzAu*?FD4gC4 z;r#9@RD}X?B8k*q9(tH;^TwD+n^Q_3^y#!P($nVa@2rG+UZLmg-1I0tKd2zI58_~* zovQXFbfA@QEtiA8&&n}hbi?OF_5;p;V*)ft#3wuui_ z80)`2oizA*TI~TW)07?3&t^_+HE_JcBElu>s(T!yGUbr1?F` zT4R;V4q4A!Ee0=u*(5RT{!n9Cmq}KkC!w|f7I5w0Q`+$5fgthT7Xp`>iX`3 z&D+ZNCK$Z?+mk4L2ClDqR1*<>_k#=5>LW0+*_wDm{JF<*bg4o@B0k9guA0DLIck8p z7mwzF=3Rmi37eo`K@rOWyFfEq2vYI=YLsXrVfA`D6i+bAl0H?6~vq^u0H{ zlWX^YdA-LN9M#r~rKyB27)Fw&no$&Q+*~Q{^J`U-5kuOBNjSd0JKO242w$pGvx#i> z(^zIt6>VNKdEz~HH7IL~fpL{_lWizEDFr>4b4~NuPRuTwA+$?@0hyt47R_5@{?B`)fD42fA41~rhT_d^}+E{?|vkgnwt~x z2RCHj^BaAmQ6u&4J4X0XT!Mqo*2sP*Fq@i`v-5p7GPXBb{2C_uB7lflCvXs@pLCEs z@`_cdfyAZWfaL+m)l2@EGO0aj;jzI{q&{o+qc)J+o zgF^ns5;gkP9;tWgjU_mf4Cq1`VJG)1*+7b``2EPmcLuka=TKuveN2tSf5diSyhSmK z!T2K&gW=b^W8W?Lj^@;@cHf!CS@n%f;=Opce+JVEk=*|=aWA)TU-zYjXM5~Mv=KzP z%Zx;Fy>|>+WlTt!eH16EwH+bb89( zpT*Jj3MeaOk?q%KrZ?~lW0ue$6z0~K!{s$6wLVi_X^9tKNeT_WA1*#K2X;Yj5OW`Z z4n>T>(DPW5C#TLLkCh|7#=tnjC0zsh12?d;nB%D){Rz{!;EY(%*b!a9z^Gp2Dgt$J zjKUFNRP4Ctl%Do-sph`?5WV@Cf8-AfS!giS8hIb|xj|pIi69eHxc|loM6>p-zH8#y zJYV6`0GHHJ0IrVHopUG%-y5O&Ay6rb<8J4bB>pNR9p~)txuQd-s(8VWT0*~#JEO#v z$Sn4%#6gCco8Qs-`Xfz7wYDDaLOwt)RFY6|@VVfcVe-bitbL0AWNFL&%8G5;t9JI* z9-4v2DeJqq*aa`|e7%oIvmXJ~gn5%yZy^=6cGkDHrqYbjAi+h ziiQM&PP=h2P)t+rTWcD&Qi90t4Khc9)8i>bB#(|21WFVp($>|Alf+XS3PGDiqcd^< zsb574y9V+6t_c$`FyLG#NyewJUj-kjoQ`Ha?!_$s%ivJw$VEH)PADodI;JG7u3inS zULTK*B)4!=fj!f>IEpNh?k+yxeSD3LFPa26l5M#N_~%%#E1=sC#9cHvBh<7;q3-?L z85Nt^8VOd<<|2Y{QswyZb-D2VTLtO{ ziddVBVIRAZ_w!m(YB;Bk&Ac&5eQCw^MK|Flx`X_$tRo!(aG^4ZGF)%Ts-60$jLA2S z*vY!fGf?tJzmR6Hr|44{+zJl?k+mk{sb`_HNLi-x6)FeO!oDzk;zNPSAK&xwO>z{H$z$vF zg2+Ta7K=16hq8!~vChyo;2sC>@%^*V3^KsXp%2-Z`fV0Bo6|I)u<{cTbG44ZcMw73 zjE+V;^U(}3IzwdM2~9FYn8?gyjFwQ&;SjOpk{Ev8e1CqNVbqfFf+17r>ndfSEH>aS z@cZ&|Kps5f$LIIzLn+0+u!s;gucT;thMQuZpwwd1?ALfM8SX^_7@bOx<**mef_NOB ztV)!w)g6fMKk@9qOiD^i*`o*k6MmM(+mt)9>~_6-zjGFvEuG5}MuDBe5DLlz>;^?U zTO4)MarG+_sdB{_A2Uy+MIjx>-|ZfO{z7Hk+r5ziy-IqDq?}}y0dw=5;B)(9lTHyt zxcNn)Dm>- z@(ye{BcsLv+0=WFx^(Sg3sg0oN9-&8^lRuaNPQN}u5uSFql^-GXW*W?CCb9#d-A|b z(uemkrkrLc5{#Iok_Bohwt#xW{pDx2wd(%>VPCmoiW#Iy9VxSSTW~5V^Buu7nD7*bA>}#68(w%omePX?B5qqOZc<%agOZPI$vqko*BL{4c z>?0>0GFQ(*E?lYjg-n>5_G;yA{OK99q?sv7kY1wIc=T7-{?MDczMb2z6~xhORli#7 zHg!vBS{lSmQIz7^u6-mS-d6VTq>SE%YngasMqiBq<@3nhUnF(b@ z(&9+>WBd+eBzg0Jjz>ZGJT(HARCy!;;7NEs%$OdJ+qT9^6IGu83U-3;Q=9@?HzSS@ z9x?s8uBlQ|d!x_xF!?C$5({!cFRN5(v%9i0OkhIU$;ZA?k&&De$DchxF-uUd)9BDSSTKNLs~iGAVMzUN z_l{H*317F3VHC&GRkpJnC6bK z+Eg@YmHeJh_B><%08XM&AW*b1Ob<4jF@NYt13H1Z)pAOafKEn*09%r4R~>o$`{$BioJKoSuKr z&*!H~K{ODW7Z#BnTQQO!crBkJc>e$iX4-%2kH=>` z)vS7A#DNf$=0-TiG5T@`gVY5oU<5Nm^XY08B6U*L*^~!TjIGWv3I6;HdHwnx#$cmc zuY@^FC2K)CDw7R}fDN=SsHB7P!-g68&jfyYKFw)0_xFZq=t!o7#L}3{57iNLl01$N z1CO6M0|UoMB}s*p!(y~WiaB3PAEVI;fDe;_@ISEQp(t4;?eCNz40qLmrHund_OM81 zj0RN59)D0AV?R7+$3_EC-`|M(!?Xp;BYgNe?L7k zkXeC*5YR(tu*WM%!c4L*==T%@8P3Jz0(kNGALhnV&ve7&2C>STO2nor=@!))2(P!( zBn*-J{*nG?{Dbi71hsyi?G6$(5KGgr!a@>jQfJjwX@CSA0uO*jJ_`PSeuTc zV`3`k94|Xx-Jii3Cy?skl~2tKtDGSc~oJMO6__wyq2lB zSwE&BwuGvZ8;@}~!SkG-#yW*Kd_q!6^73Ln{{Xx*Lgk7}W!$|qRbjbU6~^V{ z<32d*(Lr06PmezsVj(fbVyN{6`sG57e&hfL#t!Tg#y>tfLQ0W!k2pX_C4g!YERT63 z0m{1t^<-8DC;Wfw@G+cxbs>}&Qopa(45qh(6HF7SRKCn`8weW?_&jaT!6%R3Bcl8^ zVM(7m`N3rbQUy&0%f~E{`n!W|1cUAeIVu6GY{Ohn7mk$o0x&00Ob90CSXV001YU z#U@;}%8@Lvm*!6|deS1pVWtctEV7qggt@PYX4pi(@NuCwk=mW*+B)Q(mb2SE!6Y^z zCwAS)6yu-L&l{Zi&+boExQ-THBN5u@K#Le=UKu3*U`Rhdyl-=96!y%GGP=eKIrQ0f zFN~9n00{Xx#~otK;pi>_`N8B59GkYuEGr^hd^oPv#_eLxc!@yLdL$T~pPMbClZKilI=v z4Zsf{-}|1Y{1^e9$G&iRL@_UDIYa*djjbVa09fu3CoRZ3TjbiLkDy=a}WmkeoTi2Fgedaj4H^YetzBuEH?h3B^pSiGDt_LpTb@_FHy6JsyosU%0jY;D(B6_I*fWo&^}NvNIF3H?11( z!76>jmFnjX;b)0W62fPSkegWPRl_vjP_caLNJ6z`@2m;|H!tAqZAu^^TdU&|^@l%ejUM z{$&+l93_yW)KV0MLy|BLkAd^oIWq|^Qi$;D8$~_bz-<=SYO_pXuFWf`1*8t~Y+Mb* zvq%RVa7GVMoH(*a%za`sxrFL9rJTltQx4Zz+6v$AP6CBHPD#KS@^QfY^&;dIYkp74 z@L6ca2GhGPSk)~wx1w;n%QbKgJ=qy&0QmfIk@*Lz!vNyv%lCoE1&j$#N|yA_$54U6 z3aKZmCf*3i{W&d+d}sdv(j8S#56q&=`up(ph1mcc!BTxB()IYG{$rxlybv*2Y{yii zoQw{q7+QaD`PK+!NbZQ1si9L@B9XOORaKNU?4mRO0G2W~k~4wielgIe2!xQLzk5P+ zpo#~0E{kL8mt;#bB=zNREXMY68v#3zPp5`lgU;S^qmiDjOG*+6dTU>^5d5OdrwLU( zv^U+f57Qpv*SpV8s`vYH>^2_unGBLgEp5?mX%XDNr~?AQh;6>14ac|=OuoLTWUH<@|yiYPn zn2-&k>T1nR78RoufFM|978JFzhg0KIc59xhH64nNw_N`T(2Ail8p^#GC(j-0D-x&f$$DN zA01Lw9e=1Wa4C{|h8w?{YtwA!M1es%HT#xYbHy=JR`h4|c@MuZQhw~6w zS`#fOEJW57?8hW+#BJ%w<$*llC>}QOG2_QVaA;qA65~oAr@f)F#|DAE;%l!k6H7dagB;A6*+ z1FDm7i3(t!Uw)7Q4q=AVbjue>_D3Kab{n`PwlEZL&U|?UWM}b~I2mP5-oL-zA0UGt zOQi!F3?1iK)yC^_%CVed2iu?Rj&cumi7EPub_i;@ z5;6T=+%W`TU@mi(&yI(&4Cb3K_u&tfpnWq>X0nofz1V{Du#XLsoCZ!g&H(_Pc+VXO zl>Y#y(i|oh+IB29=$)`cgZho`7#RIN-#O%rjy^iBtU1qbe<%`%FxI6ls+OazB3+(c z%IFrmHsCy`Jb4+wANq8cD1((l-;4^Aq$V1+B0y}&M2WZ$QWlB-03#SAAIBdjsE!{s zThNFe&I*PlqZF}6HEF`g6bR-egJ2v5WdrBOCyqSy!agb5kun9USLd2Z61o7wm{{YBz z1up1#Y@$A{vPAP~Zwor5yl2{&fs|JBPj}g?fU$w4iZ`tbe0f@i`Hn+V7xJ9 z2^=>*06+B~Jy+}~;U$K`kOd6_{{S_fMOBVg1^_c8LB>hJ$R{5Iz$YFLS}MmX!%ndM zqNBF8d0Hh#4tFSP6&agb%+Q+a#PQY{^ZH7Npo7^zK@K*rm8UFxYi}8*FCb0R5taj$H zYS3hr+E~dU;*}ADXU1|e#{dp|VDw7HOgBF$cQ89mb>a;SLwNy5&IWn*@Cn<;?bK#^ z$nJ{3lBg;N74CYbqoT#ZBRBvYjEo%TgW&lcN|lV+vf2+ZFh-lGEs2^_UBu=_40i;{ z-Q>2=NA~{M>fE{DmumHISTPdD^^5MYnt_=H=**G_$;rk%5BPyO!O6}u)zr^a%4N3Y z;y`wV3mUBPkd>9jRKKGHe2w3$fJgZpc=+m6%$%xa@&Xk}tPN$Nk5LT5HI)G+VF3vn zkKph|ehL2o+?GNX`ohvBDUCj*IcJJSxu{1jMMxMVWaMN@_#t>ao=+d1r$LkkU|m}e ztVX2~YeJ{h^JCMJjbSN4n&!chG=D4O1;_W>`RSL5N>!Cf53~D!NKX33L-ohAyNABg zc%YD?N&y35Z=6$`AS~5K=@Wgw zuIs&@*;+n>pmwZ`x<;8^rDbag5_?g1_$=8can5teJZGxoiK8$p2f%O6EzX!oJ&3GV zKAv`;dv+((e&p|+Utgr@Ek*=@10#l03kGZ`C=@8#yns(6^*$quE@%ZP%G9pf zeGEq;SdfB}T$-M`{Nqyp0B9{(VB%X2(f+(3Dpa4-w4P3WPmFl#kKsz0h`Q@S-I1GH}EJ`d0Q`jAi0RRIPJ5PrfBw-up|WQBdnB!rv;!9q#h`Tqcy z$5w)+fHKx3a&HCEl_ZTwwvRdAv(rTba08r$;1iHP)brG+SR#h729fjz$dhYih$#G+KNO|mC`>{{Vph0IIg1bf-k4EN-i|ZBu(V zCmXVeMhEUk{QC57^hw18mh(PT205hh1ce5B`!|nqc9&!KC)3W;?fn;Di%1&E*Hn2b zMkIp7Nt9V*bqZMR3JBomcID3}&ao0EVb_vkSo_pRVw_4Qj_H4UK{|(UOQV=YsPxo0 z+xGj)08^D-)bUe$P_nj&SLmkiB;?kq?c0|Xoqk>KR?s6R7S9pD*t0JY0Q zt$o6>IiI&9DSy(ssu!2^oz);p*LWvfd8<6*0i(amuVAe;sp6^wA`Mo{#s zsA_3IRBB^K@BMOx&fnBKPq+0nSU;D(nuZcVmNc$ojR5(`RnMIG!2UILA$6Mu@<8 z1NNg$qAG5~ULHtkX8Is60Dx@`XdICD)Qsu~(4+EnSoa+LE+w8L)7 zp>-f~J)eN5h0Zh1I&7(M6ZZN*yOxg|{Yw5cwRk&@?w{J#-$&Cb%Jqe)+`Wkma*!YT zfx>tLZaicY;~gxF478vqmp+k%ach{OMH3ls)L!7ydxx`hJJdBA_HNlP>%k}sJ!goG z%;*LcBr*+{JnekrZ#E?1PiS7zJlslDfCb~zf51ky9;ezp*fo36!!>EEAc#cP9inBE zE>%Fe`*1BbJBBIqQyD*kZ5?DW`St4Rn zoUU9xILW|12hLeLclqLJ`g}VsSh)eq6C^6kN6rZ#k-I)Kuxcj#n+6fUyAZGoHF+r2hb=u6mN$jWsQ9wfRLv&dom}O@u@J zZ`W$+{{Tlj>rr`T`l!w@b7S*!b&~;&cB1x)7ZP+nCdmjIzNxpOkuT9eC7>Z>?;F-5SFUA5jeodo`2J$Gr=STx4nRdFL>k{rFVB!k``%WAf!>UsTs}* z3I~z%=Od^FAudNPzi0yrv8+iIoWV1qle}UV;~Dqh@y0Qek~99PScL=K`5%5dK<1=& z&ZTZz@=q-BM+`u-O(NnT?{Daw0b4iBc&VOh44a2J4i$7d^oO8_enOYA5IzEO`i9;5#NM0mzm zqJJGd-KXkH5?9eR9XL2ys^w_Rr-H0)FB?eUlB6qRJYe-C&KV>nrII{;@bdAqkLCdS zN2PlkzdLX0PKf%}gDr=FXE51;+)Xuk!iH$sloi{C_OkHcWDa=bv6yj%n~;OP?mnL0 zHYH3<#O6#{q`M}ISXq(fOiQ&k&{+s1kT>s7YT%5q;Ah4$)iS=zyj*|V^Ktw{goc7* zNUR}|wP@l;L@l}mReT-^P!xVg2b^aksQ^K#SOcf=4B$h->BTRnUgbtsCD_Xt*iYvk z;u~-IpPc7~+DS=ZvG3~(GgcQBj@wA1o?GmemB27TRbq#o*$PQ;&V9U)2LnHzjT5jf zolCDE-*8L;UVubD^GF_WVR@Y#5Wma{n6d~sR11UW#&P)bo~fKnuhUV^(BV|GLS&v> zD<`31wP^j!t6S5~zzOGY+qCdMZ20L3!9%*u$)RQ@@a>jKqlj2{Q!pPr@(NOItKpPd97 zyf)Fr1d_um!z7H$=t5wYQb`%Zdyemt2=IC580gcL@~DT>=ii4|RD?q$ptMT$VToLT z9gJBd?ZE{}B(WT4`<|r<{VoTm(iNe-PgR3Y(x-$H7BzciLO~+xe%g5q`Jz{klSTMa;3I_%1YpamN|czQi?fF#Z|tJ%D{pc zDG$%;##v4Ve;q=+Fj;Z){$NPn36eSs3Qlzfk#_Gf&4SH=f>$Hkl0JFB@y|qXs3Fg< zxF0RbNhQmc{@X~$9ytF1T=nNbW~5MI3H$gMKLSF|9t15pni?-IJtT9rj^RFsX&xZsXY#&h}S zgU?nE5uEvipgiH!wHOt9mc)1|A+D{JECVw$6O*`}e{7GBjyj0r<_i}0tO`j~3^glS zlay62A?2e`{{REdJmVkz_{UK8lcx{n7>yxSX0#=Z7J-q%Z)L_uOlO`LasFTB(o&}&RV4ZSp{ijpYtH);DQw7#AP!FVjx&>y z=j0Rr0KY-nPa=p^ET$P6>#}Z9B%}^xSc`5ygPibx;r{@yQX+0Ct1OQQY^2l}b#-eM z@SkX{BP%#YiMKEZB>a41{{W7hD-=Ke(eg0NsA@jtwMOxhSW=@EC1jP>0T?F(+Qb}Z z$DW-E$0=E*%RW$&GM9LAe(!q~#FE%VK&>DtI)Ff8rNLG?8Q^0$>gbom78yxC_k$=} ztYO%$TCk$3(6e$x8#duLfOF#~$M^pD>O{)LuufNl?3UFq=AR_D^;-IMWr|DEx}riY z2X<}P84(iS2lIfZ@x;l(Vy3Mq2&r${-r5^3M^Q8tiI1$mR5g#LJ*5KnOcCljWn=#U z5$2q8iejcMDv;fn za7Q{Z{oeH2FpUrD$J8IKI;qpB++7DvP14!BUOf6LP7c=p0E;IIpp0iZ9G<%bszjQE zx##ci9KvSe@v6#J!&cMl&{OLkuKIEH2enCm1mP?~mRM;+qwxu!`+|o@arW7uHs_aM$)tn~{oj!MDknMh+S2i5XK|2>NRHCYckDk*EzhBZs&_3TMr48I zj3^T3B!se@=O-B8_2hW3hw#&uxspIlOHsA=aUW*kIE?d$kZ}riEU@%h{JbGbB5 zJrMTM^>)?!$dP(}+DU^&Km>hQ8AN|q9{Sjm})OvWI1#4d;2R4r2) zWz7;c(}R;8h!3Y_AIWmc7_b2TpBxVv>rPQ)Q56B%Jv3=EDz+7L*VEKR!{uSELQ9YYM7mlI3eo{FdCr4CE+% z%AlWNz;Dz6z~icOrDGHM<+BBlb5yQu1#r`hUS-)~mbB8$|q_L4M^M7;3LfZTKKz&?L- z(VQ+KYJ;$STAn(<=c5uH9(_wr(~;Htii#-ylQec@D+^?hjsF1V&N;!s83X5-j}FBm zfR6`(GZLGaFGQU?-msfSv~JRc9p24&<((l^AzTu<+mc7Oh9ux*1J$8{#3^|V$0P2$ zZwH(88$?o1qYD*R`NZ^QvlCo`T{cS)Sa`t-9yvWo$t8v`8_yj00*n?aSqn+av1cQl z;y%#_x=d#?_Sa$P9o-(W+x@{VjocK@B-AygYq&NJGV1K%rG4ZBlHb3ctjtC{_g65h zs87-&0$M9CyAqm^7xcTDCb#t_)#x>f$b(AzSW+7aCJb!I3vNxyI0xN;GoCt)60s7Q zNqCfk4OH>xAF01Zd-Uo*b*OeM*Sm5EWmeU+PfA$PV{Pv|W)|G4s;44F;BUwUbJtOa zE9`+#W3dP9-cTx;nQU#=HDBSI>R(~#UrO#)i&4~)=+zo)XHkx80@Tin{aF1tf=3>~ zlCn8rv6!4=tPUZA#Lt{o8p4mW`qy|NH92oVyls{H^S9Cku+}2DmShpto)#oZ$MmWO zT(H3LkCJ-MMh<#Me0uw&N)-CUi28}!GtQJ^vqHuAwxsj~6Ev{0vB)fncHxI7;Nv;Z zR{sDDumK=_V8Vv*b)N6hYEKMSwRm*fCD{T)K^2et^|0F-Iz^Xp*DIBzs97ameH2#(KzHiTRl!#*Yz|J)jrvcmqpWH7a%8 z6tAgP!Q4(U{XE8ZH#p8e%j2x|;kbz`2mo{WQ2in@OIkeh{7QXUzooTa+o`C{3ekYP z_L)PqiP^uW31$O6m?X9r894Gf^k0W~bjdP*5j^*e2T$MS7}&YTIF>3kk12O|b=uZ# zlzClYh;2w!$r;XaLGng^dGr0c>n1FkMRXtv#6Fp)Thwe!X~R6?2wu#hviqZnN4?wd z25^5l!0M`&l-$Ln%Tm86w0j%)CWlne^xa-**1Vc-uLN^bg*VABs|<{WM%p%j1CfL7 zJC_^|xb8WKmn|tNkTIl81&p7(`!zU%-?45$CAFgOjZ&Yh9!nS9(I$QpPo56 zc=A=5hl|oA?M!`RR(tj2Wv5Q4V9cc@kgy<}w%|ORjtgTyj(V><7!;F~0DNE1_l2F{ zW2}%Rg>=g69D#(=p^^5C9tVsNNXQ>PPf#Zof`aJ-d%&5S!_s#RN_C`?YFcy_6LD5p zWKuymRVNGy;l4=z-1L=j~k zS{TeQsC9tgnIouSRaOZLOwGW+7{FbmkBoT9@&{8V7KH~Odj9|o4i-(|#?{qP3f2&? z7)OdmCmG6+PsbR?i~*bz)N91Cc5fek?**P1lFPoH%#uja`hmn=w8Trd^#$AQCz5$T z*nIUeF#{uJ^)>Q_%po>49q=q6H;9<2WHJmDB!li81D-}e=Z~JGUNJdxec=^7Ad32= zaqNa#@;(46=_25Ku>giYJ_+;CryLR8Ao_mrLSqPS-n?Y1_TO!~EzAe65-vIRg$ScL zJe-rBMgTlb{;KK92b+B$yI4hj;?yL8+1eb;yIwI`DN5uIa3xL;_c#Qvo}2wyV(sKU z&^@Vp!!Y+Cr)8Q*A(h>6B8aQRtOB2DD!d;#&+5)PkvvQ|&0i=&X7Kc#*3~0n62_pG zc13bq1Y|0n4g%wVe4mbp@nAcM9=|S7`ObnqwXNphmcwr<3}j~jH=Vd6AP>$n!Oxzd zR}fxzCIqaK@$}l&DMbGOaSSpZBX7EzHzB~w5J2z`0|$~h`5hnnf~!wacs|xRy3Igh ziUnw97{hPgpaMTg+<&+I4+E#RChv8 z0AOU`3}@pc`02+O0C(>N+8E@PsX3Z;v0w>ZnZ&ZleNe!L$=&`VkbXICsZ8SX%8MHK zz~)oMTTRrVSjVlU+d;!b@-j)hR|ZjLZMn(vNeF?2%!d4V~j8-K5_BS`E(^I6v}-e&mj!m zQY<(C)<%#Zjzw+691d`Ke~bnH06!fgQrS#1!k&0t0G=4~Tr(e7#M}&SB$7G6_#eMQ zScXy_2Ae=m$R(B)nH4fgC3LFuBJ_NXD&) zW*P*o0)W6euWZ3Yt zl=~vr({)WkNHw|w;Y%Kwzn7mlR%VT3A%XTDBP3(a2Ua^QV$+&@u;ZiI}$oB~@(XDE94abvk=;#$2?4u!z(~sxJU( zy_u-$38rhhcW&!F){il18kU+ZuBB!WH_Ko#$pv3-08}n8Fh@j$&nab~UzxU$NeLDY z?(k$9_K~PtTGq7aXh&V28Wy*4ATi@=#u+34lN&}bizek`gN%@6S~F281I&B#f=bc_ zRT3Ki0De@}Z`L$BW<4u?)_5*yP(v%q!AWI?Rxb*xj5i1mIL1#*rKS6*1K*TH4sBu- zYdTF`FHW(jX$EGf%^%E&h<+7J&oAaT%{smWk`V3HYBF*~OBy=wY( zc&>J5Z0h7XRJ;*f$y&j{d$zFhi)Qm>R#x(x`cZMWxe`z>AIyB=XWyrFTtq2hsl7dBQ zA_rfsrA4M)rKk3V9UD{rlF^ewuUtTqA=-t*%QA*Y_}rLj2m!+JmR}IUB=?pFi2K7K zee9_*YV0aKtJ@k)r`F7oz_H6+DC&X@Qk2oNHphv>C!=kM0fgVv9~k2VoMr_?=9r#@ zkD+&jyh2h7e2>|oKU90uvu5l|o#EQLg!&ex5{UHeBEN4}qcYuRiDb2Dkv1s`j3e|& z1S@VWla90D!G*`;fRa*j0-`n87TL&-Uyktug~G5=%`ku|L8)`0AQ&I9d#Ss-v%7mx zpJ4T)d90*k2wpd6&M;3N%;4ko0oS5T1v3WW2JgsmKeDTeKy_{{{X6xKVhD= zFpEMv$9CxB>uAB^X_+xym@G1q+mL>a=fkXB{)Tt|0BN74`a^doWT$vo)2;o2OtK(!0+k;x8w{u*^~3yXsTe6~#?K_AkQfb1^w_^qtaLvPW5P#b5#A416dgWI zs4mot?|FX{T3>4EX&-EM?uTZ+(^57PbUhY6-tCUl zGV40VpHvZE#-QYg+-{NBTqgP>F&CF zB0C0B%$uD-7!0JG3}?;Z^TTNP#i1+O`Dg5SG&vV zUYGS6#-C65PxE?OL?%DDvoVL)Q=doDjO6i#Ld*tNZUDzVY4K+kk%JP9nf{kx^9R5 z*J<$Y6q}C3sS^`G9mDVZ#&#lpa1zZPQPLWzonKL~h8a+E`p?g`bMi6&06x2*0e~c> zG=%6E>QTN#ROYbA?u54hjB%gz=_G`j2t+ADu|1>Sj^%4~Ra&&_tvz^|1d7w6dj<}C zrr<}90Oz40D`cp&I4VKRG>=(WZd?fZ%hc3u?zN+86Kzt>>Q6H`9H@Pn0I&=IILPA!W2RTcJV2_8Zvp2} zesDdlZ3fo4Pg$O5-j$}U2#!jzK~`leh{R>3Ktcy9pkyxMJd@|9S02YO37JvIeLisg zrebLef)3r%mKzc1F)ejXvCSTwk|yQOaK4}C9R942Jo6Can5DL5pEe$+>jowZkpkY; zbiGL^=&z);?H^Ai>LqBHCgKY_?An2FS&xu&`;M1aXhjaz^vVI3(lXeDzEHZgDw5KKylwyGN;rYA5OQNiF(x^pf+J z-DxS2Gb^DE;r4@)7=!0H8R?hBxP*z07HOgOS|KVzhKyq<{uZjz?4jS3q!usfm_{mY@rs3wk@q(6Ty*MT9fu7n-Uh}>{b2f@fbeD!)x zK6;awf=9m?F*!e$2(7N^Ix9kgUu#R59i~+oCoD^z0V8$*1d=)F*NdLAu)1P4y4EmhHy7w^yAOeZj`jBQk9{#y0H09Chm)K4D%SQjhqLlw~O{N!By#7Q;!j zd&ng6GyY_Cq@rFN2-5z7Yc({^=#stn3kIQI?aN5Jl(=P)0X*SJO}WP#y2{0*tsspf zUjn41L>*(=A4NN2%hQ)hp`=>9WWfIbZw(lO`UJ9^xf}@a0B_%JIT-7e#^H`NIR#bj z@uNKnM50XqjgCK~9r+Y(ejd}kIatJwqiW7b7-8uQyB-L~2Ok8Sb&9SDiviMxjEuv0 zhm*GZvb9l1AdFEt=3;O^~%O=ut;26pG9DoiE8C(|?EhM=c(h|>R9NHk| z?|nKQJvCc3C84Lt%!zctiqa5p$|ZH!NWkDR{>Q9|yeAbYK}Z>gm&|+C7%UL_LMr#h zhSw+EH=|)=-784r)KH)W^#S30fHwi~IL}nh3l@`{r_?{KJs^e583%Rfv$BMkJiq`5 zl?KqGaXf-EjoEMV_&q>n;!6Q9W{N$bi`Rbg+ zVu$9Je|F#9VXFqFr9yJ;B-Qt;Y)6ifCN%_mOAjl?bAiW?*cCcr04hy+kAHY(FK8`p zP>=v7hycFn3VKC2<8I<`JRIZy0Em@|2v%a6^!Y-oygBO8k{4*~$8?|`^RdGZ-1|Vt z`Qym^b0oZOV$&s&*Vu!G(^#hUoy2g*CPLuIp<@k?E?bkyBhNifnaL_7d-qsphQ_-} znnxQd)w0ZXpN$sz`Pi?VR-jP0z>KGyS!j1$0z>) zFu~;GIpe_U*ith0QqAw(;!s0F4{DrWaV)kgLslq*&d(u0xhu{GC;;5YBz)r}08lA@ zzNC;X@5UIY2z9Jgjb?g3ZGAmp$!0m<(V2V)S3Ka3KsW?^1IJA{X+o3(7ChRNhNOJj zooJznXPyd+Bj;l}MeV^O1e}q_&+qSV=*gUwWx)!=l0Z1~<0t1I z9VJi|1~8mQVMyf;CPi$3L}B+5yq;GeV2}oWGv~nS1sj7w@87n9ctYQSZy1%Uw45t0 z(||GxC_mySl1TA`oOJ?U^Ef`Ro)lygBM8ztMgyNs3hi7I&w@sLgP$koqCh6i0}df& zRvbtH$E9I%W~?&*v1(=yD$Q ztqpq+Y39sqM#NdM?ik>F9z5`Q$;VEfA&i$*OeA%9MsXq>rS%$(%KImpStQE;07|TR zicABy8}Z|j`+oj=rBg;Xyl`5)T6(p0x0b#$w4tl$8;) z4fkR0ks@Z5(c)J}?k?ZfqpN7!lF5QCkm@qAY_3l+>;VRO@#7?ZdZ&xUaSR;XgphWn zEALn(tP9d4>Gvj^r`e9{T$Zhh#5%hAj^mN;BoM541m`|Hbqua8#4`m2m+<|ddqjZ{ zdTq|$(=PheBlK)WWLV{=9H`C*p%uO ztkbdS)}k6{A*-xRs~oM2um_Qb3V894o}*V8#v~V-uRoL)Pz$6n5Jsinde)~_K>DIb zeOWi!nf~o$KneX08DwB^RIuRkG0~1YjdxVl9~Ro`hpD z65OnoI=-J+B~Byq!@5^{JW#tyNm@vX?%K?V>z@I6WApyK1j)r$zgPs$AXXl`-BQTW z%UX(hD7hOa+=rjiSAs|R@PE&$qIGLS&EeHCM8XqVy$oSZO6iihf>c+5kY$R2#v74? z{-dZ&@V6wtx<~fc(EYHIw zU_L_rVUL%}G+*JbO?iHp=?kS)8iko;u_V?Ddmv*o5Fg;M_BJ`jMtb@|OG#NE(lg|x zW=a_~Zwtn+W|#7+?-(s&(HNS>%!LH|loB(NJfHr0G_mg(B_yOU@iFKB0OEBXUeTn* zppmQJNsMgDWGKqXkh#tj?d19Y03+3uEFc*l$W=B$OY>0zaw zjAOnG*nfUH3)k$^FdPem?F++4)r<*`!BZBqXL zEq?GNWj*MC`ahu9i?`oc)Kk;dHQC!xvaCrctWT#OJmFLG{{Rr{?hX%v7(5)gWL1Od zqpD7{?uSO&)XrOhh zl_q{#Y0lGuj{qX^@yYAdeh!%^8OTxZ)5^bCr8z|;5gugG<)vQGmFEB;44^JZK5z$K zppu;;dbLDJ9Y(!ve#x&xd~z_AJys)w{{Y|qeKA*hA1jK%(03~7Qd+Sj20Jk{b5}3{ zfZ>BEC*XTe`ksMcEhQ?1mjpC>BENOi*3fED?bGR6j87C+hpmw!`iPrzle9L_JpTZ% zUpV6>b{Lh)6=Ho1O6)5iO{&|APbw$3S^|h)(!w@IGF%^U{>8p|uj&-2$^@Deu0Up*_F3962kZFZ zMmWG6V?RAhC>e=ls@i*`2`!{Ods(72SgcPb9IR$K?j?a-lDM9rd3 zEn-RN`g6#+jhV__ykU`4pU&m>@xj6Uk5Hr)8dPuZ)Op0YsSU-hT(JYRFQ!H7or!Nx zvMhR_Dfk{h+l{$7Cm%gbsePK8T|o`+#6xz96^M9ygSoD1_G=`%)rk=Vf)>+ZjZov3 z2Vha;57nNoE3p#7)Dk>j)9TPU)}qyjtp>MleLU8$La6X+wv-`?GEhAZHqPPtd%B0C6N&d8OJ{T`&C)LK78XG^ulcUZ7?mvu=`$u z3pfRUh3WIrhTXrg*X%{`}P;jC%@&Bhv%-=N)@b0-rw-gGBs6a;oX{ zh5!pjHne4xhzi@4aztyAG4gT99FDqtrJfdH8VAyT+Oes3PKwgif3~qV;Hah0IhLS)W%RJsXn9_lPDqHB_;kQJHt$~r^i+e zCjEv%09kA9vdRhE##dir;DP#Ci9G&#wI}0=im8y66d}u#%TL-MQf3ATReD1m-~2)M zKXX?UX3(hWRc9WKqXqb)wGD{3mRRIRi)w%ZfJewDBa9NhGhE;Q0I~+)mj}=h?8A_~ z3{~__@Uu@&ioI{A{jH@}npotT37|5-l12iFVyh!f9_^pgRz%#O9zfv4uZxNi{{TfT z)l7W(QR~U!(wTsUq+VP2neTlzS*F%w(685=g|aH?3+qY&J9Cmt0Dail$Id@JJfDsg zHjBcctnNVdw~abM8B=A~Kd0{!_5T2heL~sN?CM>qpx9?JO9VEMfZLUJvNq4@kW7ap z6Nez+jF3MYJjPZTIY&RM5qF8ROalJ^KfFilpTxGaZtYsh^w+kQgV<=Lib$H*s;!N_ zA=J$qJF$(rl0*z{HbHxddl8Ttg7Y1aeP3=lX6PNgzrIeSPT$R*=Eo zFSNd+{U+`$F)!&Fjr~5vg%;hrtl^+22FX1I8xlDPaLN827!Lr_$NW8q#L6KwR)<01 z&sY;QC|5|EZ7Ws#Dw1isvfHm<`q5jUraxIE1eT55D(%NOK0xv0b>dS1!s1sVYFMPU ztork-YXae^18G`Ea7gjilUCJc+QFxj(b?*NPGfD)vB%E!Ae`rpnrNXrzVHh<$Rs>>MM!Egxk=Z^=#&p@0&(ygpXov#j5?YnjEPbG~hSq#Uf<>-D!^7(iC=P=a*1w7q47WUy24((5#*6>k`1XsbwuGSEoTDD=^fy;}p4 zpnx)Z^Y4oAf9aD_i2wvFIR5}Ab|LOY)wDp8>T?>!yQ=^y(b!QQLlirXovYTj z->fUJ+Yaw< z!7a}`=RXJItvqH66@kn6sd--Cv}UA*S`0NphhMd2DJO^avHWwN$60x`%?BNhwDxe& zBN>YknMqvnjyOFwWP-%gm0_z8z00nrX7>H9TU+~D6wyp*u{$@fAO!3RS8*qfcmQWT zZgAWR_@*H^d^3}&It|Tj^N1F%%v3qkoOxrvKCAx#`o`8_o?SkzIgk|F3KJIG=PCv^ zpM&v$I`@1J4U@y9nR3g$Nu~YTC6E^Rl^%uq8>A$EPv}cdc-+}}CIV*0*DsBs7bQoq z9&x)QkBs=|#ZzC1s4XN00@nauv^V?3sp(=Ey(2(XjKs@+KT1Vxq=Cst0Q2Mnk^B60 z$C7Ae)*yZ1$*Dy&fKszWA!YlQNdqt(6Tv@>^ZojC6(|0vJiYxP$as%fXQO0P`?~&{ zyDVSUW_+G!VM?vgaXFC8&Tjr`zWmxULVw*&)02oLef=_ zPOOSc7>q2+{C|kEXZxJ|^#V8s7g-b(}s>7pGh(|SO>BL!@G^`YnpYZ#* z#tF_2dKIlx)N=!v^M5)*cq+c3rs{O!pVu#4hE{Ea72xs*F>r9n5TVd?rVQbyKh6(?N+%8>Qe5jJ&KGVnl3{;WIILcR6BxwaN5x{-sG_z z%_c2QiCWVvYa&WU(9%NV)Ih8_APoFx=`0Mz%NggmNwMecuNWAlqEbH(Aa{YNH4Jvx z^Bys!&-I5-?kFUZ*0HDRaDnZPPT+zo?&9J6~1M?Pyh`)1;p1h5$6W^3GI;QVZIj7z%Q68j^fuWb_z}N+d~HNh(to z9Q;3?R)s4KGHR0P2?eb0;^qAS-j5nhZ39ozDh)qRj*`U~WlwCmy-PyvjXc?wt&3i@ z`DDNeS{`r!;|zXr@CRPU@OQ#)Q!MVpT@edOJ08HWYC+f_f`5Fk` zH2Pq(1Vu&oTz-%|0s8^h(%?tQ^QIOKVqIg}!LT?44F3Fo@6@rL^k^;&FOD$B{JJp} z*UkxaN#l5AjU|p%DCr`HS5h!lf#8GSgVT`Vj6#x7prn=}w4)KIIuX24^=%HH9?&w+%XzAeKL>AKR-m;K4*xYFW2 zjrcR;Rb%q-&LqF?kIJiOt@JIbR`ME0+ z)ajw??Ddaq%v2(U^&_;;aCUa7ruwg9%|Q3zkJ8;Fk}9{cn6KczG4qbPt^>q|3mS>4 z03}S5um@4nm(nZD4jgG7IHOJ~(%727F+jfPm&qU!7!Tj`{f}Qkq}H%uP9U96U5oFL z>%km&jX-IA&Bu=)<J$Z z5OYwV)#Q>ou^bQUNirDTKo1+kfw*un{{Sx?WOkE52`8f$ZjEnm0T0kgBdCETuXz}N z+hY+nGx`&2frZD~c))r<+jz98y*@j2P z2S1LDG_U~b{{X!NGsgy?@dYLubt95MjAZ+tT4A0@8!!jJ4Um65Nt74LK`o(6dB5=n z3A9M+I;2_^a}vvJRCI85g2M!ILXdZ0o;W!F0GCwcrBjsYXXf6=SAtP$KovW7174Ti zz1>ICKBa4Vot0;dW7-A+90Txww~jisI~cN~lmyX;0JqoQoJJBa6S2*H-`vM?7^RJy zY6;?vqjBUgQIc`^IOK3T&odC5szLI18Z*)-N{OL{L)oW6AWE>p3*n4$#S; zpOMZEdNDL9Ma%i0pT7uBVOC|7LbcxQ^HLH$^$NrUhwwIFj1Dq7kt#_pw1OZ>F93ZlR?q`%@5i@qfs|h367sqymER{*N!8Z2t0n43`3H%I>tt13k zKCXInuUG=7i1|B{Q)>woLr(oiMv4gj-wKKjFs+@$XP$5~&pk@1Qp$h0;EsOB%)uo_ zF1?G|+Q-zs;j5wPJ+NO;m2%G{?kmF>AiR>lAtP5f&$-C~S0|3H@ZJ-O;uE_pF>!KD z02cmR`H(0Y5d-~g{sg|BKY83*+MU@oxhk=}ZD)ug+-zc4>+n%f@v!G0id%7$Th6Y0nBliJ;hq?-DahgrEaZv~mkdQn-8&$%8vkXPp! zBomN+I_xmGX;^e+0p9PdXU?3t$_i0#p6eI7A3^?{)XK%6Rnpi?>;2Va+{-2iEJL#v z$UO7N10$@iCBZPb_JxY$m4*qE zN1e`iLbE7}RwVI>a(@60ykCp_0mUV}+&YZW&im+pXqg;FOvPo+m+Id-dc||NJ5N&6 zb-Ai)+H+Bwvjv$o8nRY+qruyR0FQ%%`d9)(5!aLAWX{1&RGGk~L+8(tt&!j|`1#`}@_!X_reFa{ufA}cq_~fz)a+ieVr@wv*(`EItq5kH z4ZF}JHsia1SZraFf-}*{M5$`dW#$!dN%}X;h%Ciacs6r$TGk${QBNJ4c2C(-rzfc* z%E02abjwJ@bAnm82Q7|8)M5Vs7e6i*T~Fx_Lq9jxm*)&ierVdeSLm*X*?LSJ#q~0^ zeNET{PeE%4waegdW@Tj)G2k<9^cReJ?~Hp_0Qfdsp`REtFKUpUyPjQKh^;<&mK^){ zy}Fohx<0XMboBY#w2q0|HRU3?T^mbS4Oo02NggfoIU!fnWk8^uCpqiXF#H>b;M3fY z18Ty(T7YtM%+fG182RNTNn3Q&qlYE+U^Wq!_h)`OUXN4Lsonmg18%j8k#6;gJKYJ5 zM$`KK3X_bGLF?uZh^27#aXG)~(|e?o`DvNSIa^1!jC6Q;t5l?^X=AZ9@#o4R^;Ola zn{%e5E*Vi3Xuzongn%$z*?`Ug;FFvjj=5Q8SqTaZcXE9IiPcbR9(w*LbvtsuXK9*t zojrBA6#oFyC0Na2-Zb`r2RPa?f8x&>=hJ=ubKm}Ftj(iOHJ$VPkk`XJ2=Rgji32+#jeHt@J$VBgMotFoM*}FhbfUX_ho_Rf04@2J)*|2dTVhD6I{JoyIP4=A%iOU#{qd6KmA0G zGsb*@m6!yekUV|(dclQ&T)>H{>d&cON0Kw8T~NN2r)Z*uNW*)8&nGzm=N#uCbs{9l z$bgzDI_hh;?Fg+zouWHUyK2Pq)>f}lSeIlA6^B-ty}#2gQO?{C@iMm@dFtwuDJd&a zEO`oZYE}FqEXCRn{&>(MSGQiw+v1WLB=v(bBMg5<*baAX_h8`uzCYnGXm zA}xPHw3@w`oyR?$JX5EtY65!c3zABD_87$e;m@~-H#-WB(;T1x1cYJ3)PI}_ehRmF%huus6 z01;5k4-9C%!KqC(@3+t@$6P*RXzU?oT>k)!Z2&L;9CCbgIE*Bq5KPiZ8(XDqeW4~} zn?;JBal95LSyGjF0wTaESr6)b?GK{2?tA_f-!fRd{9mz(SmN?CPHz(#2NiZ*K5CvPGlFzrrj_W4-k)J+0VF`h{eIg<6#) zwDzXn8*0XbJAhp0-~++WUqgILg_e~ zmqH^Mkr+!NC?tEAlfljoem}QfI5>VSODIgG6>I^Fw4A}jM_BF{wEM49P}j709u(YS zI+Q6Y4nZ#-{0|=|kDiTuBN%|=4zPdXTh9Gy5$8(3DF!^>{73hTUG93Ce0M8a6yjg^ zHWFCOkCrDZkZ_@XetPylgYahwf?UO=l(l%U4fA-UQWrR==o{)|5s%S!4Kp7^J8zLV z$@AyHK05B4I~dUdl#35!eN5GR8p)qexh6z`mb{ET89qO#0Ll2rKOA+FkKx#EAt=hk zrN^k~bb@&F-_swemHw_{O_E5oZAu6pc8%4fgpt89mpieNJ*51BlkwvGXMvs$uGJ?r zwHDZoShq?M1C*#nhoW|)w39~Ove-reY`>(8%l`n8LCEv_=dMRnia+qNWo2$~OAjZXoafI}6^_b>%Twd&5+xO^W*=59Xt8VhJOfm#J>*L( zEL)7bLo{b4z}t)~j03?3AQ9JX!SL9`;g>X^gsUqXXnx{7Yx;RN3Y=`@ViW_VgYF1E zf+;_1q)xAc3}R3>?T{ic1bG?3^PY3#t(48hOBqs4ddI&jhVc2#F-b|YR1ZgiwQuC> zQb;13PETJ6wG3d$H!epWe1FTSu;&q()b#5a{8=~&cy{grt2S5gXwWCuHCnnpfSOlp zS=S|!qE968$kHDnJ-(W=a>Kzq5<%-z;qC>(c#5$HiO(|GOpxI>a^=B#QN3#)7V)HG znQM8uhoPu%i0LPy_aT9T5lF6a^d*{+M01zboy{pzJkm(b{ zUU%X_B87)*5I5w8ImUR-bN%|TC&^IEseqBl)%K$es?fy>5JZs7(lgqBxL}JLgePlw z&I0)(1oAQe04}1=n+iafTpDckr)yDFc^yVQvF#D5@HXcpc=*WaiiE`FdTU>QI8_yd zXmmAs*d1y}OFWUUs}B95CTtdOQyIdX;Ga1Bb!jIIx#&coa+6nYr|kxZ1QQ$Q)-7vF z?|p8QYdUQ^_H^ezYAisq8e?(U8Uv6vuHOSYv-8*5m^>`G5`?r4?HFl5&+ka(u~R!~ z-J<^hHEXR)w?Xrd9{&Is8pKz1{@0C9{vJ9LfTNID%rZ~^0HPQChB`c7Aj4qmh{AGS=;td0VMo&%^9eauKA()#+j6*OU^DK zjX|CVW!M15cx?Xuf6J?c>S8%TuXy%|vevXir|mJf6Nx z#7Hvni9?FjtV>HEis{Cma@ClHJnvICx`!@yU@H`+t`kCB2$2k<)TM0niW z2rmSJI2k<~MRs)`=b1H7F{@>(;RqlFcn#;gQ`=JxE9h&775ZSn^x1 zjsF0sUk9IuvFXKOl0=Eb<)`IVsp(f7pJ@9V;Z7@x;=D04F_OwACWXNnfU<+=e1UPJ z!4+Xu&}YN%8<7{{WXgc|xV-{{X5}_4JO#t4Q&;@dVZ>eN*ll z%@RQyT6-{hN$JPwN}-nvfJr3sPm`1L*Uu@VO68<;LY`8@H}TtfQ>X8d}VxuK%k<~^Y_=KESbFi#R!De zsD&D_HOS|ddv5|`i4%Xga4 z74hRZ{EvgvfRK#9)O)yr));|oiv+%!8bwW|pu=Qi0g2t@W5CV_Jbapp3PUf?`1?bL zdq;IiY-!~rq<4Q%22Hb0Llem!m}CC{eh)(=B4{a@^dB}5EPY$+Ld-KS6;x&HtTazAo&kHP2?5W^{{ zg4KCyu02ijU^6Q}4 zgF;H|{{ZPjoRP;F81tTzfl$l|e|rA_Fu=8A)L%&Ziigrp&=sk+i&p)y728v4fyC3l zrWxF@W!nrQSfN~=Si66VC+YtHh4?hQFAbDZ-y#~nv9P*~XS5+f7Y3WpniA1KfUiwD*| z!g^XINHmRaOpM#oG}@3!Ot8*cx-_!3H?H6ig;gO)#v89R_`iUsiQ!3X(^L6WNG0~L zq4~4M-V%0ncLO$z-W5KP=c()Jvd?5!g5(gvJ6E|KkZGk^2L)zl+Nj)qlZMVuJazM0 zICdm0QkH^}C|<*ndU?i=Y!n3V{`XfZy(<@f)p7)j`7v4)8zhy=? z7<7oZ1@qvQ3%>_Gn&Eo)4~M=Fk;2btzs!944Y`~0ic_(vN9FG;(0%>dGJf*zi@n9E zNvT}2Zrz?=>sg(0`S3BffUBGZ7*XT%*QF&XKnX}-EIs`0V+L%)Fu5*bzk9i>X?N;H z>nq4&c5wTG%bpk>3Be3`AEb@?7o9K+wgNR)2p zUgCMR4Hm3hFGP~Gp(Ax=VVn}o2yF3;9Ao3Iw=qd6YL1c9g@PPw6kn-*$sApasp;=o zpx#$>ak4AOqD5kIIL-!m`55Y4I+VG9X%z7*0$CBD{R5>QoAma+rw*S3HoJCvam4Sx z?PFFjD*MO}feB)9l27A~ywl>Ty{`}`b_hrs)$6A2!a3!uGMq-VH6fUvddRMz7D2Rs ze5k=6xfta2z!ViYA)+eBJxbw0kt+h@0>nf+{@57F9)5X09WyN1`Wg@BAFMlL?bTx* z!z+?~>dm_&Iobx_kJ$WllbD0c-R}bRg+Q4MlDNS+2nNvWgYM(c;~se9sLdFr3DO|1q%!v0AMDj?krbXC2u874k(iSstA+u98*)E> zd}HWLKf`!+p(K)yL-%+!ae0zEi2#V4?O&%KPAhgSou{owZlvie*wXdDO!lfxC<088 zfMa5+#xgb@cn1szi+mx3$6@<9NM?HK`}66vXRCn$z^bhFUxVBEly>T*}LAEqK#BlzIL zg$kI7`e)r*2WI_A?cGUWt28vV7_7CuO1jG&AxXv}A(g;5Vp&h=;B`9C6+>uJmQY+! zwfXyK^wz!nPj~OvNoI)Yw)?BMBX2A79i6H>9+b4{sGq1%18YRp$- z?hR58{80fJM0MDIQbb@$Dxj`aNh2BP1{}=qI0JHE*NRNA5<{IBd;Ozb{uy=M7fu?K zdOdk&hFPGcs@T*B4hy@A))AKmIE;lNbZmgn8)xyt{9VFPi@_X3g%Y_{ZPHj0Em3}) zpykb+SrZdOwOHs5?{2Z_8LB~Ec_KTce+-U1u+JIi#y`+?^Ww7t+eD8UZ`L2L)PA1m z_jG!dXP>%9+Q|cODpps-h1j2bu2gMR7*V*APXn)0_)p=v7`bO3j2V*s(Pv`Bo?8yF zQf$R6_Gs}w59iNLvEK?n?b|;d>+cDRISthSWIaozqWOE~|Y7P_@B=Pck^|i^A z!~}*sqfZSncxK9?8(nE0jQTGgjr9YtW3#MnsV0>awXI!*ud6F$$g)ZsDnN~uGlkvB z=j+wka9ofnb7Q_nlVQQ6m2HrbdKgIk*3Uet!VE+Jce_O-)m($HMNuU-anjJRG zwd1LpfH^NJqZa^SS+^2^=Kx^xan*SoMPd_}XqIAWe{w(w;W2 z(ll?TkZM|gb6xDN&e5%R4y|e%I(_Goir-X(viepdKviLpV|Fb07?1!?ao2yqLa`hp z6^=4fFr2v(Xv(XeZJ^qq$Di?Eh%x*}7b;A(c5V)0k)Y=Ef16Oo%U}3P`jJiQ%Li%Q z^kITHVWw_VF+{31BmUA}0~1XZS%y^ByuBJWu+S8J5yC?4Vn1NZ9B8Q{3S6T)zFv0dEin2^Qn zK@}C;&_-4t5+-9ON@55JEN`z~TEPDRe=PMr+GKjNdU*g9lV{4PApB!L>(`w2VJps( z;rWN&HlITrr32}(Op6V#O_80I-};5e?0C*|o_Oe%p)`V`JS_+~h>a6ipt_Ip&syr* zo>B_gunQMCIf!8H8P5RY_xTs6Ntk8!qzs^FK6`l9B7Go+b<(zNLlTBEi`k&TGjUrwtKRx!WS^$EZM7m{!| zBhOkK6Am$${5+Xu$y$6X?D7?~GS!hKXG-}(dzW2ndYy@xi#deOcMZ*!!2|Q2NBsKx zR#_TZpx9up(D4qiU0O85n*dICb=c<^}Ot7wJ)DN^i6opM7>^%wl z!heN6*oRV|xV5`fp3D&>R99#P*<6z{u1+~ogPt*g{wyCE_{)gP!r{{^J1IrSgmpql zRHkPH{D+nTl*Cw`}Ny!+;94h4pt<7un*tW<2*_r zSI4YxTK@pmTY5*(e%qf(o*6YumDf$0%0NVw1V~Z+$AZqQjQ-~wgN`mw43eLS=VB%% z$qG)-K>qdV7S{8SB?Ci!+iw%-BLdLqY8~mXB`Wc>)^0xCCBYIXwtp0F6Xzv;MHn1u#P0VkL8SVy|MV?wW>=~T4(%A&nUmKDIpa0XBJJfHLH z)KHKB05p7okHt*IPF(qiyrN84pinc%{{U~tQ-K)EFApI1C;D`888k5TYH}4pEPMbt zKjqOeh>qp}lUYgRk}Wv~p(-7Y==*ELe-AbYBz&p|Z`gG@BrR-NqrKo?1#wI+t+OKZ4q$21Z+^lGx~s$1YBAhXzi#_jz70D*eYmmnlQ*E@A3 zo71U}d*NRSa6g7pF*68K!2bZLF}zAehp#q>?%ib(_zbuF1Tu47%}KOb;>PZ5!V zkg~kW%bmNm8=W1j`ZefpaA+x|_Y6sL+O?@Sg;F6h`$<-Kz&n{&Yjcgizh4aExK9wk zNX>g_syLVwV5>i4)y4nYYEnyA^wh>HDruwsV;nP2r9ppam1Fe_%0U=ipr|;< z$Q%RDKoD|fNKsFxkM9S1L$#(ql$^aZGb5X6Y3hwXMl34Uq?3xPEY_$t{D7@%^#UcJn_UqBGVq6r^QhQGbU*^oH- zeaz*6AA^oh`E-sTtenF!J5jkX4doAzX%My&-iRjj*)HkL7T5@%CD@N~I1R=J4ZFsA zzJo6i*{HBnY}P#kJgEuloV9n z92RA7g#>+R!B76mNpMa?vt65Ax$PN~hHb#~`FxCJ9q-vX9ZPzCpQU$Rj?&bx6gqyJ zsX%ihLj{PS!v`VpB*B3(=P~1P!tLnOEmSPt{lA1{nUa!ng*emHbm{YeJ4d(dc20)c zRMySqhRnt%b|HN;wPOK){FXwNWCt7B&V9$JWFbv* zGTF8jfvpr<#f!Ily4HyzSkdd#(*#+OZCjOW&bc@wG=N1J za{UFdXXq-u-llr(-24IS^mud4plElWiF$r>Z5Em2Ru))5Vb_c4Z5WPC~!zPI1Zp z{s&BQ2*Xs*^QT|DR`m@`Pdi4DnW6)Hg&S%}<12tj_JBY?K68&9ah!MJ947;l+DL!}1j(?08ixIB zMlu(`0f_(;)h-=@#BnKtUL6GPO@}woduWiT0@}qpwEndAmv`%9OVcmR_oplN&}ub) z+}Jo6MPlVk`QeBpfCh8c?la+TBZZgRr~;9ErQ4e`w?~Gki$1y9l>6^ZxuI)149}<7 zlx>zt)BgYuCxnzZIgRpI1_1H65W^#^Yy|v95_)D}<_z{`HRR1vXAjY$M5Rvc9(wMW zG%myK_!mO%OLwbSl4#N7jbX7PBPk_gAF494F4+pIwtczxW7%+$akz(GGEhM_Voe)e zk0{%dH!Ql|GV2=s%~toQ=hH-e@9H>}-MJZdBL{+b00I5_=~0J56P7B?nx)b`IrKy7 zziwFVIJ8TW=(;weNzx4_wTLHZZU$#}kJU1}EAMQEVYe&?8RO2l@582Qm5D-$Bv#s5 z&7~QuxQII>+pSh8f_4@z<9L3nZpk zdyq&SKUdN!7Su$WG_CgjjYDb@p4nlIOD=y&ZankxpC{*zvcf^k>SFecpd6U|z+vn{3Mr&%or9^?&^v!!)vd~cRh<&ee^dI<(@M_G zF+uuTNhOUCfN~MB^ViTn(>x+kho6W^{NyFJYKnNhV>1+>rDYcb#l6r(uC3e_Da!-J z>Rh8al%(-kg@#OpixxO7ki%?fGQ95R$GIlXId8YR#zfLmSp7HmO&}T?{fnC1(3fI8 zLiyv`j2CYsoac6P{{WPD>D;h*qnw$AKG}pseNlp8tyZ2U5yO6zP8dPzGP*NHz&0|t z*tzkI!_QwY{{T`X_8ef%DWt@t=~fN&>SL(E7nTFXWo*jt6ms8~vN>}M6;)NT2_WPX zw*$xbJ$Vp0K`BV_F`*IG#blaBl4XECV+MIYr;GwX2OCJo$3Ah?$SWa$cC7_}coGD5 zmAAE41&-V@1mkJ~i?{*{5uE&S$?@~kEg4inNgZq9v@nkjYn`nQqpfP1FJw^HHOe}4 zCd9JY)4;bBcvCA(kh7{NZUM>2+u-rn))Mn6hNdp#dn`FoO1pzoPd%#@?%nK-ce^E$ zJqt*=tjw{QnkotR4u}W^m@gr6!;%M(Iw9UHFBKF@L+J(TKTo@dP2COd-PWr#O|`_7 zr1qb|!uP>!~y?&%9S!m`Gs3@OP2^nu`K9b$2bi#REhJ3^9> z?he&&8!(Bbpd{r>0(FU9W3)9N;p^%GN2u0aSGRQtq=lNRv5wU@;#jl64xMlub>^7wg>uiEGY+7%xhY5|GZRY&r(G)nI1~U=Q>$suUwg-c zKZu=9-2VVpHU9v*j8tz@HAr@qAp}aPxE}{^k=LVqHiXIHQW7>V*D-jhhaqmEx%z(5 znaIuhq$V_C*=7gEKl^{LT`>`*$~_DGImI@Yp=w%{o77-}7K+7c4Su>kmRS@sNgFo* z07N5h4pgS~UUIlRA0NO>;v^#sf~8az1duc#t4)8}{bNFQ6;iPO0Ek%uG`Ska+kd9^t!;1|x6;VB;CbURWh= zLup^O@zN6w$zr^6BZ~H29FS1PeL^$j4C9~9d;|ORvMW^&I(z!Uf?67uBxRbMvsNm^ z#B!yS^S6u@KiKseSCK(L>+gS9c6db{C2Ln$E6{3{p<+@Ml(Jyp_&kx5@<)%)MK)<# z%)+N9lrvHchTCbnHmD#g3(HZelvv7SyDFY>&QP|0zk}DL{{T+%ua6+Etd)_}53|N! z6H!JG<+&hvn5p*#oZ0RTp6J4ew}bxx3Bbrae1FTYtCM7d9J{$tyT%Lk?ETMc>9X#~ zhH#)~Bix6dKY%)|i~j(l?-c(4s=fH1 zQv1G5bZNSFl`N1&UF&I4v~jSEZ46Yi9mHcNJmbLYX>xG5aG6*%WlBRZ_wUv!NKDj% zW?Yx_7V>_i$2E@RtE_z})3jNIiyEa3MP8d5B*g`H5}8m%z{|+U!171CoE&wXh?GRZ ziFz{;&F>l*U^_}(qfP!7dm*f966>=o7~n#&6XAwRgZ^$EhyMV(>%HI@aW4+6NBNc? z@fY(`5Bp=yzoO`cdvSUMsaNJ)O#HR;>Ms=%A zhUC}*sW1%mMkgB6X9~Iei9hWIg}=#O5X4t)>ruwKENNd(+qE@IP>~r=1XqKNW()pu9-wx*c6E(_2$5okfP$)Gf*C zSYh}Lz(xN6%7It*>nd#3&HU%UTgpCp;{05ypm8}X2mZngsI}uQP4P(mx09iZU?u+$qzNf5fdd9un zx;>>=wSG{rT&S_1P6gTnBDGcu#BCed_&=!mKg2vbUMmYX0J6YPQ~uz7N34Boz<8p# zjvh}CBQcme-|a7yfx9Sc`dT9Y0E-P7biI4Fo*HpgsU;~=6CT~ND#^Gu6rYD4eEI$Q z^3Q48-Q(%gTX34zg)+}7*mYlljRHLBy|<1fM}nz2v{vce?MkD zYp810q1515@6x)%9;Iin(Par6Sz{}^ap%dv=gv9#m|^7vs)e*5J$8FjqsEz}Ia0kh z)Fhg;GE!?1uyY)&*b9;rDL7&=oD30+^b1ndGOC~#0lvRe4M-`p`%*vEyfVqu(iVBD`qr} zPdQ_i&Q5%j`RZAPSwK=l?^o$|ZwO!ki3Qdq0mDiY(qXg!a)*vs4>`c&`yQ)yLrs0s z4a-Aqd5}h?ShukPF*H7pB>t0;x%v71&jYE70)q<*manhU7!vh{tY3P)g`ysXR)KUpV25VXy%F{0>HX zkurkiw6qeS_Nek|Lvw}0e@D4Y^NJ59x=l6sd z7#hU{)j*f-Y>3y~vc||VG6IpDkT~Z!cdxIcJ3*Bj?%F$v49-wDqr5upR^#*@0SDlMK>T%G zeaDta=VMXM{v%j%c#&ust4Pt;a2-825k-X{j!O?DWPbU_A3XvTu&N(lNOA;RQ$?Y# z_WuCWwAA~K-k2cNWw}NVxD^ViJ8WqJFgze;)vMMlK3X>)^KRJRLY?HCdw;Co6F&IcTSZnGw-QciTK z#5!3+DHVcf_hy`SG}M+!RDZl!!C=6As}Kjr93S7FrkBNKf!_uj@a}9PeLLc7tFoDRnm=uBYzt7*4LM52RXz3Vhw5b3q zm)eqWa}RlLM;kUO;L>Cy|Uddzjdmvp-W!D}Y^lYQwyn(R z^XUlHr?E^zt6J^2CSsx*j6#GM%!)TDbM6@*C5b9lky zmWe3|mX;d`I*hG$B!<)vQYhH}08wmaM1vDO5$nFY^EdcUJ@6K2gw z$$yFg+nUd8_WfN>I@%VeS{n;i>O7LInM;Dw&g`MaL!#k9RSE}yG3>t#VkJ(L1{PjY zfC8=FcOcT6h|ESDj7glUkkqpRNN{xzLmofg_3RzbwUM_6icrJ$A%W-i!T$iKUXC}7 zoT4}W*8U+ov$}gT{hPO`(yw}<7Axx$q-kPpgY?TQDc)6b0^pO7aky{m9#;s#;ieg~ zWrULCw?C&VMVUo`OGaBN{pP7^UClg)FD-|_EPQ02@BW=>!gvyWIpU8p1;c)X1oA&8 zKc6`N0A7v&h+<`ED_E2M+2Pa z&m{FtQgas;9uUB^efJ=}P}TAZC=6E{kC5R0k+}E+KLh)bK7lUB3*wl z*X8519o)M6_C}!=q4ueDq(_9+GMJR$hTpqs%7R!N4B(HDdasSeCkY`_h<0tYq*uXU zm5RiKyExeC5Yqig)Vr@l((mfYr_rwsyCo8pM+&M+DN`%R#h3bO-T}}0b@MmIylH%A z5R5ls2WBZ!Zr*;8(<=xtQwl&v?| zp~8g$>EYDD0`PPijp)^65m=R<06dTf-snM58nZE045MiO04$D@RHP`WDsOwYnI9*I zv;(Ba8|habdNI#rIb?aOMxN4r$7=g`56NNp=RHcfp(?$9~&&xk~Wc+6r4!P03Z#-07>Kz(}T}nT;O~2Ens~}gW^vrNkiO^Kz{M9 zX}?xAF5lE2RntC-_THOtsaLf^wTRe;IUwzS&NKV`b+!0OdCI~|`}b(&yJZB4V%{gF zvCBWkk91d;QHI}ZXr{3UBCg)LHDUn|?%lcf9(~>panY!WW}vX=`$GerIZe+m-|Au~ zO4oJ&0IRwv)if^7sj1w9T!IZkrkzFXpR|$|eb2m{4a)grj2*w{jWsgoW++8z*I(K$ z%EK;ENm-?9&W*l~V$0jVOunCYhO4VbeyiE;3O1&%ZmbfrDM)2#<9POa8FnIPX*kA6 z`RmQTEtAK1QcgPrq4;x30O`}c$nU)=6JT+Y$Px+fUiazt@{6{g^pCc5n-Nb}Nsg6@ z@4hMGmBi?G3@~i&$OD7_0At5y@$7ye6wI7d4Z&u%){S>JF*iPir&?u>%u(HSR%H8x zX&OEV$S3=pA0w=^;wL54gO<+Jey}>jSFh;ZnL;Z%)#TFjvnMtl5II~H*uGiDLjm*g z^Vdnh_(m^|;&RD3K>W&k55B@8ojnqf{o1{QIX)5Rj`r`*q#B2I>N>&EZ1-F!>Jhb8 zv!}~*+CeO^xe=n_Kmm&$JagB&aLxz*o}S8PFqvr-0U#IVOT~0dniok_>+4UIVkf#j zxM+ILl|sIO*d49fi>J=B($sW{2DYgTs~@$V;>gB9y9W%u?Y{>(-BM%( z(y+2>9h!spi&l^It{O>l?{D`_+PbH5)hVx1imY|T>kg;Y4yZjgjz(evDuBLlIbqar zOji%VpsNspo!k}#xVJ&m#xOYCo+FJ~aQJnVOKTFqKa-xm5v6wT(66a|soHghpWQvJ z+d$VCp)6?C z5e_eg&yb|ekhfSrr&hUdn5e7L@n7zK>+OvmSflPM*CG)*dX%+o7FcABA%970N~*z@ zOhtwZ$PT=MbJvbwIDR_~i87@qCRz#ae~>pVdW%MmNplW)sC5>m#8tlLJEi{s8@e?l zt2U$b`?s~<)TE2;NF#S{9BBYQZM#0R-n0(l2yBS zG+!zA=Wcg-hgs0+SB8h&$rW*Thnpt1W94WQmHTO(1r4-NR>h{Jizz z(`AGlp+NIvPkvEk7(G--k|V;bgaFARg#;1)tbRYws=@|D}wg7g?@QM#UKj;2aB@&2rEWW^W-WPbBh5Ovp7D4V4vxNKtA?m01c;-N#JL%K=>O0jh)5iPQijQ7g5tGa7T!6 zH{{Vz9oZZ@`&&1zp0SH}WszC68hUNVy)@F6FJy*xUD-eG)C`lK9)3Fe6i6W;nDb=f zT$yPv?~wPTWIs`A_Xl!z{knDZOESY)8o*rIWd$KV?;6-? z!?a42Sa|fWoMH)9)9JUYSEVHJR4DUVSefI6S+*%+I2mFT7V>!Vdghd+AtgXZStS8x zzV(3LTeYc5klU2Jae@~iPy_k)fyu!A`l^|(SjDNREb$|xX|}bS2}XjMuFlGlMx+ma zoSg6#RZ9|afC(gHtCFQmoV3Z5mr{*7#B!0HZEkxA_b<{O*KH@XG{`GYr%hd;j>v+A zk!n?6O$1_RQGh@Il>^CP*QrSZB4s1;pahD&zU9x1e8KU*5=K9Y%*5qVMG`Ym0Gd#4 zeSsPR)p*Vs7=Llquo^tn%nGrA?p^ybkw+M@ZdE2I1MSEJ0`A}7jAxd|-~d>G)biz- zg@kl`2Y_QfA;o?h3K$BhNAjd@euCxzxpKCI$t{T`zpCBLP{Ra&*0F*KiWu|kjBrj) z1~*8=;B~DoT4g6Opcg*weTEKW>Rc`tgZvf4)By=d4y7N=v(LuBP*7B~CNA2wt4|%x zd(xan!RzU8KdDZ$4-Dkv*N}h2!)pHkg?@UeHxyKonREvGpDj5L7mot?m*c!w8d9+M zXO$;wDP2yVGssdW%Rb`7lIN01&hE40%qT=DU0h)MPmY=df+LvQ&N4sbdS+;Q$C^Ki z?#_ZvG4NchQ$On>?vay)K8Qjnu%Q1wsjW81y};wnxSei=WR z1;<6-{{WJGVuSTJQJcK}i}psgvC7}cr=LN$B%T^YNWwqhZdlhp+piAbauLIL&NB!s zwpw~QaqAxUG?|G+!>IMY-Z9xg`*7QdaK|VHeVcQf{=|Qw>(NaGBC>AL^$U+y+%_PP ztdW(GSw=Y+BOm5E1D0SYR=*<>5Z8~ulgN>rP{ zmL>#$R>~dMi~(06Yof`!@y0XGasGM7&qR=s`G=qL3LI*SV!L{=EZ`C(R#nRu&T>8m z2lgQT-2f-^B(yt1XQ?dp+7UePpbXK=P2*Qg08Vla7o2Aw9CakF%pB?u$iJ)!6k{B% zr`T%sqx-XSWQ{5-4_G%CE4VMf$2iV&_{UN$K$4V(>GpzcW0A=`uRL;Gh+#_uA~&c8 zd0%MxJ~DHR^$t|10tLD;^ViY|q1YJA{{V236C&;cG*Pe|9G$Jg{s|oM(0*R&9{ukO z+QNiv%OPh)$GSo3;F2-UbI<1^@Hz?)e*L^63`Wn}dVQM7Et=JCGE5z1ng?gxcnk;U z;N)}uc1(W&%iwA_UKAGMgxa?F(JOA9RUs z>6IZcqEw(3C|S`0Dn_a|JGz0D1Ro`iI_<=ePGc1gP?rUh?%R_RmVWnDk(gHz4aB8j zu*^44Hg^25z~$+m2g#1Y~$p(3!m}IK@y2XVx9G%GCI;mJ>8A8?yJ@d-iq zM#KjnxbSd#AYe;~dcSa6g@+MYi+Ey;pQw)kv5by?w;d*i3nB|FMM3r9jD5r7{6yp^{{X4~0NbcmSQdhCW86KDdQE$=v|UDd zq_Ou^1kV~*M`n2Pk}<%^8B@oR`RnIyErDjWSJc>^7>~ zpp|!(wF}h`Gm@urqlXF`pC8|nG1h8k%a#fjl>?!67o{1`@vJcfmk?zdG~ks@7pqmH zC1i@6Xr?z-#y4){;O@v>oMSm19K?eyE10rL>*>GF1*=F3eJUTnR<(9n3P`xBP|CRH z1R+)A5HZQY93F;Ribhm{S~wN2kEI`7;h1Y0htnu)!KQbRv8w6TVUFcpB`HH9*sxt; zGfg8#IcUL|S1P0f8=b@tchi3Z;8nx$Mp}pFIk`|Z{B`?4@kFLnxpMUv>T7QZ=An8i zW_r~$``(mn!6p=CSR)|gZs0St^M(pIAQ9K7Az|{9SmahoER>e5Sht_mqcHlN1h8oj zC6uQek0KQ?7I_$Q0o-s1+tMMd}rk2_xJ3N0Z&?tn8 z)u~bzu;~FH63oTWuF_lJWM@Agj=oLe$kV?mx+$(ad`FRrBa{_rS?&k|sj~1NW6Kn7Nn#Xxq~0(F zK0o5{$D@`q_p;P$ew1%07;eS4o+hbtO_tKiqhrO7Ptybd2ygKyA0z|8>LjHj`-xK@ zuhbnI${pjtIz83{n^@jdepB4y&0sK&lGMq!28T{vv_#-xCDX7z!}F-{87bl@;(-uCSztU zk#{+&!=pwtyfneY4AJ>W8#9gdbEJA+ool2jv_@LtLaRdg8;Kt7JP$bMJRZJFn5kl* z*Pkst(l?mFZxxwxpK~GfUm;2Mof)3&&m7`@7CyZc-mkEA=?qxR8dgK8L|hcLvjBABh<4Jk@;X!qD^3NWK_12E0!^k z+mQGmd>{1Z`}GBlAja{#eKY#SKCRoG#h~{EntFlMnzZR2p&zRtmH@^nB6jygQB0}q zWgBH9uA2dgO6Grf)LWm>#!fvGdm?3@zk#^gJqxe>SkeBreI%)%eNcF2)U;?@PLEfX z&8N8xr9r7>S9w_-hE$mW7>+<#2}TWgSwTevvR(9~Rhy495HSP{;5N5lZOyz3ml5Fw zFD1Dk^wwJmxfx(x-VO;E@;LthPPtjC5}X|)Zd!_x6{KE%iQCt7{{X2Syt-$v7X3yq zR7M!4)9rHD_}a(k#(zC>zYs4Dcx?XE{*u+#M&|a52oFOZ+=gnzlFr9=LXZa5#yojA z1OEU{zI7pb4}Osi8WOt9yQ7ssX$DZlkGmMo0VnVP=O5drG68g(Z||HiEof+3S!_w^ zmcy_L$W9x8LC!FE{PbA}NPQup*sj=`(6Q>7B$0_@#z`IyF^`YO&rYTn7K9!F)~xE< z&utA2wOJ@>@iCPW1!Nwh3>RVHhyVajjDFbbx#0f*7I6**C$PabQb{h5Po3j88^fgI zSWhH@pd=2htYxRNblpDe(dvEZ$v7Yt*kNdaU-1wKBb@&Lk^JMX%zse0?Bsp=kPCy4I49!-b+?50zlh>UPBj1| z-Kcz^PYshNy(9B;tX2DC>BU>2Ba3xy+OpMV?nFnbAGUGnv8ONTq zF~1PYN=U;bt9PUFr`j|m34tK4@augL?f59oHD2ud{X#elu+xsHi|${Lsy7^_I45^F z1dooe#VETm;g2msCxf;#pTsUUYnJfq^NlGIdlD1}(n!-{LSSRVIqQ9&Je6v?U151~?d zcBvV`l1Ve{jd{uBk^-nC@oriS)7|m+spaDv{{RnsF~mL~#r_^c1PZBbM2>vgz&uA? znCT?cr5ftm?ZjEw1GjpDK%QbUBCehk5B`+}i#J+94rHie^3v z1Ht1u`pMY~+uy@(l}|&b+i9fg_oA~#RlmDbvl~j!>Uv^k!Vm%6=nmd84gvYQ;nZV} zAzVX-X|>4|Gmt)$l6=<$Z# zA;8EuZiMmj~wC0&@!c(m1rQ`FJ#**9j< z>&TU@>V~}5E7NBxyJV0I2FVyqhCb2cjN!||PoFQgDLDZSrf>n*$|h2RTbxDedY9Qs zi#5s?8*x;!h~ZxzPSb`5IN<(9c9+vKN%nQ zj;pD~L=Z0y2-wFTse<5dbqsa}oY147u2qdzz>sw@&5p~ zMv+z?u$~=JBykB%f{J5eU7doi4oM0@{`_<-31B_*FyzAInm3Cbp`H`z%*h;~SgeQBXUGUYAoIb;?f4%(HdY6hoGj7VD$gJFF*pq0?-`IG z1bD#kF`j?1=>-OAdc)obp`+>srHe3BO7x@{ZAQeA!32XK@iUOSfEmIPPvgkzPEHPd z^yUGR&VYIU05Bwh1T7sOPmr53%d0agmV%;$jYd2frd`K8aItyt*(0d}Fmgl8m{vp4 zG#-CCL*`MmGU1+qc2VGSiRx>yz^|o1IDEVOf6zryNM2{{UeDBb7b=#uWi#s8o*(MDX50f6KHxKqw~(NIb47TL)kR84uRmJv??`;FXqWq4 zP9EL2sEBJC73EZ(X&NOV%AJ8!ux3&T&d@+O85!W^Ag)@LhG0g|SJL`jn|&U@!_DkW z@;k22&u+E$#k4IRKH~ae+v^SeI<>t+SefUcN)uLNE5w^7dlDnEz_5ml9mhGBXvrjX z^hd#%9Bu+c)Z)?tR6!N1k!Kn+X-L4u%}6OqDofIYQtotbtU_7si&ke%Pq?kw)b&#s z-(tK?6mae!m4q^=jxn4Ksu{eN&yP`-#xYE_E4?zOr?wsu9cx1S;HxbfvDQKv(qLv* z9Dn&W;n@5J-T2_=tq$l63_xX-T_T=7&sgQk8G%i;HL5~=kT~4f1IY{R1Y`7$0OOP4 zB_ZLI2p#GVeh>}DhDb(Zk!m9l4&jLi;~%)-`5*kclr0gE_0kK_^ueg>73x*2H3=PL zDNUtxS<1sCo|$xIa2TK&QV%{r=d28EWK2%rk1*d@(!xth@e+cn96w9@-XZC0N4)!M zP1b96{CY97{X2CYIaadC85kAikQq6R*K~6dRP7+LxW~)i5ODc89FrwFLWvD_BVu%I zFU~y+h+(DS<^-U-hiyilx<@I`4Q4RZb%m>qn?e~8oxHOK2P`<_aL?ziOe~}YtY6=k zhe*k24$hk^Fm-8F_l6#c^w*Mj36MeZbCdr7r$|go+uxKZVnupWrrQdOBa^mw+TL=$ z2e|(LkQ^L%<+BAj0MZ+*9}H6zmfOJ5G-SBk#fuU=gN^_s`+tAOOipGZu;5Z5`?Et{ z^X;uqL$uO*x|Lb0MJp{PHg?JJHSQ zZI(AzXxMlmxm511nyk703Y?~jUdHgixgyq)N)IV zhYOs7IOFrvg$Nw5gr?48YycE{K^f=&0AG%i!f2VH0jj_a``ip};{XnE{#^;INE05< z`Wtvd^sl!IQZm>?o;c+Qo75W;@))o_2p`+~_2AzVIpTAY2oII~gjP`R7}6xvAhgk< z%>=QZ_bWPX#z(u{Msf~EoRP=K>yrs0jN8HxB-bEID6vym(H)3b=I5R=IV5Kp03I`p zbY$l_zJ?rHI+}%zHe{X$Fd&%=thO8xx1Mw3Z}<86=;adNgXGYN);xXnSGg_rkJT(` zdX$y*4H9D->Q$Tx7RxHhJ1c-PrQOtg72uCOeLcaapTkO;JEVv6{j{_!Ss0Z$6!+|H z5PF~2pQu{xa@TvqQO(e!_~ zgqjwx+Kua4t%>cSF-h1fkzbX+o2xdp!6;>vyZC(KF(oeKJP||qZ0;Fw%T{ReRZz+m3C?^5z!=Bz0B(vnRd8{%_zR2=^Xq})-w_@smRom6tMWE& z-VBf|BSNL5T&N`yl&r6ZSt3W_w&Ac2103!E{ti6z*CR4?wXp&EwY2H)NNPjkeOcv= zVJgw=$Vj9J%*Qw$4hBEWW1t9;sIN!&^@Q<){cQS0uJ^{Z2CJrMu|aiH)3RRD?VVI% z9FNEz-QErdS)H#LtN#EFUGXc*^J&q-K*&~= zaVPAge}DP)*Z%H=4a=Vn znEu{aV~wPRmvXimGC2T%SDc1XkDjaXKaQnh@N1JUQqr|GTnaw!{*g#wfS8iOglKwK zY1gI_R)UOjL`aH6qLI9L9_0s&1Na&8NA~iG*wxJLf}#!k#DllL7|;eKH0?QSM(q`P zu(9Svg+hUxgWz-X1C_<2dM) zq5uij1_zKyS~%4J{k}drMvBNmcy|y?&`By|aRpZmh|1@1`3>hI=Z>NZ2{!gRL&9gD0Ztllc$M74H!gsr&ET^*z@z^cJ!L`)ld$cP zKm#1HA3jGuI*w-$#H#ZTpV&h%h!3a@qjaOJrE{T@3WoyK-1}$Pkwl6+;Iq}ZV z-yLf3bkE|PLRKA1QAs`O{*hDm>O@AQk}{^|8~QEuGey#L3Ffs$ENhyDV@7M%aWh9F zDOs8<@fh_TkTUW_SN5c8 zR2~GgC>VDu{_Y`xjCfJcLcEw%W;f7p`+Uu}9_hun&kXo00Gt*H01^39tN#F!w^6w! z-bpk4xu$=y@H%O;NxE0{Y*>(olFWc$Je**VI0G0^Z~z@*%v$EFGRv6xhluf9PZq=+ zMlDXGrh||k$<`%h)uyy_JWxt=%OqhOkH%LY^y)BLG8B+Q3Lk#=kCqs$k|2CHpH4yn z{{SQ4^&lok4c(e2Bl*1oK2>a^WBk;frRf6Rl>s#3smljRLFhGV4v0M)~+bxHe1XWjn*L@8^UwViLb z1T%|I>s-=nPaHy5JeP)CGNUo%#=~hOkW`+$&*CqI4-itZI28mD!Ch<@Je-6rt6r`po3Xzxjs%TznZVlcyLY2qU* z&PS*>D-R5M`FD*<;=CIX*2xv|#ukgVCoFlx1K8QK%es_961?8rOP#d+vVn?9FGj+-O=~ZCI$fd^#?h zP{h?kHtq2!U<(WaJ*OT=J$AExE}Xd^hLeX%QbSmmA0hk0Dn!Zd6kgOCF?}nvWYxP} zWQY@}8RV6TFDV6y#!0~*Khzu_!Rv@#EQ9$Ov>yKecp#Q761}hoZ+9IVnwloNqQu_) z7R-2xYz%vyNIuif54DdC?Id+R7Zrz>H8Up33S#D@IQMOvSa3pO)!)6(ruR;x6#8a| zL$#rTFGNSCSBe2MZYO+-p3M0k(cpaXi+pQ{;dq4PoGgHZfZWeA%qgeND@&A6<^BEQ zxec^Tvo0nJ>By!)3S@#a$p?eR56_e1u5h56TH-AN1bz1{LR!9tJw6n(6(@H*fuDo4 z@#6=h04Ne~@02`AcJ`G&bl;9RL?)*2E#BJm#Z?%b$m|GS0VfQ@IKd-5Ucz8zVkQ7{ z0QD_ltN1|X0`YI}T{E%tU2+8NUg6Yr8YxykUQrx!&G#A00VWa^Tm`^WCpo|%b+^SZ zTq6rP2Msu=02~UJ>usz{7l?sUlvq+AAilqQ6@4#EkMHD?I`K(0bAEY?94aR9uoh$f z7?*A<95>>c z0mxhp*udoA>W_m6mzh)DLqv!2cCWj}h8KZKROCv;%`$?&Qj*9AOw3e3CXc3}oWwqh zcj=X_Dz9(NXIIQ_*JqN&dQ_*9!eS#4Gd2?n*x0ScG2}N^q;NhQQ;^JAPNTY-^|p9bo|SWRfNT5B$4H&mD0Xf9lMsGmM;kuv=8UIjEAP^$Bw99pjBh z!79Wjw3f1z==FVo6h0?P#$ViBsjTQxJlFJz<)#k7YM?U5kBse70XX@_IsA3%oIB!< z8^xuGn0c4b(2u1#9rlh}AMoBk0-}YZFx@rrYmX7*1ARHx4wK!~b;|Lz=ya(C>1)Ef z6_Gh*Vo4Za2t4@D2gg%maWYdObII<2ON!73q+{XGxif(Rqfbu{bcz;_PPUsYOB+fH zst-zO%-)(IEU~LN4AI6}wga34kL~hr5#kg0g39KTlu&f&>ZmljpLW2rL|}zO9Or1|v5+=^Pv^!uY~ZLdZl~Y9VS-F| z`F#3?Xi`V8%vIt7MGCGkPTwRSCxMU8QfDBB`ual3^@WO;$XijFCfvy*k&Fh)e))rXtPuHKNS9=4zEy}S^CXcAiC5l$c5DxNLqhrVpj9Gm9mkc;3 z9_{e&7nQ`Y$pH7z08rL}fgFwb7X~H)gjFf_Ua_auf$V9sKyBBK+#*QXOt4A2(~t~! zY!Wl%VEDo7qZpZMBqczxBI4rX!X!dTtV>z#X;N18UPX;Us)U4+fB60J(FM|(+q*l37emF73D^XOrL@fDU8RuUA%kOrL~ zjCdnlHZ@H{P_|NY_f{mc?vI~ymKngtaKn!tdiq-qluANS8h{TV&>2c$y0OhdB#=jv zRkDrUCP1CmUDXZ*<> zaeo%gOs*F&8>oQ(E`0zWI*gVAfW?<|u`6yQ3={bP z4;kykyCqKU%Sj}AwLgo*qEyy$lHESAZQU1J(ftYaN_X_=XK4jz)al7tC1HTvvMXQ~ z2cL{`M^R3~&5%N8%PBnR_Xd}NRJDK>Y+kaXPopwV1%V0lHR`bimw2)i5kEjl(7dnakH5`<*iefN(2@52JGQRIKasB;BIhE0HNcA!^oM#2I zoDVJmI=WBd_Wlte%sp8Q4Op zCnJzK=g0jzS({73FvU{VS))r4h@-;@S`DtRoHp-wAmH!_`SH~2RzMthzwH9=cyTE_ z^(TT`Yh*-Es2J=}cMZAP+0H&V{{Wvu+A#`9XZNUM1o}hN>p*2w1V3<3vuJj}K6p7F zllu(g&p=eOLr;GC!s!b!Ix&lE8-!}gpzhvC&I2D9=lA0$9Yo;wt9Vs`EKOoJtn+P# zCdTO-XdM1Q@(<^M`}HzXgP01RAAep@&R~|AB81HRvVz{EQQ-LfSOcH^bNT8vDpNr` z4N?e=NnI94jy5NIl2OKP67)|5viv4DiT4hBAW z2S4-a`zee~gfoa@!w2`cb_s{x?Yn@gNX9&J2>JI9@6!j<+4tcI%=!92Fu& zMl7tEW*}hXjNy3CAD_=o5)Cva1>h*&No*lB=Ycko$uUx?{)f-{@Oqell%DWC9tFuU zll?)S-+6sa?cUL^7&Wxenzf}F0p@ZARddIJ%GexX$@tG+`~5HAsGo>9>4C*>{wcry zN@2_hqm9R>hG8f135UBWCBNi;)NWw!8^lsYqEe??tY*Z&?y5kowWZj+v2XCN_?^!K zcgY|hsZfKIh2BS-RPglLsYPw{SNkntjjhA4_8~m>+{UDp4r9z{^(5 zwInc6@L)6m63h%;v~4a88j!7DdwYzMO)<=q$bVFlAC^Dz@Hic0Oqi*PDp{|;bdQy> z*xCF-da)RVI*+Fz%jRlp1zOY~(%=>?Sc;q;Jz3-khxP=Xf3WAKNG5_Q%8cda|4{)^?8b1MmkQ##K^;wC=Lrl`++q zRn!3xeVrV62^^QC1N^oi{{TlFSQth%FX{LCN1e-`1=6W^REpvi<9`fX)=~OK1ooWzyAQ32LtDZfb)-!9YTIA!4J@{H2uUQG?P&N zKX@(=;zzRYIyR}6{oo~M=ZPYszur6;OM9$=V?ZXXRaJlSbb8>J`d z7M(Y`exCh8dc#ZpYc8=~HZ@aEuWl=~17H@6l3_$7e~kKP!vZ?b<9NS@arjd!iY{8^ znlS{czyKBx{6|>1!ag1t$;BKm3<8wSQZf!jNAitt;3DJn`?B?p<&qs9)o|;+G!%M}?(_fxNh4Cx>mJ7b6T4Hg^oefwlxwfda8{s^%n`F-Y#1r)mf(Ao z57>F@lbta#RA!J^umDuPfENx)r5J8lJaF^yxC!%wo*OMqy33QfAWZ9hgt7>c%=RyZStbDRQz5`Qi8@yA_G3l3x+)^B-ytesdJsP3~;%o!jE(ncCw zq;^upH9DUQ0kHF|W&OweJ?WbEt#49`UDPz`r;((ZDByw7w*$KchCyumkGq_lbJkjT zRH2yXCjPY*2TNPQnj^+=`8c_~p&4DBkC|pQH@tU8{uw(c){;B-yN2AZ<=RBITyD>h zN1@O8{ysX_m4V@Fs?xXQq3KWOXs0uT%0(rBf909|2p)zj-Rt}?b{>_d-L($2-Y+6l zl5e%I_Hww1HkM)Sj20e1&pSx@>PeVh5hQ>L!K$nkK+mY&CcykYoLs>yDldbg};c+FmBK&*A?7w|l**RkQVHQMlEZ z%LI3H5d>%lBM4X`Ch;bJ{{UvNkx%(Ho;JL5xUYrr*icrfPB|$m z`F*3hukcOW!K4~Tb7&g9cLldxscNIM5^zaVD(45!?sx;HPY=Q30agTBo0}R zUMcZk9TLj;W*oT%l%ZqDRElfNR-ggB!|`NY_Um8bIg2m6mPfW}@%1ei0L&t{xrhNyc z+r7VcThnBC9=pdI*0K^wFpi#b`xo10Odd^!O;BYXe9pJjahh^Y7XtzG1Q53fhk+0 z{{V>r_j`lTmIFwT?q97PhxIRQoo>Z_JGztzms+={xUYIJ2=p}&q@H*QVMDtA0E}a+ z%2Fnb%A&X5{$F~X;*^Xe{uxXeCQtZgst@@}BnB6B5)-OU^({Z@UX|D}rM(YFl9K95 z7iIZU3g|~0*VB>YnE@@fO{l@3ItWe2_RRux0J_HP}t;AZif8#EF zA4;=F`E**6Xf=}|3e`;>_?+X*eokXlCI0zP@c;v7O& zE?JVbQ9#q*)-)t=+yW#jUL5z8s);9%t-2=lk5^Erk6tyYgB0LQWyS*r0f!%q9tU46 zVhNVM$eVuN_xVShEcR+S?i@El$Jh3YCYPs42#!eVG{HaJKFqj3klsi6c*rN@hrh5QYNNkv?wy3sQ{L$l0yy4HEVdonSzywS9m2yP5pfuGtF!GThOme z(rH@U+H}X-65MHM0yDsWWAIPp^z%4Wq8Sr1@;2wUqD){-;c`@S1d#shU*{D0{{V$< z?~hW9RI#L6jh7#DCGSEtCxh*H%MAIz-PKR@d_t9zGFx|VHzM658gC9#KPi6$@(=Tg z3I70wYkCavN3QplnH19xQW!M4b}mgObC4S`DvWWH;Ai{wZg&L3;j*U@h#8b95R#|# zZl^FB#oQMLpNj=aNy;uuCA{4DF@kiTr9V$A$dT&Z!Q7S#{<$HF7%NP91RqUgkg4Eq z3_J{edao$(1gj*~smJPW>+)>HpH^b@Bvv{k{3D70Ehz_d<}TddC4 zE}5X{x}{i9scX8W`GSv~%F+R!*o@=j9(1Ow3<;ful_g2JIhRhaW6JulYU$zr8^eMV zh(gk@_@ciz>uzys><+}zHORjD&(r?dZ8B8?A45uwo`3jEYT=71CzoxZK+afk)`tv4 z$-vLV;nESK16x%fl1MtJfD#60A>zLk;oLI^{fR}EF6_hpMIbR6BsnIG3|Mu4q8+oY z>Egbl^w+Z`4(3;mO&+b(-Ok*G0B{aCU(Oq<)6Obfz^pW(DAuV~Z<56~0X6>sD8drB zc4EY+XyY%9kE!Y-JhSwd=$@&s%}2fdjA-#{(W=C^DAQ+#R8;{spk-tLU;)Sj5J{`CP*8i^y~{$K!HSY5;Z_Pxg}z`h^oeOdgrDKtRQJ{FF3l=@ zdbZ|`&+ng5em*nE^VQj$cMwnh(qxc7{iMl>J!(Mm&0S)T;1)mXAy3I5{JyOb*8c#5 zZ=i%_yC1Y!M*#l-Ik$j6Y){uei1?R|N+c$B5B~r)fBD48;pkP-Gyeb|+9EA~3LnGu zGw$rJ*bU?q*6w7V*nPk`>lSatKNBRJio+;!U*2Gef72xkg^r>w9g+MseKpZ*y6OFk zH7z-dI@YgiRFxf$Ga@Id$fWU?z$YI#>O3zIer1nS22}nSyDX3Y0MRts;EW%;)h$>*=o4 zmH_befe6B<^Adaq)WH@%g?_`Dm540%hLb!J#>pjUmSQAU7%F$ojzJ@l@_M=d0H*lF zzwaK*DK!FG3%-o0vsfPj-0cPxhIRn>pP&&#wfq%*EUJA9{{Ta~X8MtnGZ>XqAK@4% z#Qy+)_3Na=OyhD>mTZMhP`bNGZQub%kpM=twD9$zmpd;kh0ixRy*#3)ss8{3orfb9 zPvQGSt}<}ZYW4zwpZ@?i6yvHmjCaH+1fz#|_PL4hN}e?a6M@YkOU4p*$(PiI^o;rb zhIVh$p5eW3O8P5#Y^+{_i1bZW;f-=P{{UWT-WdVFAZ)N zB&oBE`-+?#dqBw;I9z(=tdS@sNfa!x$*86*H7Pz3m;R=?>+BQ?LC~=e&$~|oBQEdn zsvXVsHfWn!vMw|ZUaBN(S_qVbl^|sMu)MJ(;CywNjK|EzkgP$qwQbtZ%-)wV%wjQ9 zCwD~!+ueP4-YRXU{k?*PPVpL*1Z4aj#ODNHap&WoJ!fZhx%c*p6%>a;$f1B~1G<5Y zxFihZkbZDakBolZMh>t~NO>)+L0w&$r3FZ=%G*a6$T<1_6!845b^E zAoJ(W4o4%7I)tRSXedXmSiDUc*b*hlR;AJf$QCNKF2z zD)}JtGyY%S=bz6=ep(u|G&8E)XRjR9Buh}X*wx7|v?&7tSDXRh=Y!w@(aTGzrno-$ z_Rydt*DF%9MjMhsnxjdPg*a^F$B&c3pFCtAj;HJtq^WX&$~tm*7Z7bb)Pl`&vwE>h z9>XDBrd1~(F$czQe_}Y#P$@G~do@#+zh)kng8u-Sr6IY@K>$Snuw`;`N3+IB=f}V! zs06hr_itYh9#BBEe6qEcB0HsJQVVava@hSKc=-+a0pj;#7ttV2|+U&mZ*YLchpD zkE_#~D1AamY!~$k+x`08h_73X}?%d;Q_s5|KP5*vbH1sy(3ZBk}q8zKcJ&A}3fAYl zg{1^7Jkn(Cjv%{_`GKBCl1KLHc~X^$T2U&hQ2-4|C7@p|OlChqs>^I&?>$C2Ce*dd z4{|7ThQoT1R*gyUb}gqTz*Sc8I{U-n4h)VP!x1h}{L~-uMIXgPo%D}C_|{e{h)TkI zKZ6f4Y0k5tNUNym1qq&~pw5|Q21bwUeF>O4+2 z1;fTk%K=~FDz7a(Nb!$R!(s3oDp8k!l$4}@h+*QZp9hTRxIc+L=+z~Y`DH2^EN78E z4a-0O02G5@KiN9s@gElk%Y~NT@^k2t2dRw!sfsS(4_`AG?yKH?$=%YeU0Yw(HF+h@ zDK&>>AFzoqKmK01{6`()Se0d$FsG7xzx^N>r!suGAgXfd%+PfvgWVwgFX;aOb=U=~ z_T`!c$Lf#1+fRCD8-S3hJo&&l`Nl_3$l-iTh)iOV=T%eG)--+@!jFq4O-Bfrg_%v6 zlo&mop88R=i(ZTL6H(RTkz2Sln%CAa&qL9$&l}_P$aa9gjldQB5)OLbk?@WoY0W-h z9UT3=Bh>J})9I6ByvgYc7ZsuN57nZT-d|4jzRI8f0LMCPwIFkZy$~;2WB&k>QOEfl zH|^Isj^MbA0L?f!+*Ek%c^_EgIF|s$@N(F+u+aXWKcN7{f74*4aIy^G`?5#lJ#)bX z+AzYBfl?o+i?`Dsr1m@h)7Y9xG~Eib1+{#{%y;m8IERLObCc~+%X|Zg#5jU*iO81B zeIwC+7WhhUAD!B+l7;He$UmuK+5%}Fq0l>LPwibXG|ewhO**(6o-(DHbKpqwU>O_# z05Jy}fKUemhM6^UR97&U=O0jT{u_khazvaO0#Y{K<=m3}OVPyP0Zu+j}XQ|`+V z@0wR?6xsROibt&D;BI^;AO39qPIx{ki8Cgt2{cVSP5l^oymh{+=1K2wB(QS{{Z8yDRZRa#%wym{{Z4aZ!1QC1wWWa+EtTb zY&!)D8*SV^ap$MQPVGX?7*d?CPF~s+^aK=vtXK5SLJR%JtX;7*aaFEirx}sd2FB-c zhT!r>dd?f`Ae0ClO|XtO3b3b@&ewG=DXTFG;dp}6B~6Z zT%HI_YzSmYSRp4k{X2T$ao^jXu!G%l$9`d>(@rlFHd1EfIfw*~WC!(vsOwSFDd}l7 zq?5-1-#_031J*j^75-(G@DYtYTtaZ~IPi@xw%fkbZ1L2Ww(HG|*tK74xF7IDsv94S zu=Cb@%u;wy{WXwRUg;tK0PZhtLOA)$ku;Png|?3W09e;_jYClybSrY!p?i=20BN{? z-XER^ALKlB$MHOG6vUuR#7$Y2t2&lbN){y7oM0|gkh75~9Do%1#H}dp>QXYn`cP%$ zV}(!dddt9Ysr*GsOp!!~Q8Y?Mc-_H z{{V$hak)X{vA{VP&t9XzIDE_^%=xMBvjE}7@3G}mdqtUl5@C2+%)mQIf`9_Rbw6mz zbnO#P(k3?{k;Ji~CP|_Xmd86!;CR3Q>Kt+sUB9SjBF1T z#_;v$!p$k=r-+KR&!z37++JOl1KYUF|&O+RMN9n7M)w-Ne8CH^O28$d~G@9 ze|&YtaZ*7OWh=x$Vt8zXx8;H|PNLKG3Gk{zW~3@F?&@sA^~ zH^*Tm;uPz=a2UP~hnX=`DjdBF^`U>9SeNP+qCu%>Y{Wy@e6@`40d zJTne~QhZO|IZNvD>2BbP?n&BODU3~@^&>y!)}IVi?jA{&6S{NpPTh2hGg z@hM5zl%JbJ);_w~^h9(fTFk)z09I)lMsJ=lyH6iD{{SrY(2g;UN(v>=?rMB&9Bxxx zYgo%tcnp>Q0Pb-qS0K&jQ{-YMZ>=}h31Pk5H72e`=_=czl19VNAjEUyu3^@VvDJOIs*vlCXRj#ATnYJ-=eXhWBddcLZk)@WC#g@6HAq89)P% z_cc4{{RvbPM$G~GvbUKlBD7B6Zt+xkN#o&%n3Jr_7SxeiF;R3 zNdW>nf6p3vu0hER8ZT3B2Ou%?&mCD4IAYc&Ck&+JF2!RjbM8Npil^gA$ptyd2V98? z>KS=o!UpbtuN~#w_TFk;#dkoXu@W68K#CeLMprwxOsdl{Cjh8q9?|E(zcb=2VNO&8 z%iAbs9EC5bF8u%z%+BK&OpPK#tNf-M<@$bKN^^oV&*ERb^uYvC_jS8*DlfLoFi*L* zjEu;Iyywdk$55U%#HF%C%)oAteBIxyU<_(X%9Siv!{kI#b>HKa^~?INZ}&EvM_4*- znJY!Ay+5n+K}i$>S0^K_oM#Khacm+c%#$qV0D=L`o4BD*S2=}fHIBcA zV(^J&6`6avbe}?bY1RR$^-sGsD@`Vuq*RkgBPpp(T3bl0061r*EUHE^;1Ap{T+03% zz{o)D-Oo1i3IP2hy^_2X#JHO0ksnxIjr>bo(EE;Roy$s<{WK_1Hilg!jj^1lu*1|C z$S6WEE63o}#r!3Om>&$HgU`G@EPP1bEl=S80E?0i8e=PC?vd+0((XM;G}=~lSl+0! z2$dY^Nga}{;IZ1o=kgeX@JQn-ENRi$oKjnoV_Ij0g-ZP4R(-k#OR?McaD>s8)DKZ6s5eZ}J)S6p_ zx88LX7=#t&)M8M718i&?pTP6gxft#)k(D!4P&9Io-}aQ2=z7FUnJ6Cd58(oz;ufH@ z6>56bIqy7+3~=f7?AV@GVii#URwR>v0601E)|9+&7MirCLO@v{2F&M>B}9&Zw<2N_ zh>|E(@5(bzss8}}5PsYGeXC8|mvvozbr{eajiG6g%J7q)=?0oWt0o3Z57UqU&m+9x z*giRo8Rtyw(&SR^C|fg-f7}@82C;Vuh`@zsnlm+qo-L;>Vu;`V7OfVF?4#P9|4K`_E&;J12`o!3bW&uUz$^icW$NvD|`a(OO{{X`C zulClde@b7bx^c5kSs{P9*JY5SuNzL<<0Kvl&OaS#VHGxPp)uSVj-N9bQ&)r}Wq(g% zBmSHJ0Qg6Buc)2Ft0k|ey`4%%h)B~$)=aXyHq_nExOpF*N9U{#G9+MS3Bp2DN#;44 zjbK02=E?y%)ZKq*(XD-WtMvy*eLi^a=#4TcmVGl>vtpwuIB4aDQdlTlmQ`ZBVCTnP zUBoddohXTEQWQf`WkIzBDb2MC715~|k&cmz9h~(PzMu>9W<6=+8Sb?uEf(@bZ0VZf zh@>%Grz`Dv*!vm5+q8k;epfjlbAYKLRT9b-7BxK!9+Vn9U{6YAETt+6<{%h?PZ?O7(Ce}A+mh}OQ)SaT2b$X&kW3(j+p4}*^f8NmIzpR)=b?|$~Q3$>v!!kKh>_9m86 z2%DKVdrm`tu1ETO^&wYwga+{PhG7Fuqb8$bH8z=CG2~0L4f=3b$B)NECz(ksnsn0l zh6$`1DvF`ESZAGoPW3q8kH^XUkK3p-l^Q0V{!mRFxU8_tG%&4;L@pcxfDNZ`0Qn!M z&Rg4`C!#`it&Xv(@5U-1GwuN*hQ#~ykjpoI{?{h_Z| z8E73&8BoYsG6;&n07wtCl^OAXG6C_;fk6}8g*dtBBXmsjxEO- z9^JV7?gdH69P&p-xvQ9a-hW>z!WM?6)8n&i*NZ{3EPyCC4$bVj!sWBU2ikubl|_KSB4LA zfN%&a=Q$n+7|whikGN7))qVZj_Jje1So@}#0-dT2<6Sr!vHvkR}bNSCZzw6_u*zZq%+7vNc zQ1xD5(aRV+gFBotCn0gpNdR&@bMw(EUQij2y8Gn{9g1l_+$<%;E_S$L4nY|NF!>lf zf(YdB4<%H){Jj|a(iP({`r~5ep`_@&pQp|vzui{Wrq>&q8puOSA;7>~;C|pRCyu={ z;2sbDt;GRI{H#1xuYz4i(k8>o6K5qVJ<(Ej1K*etW9@(C+8&VF)Xp^hI_!q7mkCnN zwPTGSf+(W^w30fYa;OO;ug5)o^CJ$42@<7>AOYYtC*RT>%R(lv7NoM~H&6%VKDv^)b%SZdfacM-I)w)3T>Lrcu6H;UKt`|!H|ILSmW9hbsWikp&5+@w63K6ElNK8 zR}lDL{c{*6@oA4KKbBfa1FF?`NiQ{wh9vv#Uj6I<1XZJstbG3XbAkQ(c|Qu_5I?-NDDQ86 zFh>n7Nda!Je7^BvKR|m=?OkipwAKMuNi1KL(SZK|3n(k?ZwCh;9FG|4@BW_Qpf_jg zufNhOV(^B1h02n$g%Lw`Jqv^6evrH0RrEikvO%KhosV-&yyyw7P)7p4=WNTp=O6jG z&sZ4g_q+$$ojhxtbFf3;Ep%Mu?qZtBQXY#nP-s+qEh^+v@;L!2iyJK z7xLS5t>~JJlO%9hwIVciC1Md~X%wqQR1(K3H)Cnwo`;9=tR606M71$GxFEicDd!ts z4*V;}z8>RPY+nz>i9`_UOQhv@DJd=#4YHC)D7NeReATsMrCrjWxTB6UVkzs;j%lKZ zxJ|Q08Fn!sfMX;!(n78eUC^2kkY*lFYL5NfppKlG@yQ z2;_aULU~Dk)xT_frK*nTPCwza<#^zD@!<6W=g63Xm9ZSh+AHEet6$TpNI(1JuaHE= z`ekVPe#O&+<$Etef-g*$xpPjY=?)0z(uaGo=Re_IIXK|*d}kAe;rLZgk%H}2uuTUw zQSyyg6VmY)1pcM`0r3_${XY)0N>Q?=p)!(mWT|w`uu%l)=8^Sk@KdasCvMx5O@gdP z?rcEO%&>16#AGPM@)BJ5*tu+ef!B=uJMiWs;w)*z;z3NHT}86FzEp;%{{U!VtbG(~18!%Cadq&SN>U`LVxTi^ zs}P}m0UF*tk=`F!`!D!^?9B^G(^@@VHGo4_D%PW9jy3-P^44Ws`fAKiziG=8-~va? zI35R%;)t0)4J@rP!>Elb`lw5nC6eUUtR6gzhw%Jo#d!I+iOb66)TVF=ut=ts4?sL4 z9_}CFlYcp zW-Gj@X_!S_nA2L-{{Ze0&f`7-orXzY48)#_qyGThDdvzPWYDDTUghnFyP@Bb_L^0d ze%{NeMqeqnhgJlEwYeGb&s?r26*C8fRD{h~DHIvl(WuRs)U`=XTs`B?F$aiYab1=v z0V@Yok2kR##?g@X&tSboSVG4ng}(CE_(R)?T>k*N4duOyoDrUQ$vo!Zh9Yddn1qK6 zr7-V;UWa-Bq7Acd@!fFlB)Prp%s(j6CADh`mUzVUS>jj{R<~9;Ej!wEd4>k|J69i& zfdTP@pPcmgfJEQzg2`Yhx2EyYqluzh=TP_ZjSK1j0Cm;u{bBFTXGoGm9@P7Yn!`lF zf!>%t)4&2LClA4SfXP0LL)IwHOoB3{$mT&Kp}2~7)NkTQ=#a@@ zP_+VB_OR1yI!7*8fV;+Wqurlz$r&L|IS%dHx~v~lAHaRbR9j57n^;~e$S;CN(?9G60CVoN_x zoP%?W3|A4Ii0+cZVL|)&#a1fmI5MP?EJaBQtQ2Dz#(oGG{Q3R*)KZFjq*vJTjI6}R z*6OWxo7Rn_+sOmk0M32c91Q$>=w5FGoIs<0XO{mB{_99-NEB^a4hT1)=Nkq-S~lt~fd4(HMB_J48S({G68 zwFIF=u`U(VfK}A_+R8(I*~iCUOkiH-?KO=`6B!NdyDF*?$AI2{eu(hQEWlAVQb<4TA86%I5cdZjH_ zR1?^(W;kb&VO5sY42RP)k&W$?Y_Z06f_ce4aq-S87l6Z~2`B{0nC?ITl9C7oyzKFm ziH8$2G{q7|^%0^&Qh8-qwurqY3VmWD%I(Nrc=3UO`(rul#OO`xYwY}C zIc6-ek6|yCJ90oc3xEfbd~x&eXh>V7H}}u?gjB1%4$ntifVZbHSligL0)&tUy9b6I zF_Fi|QUNy(rg2{%2o)UEFzszRjcWU$4$Bx^L&G|gf#i?XoDV)T^Nyoj*{fH3U3C6F zuz&&#%GZRx;PGucz=Sa>zB7@5pKttu&pkv5Cok9EKUg?sx&?#Pouox&3tTWIqb5)B zD&uepe3Op=l1Ek@ET9;lr{x+bQi1_av7Jw< zK~CFG5AQwN|B)7{fSCBt9i1Apt*tv#mdv&>!hs|FmgK5>kEgVfV7Ntl9ykjIdUg<&Phk7!qtwe30Q zr!63LAT3uYpVMex`jMjWJ;TFo;{}wSybB*KI7#y)k@-t_a@&+b zVy9XdG`jRwyk$G4 zcvIU$g|?A+`gJ5dhgR$>rwJ@iXie)fZY382BzzH&8!dt4k_o}<7l`79F<^x_1HsMW z0!oXR)Vq7XBh)TW6^bt{YZpCrmT1Dv6}N?5%fO5mIZ=?sRDXnZ$zuXhElEffF8b3> zPW~`}Q@M-HeMKrgAnIk)Vlz4=H~Vegq1<+mr0qY`AP{f>1J7Kq;yXpkx5$NtCsx&nlo)*Kjh?FEiD^vP`DP@rN?Ke7G3F~^RDAcZ(E z&m1@7hDSy|p>}Z5oyR;d$G4CMaxssOJvd2G&O|%3Gd7=Ab0egW8%C+VS>+*=9I+|} zF_Jz!=lAMFOjrOGZa{SR(j2wmS~VbwX;dV$2@4(5ZZ3rX0EI`D@D9>WGI8grCajS{ zDYH~=clPii3b3Y9V{{}ER!pla5^xADoR1&Bj-W|y^ZdS0@$yOPM7zOMEOWg93C>18 zZ`&UO@H$3>feIS24LYuuEXu^TVOvrL=Wzg_0FlqNbB=O90}=xw=3uuBYZKX7^!q^#vh>+OUhW13_YWhylFR*}f|#hymogM-I8 z&-orYOYKxzXTKPFM`WD9k^-?aNMsc-o-m5G?|n}C zo7`Hp4^3%Ojy(dc>47!lfVSH%m?53tbE<{Mwo+KG0qgA#=~n_)2MQ;YcgT0xs6UuU z0k9M^n{wF2k()=CO6Y;z8&onL@RhhJW1GiAWKkP@~g` zDu4#!ah|>IeCb$OCS>IVp*!2^Nj@edTGKANpEL3e(S%{0_w}b*?>hD>z~{I7e4UL7 zOcqJk;7us_KmKiSzx`c#j}`c)Vw7dzd!to;s_GBv@N>D+DE5k-`KUJce{_o%(y!r* zxIUzI-lM9)+j`yFh6&@-Drx#`th3nJlzCl|V_3KC8$)IJ@S`V^z;R9pg&Dk08h}F# zpp_{2ASj<28o7%}kWu}>hc4h7Z(q04Jt^DYLcXDQE|)`8r`Y6f z0kx+D00;qb@HihGdj9~bp9;wt6^JeXe6rL-@}PeR)0~AaVJHNHtu4*V!h3`1k9PKT zX0xpJzOANIh-K|bSV;tb9_b=3sy}wX>uUk=@55YCK@u^k1y&1rXQQkHzbxG%iitt* z0wNL;2qj98M({J9Jn(#eIUIG}7uE`}WqXsNeS01z)%5%GY7oolO=H!DXWD=yDIhY3 zjG_&JT|whK?mc?O3SKT`po~IDP^EUe)7NudW6L<-^t0m+ig7>cSR|%owInuaiAsY| zhBkJ4^3pD&x+4rg(KY8I{{YSDq4DGJ{B?ZcrgHO+Nq^BVj?!4*)BgbdP=2uVy4=na z(XBNWI3inqn2|qqiCOaB{_|)bA^dbw@T*@>AAVku=wAwd{62AvW+5|vBU)t07eW=D zV!1dOP!6P$b`T76#S!$@MqIPV?K2q~XLGsTnB==L&gSDkumE+ei6UB92>auQ@a#Ve z!pp&6<%CL>0#cEn9=!E|b&lrKyK&YnPg9|#J(UWP>N2ceux>d{-0D)?VT6-Y*OxdBkl>H zksyx%hf2{YFr4im@4@mHBXIFQjrdOvlm;`g?3AL+k_K~k$_Wp;sm;qoM;pd7CSC?w zWuNydpjN%r7uks}C__A2^e*KZc9*9;o1Uhnb|~exl0)qifrljYTj@&T<<5vOcmVh72Q%$tJeLn5YSL%+m)iidR)p&}n%}U(M1qy?o z)~Zyy{D_8Kh~$ix89YOY$iXD4Vyf0E34fFOXS8yhYvK$i29!W!Ge85E=1Z{j7NsNE zovW#6A5J?`9S1=F0P&Ej37Mll`DG_BEb@VcW^9rMNEiTM4!oG+r|}Y_EiMhnaYxlb zh!18i)~!!~&y|Vd__bp3C5pUXS-QmUHs~VKid&L-$y;gQ98*c;wg#f-WoSfkN0zk)2+*n}1?*9JpM3@q$ ztn!VAPmoUG%#&^m7?JH!oO_h`KaxQF^&*mlrr`VWroT84Oc5){(aQ58He1!s04z_* zIQI=Hai3d>&2kY(7i0~Ctl9izu zciZkMdYiz;OZCUF=y&I1>bL&dvTM3+b+=aNL+ThK++=`YfB_x@ZrVr2bJtz)zZRQ; z$09kfi6SlX$GKBWC^+`<6O=;crNlS)$cwGOB^@GD1Yk z21w3OO27V>9dRER$W9}J%vrh$R1Vc+=zSspGR{dH$Di@*9+`9>Xv<=}mW-3e1H~&w z?GP*+1ptCi4Y=fupmW#HNK~OlVk-XtNYbn|LfrQx7~dFawyRGErV5458-@guQ=DM< z<2*8@Au8^X=JkKr{Nd{bLuTFfpCMkvj${%vh$JuMqZe_Jw6{Ooo(LnA1b4mtzHlx{ zfyJ$ru(SKwKc?)wCUfC_OMLu}I*6s!QBR-o4h$)*Ag=8r8!(VUB8C8*j43!cC-MIP zKA0rt4MXqc1QkeJ)5TpQf))0_>=ZBsGvH-O!5KUcA2{*SMq+cDpML)Um`Wjf?0(yd3RS}Wm|c+tjdv@wY{M8_w$e|6#F9zpKQnw$ggAaL zh)!ePAw!~oAb3RbCTEri_Qj%Xtjg@kwj|hhEUEf|#uT?6MnCV*DhL!YJT5ITNYO~} zqB3P!WKymMamV~X5Jwo~^edQ93SZyS2nbyCWSS&)tfE@;H}wcR(;x=_0ONt3SX}kap|9~;9oHfz|D85Srjx`KtIwIYU0I-CQvx{N3z$T=hA z^(tm0cS*b0a`N(H0LBchW=i!XcH3>r?GwrkfQ0Zc82fq7Fmk_+qR#7LE*Y&)+6+h1 zG%{Hbt=5>hW??L3Au2JugTMs&KatNpMx5a;Qo@$`b%hKY(8+n>`tPTM1IlNi!_X5Q$-pQMSa2prY{E zz+eDCFmaBhGbS#%oQ?kgaqHs@(e|PhqKGUqrIaBi5`tmNF)qPIF_E_*k;p%ufy4K| zcNWqWsRGHV(R*;ztzKo6Gd!~-rK2QxC^nC17zZajany>J{h+;hx3TNt4XZ&7K9#0Y zlVZ4(TyHEL$MsbXiiUCPUr}OoJXO7&8dQ-YhErTs` z!w5M?VUVwkU~}?!W6n-`46JgLw{l&pxkr-lN&Qo2Y6P9=&@O3s9($Meli~ z^3m<#cCPb}+zfNaS@7=+M-$>QE_i06FA-f=gZ>;m#E)GtHiY>QSi!@1k1efzicJ zN>$lQAC`k=6n>89pQv}}U+~NI2l%$`X?13>Dd`Y6uXDJsz)g5D=Dk={k5$*^H6d60 zZ@A$6^YKT^V}M^!6U6%k0msc$i5pWjcW(eLl4f5Yd{U*UkkX6>CG zUgv=`{NuWA!!^*^88JkCu=yNC8u=L~&t5M-8O6BN!z~F?%h4$W*GEcR+x&qqV;aFS zWQP=uHb3Go>Adz9A>Nv6GLUlbAXELAu0Q9|FX3N^P&-B7=P>^O_ESInAkm1Fx+LBl z`Z?TMF16dU>DoT8Lsc3*+d6p^Cs{&9lQ*O=2{`oHK74h<;<$DvhtE`uLJAZKU}#9Y zv0~MsBhImLCp5IEvoliEqdxSgi}>1&&$0gizB*)_v@}ejbp|C0L2H>{X4R| z`mLQVyj3XcH5$uRhXG@mCEFoYAEjS>X8>bxe0d|UvOlC;XBCaWPrzaerdCpeuu)Jc z9Dz%LVB`t_1{A~za|#n`db{{iolIm3*aV**IQjYcGMxD7lmcq;r-I@*o*gr_!!Ie< zW7KOED88iV@}%g}Ppw#m`IAhf%U;qI8)OY8#X@M%DGVtbGf> zxZ3!(6f8(rXed}w??o02b|t{`f#>>zdR@y)NvTOomJ(jD_8f!%0Fph5KYw{2w^Ed1 zW-|burTquqI*}(CGen>^sCJi1jtQRn^HGH06$x9G@T40f-qRmOgry z@QPpkNrd!a_dc;W;n}M?YL&<`N2P!HL~OmAq|nsd$$d}aDNg108uyx!}y=T;^%k4Ww3CA z{wHsMm9DG%S!KeEP zD^aPd4vt^z8%Jq>4Vp%WcI}Sk)o*tbCTDn}h|r?jxy+IwXEFW;=*N&j9ChSV{w~B$ zmcGVO008CORMNtar7x(wW@ElG;uxg%QW{t^-A&_9M~xdspnHy$STx-_4I4nFxPm%r zAQC1|`>=o*<3E)f-^n=YwgZJu;tKpuEjf+qbbVTExRd6^>46+)uq5&^+ux4_9$fyY#M!c>U>E{<(qI@fVv zDI0tMq^@aAmzSkSAG0^6Hd#6Q-5t~$5W zo2vko^!*8a@aa&$wq^vCmS^MCW!i9~C=ZOV4c4+|tw~Y^5?TJ&o2x;)6%fRJC1WHRoSQ?%H|4 zm?;H*2-t*{!8qJJk<7%=$nv{XmZ4Ppm#|nNc}S|^-Qq}F#P^o*`dW}DvQO&&V@97L}4Iq&%LeF-~ zGB!Pt0H@uL0B+6?Cy&?Hq1J6l1jIQk-Tul0<%g+>Ie+K)Z`_ zsv9}{{{ZXP)%6M@B9yGc1yEv>-l2vD(77ajfA9NrqH08qN5pnXW!xikl5xNo$Nc)4 z7BO9JNNBWu6V#>CKBsoHiRv?dGp`sCF6UtGBf^i8ck%tlS(yAk;_x%1GnFKP)QWn; ztvOC&9joOY>$z4c>oQ%5V6_}YRi<$p#>s%k!3BmGa7Q`$&tE12LW)2;c=W%Y(l3!# zgC)5Qv0Y$`cKM8Pr9xbVRa8aZOS1(z%8#Ev9Z4}}X#A{puc15X=?w7^X1>XZk=-Jc zsCRV?3uK)C0OdRZ`*HocjVDH^c#Ov)n&mr2^%s#Xv#+Azb1IXk<4PB#FxFQx*=1K)W;=Hi#yI}~&5kkW z9D&5bV%!5=sSFzN=4 z6oKgf09b(&Z>?iAwNX`_)fYR78^5;%4zx-{T1ui*>9=9o@6Pwq^h&ZU7A{K2Y{)kx z5rr($kU83*AMf$cSo~9oO5s>l;-^_YrPu+?m_)q@Z_> z%CPi+;tV2@9|_qyhnVsUPSa6agvVH;PGS(Q|Ve_dE)uwpU*ZoFsugTqtWNyj5l60cGZ zfTutrTon?eKBWA;^^1m)s_FWi@>q}hPhHU`tr7_RBm}-reDRKQd2k6n2anD~)Df6d zPI`Rz+B8x`7?&YdjLQ%bm~)vuMjMH2w%|`b2OqE?Zaq{}uELh~{{X}oG01LK*qf$c zBq2&jUkv1s3GvQBAe@Yh;De5sc8MyZ)8CXU-V-jPzNYHcT4#1soE!t5P7gbLWDNZL z_z@|T%TqVMJ)pBgGRNyQJ3$Oa@3xX9c!}KHvho`kCms(6sb#FoB<-%cd#^YG9u8VG z)-ABK=+Z)DmEu=1*CF2x?&XPHtCBEIFizq`>nJD$RO|Mqu?edPr%0fgkFOieA(wN7 zB0>lSRAlg_RPmBI1pX?iN&zcs3t!L0;lR9d?RAZYWs=nMpHpOkVvlw}z#N7jtAzt@ z1`h|RLXx4L*S4R`0R$LNr#v7^vz5d}go07-8-NTlLmVkm2aBPgvp!w+xz7AxdNf7fIM5(Wd8tjka!${@#m)nKoAXmKaQ~P1Vvp^D3T;f$e^fH zZKnhrFCIb9j|V@$KvrCmNk06cK~W9!&lxJy$ibs5Nn`QtKOPVI=cox}g;)r9=v#{v za>paQ<%HnOJ;bXJ11a(b);#=Y&qtjhRkxpyt3qdn(ww&D-yCUdGJvKsd>m(=j(lKo zjt+U~vpHcy??@Oxh9-PVA@$Zm2z(s!akwAP2f_H~o|`g&bc#2&_q{0y2ZdFuTnMFT z+2afpfS?yu&mJ+8hB(GQIqCC~Sx4D|Xk^3d1&T#fs^xaZzpYn3SPobbw4ca1*wR&>ir&4Gm6_Ue1(^m?Out)(hm)yj;tAnyI9Me~7Su4Y4I{3trSwOmoYqDoHvf zByEs_pudyM&E%d>Uia{B9TKo}3KGshr6>el11yxbr1}Jb^FDF1LvCMJB3T{F0Cux+ z`+og*Wg}>g@lCZm>Hh##bx%#&8yBh7)S{j_3d*Nvp!je>9KwY2^VLb%940sg$W<19#->-ND2~i2j^>D~Mld_)LvPLgpbq_=RO2Pz#)}pV~7`bL&=>tm>97+1E7=?A0YXmcH%vC~LsUIN!05Jv-b)*2&cTTKa z-R>(;qhY?^N()9Y$o~Kme^BT5bJK}SKb>Ady=r1W5`yA6r}k!*r5SI{6lzczCb>Rj zkwD~<=PZ2j&yoiu^yXZ&RfCO9esE}VLJ%rw!v6rgGbB#dM6<^#+qg%gMNr8&KmbGt zxg>?bV~hgC6UR;`OO%r`VQ2Ou-}rVThkOaaJ8nBYGGDx+qpAHV6(oQff~DfyrTsa* zav0^jYw2vjV!0l?zy6R{RA1i=-{sb1&M2onnrY5HbNG}00OCqisisfEcs|h}{&}RN zANZj??uA5jhc&$t-i+cpb}Cx4T|HY8ul^0H!;J9m?tz$$C?pbCpB(kg@gIwDydlsc-TkfxdcLJwy6i{X*m4)EQG|;y7SLq!u$0qm@ek02HM`$w;b*BmmAqXr;?) z8?eh}+~3J60HO%(#L>?rMZL_6!I81Mkl(0`o;udUaqI^akg*GI){u$udnW{zrue$8AT$8B@000TDq?(mI_$B-!hEi6vl9d}dbK}hG9?Je5 zJG(*ejTUWFNpf{K<56cznRi*Klfc-&??WGOboP)rAmnx6hl0=Hc!E|l9WJMP0FcE> zHy{=O5Tj83f5dohCx!sQKt8`5xRkIqR)@z=Z4>_hP&C58jQ;Oyg=YkwN2EC%H_7mN z#!}}@Kb(iVHF?kn9z3E|@s1gX?kZwwrT*~5UFo3LSnXcDscIF17LGdc&9Tb(Lm~YO zpC=p);~x_uM6z?vSvgy}*W>Q zS6!33?AE(#)7pCRvTSk`LmG1;#S$nG!k~LSq}EvakQllgsLyLi;)ofIzBZ`4$OJ<>0@*a8^+BPnB&yP zw{A%UDf#kG9SF(V9OFacbob+=D(M}ucF(A1`?X^~wmBMAUF=kxg##quV1;EUT|+vy3R(;Rf$GBAqTu~f+pS(hrn5#RzCdFLK*G1I9k za*{1^Z)P5lxsg2trC|%#ELg(vlvB9iFCg&Va(Ez&etG~_4q-wy@P9}q7KSt$)O}$j zRV$+tupPoibSwc;j!)`QwU2;*9ZNLKAcZeS)K}eJ&=lG~KRcp{(sgUqcLHnAEIYi9 zY>YTQK0Z47-v=LA&sL(*f~Aq8)2ue}xzy+J zlYlY)y+~+-6@_L9Z%m&S(nTd0GP;#G+C~m?IRt)siO2wmo?vj%?!Tv<`+vE+14+Gg z z-4Id{!miRh9-p)F*$Q$FTavgwKO?R<D8u~$siJt6&c1yhRb6a9$(Ll zYj8;?9mHf|h;URMnrq|%`ovRJ*`Tan?{8S}*1kWi?7*Ff2QBhA^Zvi{>*}=kfhHO^`&_)P2!$EkN+!}f~qC{&VV{+UHP?78G|!X^fJ z&t56<*AhV};quF$%%RT7uJypghzLZsS;jdP`@IKHH0Ait-XNFvMW^JpTa5{{YLcc|w2= zB9>h7ss8{EJ?mdd?nyg(-k}wJKT{w=g&mByfD2b@A~8G3G5jd?0b&C6$$U}56~?ex zC8?Hz-q8Rr`%imL4{r8?&5q(kXItDe3RS-Qa`>+I@=e1mdC` zGTKQaZQ#k}m4P|n`2z#w1LVj_N=JFY4dD#Xazi%A63rA6aFQLwbu4 zQe%JoNo8or1xX6I#xub4`(vQWhY9*?4-F;QYe(y;jO3`1RlWOw;D2sUkCFRwWeQ(# z_(ME$5i2uoc_nEA4^m8p3(hh~C&?Z@PmhqrwP4RqexABRL-K`0ib&;^l#?)2+qx7u zRmOj@;PNq^K^-s}CGpzzeWWv_k1bvD9kCL)7Nj8uxHk7k*Z1B5K;9awM%8ve9wF%{A46$LE zl}UUm;BL;(9Ah~02S!qfizt+D@2K{5gii2OZ6(<#)r!WO~612_QthrrKAC`&=i zO*#Gj>jLD$bsKi7SBhHoWYh$TUMGl@WNdQ9kLm#A4EzK6=}VS{sD&#lUF+b``J!7z zc{SRyN)^Ve>SWsGRPD*hBxGcQ+f;chr0vE!Wr=dion1D2>c`mu;mvIki>A4b1-|v5 zkw~!=kj6bj)=0CR5qa0#O<#I^_o zGL6N&1O10zczi9y_-_lrj4Jzta-#sk)?pel=SxhHD$Ggi&X%Pie_4C8vpczH+|jJq zpI(57OtDmW;dn4f5;KK6fj-;v^OTy1hwnK zUTL0Oz@UW@qkFR-k0&GXk?LOxn?6uaxu8>nTSOh_cmnOj=IZ}>^0rjAB44H-U%T0NH{{T4Edz<)A?@rY0`yIomu8}2rF?#dVt*AWK zA&2~KGYX@G9FV6t2L$7vB;lXdUx)aY5Jm=Esfa?<5*DnW7qQ9|NDkF{0_?!S%uM8r z^KI4Jtv&4-l$uh&4r3~+Pdj#~{jzb!f92P{xdn~lGj@g7u4I)GJT$vT#pSc`4gmfC z0Qbj8((u`uAwH?;{lVHp>D9Y2*QKC$H1e7*5X*7niQ-ncAqrP!*;qrJ#PMu@2MWYsC4)F6MGhal$0=h++0R(e zbl>)Swsf(2wAAWUxY$bzDV9q_cm@5^PWbXUAn}3L!Ot;W_a~fulZjWvUl?&iE^wQQ z#VX;7mEfBsZD88NlWSU)-BGl?FHLImM-t0w_NGK>Nf>1}Ko0HfKTK}XoO_rB8R*j~ zOCSrF_b-M20HC~QiBCk%IWS0-btNP&V_l!}2h_TlbEIOu?e%X<(QmGiquRIJIz3r^ zY301|n);2M;h4Ke8ZnQ^027=p2v9TTKN0Yr6*+Sh6r()cgGbTyrop!6ezC)RO~d{W z!pq@286Vk{GKL|!_lvoDl9bh3#6NR=b+6t~!33~arA{O28nL&irccgzI3J`RIQ{zb z)Npx-Q|Aw*KbYT{{Yt>%5*rSu8oFQC_OGHeEpDvBO-jrU4MbA&Nn&M;a7IYReBdgt zl1>2Yqffy2{6-YbGZ2A(^QM}Uu;yZ%zv?y!GNxhGm;OmHyy!YT>9lp!?=`IoUiWY_zOOOT+4*0ZV2Fo-*4ilnkKD-aGCF9+~B>yet2sUtMB z01JQ+kx|S5TGV>;k151)d`2jo%s=8A=1fLwmi0Ya+&6A&)4i5s4mA`u6km4dS1jNpFw1KrO)anxWG9`JVe?V*Wr z3|p@?itT0PGnrMRP+SuNsb6_b#DlcpC@0{9@zteeNJ%CBWwboMh!{PjX`_%?tq@OJ zFB+9HI4gxL!wlJ89D$6o{zp+~Q)WW8*l%rH*@rygX%S^Iy*kh(kP>qgL?HkLT#dNF zJ9D3mj0}!|OV1Ny!{^S;7+gZC+gZc*19f{@RQ(S&E9W?pa@X};`8A1w``pomdvt!Ghg)osZasbzgs+6Sn-k(C%6 zCVVDxdWA((0aE}+&ffaNydtQI2&B`6SrEik1a8=qfZ1kKlwgEooB_Fc$pfR96;-H* z_v2qX^MoPYT9W!&Z8}tvPfSVudh?iPKwYtz_{qpS2ntjW)0`fi6Ux!wU`Hp-txaqw zMUL>4H4xPgRbjmpvOyI&T^b~?vQN2`WHBZ37>p<&95y(|LZu}|H!9SMcu;9u>1aZb z-n(w6YW8lYubOq|t*6$S%OZqkW&7z3iAxGe3dC%{jzOTaJh6+M3=p=*y zkWJVIigNz|En*Qy?BJ5;4U_Zu{{T*`M5Z6r6J-QYea|Rezp{c){K)C1eo+G~49^VF zX*Q`0LoO7ObHL;r{{W{+0n-OCw0mFpUhb%GeK4(6r8T6{ugsPikd$cTWtCU~hzBJ<`#pNv$pT;dvg_lt@}I^wbn$q@PTlGBVsZ zlgaUpyyVMQXaz*J^O9(&tP|V+Q(Nd1dQ5v@Slt{sHNgvWG55`Hy zBd*ur$srtN2~ns?E#dpb6UZp2_xyD?krEh0E5xj2e5#0$ zvB1s-GJpGY4D>NO$GLk8M2ku74F^ez1*-&lYz*ua5bo%%?CcM?vpZ)P$oylkl(@bi z;xY55thvsi;0q5R1W1acKB3~>N|jjAL6ockj8bKD>M2$(jmLt%(hhc)Je+~nI*C2w z;CYJ?>D8i99yR)f+6&n|$*Aa($s<#VgH^CjJxCZ68*%%4P7mrrK05l7fN^ATYz(>P ziXoIg+&?$1MEINkMEP7x^fZ)BC#t`k6(Qz`k4>a z4%5@>+}B%D`k}SpG;CH2KCKih{TR%ufIRCMZKa86d=k!}A36BD;goSHUyZ{4bpBzu z)POY}qnNDHW+)b!bMM>h60uLKTlGv(!yEz;BD!Rim9xRm0eC;(3=bJScp*fD07)jT z;q!l$HE6u>{RX{8mg(dkmf}fN$EZ<=3_;-YxPCc2{FayjJ<)$Jez1o)bXTlCm3c36OC3J)yf z0FZJAJdcye>bXj+0U&k$JZ;JlW`Q0Uq%zhwq-3(YUKETe*|~Bzc~HT!G7fSNBOO>u zmnelP49stRL@@(MHl1u-1r~IcX?;b88!p-1?f{P)hHzUWBo70qrm4Wn-k&@8bB7QP z^s{!NN?J!K(x8nF<=Xf>=f)gkgSQMZ(g39jBd4eBUFcyLD%?*C0UIa1T~aD>Br-(q6vk3983eWnJOWe<`2CL_3Q|m>qDz4KzwGb!hZTvbw6&vNNOr%V z%;GB&x_?xT3jhZo737Z@2b`X%=31Iz@(=U(v^fAkQ(Z--V_~v2QL$8?VEHElfq}T= zKOdf?N)%N0_v{E5`q>hsZ4xZ-ezCD!dPjguXUI6lK6p6s)1f3gmOnUSA59XZC}nX{ zBb*lDfXT>J3{DR?$B<9p^noDIIrq!>htd}uzMVNHm5i$+JaQ~@VqBu+6&c|Re2?)1 z@9xmf(B7UTN`9fP)h6_}Wd;=GNy3rI&m-h{>NSy`ig5n`wP4}9kdwLP zV|5*o*0_^l1%c1E!*lz29QFZ%xiBd^Wj0j#% zGm)MM=gthIB_gi-Y5R?>3}K3G9{Cb$OIoyz>9vUSBycZ=XOGJPwD1Vzbt)2-sVQ5F zeD~?r5Qkwh2Z5Htv!r{K<2`$;B_Ly zntQYhZ3CIGb%px*x8kngl1^?c0CkLlQvK~Z715Hc~wFnGrUEBNE3 z**7$Ur@!7Kf-3#gHR6UyE37`LUP+DGH5~F3?En=!zXQ%ZgUTm0O+9?;(|DJZF?ML` zdJ#OPXr_S5lDvU8ZE(9=ju>Pg82$cwQqrZmfMN?XJ;S)=3}F@fiXASLtvB1KWe~|U zk%>Jr!Gmq~dGa|&U-tMY08_Q-Wai@Vn4*kdBo2)&8ewi5(dPX z36Pc-H$MI8yVd^yieIZVdk3&DXp1sS++D$4>qT8i-3@02C9uQcLgg1fsmR9)#B@K? zpM^1>5%Gy@Qh&696){=_{{V>U*;BifSQw>^md(kO%(Xw5@UpDBgzF*NySPp_ zDv!bc0EqL)_v`djs^rBh)+K0nUtxA2)A^u4*UT|&&Hfr8-d}bR}2q6Yo#UIC}QdxAJP^2Nh%If0jR0cB}^i6tR1)6{@U2k z#cy2i{^$N6KB>?3Yg5xsmD^f02ELJ`$0pW>vZE@j83A3FdpRR&vz1jm4!r*W;6I0x z!BQto?-EHRu33N|?MR^0#yFf$#h7VaW=;SlOPMr;GAdG4M7X<>ZE)p9gjqDo7O3|X z%33rP&v{OLeM?X=BUkPCm;kYyf4#7vc+LawW0EpGk!#L#wN1Z%M`8KD<3siL0pTd% z*kgvmfF?|=#kEP$QsYFYQQkBxaIdXB-KRB?+TFLQYBuz0vPmWVLe2SHYF1kt-)JFr z@#hC5`M@17;qT#avitI1DVqkUnbM?M84l(dY#uD0w8c zVIYlnAYwMIbN+t-e}BhZHXjW;7O;s)bw`-3VKq{s)7pKTZvCQ=NB0x%RrN^BQ8D<> z$^QVKKet|qgyDF891}Ye-kG4=_}|Jojw8n;>sNcW|<#(4ciYcAfS<%U!d$z&tRCm~aU4_`S|s;%AMlScYRH zQr5vyAWgs{4*{3<&*$T-2*fQQfLIX4_I9Y`dBE!f*U{#+V#=d$X$p*5gvTv)RXjuo z3<}^CBLiqT@tON{p#eF3L3bqUwQBVzXiV`hVXDxD=Y%vVG9U$~SymmK1v^0Z9!c?# zeg{;0(^Z#e(TRO)p!Vh*!2)2Kd`|}8peab0nh7Lpd_#k~FaK*3YRP2bkF9CFH`qj=(t z)RfwV6^UVj0VI?IImz5f>I~~333T%3UmI7Z(B&}YixVxCDMmIx1kuQ%G=bSz;A~H5 zOa*Kx00#h%o`obR_KH|hyjAKM#xut#y6J9Y%n?8a{1)v@wdlL5>{VPwYGmt{NPMG zQk8c1!sxAIYD!tmk%wGH8-f_P3RJPkI2i|yn9M(wV$Rx7*UGNk+-nJ==^%Q#WboC~ zSWi6 zm0d85-=~%Y;~f5Z>565toVPPqsOUVrA#{V*tj`h8+(#0>GNBtC0SeFR9s+J~qtE9j zpb1WdX4`G8)0WVy8PD+_bm{*9Np%Gc8GDvtp({;e^x?fnWCNCC=_;Nu0X&{M^k0Cm z3B_?0;s^jrPGQwWhu2J5Bmq+bcqhRpPl; z8Oj5=ywAk0#FAX^SKZII*Z>`T+3`jj883@YRxF#kN&f&3(gz?Z%)8baM9FJM_RsSc z{{ZGe;xh%5z=tDhuo)aG7UTjk#&h5gj=1i`y?yKN&Manu<<=sGLtYcJV{B%UCJf5T zhj~5#+*l5I&V9gOSX_Y4-2VV(e;-1|1S%sZ*DJ>nlB||3CRRBlXvjD?QIK(g`uHRa z0#6v~$-}5CEd@Vw^xm+=Q7@(HB092KcXE@dlr&jpjYiN}`~U{^8E0ioCKm@=5liasl0iJ~QBCdFlZvOA0OQ*M0v0m?vh6FVrnc zU3);*^q~Zm;t;jDC3GR!B&4Z}Ke}Tl!jf@=j=LX*<);P2WMWpJJHa}Bpu`_K^NRC{ zC@dq#nwF=!%NTa7WRSS{=l=knzLg3p6^iIfQZBzhdltWa_w72_*QuoG4qUo2_vpM;$9^;hhWO2j`E8GwJLnA&bmdG1Xew5+?~r?L%A}KW3iA% zhM+9F$dJOygnw0VLa!dsa1IW94sHz2SN6wthU`X_Cr!@#JV@;W=-SL3<=gSx-)0K_ zzoG?_QZ@ZvTL6suP5N`dIma1>ZU$}raa9hy{x7+ z0XAcS?KvKQ=l=jsy{QTyL?k_9wQha0$0TK=_kO6G1xP$F#~^|M1adLQj;^73VyxL& zgnHlUm(?vh>R)S#HLZ3Uz1LIIr6o^nGhHMB70A;YXDzqWl@|^l5)6kRc`wA@3!!UY z5yBzfs{=#+O|jLPjcpRjSxCyY4qp8t-J@!V)WL~k9)zH=DQiW)@usIkGfMB>eJZA&|PbUYcrllt_ zSYi&ZPtKsMN+r~mN2S>7&}VFgCLWO6D6T*Nm9WK z0WM1hwR9bNL)D|^OMhz|6(>Rh%P;jpMtq_NAe^XTcHo?HG4eX5C3b)TXZ@iEf})aR zLdClXVKB=qfB18TGN=kww=UhG`SyF5s{!DV5W>ookM4)NSEC-6gK}*Vaambxq$@Y0 zWJL2bwPa{=RnMl(%oo}LAhtOiWbFmegdx@$L9BgS&hXcW(QeIoU~4hS9Bq$un|AVW zI0MKZj)Ni;2tlum3-1SNOd>AQ*?4QT9-HmOOA(NXhu%5zr|UR7(f<^7*sFg9w_E)%7b^S;M5E zjFYeoiSXZmn8RQaeWM)+UUC$+Vm*FurqzdsV_BBOvQK14UP+N_MJ51HLaUYlaB!{^ z5O8tEdUXUO00Y{*{kVFFXcDz}s>ch6Q3oLddz!B}wpE)=_JR4agWKbZ76riDS@4nE5-X3_a zHHajLuiO{@p4Z=l!A+-(5HpSkk>L4=8F@%myt=*buSj?wQ>A@xtiV}VP|5v0(U@IH z+qmQnq+=(@$n(ciN?fkbDQ7KOxFb${xoHgxN^Kc-gt}QVg^iGc7b9xAK>@z4^L9Hbn>`1^e!L$Ybfshaj-7iSY~sK%nGkslD=b^9Ai!?LDB>GhsN19pQNhgyAt7d1Fw>B`RaV?XKe#`OD zrexr(i5Aw)PYP=M%p*voeM$O#OX`k~TD?zB4N8MD*Sk9@RXIM%0O6I`V{p#a8S#Ejyrc4@8JA5ZmV z(;@q4Yt(dzL;|#7$P8Hde1du8{O28emu8F(6=MvR$dwQ|fNR%U#2EEn#3U_%pz&(87kHyNs0=0s} zVE%Ub#W`8$t155cBhsC#{{Z09`oE}04EmLh-0j$o9bRopOENFvg253}jtKYl=d-`4 zI2U`E)WI0@-rU zG0DmHvVJfz)A1kcXB@^L$-v4)lWfdXqIU&ul$O&b>_bM+;Ylu|?Wr z$fX5EXc`kr!+=sOLFz$x%f76C8G0^@P3m?KgSj;cL1U^AT7__V_Kppf$HEYR`D2Zz z)IJLSknvtIlM%$nVIy)o${+1Pzx$I{ z_u46O>;C{tsYtrsvtpLFugt3{i)6RNME;aj$EJ1+0#0(K1fg*!e z0@SxYrap7xi};_7Vt%rtTJ6zOuuIEwx zBC)AIrHF6|;gUWuG1V1u$w>bIq=K%b55MspPrzIXXTv;IJYyV`El5cSNOnq;8#9m` zfZ$V7UE?+%Rr|i?ko~)->T|I{+`91_PEI}^!^)BI=bt@IgWwaE zdrVpE)6Vbje<p!UzxXN3^oV^$>h3h}qOkDqf79|Pq6$)_wA$2_>aV8VJX8BTT@_pZ`8(#r1r||c53Jqq)j@UvAfxsK^~QL9_1T( z4Iv^u!z2cF{{RnOTk)^NM}hF;H46Up(o&v9s;>ZP%z+esAHbd|jx+HcuePA&^w@0y*^b@qO0{&gLIs&U zzjY%8Z|UbdBU6FBSKwrf5;?`pIwdF^&Kq3?cRIB-XM00f!%=FcWVHaawqwAwZbtSlZUMp#PjW-M12rCZPM%W-82G;`_&fq{8BZ1V(kforks6K-4^&VF@hI+J0LLs+h1mG-k zuc}Bsqa=Adi(|uONe6MyBycm-8A^5je~X42a)J%4Me0@^m?ep6Q?8%X6aC3)KmmwA zndB(KfHR%HJy?{GYLH2@H8iW}`q`clBGA<>X7vj-O=>dSrqML5JQ4`xeDJCUFnlNq zaCsv{^9*$foPJaJlcJlc3(@eUs93Ty+s3JKKvcU$j=}HoPa?b zgVYL2fh@4WneR?P;RX=dikiZWB?l_49e{~C#>*adw&9$BK-vy6H)qFEOHoR@<@D>x z+V?&X#6rK7)N7Gh2GmuAE*h*P0*-R4w|3*VaaoY784Zv~WggZt!Bl1YpJ!g$1a(rjn=Chjk$M0(@gRS!UuCusU~J^ zk2yw1B$1ME02n{H>%5Bwf;P|L`{`X?>azOwvm|My)0@+;R#?P>Mxr!jvpeIr$;re2b~kXe$YHCZHhtm*)Wz`@vZ zJ-{D#9B#s|9ChL-64tPgS-^cQe|6rmq8wf%VydqaswZJL!Zv@O#t1|uEOmcT8)V7iHt6V_xfa@zzd&f8%LZk11voCA{Ld5 z^AH$)>(&^9q%Zk2b2~*mmM*&|rxV%@BS`rfTyneF%CiCB?8fc6GEAQ_N_03&(0x3u zEU1Y1o&Egg zWKFCJv6EsEkbgnI`RmC3I-4?B!=(o0RBb?DdIvsnVp($0vYj4-*qczXWo_!hR+3ec zNi!3T%8bX5#djPNk`8`4^2K7Y1vx8}(?>RcHi-xg25EHY3Ub$qnnN2Pm12o;=}OKR z$U)DB$r#2!#{lD~*Ep$JrWeuQ!{vWw2P{oN4I7c!ilp;WBscWTWY;@iAw;0AJSh^i zVesWbgUPwtNRm_#x>12=CG|9;R9d|tfYGDLA6~wmFJX6;nP^5+x3xJWnv%CIUB6RT zeoh%O2>hW_KQcVIAbKm`Kr&PdIe5R%AozkqvD{da-|p)f?UX=E(n?8eNF@xn^T zS^c@m+N3CbzBgsO{8LkZi#I<`kMxM>Y-6y9DPLc^mM9W=H^*8mO63)Xoa+YCjr7IEo`Uce}VmW=8tD*j3L*A>Pjyu|f-*)XQNTvjl5;EV>%Vb~{C3fS-f_wu?B&iwFElN{L zoA{lt1n@?)sy%1746#il#vrW~i1w6$O0mZ!fI`FPAOdsar<9eJ4`AE#@OHo4!zk7p zzpBHgUK8Gp>Bml3o?5EgQ~&lT+!ou;7PiY|UwGx7k4l z>3>YZNh%5CZfudp0W3=%f4@ww%(-0w_xx!Jx<|`2@>gfthACHbMDmFplny=LkU8Cf zpPcyVm6WfT*WZ(QLKLhO6s-fr0F6b9KF6^On5AEhu1B=T#^4CfLHIc738%9mGyuNu z%d8VX<{33d+iL9OySU8Iq^4Ce62u-EagIEU@<_);ohfw@T3DJE@~`b!O%LjN%i2^H zWN2r`#q=0KJSw3;%P8c90djnSo=ECrHe}qYQyicGTAN$jc*I#kZ4hY|nDo*Gc&Ao> z>6ztJXkCM@&Bv4AF)T>uKHjA!bGQw=3xL)BIYCg?Ap4K$#*^O~S_sm{ov!Kro6^=H zlM+-LLn!m^8EkCxPqRwmj{jq#U&>T7=1Elp?>QcYV6L z-JZ?w=wYX>gfT}vgc^F1A1n{)48x710G>Uq!TW>ZAB`|y6Je6d0%f*yvuJlskLjf% z>Pm5GD9Qr0PX7S$`&?Xdt#?20?!wISXg8~xhVe3Z*EnS#i(rZOXtbc?k?B z)odp}Jhy}6^y98>XTi8R3Tc=i2)}hhomjE)c&;(i59>ZGK4K$uPp;av31)3sYIjo1 z8DyJOkiEhPAdz5}VtL5lyB`?w*LT3Nk~lCDW#Nz#HA+&IUD?{Bk*p-*%0UWZ_2=hb z+ekY=`r)L|(kR1fjrX@&yl}ImvEZ_xO~IJ{21gu!mtFoF;=B=(kufR?6d{4PH;V@! zCsdJ6UwHcO*PFLym-#M<9eL!8!xYm?B<~Dk{3wV39CAs{Pd^=2#C%evA+VAOKkna{ zNH5Xina3(+C8oV!)ADBP}Z%d&p#qLEG&{DVepR% zcJ2Kodq1|LpSQH=)Ftb2ykgYn{6Op{49A{W?Hpirv@x`6+oz$zMOYR9!)p`E5%?#`{Qf$?n>u9bth{-yFB>-&rGdK$rG~E#&6A;sJ4O{t6P^|G<2fIv_vW}pbq1|` zcK7>37Z80*&|ZeDlR_c4O7du6UJH|0 zfi#H(4;g6yJH%k&h8u8FGFW)e#&MC=WXM-)Ggx!eXWN#sBhjEOTFjB&j-|NTX`ak zk<>^D2`dBFt+msvG&iQo;&Bj)Sc#L~^31q)3!LycR>>IW7+z03G}NGnac!;pk6Oc4 z57Vre>{R=h=8D|2!q5e-(W8yt)>DED1C9VEf%)o0NM3aIbQ|CIgA7MFedtEE@5aoU znT~P=recUdBo=N#JDI$Wau|FQ)67R^vK$tG6^1D3pKmdgMGQ)-DzGx;np|K+s=2`@ zk;VuO$jCiJ!D%_w?|3T5Z2i5-%yB)KW*cLcD-nih<=Kz}s}jq*DoHuvP7jWyT)8rU zDJInn-F!B&Xkl23nw!>X0h(yv)l%{l-WiVv3&1>nGx5-c1v82_Pgrsq4qMgL>{gO@ zsRyF7B8eMnu?w&qWlr=YgPu6goOC^sf?SIH{>|>t#s`LKx`3KG_9)9@FejF^nN?<8 zrHe_9c*g{uNMJF7I-CW|K^aa{M{ghuETq;2TVHKb*qPt?baDFEb~fQ0afaU^Tyhmz z@P7^Gj;!Hu^D$G=he4{80NS?S$M=9)2tuYC0RH;gJVoD~&pUbC)P^x~53mY7Aj zpG6J=C-zkezvb7}5^(Zx$V|B*l90ooic->~ASFh+^*8m1Bqtde*qmS=Kb(Kx$5N=o zwU1eU3p8p}`h41r`71`}Q?g8g67wnrj2{3Gf>bk)B={$<68Og~-XajAK$QBqx9=8` zGQ?l;P@l~E54Z2X+sQo}(nP{3!?0Q7EKxxHO05vfWb)s?P4G}$tz#5rquX4N+5jc#Iy?_ejoTN4X*aDXVw=OraDO>g zJZH!rdh$<>r%1^m3JzUeS-$NO%B#llNS|6Yu_Hwpk)i&OSsD;j{e2oHghDMhAkM-dr=BgSUX(<#=q24l+sVB9y2yS-tY|*PwzVwPFKS?fTvA+c)THYWFF;D>U*& zF6M#2Az4`Qy*bEIa!%es>w5z~5ySBV6ND_J@?b{9U61b(C_UrK9p&3sI|I07(sd<9 z0aj=&g~C~d@=|33l?(?ec~imb=#CS`@`f%^L^s%cq@JR_?13Pck#ssaObCe)8_^`f={TjW9oQaCqy+zBrh}4J`?B zN{^r(u42l70I@($zZ8x9cIC}1Lt;DTZ$h@&tVXBQOgw~07=Tgswhqn2o_O)sm-eVy zM`o5cBS2^wwG^Z25wk;+t()$A0(Rz{LgmCP+ON)(5wLQL>F`tyhypQWh!HBtDctw#+URWU#?w$iX?s9SXm=LxY?*iT#7x@J@Uq zvVF`B$?8Qg5+OF?EFuk!#gDfn@JSg^FhJnrs_R6tYU%q|%Ex$MNrh6oscNb#wx3$H zyRrIdHR?t(WF-h#f#e=G@-SD<(nnU)Ei%f|`Jb6^$CWGfhy27eN3~e9D*CnKbtEiI z2IC-H@V_8sLBTlBj(V&VqMXcI=zoIS#0>;B`!T^SJBUN7ss(j&V?s+1p^g+WWXS_9 z?Z7;ePRZ`uuiM1(+(8icfzhmL0hpTFd?H36*tA7}04W&ro(6CRNY9R+DiAK^y7M9g zjbQfms6{GG8j`M++F>DA360K*2kgLPd~tw$LX`$NZb!4}tT_fD{{WG^*WpCDA-Xam zC;^f4jx&#rFgd_JPCBqB_fK+%t9f&Q$yazjzKW@ag9<3r;N65vQ>xW&5UU-wfr)DoN=$7R19I*moPvG4c^JX!70Upq1p)wnPYvt#h6V(? zJP~?jz0w>@45wo_KGs&q;P^RT+Xo&xh)Y^Y+g-i&^@A+X3U?)W?Z|yND%hD+M8(!M zQ-hW$qXB)O5;1~)PaRrb9YrNdsO9da7+xll!}=BBs}tLG4TW=sQUZ|5I3VX3-SU4O zQA%?XO%Ji{p&CRsuc$gh8^<^D_i+;J za$X+$8jS=dYd)PZ7*~ztIDcp(BHk_4e?@K-DR|%ac#`0*tf7 zz-kX5jP7Cz{lhG6l=g+r26!ZZ2MRHDNHH1F~3h(RuL24y{o4EL(t%-+?86qb4#`xPZ#x~Y%2PQ z*d&|+SRCgVuRPFBUN}8P( zXOB?s>H<6V`nTP_KFrT1ss-W|z~)`@|lii6bEXw8;M5dqp*; zU$}p~2wSLtlH!C|{NZ|zr6!+Nc`s=ihJtZD7+iYO&mH3R|!^$h7)EnH#~#Wme~boc=nXSB4~mm&@)Wl~=_a0ZZ)W z;t&)KLV*X=MN_tPi0aF!)vcgOB=v7uj68Z;qXja0h)Xz7J+deOoPm$qp_4lSC@C}* zIz8*4{h`>=wqQQY8a(3iTdW8Tbt=<0tlTke9QcrIc!(;GJ6q*!vOlJMEJ0$tQn~2^gC}(cbTo~*R7Epb?w=2PFMH_DE1*y z=LJtV9(tuaA2=iz=m2lu);N`XSI2m~!i8c?xniox-3{ws<{)idi?cMQ(sbQ}QIM9o zVkH1X$C9c5#zuZTbMuU3E@41oT}I|9qFT6=@5N$2ApD@!`JG#>`bAn80Bn^2embTj zZA&Cx3024?ykngHI(USgp9tl3~c>di!uq3dgOm=OHF=~3PNbS8^w(HfBWse8d z*(|sWq+x*0emMi5gY)E(0&Q8i7D7VF>>6ER*F{<}2 zJNhBwygj%ZA@=!p4EZQV*4nrjB%cQpN|BXTV@BU*wXc3}9*(Y1<)tuNYYsE_G6!@~ zB#c}gvTfc}WkCHTf}oF%3`GS<1!mNGcnW(yurS3JR*IbWG?oe!Sdk%KSyY^+@r(~> z003}vf>dXrbfRMgICiOPw_iVzq&pK2K{TuD!CFUv*#%y_$7D^t(TcHOEBZ5xjDUCp zkt;WPaykyY#;vvN%wgMlD2nf=KipU<6#4|K0U%7VVyD_cY+!+mE=N4VW)!7>1D_|; z-upvk4_I-p=+yPMhQz5-*jZN~s7G{+-1`sKIVYAweUfOq@ZNnJz^EE-3eY zPDg`Q4busND%N3+RD>dGs5j0R&5yg+q|aFOp?GSAeB-81Cfr1qFG4=>9;oL`@P}XtRUzX zeO4NkBd1oKYViAI0IYB#hiM(%i){qwX>Tmb2;*-}{{Z<)0)dB5Gr8A&G=T{z(jsOy z>S#}53-1ZFAExqy8Lu+1%PJ^fOM|xxLYCRjkO~r#l>kBd({o=Dpf3hNXc`SZQDVDl z6VF~KoGE7aEWE02P<`b;=khW$(}0yJsLIz+K2H(lYsVngW=ddQn`%xBUZlU(^yV2< z222nahAdae#&MC;89)+|R;@pJ?N6IQ65-&RI#XGKoR?yFQ`8bgU)3I)B8)7t%1*+- z1_f|J1jr{i4iHd~QmUF%Y4c~J>#ZPiz2GXns{pTMJ$+4R22W2IFISf&wVtT(uO?KAGy);D+=w>rA>`cA9d)2{!t>aszB+ zjzfmeEJg+oU5~;Td9(PM2xgWX>9`M}E_Bdq74Zs6M_;9W(c|qxb*($bry)OE50?A` z_y_)-d+uPA<%;sgahd~;)2wWYMt2@h=ctOyMENTo#rh!x`ZvIB*h`~gqeWH&BLY3Duu2OcmdA3M3U4>1B*v$YZd2DSZUiU z$fhV{u^S*}+D^|?!D6GHPXuR@mMsi#|b=U4+^PSnJCm z+E;TEoRw3P$Br^vg6=E0r#|}H{{S%#&1eq9mi2gJv8UgL2~h;mAk)w&Ts}6C`-TfE zWQ>p-{vK46$w($+<{Cdb>0P~uWgVjy{wOuJmrv3(di0~UQv*?bQW;Abg0!$l9>6gg zj60mXyrM^dG-3COGbRGOyGNMJ-liHx!OtN70G>MZtki?b z@#$~j*IU&z{@s?gmk#^&BsVO>1pb>CHmFI6An$P4i~)g=zbCIL__qO`YB6O@000sJ zVhJtNSE;-*TsTPU8=kq`8oinngcWXE#GuuV%dB!hyn2ij`@`?gw{XEyaqsKRCZsr) zxr_sp>*3MuMyyWHSZ*6bP_w6L_UEw`d8B9yJ&l-0!$@<6D8tVgDp;S4^`4Mcr*THT zMftT3?OkD*z$dnDErC4^ckO3b)p0zE$faC?Bxs`!5r9xZCC@ktTNXm)u@ev&9~$$k zzmVq8Ko_BhwUE{xSRNfc;Rn=InpBg~0Z~R*ZZ{Xp5gT9~yybJ!3jhp+kFDK}{PfYF zvX_c~)c(Sc>L+JcT}7j-au*`Ls3Y|~Il#-IVeWKgXz~v{1_vV@Zg8F{JB3f}B?mz) zMFWjG5mr4#j3OR}JkMw7{{V7Zvf51;Ba+vrVYLe|v&#(+MGG_!Dz3vbC?C^|u^yyq zeauvQDC4+OC``=2vjak2jsao^GfER^6iagEGYt$%?Yu!)&tTJ^7E^Eq`#CGdVU1WmD zhC}MPUBv?J3=0rJEHDN$%CYLgqy&X_Yp5msJbX0VP>5@q!lthT7vilf@W~{QD--SB z10QmRK5`fj$KV`wR1=pl1%c7q*!kW9(97w{@}*|6EXyeXO(6{=UY5lO!*Sia!sp|_ z;~h%5geSECbn5lB8(2*7^jd^ftgRs;!wZEm%3eZ_PII`Q@nKt#F~`pXsFaniUTd?s zr8*6&Il>i(Yt;rQj8*6L-JfE=u#z*zSIPeX&GEbE$5E+8%aGLP>~iG}(Da+^Xt9e> zeZHwE^jGvv3YH8>8;D>>vz|fsWR5zK36>PBiX;LK&N;Wr7SR0AUVpcaCNQ5&iOlXN zk{K|pM&NA%mF@ zIbs8xf`&Cag7QH*$3m&(BuPjh_&tHz)cL*P@`DlAjcKH|WTzAp`iU${QmKt1Rb^*n zPzVm=Diufgj?e+>Q$0&kmY_SquFQ0xATZ}oARvPZ2$GXZt!Hy6jbp%$7Z(u60|ws9 zh6f;S&Tn?pt07oRuK$dvqZ@>o|{T{MmCb=OKsV`v-$=(!BNz|-2*5{ zXB%s3=r9!M^OKGTnmr9`mqE;QfBay!F4VHd1xPLd@_e(H91`({YP zNJ-(4tMEVwa9HPV4=1H5IClkMhP!e!*0uufEd~^A5}HVPpiyT}uM1gs60KPWrU)v> zBN@OOeW!!B$j=p~l!X$Nk_DVrt|{+cPcO2oCo&0ocov}f z{Gk90NWy4oPeanXW@+PuM2&D5pKlAmBztnlY3C{jjCE3a7D>t&8kQQf^Y?qh+|~pd z+iCV5Ws*ppL&nRrGO@|q`n{}u$M+oaIw@#S0ho_^UAFXrhY{uyto9^(EQ#r|$pDQA z7-dqUX+Ni*&hLYh)L}pol^_QO{s-UAkh;P&y*|{#?i0|CRcNjsHx~);O$yk@XAJD#xaTI);^C*Pe#{ey8W(Y4WsMg0&M8={{X^N zuPQkW7BEzt4oda)*TCP@85rq*gT|VsggcSUZjV&+5otJ#=n|$YD#ztt(OZwFD{K9? zt=-fqM=L{HQDjwTZ8?=+3=Y{NRy!n(ysIOx$lJIP*SISZl&O_tVk!vV+s6rr__-V( z7;rppNtAS2d&p4Tt~9gnVk6{DHMJ*}oYQHVf&Tz_k^cbUEHFRfXq;k96XdId`RY&Y z&kjI7`7s_gjS1L31My6}=Oo~DN$(G+HqtBfyN33a+`Zcc>ZefHYT9XQBE;$%B~1Dx z;PW01)0~5W&sj?0q|H}ktFf;}`f`st;=cgT5|RF%w@kN7DAc>Ehn8lGl}&Pm`wt9O z5Y9MivI898XOZc)8$Jif=c+4|xo5r`-tpvk-WiSI%E^k9P)6dOcHhb-Ws)r_tk$lR z9B_^9NXadp03RpdbS@Ax8%82hsp}HuRGWMCF`Mc=*EP*XYpLzJz{dkG>8PLie=mdQ z<0SdUGSxJxMSA?Bw3C2Lxk5?&w?FPY`Eu5R&hT->IXsMH@_hdQ-~RnWSko#nKtYV0 z1N;1R^@vF@wN+&&E4TBWim)b-;8V1mGiUty>BE?hSd#2@rEj`5crGku5|w1%y>f@v zmGA*L+*Jq}BjfYe&%f10l&b_2{{H~}*!Ay&5ZFnoI;eUkFY7gZU8M;Xi~F`sBw}<~ zY&t1qM95IhlyzW(B#;5;Bf+I3D*LA>tv2gd>GzLAdPgIU9Y;{9SjDT>rw~-L6=;^r z6#XwKE=ToCW7q};<@XVj)rONbi#0>k)qrvgsjsD)7OY@alJ20Ity=V?w}dmvJ&E{0Tq?i*T>Dqt32bly z8S2cm1``Rkz|+>hAn+POblLS-#-_I{&Vp09XN(zSmVNt}7ca(J18^X(2M2@EP3#t= zt5E*{DtoK?LQ9C1)q8?nK7@oUR0D6aZYX94hT%lPL=sfpr@O9QE8-4sbezQdX0LiHQh5rDUlvKI2dv8Lst7z+ZHCmLSWcztN zAzs85E25C4Lj`4FpJ>AJ3FjlP0!}3AD%jZi8Tufj%7h3 z6p*gn$_d)o_{hQk06xBwN|c}quMo=sBs6vjlo)|0(qaMUf&P6+6wxAq0zI|#r$b17 zk#;_-UqsZumA~A=ZbdK#TT#vb0E&+}0F%3VA@~G)h$pWY__qf&7`5U_8D?dvs}LJv z&A!GXmQ6x~ADmk4%`$1NUI=80I5vYyOMb2Z-cf^PivF#joSb~%@xd9QV!tuUYe7e~ zsPu_KcYJO+MjkcwV~#dBDx3m5kU_vXB=gi|D_?XTSNZAn(i4FMTJDyM zOrudP&8XDOf(WOb(m>I+z}}fSP~Om4hDO#-uJ>&k&-m9q% zA3)Y>hL$IfxHR^)6Uxh$+@7ET$t)L~08T*}Jzl_KlZ08AnMqw6ydLG;w$0<#0GH_* z-_-nlOMm8?gcWqj=7}prx@}miR<49Mq^z)_t~{VJZjc^10Cm2OFOKc|LO~jU0H8V! zT(pSPl!0cCKWjaX+Cs*YMe0Z+-z1=JW|4~mpn=E(k&e4eKN&NegZx4Rc%J%8Z%*u9 z;nZ$2+NcJfBn+v&)VAeyjD_$Vrp*48-MIMTpA;+NG6b@rcOC)=D=B!@^#1^>6(^F^ z5-~$7G>dA|sU(CV6CobvU{2N~;NeLmXRa1^3{p;DAC>yC_4Xiw=pH+ByfmkVF=ZNA z;g(6vn5%9df%^#}q; zLPaEz@?#sanCBTS=LNYOjC7}PWwPeXwjYcBPDk!kC4H!>H&f0p__yf`_hy2`)LxSz2NvYIxtpCdSYq=QJqmGiQ#Ew zjg~-KSoVLu{+J*}!M)i4fr27TsbnTv&)WU~4fefZ$%Xuz7c3ZO-cnM= zNi1YMvq;DE$?6hAs3Z}zoG8w;JRY-noz}#N}8Iz zXA@XPonrfAV%j{HNFYTDvX4|ZyeP)h%AMgrz^aL2VHtotjeY$ztVl~*8k$$puS5o` z1@Q7s>Bl8=Wos_=bLsEOf@CU70fD!JfJar-wIms7{WPVY%VDKVFxJX%}L0M^^ zR2EfmHu|^$gJFozjBW9Vl!<97Dg=3&=;iHu)O3T0%{og@q%}QEL?io)6Gv1=2m(#M z@Gvki6mU2I5!D&%?*k~BH!ADzSg&x<&04kg*KfJqR63lQbASoRCp@1R00V|Ux#~43 zPC*sv@73)IsSN7+GD~7KifJxlXAxM9OnXoO!I?QAunU043(3w7dY)X|rCCKe>v8E< zYuiY00K83UHAOp?Z%M4kuqyq`hEwfdAx;#63lKp&c=B=y>JalnQcRrSAh-(GPEOirAFMuFRuLwrTv&=3j@uR$WmzFi zyI`p*7xyjBJn@1v)FM*yl_tY`YghaEQXJ9_)AebgrS|C48ghmpA=@IW09kRyLv7o% zk_i~VJu;M`M`P?Cxu-hA2rEyzTs@hTLoCQlO3S^WU^bO2kOn;9gZUnMuC!#LYTEDo z?+6$I{-H&+kd_(~C7H~Eu$aU$RJq%N4ls;3@CO*iI<}KBPdgmo%mNor}geoIEAFlKD4Y-SEHK*2(J-a3$}&?aVkh7@g^bNgBm3}7VHE=>!$trb|I ziv~IDEK)j<2H40vF$CawIK~b+6v#u-PEMdwo9_gpv0O=YQv(Vxt} zgd#N%?XKa}<kn$4|D z*}YE&+>M60IeQE}OzxE92P#jyB$Jbc<29!guQbHWB)KQVeNREw1UoT>QI=?6s*_ zp3-Ha61;JV2wj;0vK@DEkWUJ8f(h!B<0&8#pb}Z=OIOq0kVypErXGUb>hxl38f}HI z6ly-sFk8}PW+Z!n#y5L%G6^Hb2TY`ete`E~>u;B>ONQ1R*g|U23kf!@%Hl(@jHD7o zoa6;w6z<)?$P4G3bsE%PxR4v^)Za~u!h+?bR!fUDlVW)hRc2`YH(pCK0fNMZ!OxZi zjydWvGxFsOJoye)K2Wu2?9Szh^vyz_t##dtp;ErnD!IsIe_>n%;%dA9T@5iMnF8)xL+#Q(Dr+i@k+Bq$jPJCemeN{YWGSmqL91 z0K_of*X6#G_#flZKmP#foJ_eJfAu4;>PP-Dlg2n^7mZ;*1&z1pUa3ECV&q2EqU(B3 ze&`X-k=MOgg?Fzm*_Csg5ubzb!yX7YU_keRGP2NI5a5dtbqDF8kCynC4aa;rjxuJF zmFT3FaiIi!tK}7aSF^=F^qlFoS_RZJVp=Pk?JF2;E)h#GWB^J$6aBn(B&>YdGr%Rr zUy7NhGDC+iC?lAxTq* z3IHv_pFL}*8!2haYkS+xqtI}_9q=r2QzsPi-$kb|^A0&$@saoEaoV@5v^qq3y$;^3 z?W+p3=kAzi`pg}eChYL5jjMr-=crb$A3SAJlKx&+c8YUkd_Jklnnb;Plq@P*-lC8A-?{ z7~J^$CFhjqB&8(J@8BFBhy5=OE>D>I zK_}sU7^M-G^kgoKlqM{(;KK+*)VEHu(-9a5CQAu*=2Jp za((&`e{&rnhMGHJ>(!36sltXe&@0Fh3k<3Zdx$JKKOAR))1fMy*-hz9Pt%~*u#F(= z8jY!AtkO=X%NX_SxGs#SN~(nig5RJsF4RyMXF2JaBr?gZgVutN(`{`8^>~amY}%~S zRtU)u+|6B1T~rijbvW8L9Gw3E#4xx$5y}*OwO^ly&Kb3iNgT{zGS&-PU|%y37IX}t zV|FqLBOKu19Q>ASfYp!6`#rq=P|)e6B=RJ+>>)!O(yBtEhF@St7&s)H=a2B5fzY~A zmR-wT`u!oyM_c=fGSvHUvql3)D6pQ^I3K8SyJ;LR&m`dS)r6{A!Fn5p`fUE1K#D=N zi7C|Sfiz$`b9jtlU$8gRVaJ>Hjh!Rj8_KJnV~Afr_tcB~|f zdZw@;glu!eg$xJ*PUXuFjP!!nDrRKqzW%rN&@S-ol?7LqQ=d|}YO5w9tamg=3^ph` zcex4%?Z7A`kk})sq=|D7(+7{xgQp>A0$R{C?=3A3oaGC^(U{nx{{T$v2v}uba~!b? zl>-}yIpd&S9Y35a^Ag+o+Tr&t1mp;ii&}-Y;1ahk`8T(}`NfLJ*pRA-)l2Oy-ok(4R3U#H7m zph<>w*=+Xu!89`11eRs8*;BBwQg=VMAhP72KL8&Fqn%`i1bo2QSAVO+g4Bb?qf&;E z_UaNCVwI(6qL*gi&9vYGLIxp+w}$!P6VxBwm_Rk=Yo~>V@J~oDO4csafm~mfNR^m~ z$ug-7jvoPs_?3?sIShPp((RWmZi*gl>M9M$FeEsSH~#<-`VFq~?;4jROOn${vDRsu zY>H;w+?LORn|$So(qIQR#lA6Es>8Zxf#jMFg`l;oE-Gx(vSsCmx}9|kkawCeGKeM{fRcecBGKpyDSlj z>;2SQtH6>IA$ZZw6D2ZA;A7)C>(9PA!p&5vIe#Om+d{)HjoK~9n5DR0k*m?}DwUY} z?y6+<4%JPHm`+rZGC5V}BRLp7c*kKej!^UieQ)lfv2QT5!&*mm)2DH4LjaX(!q?`M zouz_d{YTyy@BzY)aoxv|2KNn=Q;L?kqJcxu*lFd?24T#S0no>d`;)cnN>IfM!?+9x z;$jP9$OHE|$6nnp944%>ijsjsLP@X?-j(#1UA`9%@qjO-Kt9yY_w>jxJYH;1CIOocA6l&6&a`CRCl&L{6bMNn`f5VS)_T}rk zU(~+n(5yxpZ(3Mp)K}GP96T)!NhA&uL}Q;L&t013@F{adpr%n2zuV^%uLPu|lBOPU z<^2PO)00#rtp%I0gG0J^oAH_rv<3xy=Qo5u8zRDT!NS! zgTko*XD5TkI>ejBCK;JfD(uZ#`}@S1V(PJ2TXm{cbo+!<0<|HS>$))5fOD1rl2_aM znE7LjX_J()Rz>&s>ssE>cA$@`6e%=%hLrJHM~~B&(N~Gr1TP7KSe!Ed0E7Z^4s+0x zEKACj)bli@Y2~a7kZlXuB(CBq1TH~~)pjZF04U?|7bFfr@J~HeLXs*)KJ~v^A4n|Z zn!%Fk(#uLzHKdWGl0}wwRS-uSDuvot{pMS4@<=LGKyPvC&{`6J@0*sub!XPz5Y?{Gcs_v7&zQAO`JwC1Ok%!k8P%~A@vZ(ouJ#(qpewNwY9C4 zXyp-*>c?~J2OO|r?Iig)$0Mqz7KwzY(_MNGesLgoXlB-lEjrV~D24?q>6b8w@>48~ z25_L@0^AId!)`qqCZ=J`{ued(*O+J$S`DilDHpM&wJ<3xJ78H0Fx&2H~a@h9g;FHEN(Pm_^9X@E4Pi4h}kJn1`P%vmbpQSGO?^`WR~L^<$$gv~{%#xs&aN zB=rv~xl|HzHjqp0`8;^(kcAQuOBTJqZFG848Q!rUe(bc~w03m)CABPKdD^1yni3lf zr+?`nZOb}G4#By#=uYLW1(#Tm)NfbRCRrm^jRQv{UvSZfh>U)%fe8WB zsKTyS0Q1jLtR*WZP%Ujs({^rrw1Rhu7;XLJHk!qcwZAN}GU^hjb@YOIL;;P;gzkPa z%vX)Lf+ODdh^$8yplgvel}_S|(vE79x&9^C&#LYDNl>xjFHka7H?L ziBi;C{XfU=?_UvVU1odbT5z_$p_;NWDHJOl75ttBeR zkj-y*EHz;FXm%HbS6k&CT5-o5GfcJYOzIV1{CMBME;g3fNZ=EmI*=tvlBE!GQ={lP zJ6w4{y<2!q-B_i|FCBH%XEQu;tf5v(m@pB@rHD{Yai4&5!6zURXEB(ja2(c~g4NO; z+R)Cct7=+o5*c)fLOewSy04^c6xrq^TgFs^lAoBanIlS`bNd zu!kXw7$s@Z(zO^Gu&Zh+?)#qBW`ZE`ykLsM5j2PTWCa{DsK^~!mx@%fL8vtjJlx!M zCu36rz?U#LaCVNNtF)fAsUfJ*D^{4)O9$*m_|bzF3RneHU9II99(CzmHVfytylG~nH+WhuJW*I#Rn9f+C1sjaESy_oWW9|cvsUm8XKp0n${f3QZ zydsxAeIQA0h2FsT$HL%IfJ<-glGk> zAL|2BrifW9=#j|Usz(l>t^$iQvGpQ(6;~UMeLc!Dc3>Ry#(I?#X3I&Dq0zP^__l|W zKoALWA8B>xtc+{wH>A=bh|4T9nC2yxH{B$yASAX+xW@x%_VJ#a%`*W=l#s4pKsi&! z@Im*aKCkQQ7b_cQOGyW=(b`9lspSi_d>ri`1IYu7k|PNLNkZHu+ormEye%Lry~9Aq zs6}N%Kh_a3l?&lE)o1yfnhJlqs5^yD6{X%VS1?Q3Np&hwD0i zv-E$swM|>M=}8Z5!D+YYzjU*;BFZ6L`r%#rNWV?}2*Jjt3gm|VS;QW6M!Qz)ONix%wV7Y$lyN01< zHQD(^+q?CGl!l!P;s*Z!eCkH)5Auuu0FWLTykuZ!1a+l4V+@1&<@b(bhhd&);ubsF zf3#>%GCwH0RI`H>T6CthI0&WWIXg!INB)EUy;V!7#_Y+dmYUiu^}9_Zj0+f?umfW# zC=cZ4k_Y~sT1>4>VXue~uH`xF3mt`_O6NK?CO~i)#sU81fywH^a|7Kc&L90_8iC!O zq2gfLor|EO6-^ePU8J1HkbZn;{#{NMy;OY6M6Nr;f?M$`{{YCt^fXE|NHu8GtfQal zK!cqAah{_jtMgVV&c)B2{{U8ElzEbVu`{LBC3mnTzeg>cet%)pAZr=3=2s|ywRsx9 zd}z`HUq&r9*`4x>g_3&8ytYWoCNc&`>tw*b7I$_$UbHo zZBiXSONzZjtWNfM5#*A6?pcBWtU_Qe!^h7gjN`AFfy_o^_}faquRd|^#<4M@fYpDY zB}wIiq0Nf$ud53Bp?5?95<7^4(urU{^S9?&oiJBRviDC%M7?%uou`TXgk0gu? zoB%!DODQl)Vp0(mjEs&!{pRa)l=heYpxq*|-zn^yY~@ zpgQUI`qkNm;j0DuZ#9*9BlRjNENp{?U_m^Cw~}}ud=v45)o=m<0M?ZEv^zsT&DcVM zB^0NPu_2aVg+mMtp_7I8DBY9pJn(v-vOpPydROFmTWdo(fTOctQ<1{VmL!63LV)-Y zDwT2*Af2cQ^uPq}Ns=aZH@N)Kv~ zJ;xZ`fEknvgwpPiMIct-cRCujg3npskm z2xn6VI`8xv*097iB0ESLwGC};UaJEbiF&e6D)d<^52BcT$Ipy;V>S%qtApJW(fT9I0_ zt;a@+WKz=Qw`EI!8{Nkk1pfeFGv47RuM7Z|S*+1rOlub%8|!ayc8;G7 z4HHYy^oUUISfQeENIGwJl=yr7N2@q?DWh z(V;{?oUiUqe|~?;#Jn_#KaoCg#YO~eRfEOWC)&I<%hvj~Pauu+}=0Fqxg+PM66$MIw&C=i;rgWb7!MZ_zdX>T)K5w4*! zMpgj;WI*n*m&+3xFbv$V#zFCx>{?FTt?j(RkSq-;LU&Azp5D-qHds!V!imAx1A-d{+`EMc&#lMf`e z2J8X}8P7a;S(FlcEm+gnPZ~oF;!{o3D8&q0B$f;E`cA%=JJh!-=Wr?rEu5|n(0KCPlbV&h1Fe%HO)mu9%Dhr?RvjE&4v9btk`c%F7X9`>XNFhPjT8VgAF>jiM8S(npyQ0R9!Cm3RdPXyp&#z>@gXS8ac zOK-N;`a6@XA~|8JB9>`2HjXG`b*lLY>>1yJ$KJbH^1KWA$r(b(BYBYfk z40A!(X1zkNn8!|ervgneC1AjiqXsj;W1b5hf5m_fBE(1u1On-MzpqcV9t;xjC*IhS zc)r;qu3A{^$?L?*%~kiJDoubu2Y4y~;3)?KsLZsb3JV0>3l>mgQOq-MtPBsQ*6us^ zFJi%S71vV1`oJ_xptNk6<=+WKCY z)GaikcyP?^0f`g|Kxp$JT(BFmM;YTIX*~+0g)g);Bz5%g{i_X{k@d@#k{c;DqW3F$ zsT3Eg#u`UPk35o2=I7iw3NQm?^#Q{nDhrm1Y)c2}r7f%6L1ub9A7jB*rZS2@Ryan*Fo6G$oZapkuCHisv8BTIEukVx=Yl5bPcj~FmvaE#NE zcCqD^Mn@`lbt2@aA{2$PwHh}{bpBuhs>DDL4O-$z@EEMtf^ShIioV8)4&D1z7naBz z4}ix!XQ@&%rzi#6t#qliDt=Ig@O2n2OL3mYrm)K+ddTaUho`({xm-xw&Oik6zy&j7wpu-|*5tNo+ds}zpHvxnN!GO$KuM;n}lAU+1s!Ef;OGA7JIdoaH1@9xkicyB{c z!zhaP5!P62Sd6Yu2ipGtxY|n-IX(#2nVrXa>c2!biuSfV0fiQAkOV4;Ze z2EvvY(M1J9sX)o9rkbW4-0ZyeIgRyhybGc$nfagd>qFlJy02PKD% z94x7cDq0dqNH0eW+Vytu(j!`c4x-RSN}|P@v%^#QexA}s`y`mwbpV5q**unA*(C4> zJmajz%t}?j|%+Bf^K@4zLmnR?|03OxOeaOvP z$7Vk*9BJ3V`iCU)5?oe0scDpB)94gilO)U;#9`{mEWiXwB9XEP5(dW6k+YGL(-38{ z-r(E<0(I&QMfB1Dv#=1@?g^7C0tv;L8k!RuFo_VHd??EW0G->oB;@B2c0htxMgZxq z*RkbWQXDD{hfqs`87qCYdE`lqx3w8UB#XFgf%hC_V*mi8v_kCjFR(^P2Q0&Sus9hR3~|&dk`QvHp*zy`^0PH; z;6u0~u|Aqq@{?fKxmGCwX)4Pln%H$Yjz*V$cOo!5aINrE7UYg5UK|9JQlPb-y+OFO zHG#@Wg;nicUA3ti;@O^Ev8>geB@#=O9+}G!!DLT(+r~U+gVkA#nRyW_D>2rOQBQXV zloiX95x2WrO}5kCxn}HtZ3{;f5Jn6$lO%uBkmD*gG>SL`GK}M?Fu0Hsxoe>z#^GLe z)R!*;q=;ipLrhSDjBg;W(AG(A*p5BhhFkiDlprcY6OgCIe1a94wH1-xa>3h`D)pxD z@3a%8G#ZszZCrt8)ZmMFce2G~+(zh|*w{F2+nd{v*cm(}H}SUcAlSpY zl$GwZ?WCj?W=7tVRa2P&IrP_pRex@BqvVs-a<1?*6GL!K0Xo$AbJd~xN5@aAVMA4> zvBMIEVKOX@AsOA4DmQzuHm-BDamhV0rjTylKTBE@0~oJ9=GtH#mT|Vl`DyoQj!^0EcRE>pB>{j8-kaN z!>)w8f|rsD2N~p z$2ldwNhi-liD^J#9%DNcxyve1f`kEjk)pPDhili&WMC2pw~yba0s(8Q$0GilgULLO zKetK|sAyqg0!(~@dGU^gqGGBDNwqcpmSioU5FmwUrjnOAyJ)9O)W zF@l6hX0ae+@xX*}=O?_5Nw|k6E-CS_YP_w3|_}SX)tZT-}j4nxz%gI4vnrUZtehE9p6P=&19mLIg0UK0gtiq=eLYi$9Qurm9- zPEG+|ZVqJyWg=w_ZDu+jDmN;;I01{tY4+6%a{aVW{rsM+aICXLR0&SvyMp8$!B`OL z+}yAF#zw*{p88NG5Z<701t&cj;y9) z_6ehA&3N#E?ngn zBo}84{Xih=Q@jZzio%=h>eK6)AzM z_0(u|BSxn1Yz#naQ`o5#VugX342aWDFao+`Ck)In<%UN&Bc{WUvP{utpcgi?AG9+= z)U~SmwQ1z30FrY4pcYuIhg_;7#&NPxd<+rrd>nKWFzb|{7FZ3cH6$HD7c}*WT$;qD zjcz3MYfB&VBV|6{63iEBvjY20;Dt`+z!_%1#?mll9neg+1-df|zO}Vj2n}ly6Tw{Q z&lK_h05FMT5{a&;U9Br|`|mq&0*vhrq~kpb48bs|2QGvTUrqV3fh^f#r)RJ<=cnmZ z)RWUyjpc$s@|BQ{$`p*qM&btGFag2qYF`nY6`5$>v>&suv)-^7Lq>V8_OfZ;%cBNs zS1n;b@X(T|?SeS}02VnU@IE=)$47?78M)O!6S=KE{_hbHq(;r#ucxf`;7gIw5_$%6 zUBqHR!A-<@&yny>I6Y{}#1*hr`uqMNlp$QfTFM+@~MbOptbANgpSZfyY(P8^WBaU1;{# zW6wjN+``DA7IWIKrCTkjT8vq-6lI}dxVnI#Ml#B!Sx5yKvA>$tV0wbJcT;%>AXbdR2gO-smY0SeUP->QST; zB%gL70o=_aBDC0yw0HzOgk+Kd+6m<2Jy%+!u_l1*G_Ld-4al@ODurqFZD|iJd-rA3 zEj+R#-f;G|qA?3xK1p^hw1&%NPT+93Cy6MZFq$N62&r$)vuU%l)X>tpRt>!Nt~I~B z(I~wc0?3Ih=XTAzOuR8FW6nfmhbM;MbjS#jLlz_xO9!y=-rS*$*ocC)uQcx4w8_>u zRG37TRI+jaRgH*X%aO@AV}dvYiOiC~XY?%Hc0PjjxEi}hYaKQvdy=~Y7GZM|5}^nC z-N!!XU7+xPQsd7+(-hL)9Vak?N~DrW-n0|XyA|A>c+~9s#L4{Lj`>0cWxLw^ulc1F$P$; z)W2HjYoO_9G~LWN)hx?;SD^(>A~-2W2@&qEEb+c_OlsR>+S|)VzjGEO5t zt7*x+K765+8U*c{*JRb?g3ZyaOFYn2p#m3;;SB7ts!B?!-dqB4v>ry^b}mdw#RN)| zFT8ao=VsBgeIX=XJKMI~MV=W8{{S#B07synsF8%BkOUxxJBp2f9ozyr9ZN23#7@i; zSFzLQt!qxO%nVBEHuS4vBz--r$ytbj>)VvHl5N_o+wqJ7OCk1?ykvCKC(BaKR5OM@ zeSY?|I|3kUue$QVHkUr0Ru4?a6v3sH)qh)|1gC?AAaR493m6Qbg#bs~7)(v!HMFTJ zHlG}|FGmFESS(Y8^$`Zwke!*>Ldpik_G28C&rXF(T7uRbP}TRZvwh%OcmqqnKAm4v zmLI*&FSR?Ll~2O;Ea0l>9g3U zy^Uzmtor6CG6D%o5RM838;WG$9ti`eR+5FNrScl=`-g4r)a1}Fm`6XK)~OXr^Is`q zXv`)#Vk^TlGJ@E`sbo{M@s`dA8R`LsTEdorKz$mmz3+H^V7Rq9VmQq2O1e!vsglyn zLjmcl6+^-G2{;7qRT;o1auPDLho?pt`Fi+6-j;}J>#WOicrL6FQ;cu3C00;b0l`qb zd$7cT=Z^y%OxSfKE?Fe#{{T2lb%JQcW*7`})`Ch=w&nYY(ne&-Y^t2?V5^Qn$r-`w zH3ThL(4zdQ{En8ehISQ+4L9m8r`vO?=~rxNkxLwrdQsbvio{L?Y{`wl`;}CJpq!9+ z>gFC~+&trmkhLU@L1XVyepQDHbg4QR4z*i1p-SbzQq9 zWj~l**{fi5t)AATN!A{|nl7O%ve-j4r{5acngPVG<~9gGMYx4!7~D4vr#z2o0Q_|;i2yBTr$P&#q4w~d z;zo?N<7iq&X(c|I1}I~$>FY94G}&)xpl(sUQ%Ght2 z)_A6I0k=?xRFQW@=@|&m1RSr%;Bkx+rpldEOdQyX=t=PmcGB=+g`>9U$9h?2kJE@l z8yO=siboO@3my&*&Q3S@Vpk_|>QrV>Bv!g~Ooeoz*OX3{9w0+6r0$|$UrWOqa= zKtI$ZvNs#IvBATGjCE(WAS!B$cnZHSpnxhOkaROoB{hMpe<-Gr#LS_0B38yo{{YNG z;0*k+IUJs=P^zahl008-mGb2h6A#z!2%|=)46bV%2+d?y9;(NX74wo%x~?`8ft`*!Rs7C^8|7*-NTE3Vc7oMi9Zt&FhSgW-R_QA{e6Fer+HN?@@nkiw9PmSnG6 zb?G9WXys6%s~3(`U^bFT86;nuL|*m0~ra2UhX7NFWIic%K&9wNvQ03X#s< zqAI&5mo5nfi5YKb1wjkrBBTWrlG;DMTUV~c(VXjlMsNK>v0u4#7N?@?u)T3!TNWay zETcBWk(7`MDPMe>hTXf4;zm77;ctoYoDUI_TER%THXDa58 zgc}%{`g8S(HK}cB9myc4tkWb;?%Rk1192b?$WI~1ARmAHU43K1xSkJ>;l?8bd(Bj8 zIj3~`i15FN{svwd2Oq(bS1a`+^&{>%H!INUEPyrv-Or!T;PKVQk0El^jw+vzjz><= ziH7Hl#7`dH2s!9l4JyqD5#myOWwH7A{{Z8rg3RI%thla77$+wm^ysvTt3BwWRkH<) zoyjDz2U{AgnIo`ed`A^=95~uCq~;v{lgH1;Umbr@F-CvHScIeWWKW0(ovY{PA6EPy zhU_>N6dRQ0KlfMb5;mtx6$F-97Oh&5hn9O0E)iSU#ad0?8GWh=V#ExcyL{mZnLSM` zR6hL+QMcBe9y?++F)o@EGRBv5*R>=L>a4QYc-7>~{{T&jqO^HJ4#wVb#(B!aG=u;Y zEH3UImhc*DSVNRUjTGufPSjcm^y%r$yI3tvPcgmECLXJ<~(JFCz~|Ml4TZU zX=3H9{-@P}he0Xgx22>BgB__Y)S3Ny2Mb3FOe zV-gp5Iuy5R#+^Z9OS)&KAe9c|0dmJ{u_3mSTfiHAqZkLM*Eh75O;S^_ug~3egeYPr zHA?8lT2ZP`T|+}UD{^wjW3zY+pdZ!e<2@d793zgu^2RILjiq+{H?r*$P;0D!Ihy!*+>CO4w>hBf_=mq0$7MW9#js#ZGCfOCwer z$!TU15h8)fWjkDPw<*JU@xYBqm?xA0vvfaWzoZBtibGng(Nh(rfU|~;m`Nb@9u(QT zYN2AGLEb?Cl34MRx2X_2ZsXf}J$32wh9@i;HBDwSJbs-@;DKcl*h5``Y*$er_m`Ap zEJ2jN9@0>eV-JSyQc{uKXC}&`NgzLukAyi`P2g9J?mc*{vT1O&@f?O)=3>M!Q<1+a zOJEJ$WFNrlq_m|gI`?!e)ah0gEd>)YX_qWhp%Bw8rl|ytBz08DS;7@fj~qA>X933J z`ncPTyqcvgBp@V(E1_|5%bubLmewCyu;`-1VmPWRH>A+RX?Hpm3>PY!9D=9_+*qG> zKs{BQrIKCUz4&VE28|31XT9M*uc2x3ecV;!lE$89hAHf4X^ifEb5UYUUK;_F2 z;{b3@&`HN!?C~yIh(ey1(DLXluR2y0;GD%ipJM&gdy-8vM>CdV947;Ol9E6&9F4dn z`QQztm*q}WmO2gfr!C)`ydamUhU!BKLgDn?)uLe%4@9Ja!T$hw3I5Hwz#xH-)H;l* zM6gqm9M!{ZPeFZc4k#fFZms*b3|CqGUrv0<{X)^PAgYtW$C8K1PDmpi5k$l!6^6dL z^VgxH17?Mo62cCtR(8>s(VlygG?|VV6(bO$lBvC807ko5WZ;4@#PB+|nOZnw%7G!0BCjhGl*1?ia{IS?SF6%S8z?i>b5hN}lMbcm&$tq?(<6%K=US5gL!hB04o}e`-lppjv>l;h4scpQ z?6UU#$YJ}(HdSM>B#Uk4icBbQi0q1rq<*qWjiLR%-eZ|2Qh^HrxdEuy5?ntzwQCBZ z1%~_(>Ci=P-FlQGjoEHUp0d|+%E1D1PV$Y;+>i)u%5l?61kH4$l{lvK*Gf{H^k`Z` z2f6gilE-56LM~mAV_E%C7;kYZ7vpK#a6SO}C!t;+5bEsRlI5A&pD_NwgyUG2mPOVz znPs`DThxZ5T_d&k&6HimT!nw?V5lT4gezd2fExnJ))AK~p2U(Ey^f}j{6IN+!Za@0 z(&^eWPhBfST5FEhy*b8L-GKtMYat7O3EQ2dATD_^h+ZOMRHahXVL&?3?@eoJK_r1l zYHBxscQ;8=C9TeB-}vOwL9rF@Ky*AZbdd^ zk>N5H1&MjLo=SiS-_Os0dJ-lAa@1P>jiKcX@I-oxR(U-dveK+01V=nmq8%ZRM>H_PLDcX4% zb?`~xcq(2cC^=S;utDU-jcr}n#DWzxnw+l|pKASSwK*bYr*lz_%RvwUmPcG-BmtBF z6m10((mQ?AP5aGS;gw{cOX47r z?=0onh~CcQ2rzc#n2r+}NmQwbQl={@wot1X#`1&Tx! z!^fS)Lv3{3m*H`r3uRywxm8FWUnADe@QHX5T>!laCiK%#mDntvp~;=Lu`B93{{Xw% zW%f8pSa0s$RKJ!K?GT7g$0Yt4<1tNFu#E+NbIXi0sR6?q~#ksgr^Sti(cx_RSz>Nb`_6<}6fnIsZ7F&R1b zk}}fDLXwpb9)i5Tc@FEGf&jXMuavbQm* z+DJG!$?I6o%a&ZSQU%Uz$>u{1#q=6L6qkFzwAQtaI#?yqWYb==vNVf2nBjHf(q&P( z@q_Qd3{C+jfD-{ivk94r%GArl8d9Z?>?&m#jow)~4=1wbS&+`+4AHotl05nBbIiA1X)( zjzJ-YMswAaN=6wabAy87ooXKblrV|S1Vh_1+H0~j#Zpkg0Pui#ja%n#HxhGoZ+5C^wHhl`@YBMQ)YM=SLe{EFws_IOX2Sw}0fCkoVl&X< zyIBgBf^GtuhNLs;r;F_ZmZ)oM35r_wY4J&`9i1=U+9Rei< z06>1MEY)gMyEjLKOGjJLf|M;K>CtM}`)NH!BnAz=8aDU^cVuyZNgjHPmCRFR1 zAdW?woH!w&=rxEnDGr+s-|2@%_Aq}@cV;yqO9o=Z0)3}&$pC^KG8P79mU{iY`cf8P zX%Uc8dkGZ@Xc*;*Bv*#Q?2)Bmg)F4|8z&941uKui>cqulC3&tztit+KZ0=p*fUHaE zt4jWpVhj2;%Q}QM5UM=JN~{7BT_a#ImQVp8kQbJG@WFDB0g$|;^zo<Q8`&A|Pj_OG=u5wUg`~ z!BAz6Lx*ezNWniU3l4md<*<63%v|Qp-UCgyJNwbZM&w6Q9u=@K%vWsW%N$v90axTzd+K@?I3MspfS0n0WS zf#scc?Erz%&zUlEfSNpmQR;tvceEEV5b;XT(e$hAhEm)2Obdr$lfO8@!#Dc20Q1xY z1urt{IZ>@oFO^_92yQ4fEUb?$rKGaP;cZM;bc_!rbLZM{--b9mo~)ImODO-wMe42c!x2N9upVJ*A0k2FTSIgNQ{twn(u2au;6Bl*gcw4CE_ll7;<{{S4|ekGUK z;)s=q4DUd$yB=bjv5zbGo8ZYjT}Ksx0$lr%WUaOuE8ktVh05BUIPKG&Emf3zC5x3| z{{W%=zB=?Z%T%=#s4)@qtobu=^9-5EDNwjKj@gfLe`DvUpb%KYCmxw&!OtJ(=b>no z>d_+s5TtBWGv_L$v7elO%c2&Fiy4U;MMJeFO)po}v>TGH>hny|izt%EEOsPm;grPO zdSi5o_~be(XN-06kM#?M%;EfgeEE)3H@=c7aKt|EZ5hKy*B=LQ30VFQB}lpkQZ^!= zzlqa4o z0K#Ge$85X(>`du6egJ8Cti2 zAYr6DdW71l#|zG6Q87}oFi_-;gp37|L1Ne-GM|o|rAnYhM%5#0?nRpHcZk@g@Lg?w zl*X;}qgz+6Yh;kt5}!abzQEF|9i$K)w){48?ehJaq|8=&5ceAoFh^TANE^^ZwYRfI zs4Z$bb|OO$+trkrTiQbxjfe}&jQ;?_t~P<5pbnaV!0Br5t-v;+f@-u!(sY8%RW4Su zsh6tBAE3<|z|N|OLk3g*B`qK)+mr-?IwVd?37kSmvnl!3jaQ)ccyKg?+pDYG6+I*s z>-uUShRlXAs4)3dL7W}Uv@T9t;NvVN$y(V&k~&_lAFbgW&{KBOPpm}RWwA$2o)Kmm zaNI=N(x}=Q*Nv5Qpp6iZ+D1%|P76FN0Xd4(|i5?4Ms$ycNEkvYd<*D6?uQPTUfet_xh)SPO z^r-uO!qhIBWr!;aHN`<2%QxHxM@#}n!c+&31m%=|taYr3Sm`BrWV5ZotI$_l5=OQh zo&mvvv`uQCtzDz6R#~MIEB&)gGJ0|mkWaak=MC`3AcA_OmTZDjq8Kx~HN1ySn$Y=J z#N3k|T`OF#wV;9USk_g7DAb%r3D2abA;23(9moeIy03_tGGhM#x?GwHum$uUe1WEr z;Vo$Cwf@T2)E?U{c%_Lq2%0HjSn$fs<&>Ws0;dCL-H=CDWKC3*8B)$-t89EdM?E4X zIk6;b4u1*kRp|!sd{+P z$a=)Vnvq|<-gt+D$5t76_eIM28Q$b@*iol9QZ%Sz|z{lH_KM9PI&) z)X%zQqDti{at(D3A6g2R4X(Cw)M8k~)}^WHniVM@REfPwrjAG_a)i7xY*fONTM)P_f5`e}FRsbd5cNRIw(R;_^auPR3rcx8~TObZ^@ATcDb^)Ly+ z;eL561i*7;soQhVUWa{P=x-7&boUjN7UYughtjQ-ZSzPLJT4@DGq>A=j#Q2?GZK`% z;$*@}KQC<8qeD`{IA+pk6sHQ0EO0!Qo(Yx-i1`T)>HW z7y#-oa`&wrslEC{LEO2B8&g)jURd6Dqv=Xz7a*0R5iwEiQJfxg#enR z$;4yDANDOtlObH94f%v&9q%5swmv25AE}6_<1+eVAlPd9eiK#Nm zhoHSR(%ml&)-4)!I`wRrCUGQg(K6i)n(_07$W_Cz^+wU0sN8UIp0fs3Awgk@a6Bq$ za^P3@hcwoZB|TawwFQN%m{Zkg(ph8L+m#C}s#|~t3Enb1jAx-twICImmVZ`iYid)< z8W@`WnvXhb6yA-9j@5=K0d#gC1OWJDY@Lh-M~skqu&g+ivYY5#!#1arYriNb-WZe} zZn>?dUfWrjTqT;mq^zY`WH>{?1Y{C$p#1F}J4=+ZwI-vRY<6bcT=QLbVXk2%*%& z%N+7O-SHwMV<1MkWMJW6d1nML-~>6HbCRhE1~GYhf=eD)^O8?cP7{_-YKk-13x);v6zLKI(I_6amPGO`iyKHuqP1)xQ@m~t zR31v;7542RNjL>fS!zi=8nzPuJnS^8G>I%)k=DGhNm;MwC=Pq%-*GTD8@=;Y!w?+WgC#B zN>~CLh4$yBD3n<@S6U5iuf5>{d?4yIA5pTh87$mal2$^NVy@n<(0xYr7<<*EWKvbz zDncs)I*k$(LpaLM)xSvV;Qe5;J)&Px7aq};d8GTAwT)=Zc8slDx&+vVB&2&mB?AG2 z#t9_NK6JvxYOrP+4S}r%Y4?ZLv00W#E9rARsVdXBmLx`p?PP6-8;0pOCP4(LQ?;_Y z`RA(rk`$cDB-irtYG`%q2)JmHsL;e~!)jP8Y6-RETHxV%G01bdg91V2PCf_08FP`7 zK35tGHtnXC^D!=v^&0Gc-Zn(^9d@Ntwm2R>Syy&223I)#{{T76NFdB`9KCB-`S|pP z0Gq@#{nHMhk2aYMLdAI1DJ%? zwMTK48C);=L4+tkW>^>*;(AT?k*ojyf|if zr?+>*cTmTJljM5B?iS#mKD&fo#sf8 zix4jb&MeIsTNUb!WL1J$;Es*s8&+vvOal`}7{;5z9Jt3UNd4m=Ek!crx|o{LgI~ez z%KIW1N5etAdXT1;tc^~RE2S8s;iE6|%KK4rSFp?=?t?Ii0fLWY}!mj4$kjvdWW22siusn2x^yNF68ZPsx=V6*03eAm0>|+Q@1k- zrTeJb4@p>OD(uFN<;#?bQ@g2C)grAx%I6G)3?(}{P`VmxaP7p^#B}{( zGBf4cc4DdoY@lJ3lj;sY0QBipnaF);2z`c)T7#yetSep!8jH}Pg^f0-&t9SWd~}bA?GIs5|yQ;q=hQM>@3s|(ZiDV)jyOI1=aO!(4LA|YN2A5 z7|@9xISQ1Fpprb~6M~z#Knk!pEiFh16IDkByu~@#?@H*<1r8&LsY#=1R-vwor{DX- z5Tcio$-%?y=KEL+A%{^FXa3~B%0s&l8)JLgw{Z@Xkw|eY!7ZIc&q9RuO@#Gn zOB;G%1C$9Icx{gAt+8?nfw@QnsW9_aCISP6R8v>$a0_*{Vb~asgSBZ?)b>}>B-7-l zVmivTrV*pC$`3LyUAy>Uvjj6X7!%Z!XU$)5tP*bZXQ^_-W-j!lp^c!uN3%ZRDu^eF zvPZ)_76FQM%WlJOJHWtD>Fhhk2*x^zYgQRb3-495q2;4oo}A%;coN=$Qtq1cZisBq z6x}6(_K4ziU|KSZ%dy-GI*q%yT;!gC6PGSvn( z*U(8Qoh(k0prjiS@;e>DsE?VrzXHZ^Nyc6!E}axVDf>l%7j$5G231^-$CJ>Q#xyU= zBo&O-tS|T@0YC$i-bm-7xiL|KP?Nk=`~Cx^PowM5LM;T8X||B7E0||sqdQJXNxd=w zO9$^Idv)(uVzzF zS%uJ7QjZEIMh;637&^N0eE7=s@*1U0DP+4F*z@vYH0xOUSP*X$bae{N9E>Kg4U)1# z#Tppp_WP0J3f`avun9QJ1O6k36r6)5*Q?v>w?xw?HG0kjPz;5WhAa?%HAiDHX83h)rRnc>eWpSNh>U0e?kalrCLhA(N$F}(Yi3k z2rM`lCvF(O%jS}>)Vuw#w@%OM3y z;u05O@xTMm0f$wmwA8wZDlT7{ceTOs@QDFy$73}sLu_Jtw-!%Sq!ykTkd{_(z#-g8 zAFOR{*aYEvDQZcT{ltQ74^CC1I}I<&A||jMDH1qin!%OmEw=U67xX;G1M%9@TI70oRMHp^a%vG4}Je6ir6CgI_ z83JXDI)Tc;AiKFGsZM-@J5i3VN{lmH*rQG>dX-qKO=4*i?R6L&37-N0cF9QoG3&1Tz=6L%1~@+(QR$^&(c)4Mygn9Mj!v(#z>c?6EhjCM?k>^wh8n znZZQL(o;8yXmvf*mSk)1sf#NvpF?W{r)5!l`IfAl#*Jq_YsY zLO}=2((@%HmfZeDy!UZSLl!W-G4v@uL+PRQK?pGE&e9V+AJkMg2N6jJ zHA4d6R?XA)-Kyo)OUmY6FnDdX7PzfEG0j3C^2Q@1$S2#inWaXvdy&=Cp8G{5$h62z(YSXi1dWVLJs}y&Fvisc?%j=9fj@{$lQ5Kr z4)sX8ii*3itGyZk0@Z^k)1#(IB(^EW+H7$(XeKpclFV%H{WimIF`zIYWF!%}2l$5R z@-ikUOmvf>6}YJ9d)ex10hx?)wdkx{r()inIEq3b`-npr{WcAqiulWAH*!WuMIK={+x@&PU1DT(%Y|eK3~hi{EPX<90S4YD z2)X&P?WT-1YxEjHp+#CnCsEZje%RFJ7OcZYKS^X+C73HBDv-(mBoNMsPH~aJ<2i;@ znfSRwB1(&!4NbHb1h(2%1iDRPQ0uy6dW~&IN&a7`LGNjJkbSgnjj>oo8j^U%_V5rA zah%AEEW+Y)NCLps+U`mDHC>@*8bjI~6U*E6S?I@RSQaL0alfG?fwwZmeVJ)o6Nh~5 z*m(Z{2#RFc0$~#YXSR$ly*hf{1Om{Sl_%6$8L6#$k*ho<-zyp-2N5X2X%86A)3|ar zf^Z5-N{?i2O*J*-+tSs$!#lycPPo=IQC=B#yz)t6)DX;9?UADeH=Y+eg0|e9_)d5O zq_l~B-6{r;R&Cmz?+y#Ybgk-ewNRCs#PZtvaS4yrje{=Sl>;ilSqKZygOG4Y%R*M^ zm#Ev#kAV+yR)-syBGr#tX_Af6JT>IOX%I;c_KllwmQvVKNmgar0S5yB=V4VS8I0)+ z9@U`!H-;(>!WpM63sECRvBMBzBZ5>1Zqc3CZ;YSQpT;~8bLNSOCF{8d)A#Gj3_&nU z4WCrU6rxm&#_k((mzz`s77c~WRd2NghRY3JVqQ~5TJla z-OpAMwPjLLpumls@*3+~v?B)I`wdCy%R;S+QTh@zjY_$cDy~qoD3k!FXddE9mST64 zj;8+9^|ev}tp)Vv-%7(5(kfcEl9ifNaB9Z>`*F(31dhjaewhoD!jvbGlb?f*gFZ$; z2RNqmA2+W4aL|oCe%_?-Ud+Qwo7JpJs{5Y@cw8V?3b;5N`-eOpnqg2OSyB4X?NbzL1=UyCw`;R@+vu9d$KdfAvE$ z%VX04S%Z6yK>@d8AOND?AyWH^31oo9pRT+5QWzwdgZ@LSYZoGhmDx>@i|NZ7C_yJX zND=}v2R?tp;2yK3PL_o}Kx^a9ppR#0u||e_XSejql2n3O&ZJdiwpUS!q=H3K3r8dS z7_LbHQ^8&8<7$(3ZaB$bZ;&X00}2X)^t*~V)!HM>^ocr;qH1>@%E>ZLW&VT%_E>EbR-< zQ&NGXV2dICm^%WvU5m7W3i2XxJ)h%iZrrR(@ zT1dr)ju>`FUb{?KGDw&qi9!Mmz;Zd-0GQ59l=g%>^A#=8m_5fXa4hDr6Wn@cudM6x zYqILoQlVbV7N&-KcFWBg%8R~vnnUlBH43f~Fub4uanKBWxuhv_Gw4Aen^LUw*0t*u z#Jqu&r`8)z*P~LROK`ItthDU}erD(RFQIIR`ag>c$YQ_t47YD4t zF$kEVp_f%XY+iuT^-(TW)(wdOLWgeDg5|4n3mSzu<@Di(TTLWnEFnFtcQj>pBKue| z1Pl-xr!&{tg^(*tHm_D-MzyB9LJke#*!220?8jbpf!a9Cf>`V`l`3*~J0X5@8HrJV za0tNaik7(~kZ4O;fpMUF(oThZ8n7)-Ypbcg_wJXW8?+TAcrqjwbS#H!sEF`2=6hsz4WZWCvUAX`qpbt3% zt0_>I+)(d5+m$>Ao7Mzx9lfXOOvyUeqgFJT;+mVvcg+yw#|&}gB8{%caZ!VS8!CDP zDpAfKR#K;6Yz?^IozgMf2pHNj+2U0w zB?U6WlUCZb(!@5mcMSv@#s;;fT+-5OMJrsfVZfTrlmN$-*}!5CIU|9CpK<4+lZi=A zQ9+=+tM0u7IUB=PT9%6rA&v;*jZDrxDH;e<(+jZ`jsRTl2a_2*IRmK{vxrt^sYb|gjOEp-*>lujNs9re&#sQZ(8;SsMea9UdF!N>3OB##V zfooivQQ7tcK@bRI73$V)J^ui(oV;$W31^8JpdvU)3T49sCm;d=RbwI&3X*gt;gKXb zP%a6*YIdOmLhNj9X5|(L`&Q{6%vxPGJ21w#9y=tlh%BCqMu{LMMlrB(;|ip)BMI(+ zgb7ZS7Ry(ol4)*wT#aFr2xuchr)C?kWy48bK(RNd?dzDGB?#(OPGSv`Kp9|#00*A4 zilnKf&9y*k2MxQ{-fr+gz<4rCV@Z2g*1)AYZ-$_fCfO*-GAT{OkOYcUF(3|3ek#fe zPysat?0Pj@R;`@UknSM8Jz=M%$3sJx`HWJfa8KC8L`*QoJ*><`NWsWmqtsN~M`Z$WQ0@xtXHXr^GQN+tG)6gH zr>KmX#$Rq!0Rh;St*F$i*xn891Xrr*eZ8RQijc!foUE-RZZ|y69#2hMab=V-T#(_g z%ExdEo~)?Oo8Fj7E#aYN{GqM`9V+b+xE6R=a6Gg-5GrIHzZ;7D z@zi+tiY)wL-5kF;#n?atv?+QS)b zq*2aSFS+ocP>>rB;kNs^Aob|~01NzUjre_iiCI*l!LH2Pn)JAS?U>NUU?gK@8558c zlX3R;(J}Pn>r&sidw;v3Iv;Y^blxTb5?qc6i9QXV!&GPByYj0~)SnGQc=e5ny>CPxQU3s{-sV*q^UnnN{{W|2!Gw8Y z)tmB)2#@alJnJ2^s?%(tma8b0G8qZ|Cy$VDe{6LG`6$H7T!n`U1QE!Fi`*{|aCq4` ziDs!#K3>`@+EQ87`*%y$E>W%)c8TVOaI%R*rs(3{Iop+xfsolg;0YQ0^To>;EKJ-Y zRC|Pn4j6*Ow)W75v5&UpE47rFjY0wQ4+vh_(rKx>(nj=c${IKzqDX>4xV}pRxj-NV zBOs7Ya4`?!r%WPQe95`ZaYosV%f5^m>o7wb;nft~g;x`P+sE1^K^cXNwYIL_CN=OPg zMk63KYILX4-6btjN{R{?=<0IQP9#CM$m^XA9l{KB+9lN z!yJ0$%T5d$=4cIO4D~#}Ky-My!!+OZb5@zB6n={0|JPgKS|tfedHLB)!{bk^E`G3f zIHhg73S0B=!W0UEjCXS6*C`WP4_dXCHdY;XdsW-m@-RqpY1 zocJMScC6q^t;e^SoA$lI0Lt^{?KWl3H$ zkn-Bmbd%NT%y|4cv20(0_|1^Mrv3)X?Z0M5=gwVCks(7tkC8dh$eN_eCJAf`mYSeMij(aW7QUE}>xcFoaM5LXQj5{gzgsE&L>R)c zUC&21N)Fw@plKa5SH`CMZyXXQ{LhhWl7M}-fjAyYGS6ZL&j-X>l@c&gawetqldsoHF#p+m5vB>c3+n1y@?+ltmYA?lEpXldvL zhfR8C2DbQ$68ViMlMMRAIvZ~w);_N#6YRaY&>B%C=&9sV``%wQ@2p$PQZunaqH2pX zJjGP3Bc`@8twJN2KkuoHX}6*Ro%;OyyI9FknAd~DaJ<#!jKOIR1Jr;K*EU#-8m}eV zNgh)Mr|quUFe_kR_*k`MJ1YHZ-#8ENUpJ0?CB2<~Zug$8QGNnk^?a*=fSuvtR^&iE zD;!5la&*@Uz9i2<nvwrZFSWdN?^pFh;4`5xGMY5Wx;2d(BFYwk{)T*@`8t zhu^-OGS~Z4*1q@wc3=q5xwpzgjgV&QY34+AzJDzcx$9wf4-dCSPu8xwZ`OSD5izMx z22;N& zwX%kUE4N(ZTPuk4hFqigQvmE=(n;^}{~Q?CyRr?ktft!to2*-bAX zQc}dqO(S_1gmxkcz){N)iFGKB@=H}@=A|6ngjXH+xM|0QzPvma^C-e=7xihq4|eqI zslBWHCAM&9yK`12I;&060CdRJG1H5UrqYa742sk!!==xr|~32UZ0ou`s=Nd@2>Y}0$<+yPKk%x8&oJFwQ6FJJ_AI4^ye z>di+ndKeF?KK;~q`BqzJ?nIh_4}g{Gm*mS<6j`Y@{YJ%A(0oj`_3A;^^IG`?zo3|- zrF?irDKb(A{0TIAnJ!)4EM#;%Jfg-&q?PPfqCThkh)WOI4+p2R#orfz_%o+{|EjcI zSi2mvJswo3my?$=BMt!$*dm&SXw&FcM^V_Pr<(LJ!v@cya51x#OG6d^c%_iZH5^OI zuPj$;+;8?5+g_P@Kd!b4Z_2y3EC~qgsC zOpm^c;>a%tTglYcF}HJTWB;{IiLOkL@|_v^tGqsuBWJ4rnt}D(g%nH=rxw=O{;i(G z8rm_nc(SLZs*RM)Vl70S-d(3nNND7m{n~o>txa8JTpBAug-p!K)9rbGvBB|gJbo>m zstsc0h1IqSd4kmVZjokCYF}bmxqg^<=6|1r;5@lZ}P3z?~PK0LO{_;h4Xq%_E?vm$D_=p|m! z-j;+OQtdIPN}Ctbrxkw!RdY^F9H&m_bP6$a;WZS-&%jyg{;FaD{$W9(&+Jtn**-Ur zynpyf&r!3Yb#x%hNl51~MJlS8w|h_iYL<$`ouaBq8q}7+Z?CW&Ypk5Fs+q9JS^W_7 zCpcVY^GOV%ZKDN+Yg4?q5CP#(-O*7`Qlh9169??{rJBGf1lW-N=-nrRHm%HcG;jQU zMs>kWE;CUrm$>n(gUJugC0Wld#2BwGSry3**@)7Lv!sEjcllwAKkB-Xpn_rRpQ;FX zQHXw~3D4S5t@yWT<{K$#Qi$9~Bc7J7ynrxOMQm5!uCIfKEX_SfyB=ak1;^EvY6}aP z2>UlJ#{GsW4SVxE1opS6Zbb@YZ46MR2mpoiC8Td-a7-lW)_rU?*f~O!=)A*IUl6

@1R>MICm_vp#;%vV5#sv!7?zuha7O z&F~J>3&}j)anJd=3voj#D!O%~KME$u)?Ju<**m@3 zs||wH%4@5O-5LaDvtJj91uSC0JbKgaW2uPl{EO6RKtBny#8~>Dn`A%wmvIEE6)CtF z(53V<2QeRXS`g3?m=T}gb9(rA)b1ahXS-2y`%`Qr-w7s0eBLgraPJl?_t#Sjw22sH zE-u$C0zvuw_|%Ku$+W@rFcNF=_r9wUophV5^p0zHO~tK{%G9(4O42NzdHi5H!N!xLp%j#KLVY}G@~ zP4}0SFAOJ2Pj&z?Ez{G=FoTYq?XQY0LCSRfc4)!U!<3}vsQ+@e|Q9nSwI8*#pl%v+? zacdM%iS-s;IQt zCix@4r?h&kp0zrimxB}A>vR15{9$c$ETaaBeR%g()(5B7II|idcBo_a>vf}30`OK{ zG@isZh7pTZ))r>R;l+$9BIN`yiPa2DKh3t9K8BuBIAYGXVlfO+ltyn{(9$YpKl(>S zwRHaQii#|FAk{GnTA;3@XjFSX(t74XuIZ;UcFjqGkek`t{#TSqKvRjV$%y zh}$)-B?xox5k~!;6V?x7u^6qJurQQ7igqRl;0v&)-twbwCJe8$U9FqBOR0X%&&#TE zFFFOlmHfja+HT}UB4^J%*1|V)tJ$pQ?mcByoxspwjg<)u6rEKknv@4EPUL+Jom{FJEm_kDS@@*5E<1 z=hhE1;6Y^$)r87EQ#oTk*|+ZBx+X>yC8A*G(Jl~q0Ha6nq(1d>R-$s5;=TA+NfxjJT98Czre^23#HZH+z`>5@S4C_s=JZJ}2HY^%Z|3S75)6b2xq80fNgv59%_(1D+PTT?!6&K9Su#!|<9-NKbJ z>X%o~=$s!6Q^Pz20k&Wt-qSy#FWG!NVO$}!7ibLGk|x%`*!)!lfTeY`my~6zzhlmZ;tke{e5!ybYHb}ARnmVhAfU+n!)3mCL zQ{7FW#HaxE5tS5NE1}+Q7}-vBbJ?c$oVdBD8TY2cJ_y@JifqurqF54*N-8hbg%{Z@ zQRb>7iu~RL+m{XgzFHjwM<60t;#IeR>~MVmMS}q86d6K)d^T+UW>9^w`_3OmwsL01 z*MYjIR|g+`ov5A5>s9${(C~3|Fja#IJhaH1N=@z;*Ph93aisciDTCwLK#0FdXnx-? z)<}PScI1styU;LI15jYkWP-v(p0Nw`2$*fp@lf;wc!fF$zv|Mjs^qVpxt)3jJc^;P z91N_L*~MIkxF(rvMP+y6k^{=3VtZ&IncZr(#?`+y8^s0&V9xu%w3 zXP-1YM`>sylT$B<+Dsf+mS->;iBQ!{DAd<&%72$Kfnca12q2qIT;V-DxBLj?t`mZ0s}lrJTVP2vv7#tB}zPvytA#K zYY@mlo}ufZgO9Wgb_ZL?{HaqAOZtunimDz+TeZHssb@GA)FgWEzP5Ra7{>~ffCo($ z1cxDa@SwhMcm|qhShUi}vLxQtmUrl798frfK*tmwni7fj0hEQPEO#++Y zKPLZPAu9^b&FUQzgt-Xf=VatoOlGtjvLvUshGe_T)vHuOo8P-Bo7A@iaSQ@KLT?cxGc9(RmR+v^HwnP4{3HG$r$6*o@b9Z)^=8E@Q57 zr$d(-p%BP0eKV^r*LmTRE z;aLDQ5dnn)U5tdYh}Ypo>y|sX-s1LTWo)W88aEg===D=KO}HEez-F?L`M7^9_Mz3Q zMJfcr@|;C{u`F4W@@%V1h^XzXJu^ksO+7z7^Ty<}WjnQ@4U+Lw>u>LGtEU?}Ub#=q zM|+8LUXOz;o$Og+hfpR3bD_X~QWw#f3t|y+N$o`K#R-;e)5cQ#zxQ;volkK5kMy_H zH2XpaE?u%~w~j>{M!33qQGVRGvG+0El$mn9S)vY?6U4%M8f;BeZ7M)&va0dorUDts zN9lnoONM$M*OkAMtYQ4AW8^l|u!&a+*lV$XB?or)wB5+b;y$6i$VQx{8s9Cs!oARi zJR%)B7b?rVJK=pb;?NKOF6sS`ehRn#OP3Dm+M`^W7HXk9ZNYKvXNg7>k^dN+f}w-- z<$F5mC3B>+klvd_`J2=vr*v+Ag=*tQxAp_7)^WuU`bE344#D(IF)R30d*Rg!Nc$68z>)hZ2(B7t{Dj9rRZu;ioF`|(i!e7|T_qMPXJ z5}mZT>k9+1CBO(>dyd=t0jnrTH~Vq^?GucYU}#>}@twhRQ}W!Y@e^IGh55CBw+RRl zkaVP8aMkaPR6=C^q~E;PVbD%(1npmt`A6FILkwcJ$V;k2<)r#GJ5Q_B&~q1Ybyfkw z>)J=oC(ibr82Ga(`7k+iwx!iIAzIH3zJvWsvtr8LF>ma%U}F)fO9LggQ~pIxPJsVr z*%JoQ7`$-0s+b}d2<)t9pM2qAPtQ}1`SaI}u~#Uq-_$Dq?KAyv-L~VFnPYEmGRVD zk;}*PCi$qz#=MZF=G-ACLFODjRf%EqPM3T02)8lprb6y{9~&2|I|}#NZIL@?Pr;+@J2swFXfyp@`9J(6W`@cO(rVBZ_5b=Arv2k-$#QcA7hUH_PQpeFiKkXE%8At8D)B+Ovqo2ozJk zCPLZ_ski;?VcNs2@W6{ON!ziPBNvReGLVvd^ezvY;ism@1I(T@zNM&n=L zYR3QV)5|+LrK{lL?@}hMOS6rj3|oYg$oV z4m~H711iU=RFotg(Art_No7T9%P!<9v8f(5Ict>CX{8(X-@XxwYQKf=*7$a<;Nk`L zk8_2}Q~qPf?V&@z28GbFZAR=`Zv~%J^7jvUe1}0l<)Nyox}rzFz%LeL2=&qGge1QA zNcDE1j00BC98>=KgsnM2e^>w4XpM-TR z3^S$eQXE1r{+uBCjm-Y#nBcTK4!RRsc>FW=+?6R)9cijHKk`?TqJhO*zbfRkdV=sb!pNYr*R8Mgz&h!4X?klbN8rygc$o-vthTGDG0{;o1*L0n!Og83Di1 z6=r5m%NP>4tzL`$;|1YNwk92kC1Y(NrZY^Im$M=m)@XOcw288wH+t4?q*@5eWpp+WcUa4IbOqT zVx90{)LG0W(Gg|cF|-k{3s_IDy&jl;NY8GphO48#l8{S#OVT{ky4|y;w~9^Ivg_e4P;2gjRcmqAq4{^OH*oHdjji^xxfV0Fk5G@kXv3A)8NG(j`|HN| zlqQuAHW5&B%~h3sdTjKK(CYOqCDN+c!=;W`0KXZl-gtKfQtvfw-#UeI_?@{zN8%(k zZ7Tc<0eeo57SLNPjeW>Wz)G>(lxATG(h>8;XTK`C>wjKR{wXi-w>&g8xfJoK!L|vi zXoFr%E7*y}A3fezud6xWWBsF-t<^_}Ab-OFh$g2rdb>RR#D8W%ddCxAq{f=D-%zZ} z-n91ryP=n0aS$E*md1=C_U0FUtf&_KLPj2gyII7nETe2UYS{INNLag&fS~9uYb_)H zkjRaisu-o`RadmPG&iT|Oo$Kp&F0VN$Utug~Rvg2ja_cBTs{@8BZFW zz7p%VQJCuDCD^G)#sS@*E9~VVACXYwHIm-NUOz@m!`jdkzSp9`H3RFXl9Z23Cri@> zC^wkYP3fdA8EKCWoW+Ag+jI5#iwDvm>k(DPX=(^kO%eELf3 zO$d*>;F&)$UTC=6D2z4~=4Sdpkh<4XFYbWfrKo_CfKL{Wkb#0KALp%+}kEZFT#*AG2Q)o<~4sJvtR_+F4V zhi0|YinNFO5ppK?E@k-(n$5ifoDL5!HpHDOh~W?@ao1ITp-Da|AIj*uFqcjM(;sR3 zH7sH@Bx2Cp-d03NEA^`DC5i)b2k;_qk2q$#w*^?C&J__rxE;yPT<3l63i7GBQnAgZ z-n2*G*{3t-TbctG3+dFfpOpA4;+X^IGR}YoMD*xy0lZ^pB+9i@S&f^+*dA&4EHD9k z)+m(KFriCYJdyFZ9&9mlpI4FG$${cZfarq*&}JJlK%=-y5IO3>WhNmBI@jHJijA%f z9vR1hmEEI`L`6I+Nict)B^-ri`$pqrct4n|ga?1zfCyYS^LPJsaCeQ*LwW>_p)*q`QUHis z&D%dcBj1YGn}{(J?C;upDmlv2EitNzpLeXUt_-I7M8nk?ulR#% zY5T1gty`~>x5BhjTihvkE&LbWd5D|^lL3gTDD&9U7;UYCNP@HxE-VUp!_C+FAo;fp zmV(Qyx;Auc{Z?%j#SDnB~Z(^>&%RswgN9KD3q~ zx>O-Rbrqk)T=EnY8UfT(8n!3K&QasfcQp6@fcRS@<#dIVpP&9h#rZx*be`AO9USMK zLBg~@^qhK><9Bk5tYqS+i5f27Ld31DQ+ahIckBl-5OX@u`uVwZV@exOqX4VTG_YO# zx1U7RhU?{>2_6*Y((18QE(rX_s3DjJ2YIUzn+k-k|Gb~^zOMc%3)Z{n>1joR7^V4U z*%||*x=X<#k`G_{9jTh-*=h8H33wM*;h<-fjiP8}Zx}wdOH1sYz*Mf=2wL~(+ zQwk`A|8$WwScr4^-67i<36Ta;E*{|%wQokZwJ3F>swT2$CZTuA#TA*`Ur7S)i(paV z(wQqCm(Dm?v-~P|t6@1Ac2}#s?1OPbTh=7H$dg-}VtNl~HkfJTiz2hS37rs;*jcEt zV!Oun*(F2OX!U4{35So5Tj%FjJIp)&5EZHLphYnFx+_(Zrkx9R=jyKt@^Td{mZq9S zbPu>L3pT&oW_>?tb|_hQ|7WzxSPl8ZNt33>?o*A@N13hfYX{F zCs(}$XG)v?8pi4x(xVw^^{Kn;mO$GI`>W4arBu>UdQ<`!U<^HLwsQ^RfAqEm;2I*O zW=;LCGO+=(?RXrVM{@A8pDS;pmFER~KQdGi#3#CDLVKmOALBuAeER@ZNXfPOW|4Al z5g^Hmy00I|h_qALnoCag+4o0l+DszeL75JrRdrRL9Qmn{{z3Agr z5W?x>^|)MOuH;l=RNKEhp>&lgpqlyy+k>`Xb_O@^Hy0;C1=Dr7XIxg_$fTcMA6OpA*p{N9FyGl3m%12lCD{1d6Hin*PW=>wh=l%yj9Ek zIcUTs?em;dJKlrm_=gD4WVk&`b8Gpvi6Zks-Q=SM0q46H^&7JtzU=;973*%Rw5|p2 zydZcXH(MZMCoV&KReB1f9O4Y_2*T!W5Q*NWQVbiO!Qjm=Rnrz8e}9#g{?FwbT6*uA z8!p^?w*bL6x^`>OcHT6S;=G=v#pvgZh%KZua==fl{fy@~A&+5^f=>Q@(l}b)=RtUu zp!S1>U(1uz>@Nlmar;5fWPRpQ4?Q-GWOzm)x zR=wx2E)(ygsv%V`x4I?Q3L>O8)vO36C5PHy3$U}j>}c@l!}qd+<=l;v zYv@5SiE#raCs@&Z*JsWNHdWW!np_?1OvsfI zcT|H+4J9Z*y@rA-FLWHKwpi%>AP70(R^Ub$doxy(E3LTq5#raHNg3~z?NeTs4+d#f zU4tGsL6);ZT%3GDxET3s+c+EDOwc(HPVdtV`2sqxYPI7;T$AN)fxsh_O~+GT11;<7 zus6&hx?~9P&b^1Y!<_oPTjz_uL3HHmg(-A*8fZH45W3{!$_j!2FVHcMl?Jr1-T;=> z`dj)CS+Aig7D%#lD2Zm22n@Rz8`4|}dXdhxV~+VrYYx#APZ z{ThFFzp(@b0@CaG|50=<{!I4&AMe-U?jWfYMRAu?&gEE+cXcDVr4qx8yELbn-OiJ- zZwKTYVj<*|97Yb?Ft&0&%Q+jHP0q&VFk^@NcisPi$84YLbG_fM*YioZQFm>6rMbE{ zfUv$^*pHI0At$tgjL@$qM0>x+G`D9SVHW`)8%)2oKBw4S5Vqh5Oe-1?i6R3Yx)Sw_ zXEUoQ-)T_mdxN(R?jGsaf|2^zAuc-%5A+0BThjYjn zjSOJO8@~E?rkZZNEmd?`Zrre)rDz>c2u4z^>M}26nGKh$B((tjl4*YeLdP!0|G&B$ zqdJK^TOyCcB%ri4W51hAAO|<6^h|G+jD=WVb$$-;=hAnj;EM4?(jc(3`^jC0lfz*v zNrqkS5-4dUT)Dd-&27CY+lDs>>8BQZ0+4u5cU|A|I$iAn_hffmW*M`BOp#|Qx~Xt3 zWtU>+a+V57TaFH!6HM!?o7B)5l6wXYG+$jbd(sR9HqLmysB)w*XA}gR@+rML&_Dn9 z$&Qr=<5izV`!nmDpIxt~#-z-E#=}O@ zS=;&y4GB%feYAU={X-6Pu%&cB&rEiB?QnuZ`wyC{UVQIBlidx_hQa)vi!G5%+;UKu zQ+d)b`jl;u_ikD-0UaVXZ8Kj5n6=hfP)|rNxnnoG+*M#GEYRaa%wXnIFlGo90*TBp zM4p2ATuJqS(*kFNxlwQ;s$-g%vw?mgaZ+$q*lcq9JZooYO_KBAhtC9e%Qf@0&67q? z$LX(D+#wy$lwXd?ru}4tjhzVa++xh!@v$JJ6$}jEW6T@0WXq%3EAm#o3m`JDN507EgeSDsYePN+||IbJVo5D#HDuny?tC5%UEV#3#owD)KngOU z6tY@3eI56Y2k7immWriyh_zOWUllzs+wX4GSR;6HU+|l#$4qmy()_u^n~CEL_~kt9 zn5MG>rUH-#^xe-py@jAVZ=J*b`9Jd+B}Y;;@NT3Davs~Ji@EdazK8D9s=Z3>ig=H2 z?SyMR5*8+YH^C83{_ljV==$xoGjBUAT<*yzq2cHg~p z$Cft5l<^pUSwVRN%xKuB`(|asbhvQbC02htQOHhE>Z3 zE-A5^W>=QOC<|$mAfxIx@D1EcBhxnsrJaT#|I9R-vx@5h#_HkEEt)el=j058#iV0A zYHQO5%j44uO?)^?W`$&ybH^5{zj}V8a!2fa=BB196m+jRS98wi*oDo)YtDb99*R7| zG>vLwRCf-Y6Vc_&8b>RmBrmQvKl0)n1wd!HvQBJK`I5$(hLKJ58HqPhmyr|V z5q3XBisojJ}El;jy_$d(~i+tT0@YAYE{q5g%Bh?{dpjd+{mEkv){Am^oo@B zA+7HbE{`3&?)#eks>A}GN8euA4x07nyiDslXQtA6{eD#CET+}%7Qi+%1$8Oh{Th3B z1e^U!xlXg#5OhX%$!`t@b~X*|zn!b~Sq{Tc|6 zs}u7@(-r&<&m=s757;7$_~UVG8*ZZzqdyc)me#I!Ps%%>{gf@D!2htC6O&5roDx_R z-d(UZ%nOL~{G)P8&ZB%$ zxVr*CU1#4E0aq{rx^f0zIgN76u_gq6jhQ)QlGb-Js1ERC1a`@yus9FuW<}(JNM_TC z><9FH*O`h2=Jj4};Lwyl9c%3bz4-@?bGxL!+crbj=|ID|IEC9Fe5cspBg-w~J^6PIWwA4(0ITj|{?rRW_saD@x8SitTxODGe?K`QsmtSC# z;aC~HlSF)Z&u()|B{2iU2Qar-B5>v89Q~GmI!b-=;zt0N;39t}>J+kV zHlrh7ph1q2XD0%qkQMkLhtSdh7yufvLm5P^zkeUD96 zTrM8=kaNY4xOXc~zmyrj9q4ki|KSj4=y-S^4JD|y(YmJ!D7#Ue#7ZYYf_tO6r4QO*Wo5VEVw%-))Lm{Vn8f# zMn015X!^4+I7vUBo0rdZ`aSca$5jv!7B~vcR6JW}b;TiWr#T2+NEe)8PbQL1EImTE z8h+xHFFudVPA3QQpKLsBgBQAcbNjP}#)biC9s~Nlru-);SyWu3^|9RZ; zV;ZRC{d@2{E*m$qH`)Zt?E5h2;JFy_aj0G54f(W4+WR_8WWob8%}+Y%^_(8ZUDn#~ zP;5}&w2b-VkJip1f|}DW*6gHyKsoZ6XXE(a6Z!`T_T9T;ArXK`!1;Wp?Z5)M4|tmp znCoTCSe38PjsnBRAL)SNp#8mIx^P~*M|0XZN+byXkmS=f4^@KegvOWzVfGS z3HYABiz8(6!SMQj?niY{DoTgL&0)Xh>afGw(Qj*)_dQePU+Q&Nwg>5w3a-~!o(Yc- z)BQ@#JbblkE!}OxE~$%I@c7*OiU)zEM=m~q^9LXMTU+e#DX;1Aj zOf%@Zp=$z}d@a%!_0TUh>jKJ5N^Dn@nvd4D{6Wg?FktunP}&Maid6eav4Fxto{^`# zDe=zjq8BrTRN1}rU!O+Oljw5hz6`!45+L)C*`b)v)2&!(jFU(UxFwUhEVT(Cxegf6HJ_yfS&|za_wai;Kx8N^Dyzd8 z5+%*(b(*%w4S%5JyEWhPYhsjD1ey<{TEGk5R+#9P!BPJnsP5i3cMyJI?bw}Kaj&N* zG~bv5J`K@LC4qvM_{%Y9j1z4k61osC($uq9d|SBrCssAokyI2Uvc zsa{TZX*ScW+f+`5{BYG+8~&@DZb);0_lYLilV|v5%Qt+@j*w-PtBWm5nG$g^71tU; z86I-KlWtK4esmreU(OaRigh6OZ-rP}uH64;w{fKfZW!sSKRUdWW$yGbV|a&TYNq2!rwvmP8qe2FNmc zs~abH<()kndZEeOu7{&%cOQ&Z3Kp0Py&NdM(Uz%l@Lo44=HhAf&Pvn7gj<7ke&DvY z)6Bi_kJYNT0y{cb2+SU6_5LMBHzf+LNYz zS{ZQ9>wV|!TJm0&3rskR@p{$gmR}&aoz={gGrk5O3OWv+wkfIBQ>ye8SNm*JbD+sa zzg4OIAL^awnci+BHNdSs$J+w};I#tB3HaJ?WwQ^57(WG4?roWP%#AmVd3&Emn!gk@ zoUuA1$#6l`Sr*>;YAKz`1K`X@yo6^^V3;WJ5#~4|Zwq23%TB_ln0=jZ9wA^H zZH{!bdqQfPdK47y)pS2;_U_x+>Z{v%aKb_56l~R-;>JpMEGsFlX799APP>m)9UO$J zwrWYguE%QTrn9KsLSQb4K-tc<+~+e5lgQo?EUn&+KR8M64UN_dzVwFpFxfj}lWA`Y zV*<&Z{O5+Jj=hcAI(^*vFB>qU8#gv?DF~*YX#nI{-2_8w4~@aoG(LzI4lnYl)u$&!O%=JV1a`p>pEeclx0Pbsxzeh-*zuC_#$@IN0iILVcDnKF zddSNCe0^%7Q1s$O@!ou$UFHvR8{B^=0k!>$x$9=tpJ&9*ZyQj!{kr47jGci8 zYEB#eu!M#5tteMrV{dr|T6;vgCU?Wtq$7wT^5?*gkom8D4;k?;Y{QvN}@ENUA@f~*kda2wpb5{CF8#$ZmNtLmg^Rh*j8<~Z4z`=BaQN&4=MfuxqGBv z*NU4Eez5ru9oP2W{tJtfdg2P)+$IhoQO{jcG$?l~4-Wd~_`^3!Y`9W!bjLXIHyXE< z*6+iN2%m>Wvox=0Yk+i2RBtHdN47o_jmSLaDa;zaM5!Nb%|C}!aB7|mVFhme$&33E zS^6-3VmWyAp8sxS%vjrkS-w%FrVMEb)z+w#G-T?hCC}Gsfj)Tj$bazueOKDBg?hs# zu6unHC72vns#NWPg+>Lh&be{i+c#2F5qgd!O9F$~_W`T<_4Cc(lDGdyn15{Y?S6>j zWpeQ3lsnrtUwnEds-JW@_M4m>O-Ja^3Vqz;-TPnueTPs-`yiOrXv6Y*gAl$ zR~GyO9Hqg$9S#iABq>(0afavf%xCScfFZ3JbMjeyPtL3mlJ4D z_@Pc2LWi}!x!6+vnH`%*`|naDWj58SlxBsv+-|5ra_I?x>g;he0=0X4D~P->G#u%Z z<9WSj>F~wN*Exk%S$FC{q?*h`kE=6PAj$WFs)k))KyeFW%a-dZ9(`i7GRU%0pnUt&Hc~&wJYIw|wrGH`cQ7CA_mH_wr zHNKx;_$%CtYv%UFFqDc|giC*(+}x81lRsx7IV)?&v)gIVx4Ep{`ocAhW@S)FxXPTk zIqBP_psp8z@vWL6>w*;c@<=h~dLCN4J>??!LdeEt-^Z{QwfELsuKn`lo012%;K_g~ zYsjKnhqKrLIm&I|s6?9`;hFBJCx{6>SZ>OVjg$(h9Iq|7gFD7ANdQJiJCf>u=)L`w zc^l!I`Z?+eRfGiu9hvEim*a2k@Uo4D`Ee(mQ74VH8BGmrv0FQ#0 zg0zv0kK}N3i(>noFpQ8Wc0mJ0WwKJ!_476#R!jN#&b{TmTZ{B>`?R%o-I^F z8`HwU9XH4Q)^e;ihvBHkjI5%WExiO>1{f&YiZ;e1zaJ4cH$ms$gHA&%pJvN`n0S({ z0{MLFJjv0KUx{^l#2h-bNKYt;#VcMIEBVzwUVx%+s412r=?u+e?rMXVn_T_0t462E?jg&%4Si+#$*rcy{ zQu!wrb?0;HeV&Yrj;P+TE3QmS;5*KTqME01E1t&p;5ch3Af2ek9nlNgo8V)F5%9g| z6hJ=Jf&*yBPlfgM`LP~zS_SB$mC(XLIwLaMO6i*>H>b0CJS!B=CpRViPju-7|JHVk z|Nc}x#ATCagT`5De5E=0Z$3{VIi^NI;g$3d4B$Pk&M~_skjHe}WRx*zl07o5s&EM_%BE9+mj2P+GVfpf-J* zTjm@E?9RAk&dQ>mtz4UJQ}Y<^Ek(=;sF6VB40YA&)XRZBvT$S9rHYr+JV{$!OrPhI!$gPNC(CPt*SF8cNBD*~nxFl|#&(Em!b|iF@wL<$ANaEboTszY zz406#=)Yi4q<-WE8^$zy$sCdqSaZ0!CN7ScDqb!1VQt+Q!|ynwSyj(u3)T$Ay z$FL;`{az$3#y4BQ7~E)mh)d@-=&?We*+j?HEjQx$Zo^U5oJY4acZ??t&Y z%U!r+M$##k2a)$EMDr291 z{#a|`h$VDbyxANW|#Lg zg8Ad;(iX^?{1PCE@!M+oWRDZs`&R;GF|k?A!~s#ULdbdUjipT5WocE80EkD=WK$a0 zaG>!yLipQ%6P453Ulx(U)v=Ccy4nJiZI!EEgM)*&0OvVfQw1n3F+u)RJC1a`hKYD- z4v+6*qm4&OLEPnWI6&n zpXM0r!jW|uy5vWvZ7B%peVh0R0GL`43*;m8wBxeZXeJF~mbqoV24{R=RlK82|I!n5 z>K!dPePpH9GwVO$il2U?b(B!^ERFAJ3BVPR_@VX}@Je=%R=3pt;LL1R)Mj^0)dHM6 z11IYUDjYH)FsmOPBdmH^^@Z1Jdxmn~0;scF?==q6XkA$L)y;055fEEUcm;cPoK%I6SnaK zBrDJp=9+sB#W%&&!JdA(%Uqo?vpELDz!h6^*0Ac;AF!sQ%_{xgMS%yy$;w724J&*> zpjFRoi=nukh0(PcbYyXTXfHurIKCNL%W}sAPM%kJT}XU2(b_uMHtbB9{%ZvIkd{(w z_u!4$ObehdBp_x)?a}~~N_F7Hbz{l z=!&ARK$E9{V1hI7(_~V2%d*Ye=@)ce&1y@S$C&_j&n|-qvbB~D%g|v*z+X=iEPxJyan#HtJiT%bgL;=^zVUz&9qz3 z2M>kt`2m9x&q!AzeXnPyFUdsawoFcK=AX8!4=YwwpyS-3Ny{7U)^q|eQr^SUca zlg!Ivv2E(PQ%otiPe(m+1tb8;)l;#)y(RZfc37n{^>Ciz9ky^LglO~DU==dBHMz2NASpCZ12(C`(PcFgZrs&hele^LeO9mK(*|IoW}UonCDH+O`9I>H^|5dk&R_d?cND8 z3ux-?Sj{3@Hvqz5eQCRQmy;w#Ch&QXEqSivwV2uPkKB*|EEXmd< zfPV-fAxMkP1b&az{kS#xT67!40|*T^-_RGmH2jGZe{7=;PEiGBy$@1$6A)JkGg}5M zpxmYoT6JFohBt@b^TgN+tzK3f>>LvFxEYr{>PYf78-SOA`{~UwPKG-5=ng`VPMyD_ zNM; z^px0ro?6xUgCM1=r{hYU#^IL%AoI1!J^&WIM*xMhx48X4xWN51P|dLlo14+LXD?Jf zPk0jhu&#OZsw2SH#y*Wa+22d^uu@Di#4H%@lH5F8g&#HQv5)1Cp^ouJ+3}kLme1;H zNU^Px=>{8_QpsF>95FSE1R)sb@XIS)-BU!8yvepag9bM6Y9s%fb&2b8ZMl0JQ&VQn zE^~qo3y*l}02N75=qtY*Je@DX_Eh3HG{t|CRYyB5Kg;Dru|@*+Y@Em~c^N2LG9oVH z#RH*6&d7|O>Be_=Tz=QaQQFLkn=FqgG4(kj;aE}TmQ%B6h~QI%sNJiZwpHp=w^E)` z-ovoYqKKia8KCv}3L_eklNk$+Fg<#o{TYj7YZi8elx&PCCOJ3v$SeRvj3BuQ2cX}m zm^c8aI!X5b@5iucg@>N!`HrEJQ|UJ>Qb;-*YH-SGJg@I){OJ89+xqS-}}ByS~@qL)euatq%DS zcVE)6&aMZypZ`$c2K&wj?TiASO zd4)`cHb)(ZlZ3JXR8b4oNAB`#%VG3$iD;61ZF37DPt>{b0G4ypUvu08Tmd&=+Vr7y zd;UFOm3;5%mVyH!3p9_=BqPouM?24YOG!z8rL9ovfP<*Wd)E!cnYo80*nUk2#+=mw zYRJXu^Nuq>A37?J2j)wmGU3JPI{wyH^Rm^2&1$<4U%p}BNH@hXmoa3>C&HGB&;JEoXpe5j|9$to!bVEX z8hW-Y>iYQLYb=t=MYENt{kzgeA$g^T)Or@IWvDjKSguAeeIyhe9@n0Ngc*- za6bef9w+1!Q6d>7%V>cKuR6ek9*pU)LVy*WC&Iux3p?koNuETDSq>0_)YU-~$k(su zNaDRq%#y6uz%qleaMe!NYNrm>;$KBDYic?1g|3MfG`ia5$#R*xdj-tKZ=Aop=gr!l zOhV6=x}V3X_t_nD7ImOcdHm4UPs>iuH!aSNryV>N{T6JlrF&ZIWIqY4d%3(k3^X8r zsTs}=;zsa5z;#D7=zcs{{9qRtI<6`?EL046)Knn7g4?v*2h8c zAT!kzrc^m4rUE|aFqXYSJC|^`CPvn=4(dP|wFP2qfC&MZJiz>2cy!T;*G%`*v;j8t z8|@?;&dP>Kf#^$>9HN`Aq%A^XecT}m=nSkQ_Lcz$+7{~QNsh`{j#)%ZE2zCI6+iDA zJX)Kgk9+kVjH@3l4>MKM*S60Dp$+%kx;_D8*CQ8&xhN3rLr zY#J78qtlXbr{dRiWW4_7Q$hnY>v^omSx~Dc97hb={h#0fd)j4S{Uzs2i0o>vQ70^6 zRZI#H5C{Ho^@-CjN(bhw!NMgkf8$B>Iqg`&KKd0R>I%~4&xWrfGE9AP9l_ef;KINAoNs>9QGPc4er*1?(5BQnV4AT+Ewl6b)d}D^gJ{T!rYSb zE7N$Ou0@N{4HHSu2~*3VhX|arFXg1|QR= z8H>=?BbF76uS?xyY?<@{8D*%0_Z$WQTd!AbG>@-wWp>-$k9Tn+=x>`k1ZR)UHe}yo z;C;@cV=NrtSX^j*KJYl+%_hIA>(^3I8VI;>>?X;&M|omp-!FUESijT;qR^QUoZpOH z3K{;%%bxJoMkpuK7;r%~@1yT<$pFxkQiD6Xx9pMt>On}@6N()`T3g=l7+Uatz-3o*6 zIhtpM8480v-7#T+sKTp_S^YK$neKoGs=V++E^!`wtum8LE&#G*rSAekOu*3MK!5{P z9fxYUn~A9OH!f~rF0A0DwS8QK3tuME(||Zu#J{s7li4_jh!Oop=7^8Sq*o$Wm_X6= zK0B{YIV&V6Ny4Gw;iSRjneMlB7GiqgCw{5k`gSx-bm9<4dd{v3>%}OIZiWjnQW;G7 z_@c*S877rPBuZ|#R)KctE+2`$M268j4#Fhjq>VhBE;!y$IFZB!Hi7} zC@}3yu3bR5&^papR&+-AFx=A9TDI@olf7x1EmXN70+NzIQ;RbRVUr(l9AYYTOA~gE zxjZ2|Uw&*S;mbLX%eHU}yNS?+!9(QR$%a3nKR@<~#kaO*cwOktuw<4z2UxrtdfB8* zYpO-5&Dk2gg|X+@u*NwwK1^`Ve2yw`<~h$vyxf&D;sI}SC(y0gZm$beIjiPV>dTgl|vAVHF6Q z_54eRUq18E^mt#LZ8)*<)*KFjwHsuEVKmuM_;$YdN=nFQS7)AwZQbaz#?aV7T??51 zyM8LG;EuBE+30}(-DblOar;pTe0v@+^tK}bY$sTCtVo`3Po;J$y1K4*JiRxe{b4U< z)VRTu7SQE2OP1$RxLXdQZk|=icTa*W(7YeN;+1Af-<$$oic%dM@PdI|7(o72xm6Oz zQc+}B6UT6LMeT?d=zv%GixqyJeR~y5g(o*n$E&26X)TZPa_QeG(`HwlNi$zmUh1Li zyCK~fN&Ql{)9wVKI^H!Xxt@j%)=Rw2lO5LwO5ydqI$Nngt-hP0?~&Y$roAHQ7HZyE z!AE+YDpOm5J`Wx-Q*fQ~HM~xzdgDVY4y%@GB~)mTy+xY6c`hsnb`zkY2)7(2nO4#D zo_T{qwsi6FMT&|Od}?9^q3NKwNAd)WQ5(CkZ|w_vc(uxz5Y-(J#xSooY_cVL_47g= z?TihrW^V|%W+&iZ{d-_8lrX!8viM?A!Fzo#W~j}@ExlmcS7!y27uYl3by$qp?~oF@ z!2rlD1Q5+kqK+dUM~PH#Z3;H}5OZ8cbO8#^VgQ5s-BiKACZ2^Q@&H$9wE5Ew%2DoP zVGbJVegR2f{>`aH?%H$SNHsa}yylM8NBXY!UYmUf#tJ431Eze4G5~k`wlqsx2cLWF z_OXUEvvlq&-Y$D!n;t8@{~G!FPhJQI!T2;@UoB>*AsuFR8Bdtdw>>kP#ubBwH1{oQ z|5KZDFXL*XQ+(sw0qvdG^E`kP=-1=n>-bN4t&ifiwbH~Se>|^gsjzuzY(7cf>wJX> zwt^*5{m7sl{{oJL9b1Icj%@~UCxpmrk-`{gLWxJ(POxAJ(dzdz)pt$vYa zZO;j-kf3$}Yq%3Vi2G)PAtCYQ%^ zCD=7&MD+y-%*hS_iP|^{AaBAp@_E-b)^O|;=B_}pgY3KU3Vmvu9GmU_JtbbI`sI3X z(hK-Gn9-;nUZ=GpsKgnpBuKM(;qO5R3IF}{Ow*7_pm?4=@U>h8F74qy?s7m#s9Fy7 zMEJ?OiIY)KHuJ5j1p&q5?op0JEh>yUc^+T4boaoHH+- z8I#IXZ^Y`~j9y4BpMt%xs0@vR24WloboT5mWM{zlox9b@K1eaH5f4@<8q5^k0?3>m z?0mjQr?Ykv&k9NxpUOA#oL*xML8UFZI3d<^Jwn96_#qTk)s+H;R1NyS3f&Z7MWuw>o z@VL#Sn#9w`PJrKCJSJBL+&|9nr@|o4eR+0>1 zoIJA4zfd@`qn$@F77>)jB2n?$T2chZ26r9PJ^=qj1l~d?tI*$# z&18^=xUD?-5G=}aa=TiLA~5|?6`P2HhA5m(&3SWuW~MWBa8!#*1V5T@JsD$v<*?;C z(fktn>Ch3R%;X)8n&7Wd6hQ7i=bO177NgHPbf@X9=~DIp;`cmRA~^i%N2P6Tsikbw z>aWJ6ZKn*-tF8x0;noqV`Y;+d^nrS4j!!9mc_rJt!k4%F4fzpY`SkYV3VqLy+GykN zX6ut-?g|+8ZldU_H~Cbvw%08B;lw!L7&?O|2Lk_Vu!iZ z4t>gQR-gIr0T^w$iI&2=?A}Q`O`iPyddYMEb((VfDdI+Lqmxqh-G5qdzNxw9bMC`? zp=}xZyR;s|o_k73lUPZZxKJ8no4wq-n~LW1j_6+%ieyE8DS@u}x?spW%gd0Yc?s{92vUUwMJn z{_kegD^m^J&RbqQdF-&f6^Xq-kj`iQ*uaG4; z2X8i?GRl>C5`{h9VLM`rJF24U?8e$G(~sQusi`{dm@Ib#tL0f$wNJ5CDGd&qq)Px< zXzXQt4trsrGPsXUWB3<2tqB@0f(EAh6aXL7{0uRSKOV>&dYr~s525DcjlN&ft<<0@ z@`?CLbT|fP*iQtu1nWEb|3hCgJHhjWxOe3rMumS`HvHpd2vb~cVXQCsQSQTwKd!uc zlloTt-MQ3n`iuGExEV=DP6@e{b+P$w5O&q*6M<0ENZP8>(kk|-ckcYsiE8})>ZP+$ zyBzJF!OXuGuieirk<0Y8HNzTEM*4;ZawYF8Q6)Qe#`C4lAZndaA!`ro;>^vbLN$TA(s;QqxF^;LxJH zgsU$}iSs8-b~t#-pFHf@qIvEwAiHaWq*xaN-*kNkPfw9dDf76N-)CghU^y`iRSWc7 zVL(Cywv6T}*|mx5wujMlcm>r+Hy9NRhC!~HYo0GjyOou_eskA=o~=)zk|0Eo$OVqb zCTa2R>@v*Tg<*^;!d73Q?;XL+)Y>8%Wo%d zpvrM|K(L$}g9O+sd1eRUzeU)v!1fIzdQ{?)hNAU$Ts`$emV3X6UYaaYctV{Z_xkKxc5a%9d zyH_CXV`E#gJ{l49@d1WucM=J+C-Wd4%!teWlQy+{&-5=YEBzZGCF@j0ojzGO-=avZ zO)-a{7vypeKpD8AU*VR3j&ik>9Rn!Q63ze*xxgEWAPa?;9fS3cL7wyPv@S#xd9}Pi zv|kk5#o7Vq(M6iTfUgzH&~!1bNT@zzbGe?g9H{O>t8_fG4oxb^^`z?FC9DMyFpUnd z50e6Ei5TE*Thb>_Y4Wa{?JVTvx5`YG@j3G3vovtXQ60r7oB8+WfBFCJH?&6tYKl&7 zN9Ha;|0+*M{$+9AIq);%!Gp>t%S+gz6eEypO(B22#nZ;^}_N8cUD;&C#71O&REL zCburufiB9}b74iee-9i|xX<1iV!~CdzBWD=jgXV4vOuVWg?lyY4kG0}l8X?+HmzOx z@1;(e_juQ8;_B!uFB+D>3%LPR-9(uewU((SW!~v0f{$;m-=fCvkwwQ=*^_PgXVBg= z5Fo{J7MYEZ5ExDBM;NA9gS)rGuS7e999Mus9imGhwFWb_dJ*6t&^Y;ci0$J!W=hA$ zj#3sv?z`ffxWA8v#nwYbzquLg`)gSH$E$PDGBp`Xo@d&=ET4GnL!T^AU&FY^gm0 z9FeoWE?V`+o&z?P|8p9|WOMWNJATP8J8nX34{zPWHWqoMNkJ-WfN|Ekh`S<+Xil?- z+t_G^PR`d&)>JFaq`vf!!#BM)9eU-kkSv~}lsq|246RJ0dC?%`kTAGA5W7?2SWMI~ zxba(U+GL@sqZ6v4uhaP=$X7?*w~b4gA<~dTYlcSXOqN`FncjV}A#GDY zW-(}T4DG&K#Cti~m+M@8B?C1jz2NES5X+~6U}{22iNT6xPzrI<3Nbl9 z*`1?T`C-8P@dRyyWoYP#52#xgQgn1GB9|4m3<$fJJ^=ZMo&_k2%H{rEk#=@daPc}D zm|k`wpcUUj_W`zN;b%A%-86nUTdU$YzRrjb;Mcaasx>|A40rQ2{D!N6L47EoKmdLi z_2W_8&3u#`6Kzimk($-RfESEf zXZ33@e^u(y+d~GVMmiPzeB$b3P?gxON zo7Mf)><@%68LhG~55}AEf|wP9G^Uum7z>tigB+J_;Uj{Owfp3O1w8P6y?p&%-xp-M zZIg{nObTp8r<+JxMR(Q18^Zv9GBH#*gR3l2^H=Sg$I{E;uqC0<4HP)tr|7}Bf{|m1 zZ}t3l(oJp;j-U-JlJ^3wfnpNB&$6V5GWD|$R_Q%yI6?D^XHWIb>*cEvrkxE@be(m_ zGHmdTFVm5=16~Ac`QDd;af?)PtS_M-VDJJf4dQO-C|vM=pJ!aH4pX?isuK6BQ(h^a zQzUo|f##VGgZ~E#CgK~%mMuTmMrF64uythmSjk1FBA8qXZ1IEOT$xwEsrF>1(q5E< z5Kl&XP;D=*@UkPXTHhu-}uZo>d_mx+9Qwg@+KHe1gs;n zEwkZQnAWkgsz-l)?z4D@CC`BMyxr+APDkTi>zLz8JH49(+{~VorJkx#u z$Gfhp!&OpO6d_#YRF36Xj(2q-xuld0!zyPpOL88&uL>bt2q9ER%q)j(*laoHu;k2U zTPBmk*ociC`hD-;U;e{m-+e!a_v`R<(F`5Yw0$^)(ah-=UqBlTQ&h+Y#d@PrKI$FJ zJ(AZ*HG%StLKtigsFKOM0iTldoL*MIL;O99g6I*^je2He?xYCWOII(Hrt`8OPdm5(0!7=3} z)^=SQrlAzzy^k@U0O;<}0x~B#$txn#8;dVaDo@*-PEYIav65C-3f1Rt@eC-X*x-%W zJjd(>X-5k#L?eg7NUq2Wm2k3!cb9R-LxT3Ct}X2*F|KTwi)!lz z1`%qkl6CL!9Wpl>bwp~g!aUTH;UPp+eBV{3t(6(v2DNA3BxOg!13eAh;qVepAVZau z3AAw9x?@$+j875L9~Ms^TDmw6N+gxnCOGL;V?Mvye-9$#8M$&|jEnlXRMBXiwcFJv zuprb_EP3@*;j*f<3fd7hdLg$$Vq`X&V;;HybLn!UT|8ZzOWj1`CXi=@>TFkaVfM{Y zYnvKJc1Rt~w56nL8=}AJmlK0}x1xwvePySor+0;Lcu?R@$ym-jg_M`zS556w%1=GQ zTvulLHIPvu_nUr}-uO1=BA2ZE7-9@9@FgeIh-vxzH+v zX9935t0*ah50{4H?w#6}Y8E?W1x+eQ_qk^+J8TNVJ!VKn8a}lRXw>-ylw;eErp4qQ zGSU)!8!GSe{gwU;D1^xHoYASCu%0aZuw4)HArfX{cA$9d6+~*1rsNgnKKvjKyEnKP zk)3qT#}h}{?wP<$Z9AkQ&TtD<0!!WBF$A6!9%5kRzRKrjk&(hd0BQAe&oOI9i%g^w z247Rd@~~!V~xy*Td-o*sh6e z!2?37r~J2X6<3Ohh^=LMf)=r{DcTkyUvxjxvi3z3eCY&*L7{RN-dgLq^Vjm;0K}Q| z&BI4#LTz1!fD1)f?SNmPoZ=U}G52oFuZ^dU{r7oEiuqKNYkX#cVd3w&J~nSXxm){z z5ODBJ^bQK3jBVi8w+kgz*wSVp(!=g+am?++V$Slg^!UlZmOp;Kmu|G`^5!zQ`=th2 zwkJPDkDjCn7)3hN50O+Nfw|Ok>BWXo)0Q<7+Q2QEMveH^h-uR=U-=e5Z}kIwHX6pY zO<=x7n)p`s{3M(s=dt3*h|Rs%S2TKEarQ!;j)R?WtMR^PVIe%6fmfPYN~hh6wIW(v1p?)V1;2Dtms(L{b07S1M4*B7|MC&$7=PWZbc3Wq?RiVvrTzPYM&Aa6O!XLkPG9dDFV(HRwe$@ss1aX(!Zzxr+bN5UNA?CD&pK)VcAFTu56g&eNE;9~ zy%rk^mF6Ud05{j7hZ_VQ3opFgl)jtrI~<`|q=WnSY1$3ngJ*L7zWs7O-CM2%Pqucv z#TTSmyVoZ+E{I>wl~Y?p_MNkXrsc+K#SVZ})`SFD$I>m7>uA^g3opa8f|(a?#yedk zh-d{arp0XpSCiES&*uS8eZEnw${$KU(hSN|fFTAjkqs$3(hvho_z^fOh-bYuYn2%L z4R&%~vI*#@aAgNLv8Y5uF-Sa2*8I<;;0)(dZg9TWwY)*7fZ#y%1emS#O*5oK^oMi; zc-~v;Pq%?Y4$=wrl_CqXH0U-j`+Y1*)^gX`<(_|T$UB0_*~djQGH$rlKL^=9sw*av zp%Hwws(V$!7zaM;x#^548n%Km06VvnqeuaPo7V4|;S?k425?FO0S-8+&-`kIX+?Kb z?foUw+UstaxNwURNWZ65)a=e7UX3Ev7I12Tj3gBf9$ieYr8n=qwJSnb^eL;r(W(fT zw;=(PsNYRxOUFZj3T1(Tk{@n%MXfR68xu$hu(JnHQmXUUzlqeh5vj$}8Q?n`6%_(g zu}+{&G?yCzM5)vmCeRQRTKMe8i>cHeuI=vaX6+{Tg-bP+q z+5crVt*6>vlunfYhE$*5@gXTH_UJt`6TyPa-6}D2M)I>gzkYh|S>QfaDR-^Pp9U~7 z0%ys`_zB&Q|_oHNp6=C*MMtOMc0ZOIZN(zWeBkRLNh) za{d%S*8SY=M~g%CWv?bcXdKPOW^LDd@R*nB`^_jWEAi-SO1Pa+$8I%v6Li2$ zq<@qcXj9(;N!>&ECHVVUCk+#C=Tn0~D<67c4Z2N65D7IxAJIi=4-N6L9~Cs525PG> zkJ5|*(jH4y$&7c2k~%+-U(o6(DH z!A6)U=Szx_-lmxv*XUj@gJ%GHTm5$_!h|Dg^*Wl3%k=A=vV_fka-G_RnwR_Z-7F$N5nr5SrSo$?YPJQhL zBzP;J(7iEBVt_*3o%);XuP5+y_oo3x^+?t#mO%%hf>mPF@u|~%amx~riPJ`-@jHJL0StqP9zrw6MAj@7e4xdqe~vghxbP7p z7nfd#(ExlFT^oS^0F_R@RQX-!bw4OGV)yL63<4!-GEC$9fiEh&B#I%q7_TXKDA5aYb`|R@1D*2YTmrv5(4P${*IUfW+G^jCdOgmv^+SdA+3k^avn41Rx9%>u|>Z z()+t6PX=n)-+L?7ihTvNf%Qqj#Yp{;IllZ8P)t6is`yo|7sG3kCe{RSf+S_%2uo0Av0 z$11WIfaHOceVr}TLUAwfEw=v@W{7-4m?zcrg=Q5rhH%(}M{boZdxG}%3NAbMWO&pi z1A07;+xwztE}e6B-HYS=D7N^^mSwqE;mLe&X;j@3`Ls~xE50!xIjm08V#Kz1psrE6 zIkc(0slt&gGT?Mr8IO=fHcXoGm4T`ubtEOGrIBt8!VJF#CGAbE zO~9#Bbc$FBCqe+%$Uh-zg9^&b&mn_8`eP$d*Os|1&c$M^N2{YV5zqk5!U;sN@;PyY zq(&@9`P~4Rz-K=#$60j2gYNLFvwyHoA)SQ{Z)n2DQ`bfi2LBxmtIM4f-GEd-X3jdr znT?-E6pGw2lS=lLaGeVxgY0S7ZfG+q)6|Zpvt7hvrQaJBdm0w<=KIu->u*^hWS41j zk~YOQn94;%!?3l=$ZBuMRo$w}^OA;usC;Z z3zF7#-%t0$)SY&l>7|56(>%KqVt=4-;y2_?p^ivXD3~WCi=%jbooajezByQ=15dBR z*b){~44l%7H@ki(7hY>@Nl>ZwvTb%2z+%Xq&l&0Re!q-RqV#R zcMYsCP%Ek?mEA$baeP+JVEIwL7-;$E{HKUswM($U1+O~!i0jeB@-^r*y#PM2#^f@K zOD{hE`&{KhK%ybbp9TN`N|s0F?RYn35Nbz-XVp_k_P`0{h@=4UmN6CW zk`6{KdM{0Wnf0b$p<1`ajo)0hU_{ zf9_G1S^;PCi{h(%0J@vSI&7x?Oku>^0lyJj3wOTSI%yX@K-G-CZA`zaHa+3)4>Sjg zC}#JOo)VH&O(>X)!gd`^1`u;yuD^Vbwm?VRi|K!r&)h7zeAgeQZVAb?0N7 zf5q-VqSu*?)(SdD^KMl=t6F_^`s7CIW$`%!n{s1(F|=?*>E80h17tRySD3pzkJ1E= z$-tYm_k-jmP_|S4Ukmk)-Q{BR@`v%;V-PS+eU{_9hN8$u(2(k?W5<0&j;AN@G|%p^7uDUU$x$JXGU6TBM9*lm>IoxhsL|31B7r)gwR*NrVY z{)CH4_x)3t#x^<}TrNX`e4Q3+@QK~SifPu+Bmc0^)@c^N0w(Ii;6bU^uN52k^3C?U z8f2JKzVFG?96Bu?16GheS;cK@e#$?5)K&h4(RVyl=9%^aRv+aciC%hS&VO7%B@R<> z!EKd;+$;Da_hUfHU->5w-&?Wcd9Mr~i?zBrg7gy^A?2ss95d^H4x_{xj2p;&aO+v znJRx1mcnN~&9;t@P4l#Ce$%o4ip-ftZs1K5!bP(MnjNAu+a$=J}H{-jGg}q2CCg%zRF$& z7=B3@^lV9*5qBZ+Z(nCPj<2jCn-NwaXmmOE`8Qo%2TGRk_4j+RWo2hDJ4+76U{rU~ z$k|TmO_x^O$hpv*?_XiQCKXyCw+r>Swa+JrXct)ZH`Y~=?Xuc&WpMX1GaxOw;`-KC zY)(?noIUK<(iFSC`mQl=fc|HZ4kdkHJXz!5gHc8MEXPwWg{k_a+wYyL?Z09i?+_Hg z7IM>2qYp zPr<%2=%241-7)9RwY18m<%>H^lwwl76#}Y|>vs_Gh{%HNU>_wT7)gMbVt`W8(CjGKr$Neb_ zTMtmk;mapfUxHI&=jVfZvCrb|#AgI*YP|OP>|pezl+VLAM(>eYRVIA6KUVzZul@7v zsy6qr6P~-N)~?a{zfydCBnX&a&Xp?;znKxjZtx_@g!C;m_TaPwsF(3tw2wp>fv6Nl zYLzygF_+#pdf_r55G%fP8@kx{%L%1B<+!(!Pxh^C;h}4~RTaSlmdx!`lPTl{xTB1l zv$OsB+YuypvEoNiN7AWr$~+(@ha3x|YMv2Yee!wl-MIL$Ypt>!qIO`i?t4iT-#SiK z3?@jU7n;W1PsD^nnOA^@C+RXKT0zjXRNjp0%arM88*0mA!P+iC+W$IGJZJwDq3zPP zSKnj)+ix*EP7BbV=q&8uYkX8rO=(6X)AD=p#h6+sBV-dZ&<8^Z-UYZblzTCoCa8GI z$dJ#Wh7Q*onLw*mWm44ho)L7hn~Nm<=tzPk0#%;$PJJ~&bVqzac#wCyo2klAN@iz* zecJfHGJ`?U@vNCg@26JzeJ0+$XpR#H4&5?#+~_41ljgT;h3hbpGt#Go-JEoYxBX8< z#5>xIcL@0ekOWkPPYrr1TjV-yDn+lsS>~)Z++xF0{12(hL{zDIr$7<3>y!eKIOtu{ zdi?p4=EkkcfF1Co=M88l0ldG;+JM;E}_j)6TIWXwbyu4*gSdO|Nx7(H1=%kfs3Th_;4Ch?JJ77%4 zzVU>;KBM=S@Lo;Fd*(`;;&b@I7w?tB@VZAknO#87+O#{vp!?rXm`#9G$vH%aF)3dQ zp?d>?m@(Vdrb91U{UODm18c@Ri^|n56CrX(5RAA}*28$!4{%~$dd4sB%rcGBC z%E;ly^LpP`HXN$m^BEFu*>JK$5-9oC_N2?4-BU}o{I{V@)z+pwm&X$QN2;DtDlg|n z)_T7(sLEWByL%vzC^gN1!awc=SQcUahCmOEgsxvQfWjU!a3cc->P4~r@#%I*sNAf^|uEj|QFzo13#6$B^|AFxx? z*4yw_sOEv)DxqVb-O7_I2{KlC&>xtqFg-I&r3xh?oT6vWp%DSVixM8ARo!J1(^L2b z*o*s{i3%HaZ9ysehSm~2mz<>4+$Ojv5qSBBofHSNrJj9acdlF49Pj*gKCaMS61TMx zO$6ChfAGHa(f8op)7fmtQ@$Zuv9~}kLdXT=Vw>gua7vU4dO*g;uiMs)kYbbq`gi6# z8T858wPw1p^NVB>B4boI3~k3dS+}6t8V%sWppUx;lD8bE<`PO6q4qOI&!S*h+g*F< za{or%g`a!IE`l!p+%tno$z2>WCN^E4C7Oh9>R#idxC2M6(@Wj}uhIMZ0?npyg|)Go z-7Q?{$ugX8Ys@mnYVH3~wSQc0i8_h+3PW}kS*46)+t3p2qUMN;N zQITW!_4*Knrz>8N)Z3cHu5>_wBQZry{I?Lp&Q=g&JC7~hD!uC6vccVgTWV1*%%B}k zuv2_=J^b!JKChGT9PNJ%<*@xynpmVRTmdgui%-aRdKqFx5YN6u|1Q^7o_3BlCPJ=$ zM;lTKI*SK(wRyOb`u1narFk(q~iDfJWmSG9Vi9pnnS6@o>_F^F)%6Rk#j5W>(p3-GZCj^)b97= z%@K3JG><7_al!pf-Z$$0-e6ljn*l=E>mn8tTWJ@9Jr)X8qWA%)<>U7o+wOhkUKe%H z7(IQAT=qryq!Oi#?Ek=#D534(m;5Xrlu)Dd{o!9~!zELN#OU>HPt zqYwFHOXHe@@x7H4vp`a@9lqg@4D7pMF;?n`mUj4>yZ#~_B`f6JoUBamaw91_yrl%^ z+$~l~rsLgx$oK2Y?vy|l0y9xeERH0@22y{*e!iTQx?C{rY36(hh%~l1{tdtR27c7> z81YDlY)c2dOqi)Auwfky5{8iDZZ03&^Qh=WC`V_Runr*16dG0 ztH-(B&O3cBoD|{r5bI|hb0#3KtIhU?P@td6=`!Nw#w60j*uKMi&sxS6R}WRWjO9%^ z-+9L_D*Gt};WaT}o%LURz z)s_0irhR@3hjEw%^j*43)|O0^F=|eU|CkC3LcRCB@#Ry^fo)vdV4JxQ^KbZ}dItLE z9vcfI;BFtk8PZ_v%}m6+1BC?UYTX}TMS%fkMgj~a>#UtGD=p(ld#zsAcP?Mj$bOWm z&7~I#!9akaZGg2phG;GkF>HQxParUF@8od9o8!3i1*)}mbb=l{n?olecw$^e9@fnw z#K_sm$4tDRJ!(R-3W7xe1p>6S*99gQAqzWz*h@0A4%$>=qgX8Onv7}-mFwr!w zIB1ZjHKm<=FL|Ad?DqssAi`|>f&Cv1ALO`fq7+3g8<@-QVUx;-3-Dfobol9FIQ2yR zc9#WXhwMXDZn{S4Wa*hf8giZcDL5|nBvt(-8gbZpK@Se6F7$8oNhSjTZ_xun@wJ7x z`g>O>Hlvb4n28~)1OVNN=!GJ*Cnbm)XXIPvQ@$XO$(jC4=AWY3zSZRGboAm$?1x03!3rqJ1S2ZoE2KLm&OIH$saqbz zaPfm0TI9mFSNB4SqCX8Wp+z_p4MAdcZGsU^T?1uYD~Nv{0R~M7rEO{T^kkr3dkJfR zfx$MX@^toM`Wn*%{BX@Z6KLsj-$?@gW9ZtrIr&H{X+qwc4?;g^qM;3lUOh*qW5Pug zVju&Tw1GAn8enyK6&K+4fIdt{5zX4#;012J&XgffTX)H9CXNgg?Faz!?#5&?<{RET zEeH;OL#IQ~rcJ#o2MV=0JMI=neAFQVl_)9haL4}cr8jmXyNNJ}5SqSy&egtJJE)ju z@MER_u(Qsz^+K_o;U!z>mca&E4<(~#j1|BNg=6pPm?lE7wen@CNuR~}wl05nXiUo1 zzeC{l^Kasu?bnZ{O3#Dv`~%NcO}Rik(=3ncwOzFx6#R`&989B(>VW8iH>pCf zWO>sy8-zOg$nF_S=G~~_`WA6r=vsf@4pETcbD;u|G9o?QX*DLZiJa$a`xUGp*{_Uu zcLIIVv-HZaYg1azkv6DQyC%LVf#e6vSHJ|+{5a(+!z;`t{lW{S_d%7T;DlY^sI7RMZe*d^vW|?nHURwil=Y}EqU|2ZBPL1TmtLQA3Yj!!%`*V*vNx6KQ zLOWU?dxO~#gNh=KccE)<1h&X@%mLkI2EZ{!&4H=Ym5VyZ4)X9a%T4a5mG7{t5|y9d zfvlAkSJ%ed+_sfbvDiwl={e`Sz5w(6ObrvbXk*`1S1^$N!rx5Hr1_Zs+F1i&s!baQ zE9jDgs0)L8bsPGX#YE&%CD3(+qs9{ZCYk3)SY1pVzHMq<2$2ApS+aa?ULJs0H^31J z!>Q7#0;+Vv5eTMqy0X!!MW#^%HJBN>xCuK`Cxs7cuF{c!>H)8%gE#|I00yHlChxTlAwhWTKgNM2hT4qGSv5E7VO!W z)K&uripl{CD*)q{FbSGZq3xir^mW9C4!xda@*)|R#;cJ=!0RbT=^8G|~2!e+P^q1CPoSqiI)F3Xk zCCpx}*0XP$A55QLfKQ^L^2s20Erl5%)m<_}q}r;DM~xun)@7$>7*|i!4>}N5X7B49Y;s4{xPbmlHg+y*wl2ZEIij52mPj7q+QAz>fP$pL1rS~AE?()ej_ZB} zeZJkYOzpIjHKjdn-}c#_V1h=MJk_0^4DdXotlYBN{kKp&#oVz^JQb@YoVrkJ-h;K| z9E1qs8DHJ^ar`Q-OZD*lq2%i|VLVX^z(vLYDTl|ejXF>##iqQZC-WsXz~FqXDVc99Nee}i%-PN-9WRmXU^E2xT;!85&v1t#mqMJ6S;b@2fk$Fg-4ZP;rR ztcz_BXSg)7I@Dlsb~QGD+2qC4!;179^#IY23zS)r$wCM)l>S}v#`YXi2`D?%VXL3| zj*qX;T}h}CcJ4BndMm?PsEOIxew8@xsf2t#+7KO($TAke1JM0C!Z2&+nk994)3o|9 z1k&m~WJzF7z{pQ8Ljwbw+k7Z`Njq}?Yst$Blp5f2$6!e+MddeIi}$rTNES$SXk);~ zNr}SaP|o0vuYC72E4SSqOd3?u62}<6wCSyLgEMBP@jl&t9#dO->&@-s!#X8#iRaxz zAcu)F6yR=2j`WNHuB|u%D>jGiy7dRzVu-?ovzP#aeZ7`rN>K{krXkz%WMCsF#D8Aa zJaz5Rr|5n{_`)aXc9K`I(ewD{Q$y0zqo*S*#3aw|PlFO}Eo=8ge9L=W>Ko~SpL?pc z5tjeHI-SHqo2|_k^xYtiD}Z3hPy7$ZgnL#>Xcm$mbo90|{pO^#31%i1(Zw_wyDL(u zUCyYi#SP{3HgsuqCqM)VMKQXc0ORKlC7q(K5A?hb%ZJ=VOX_KiBzgov>SYq{0a#V8 zAf(!qJLCf^gfeTP%3QY6a$9E4;`X}~0$72!^k87a1<|JT4Ja`xd<5T+EC5amyV$9e zSG*R>e%}i(*gx85b78#aB|>LkLon9tsV;Kwhq2pMwfoxmUXs$>x(79QU_w%e~QR3EE8o zhR>UPhNvoWIX${V4Rz)J4@Bk!(7n9W(&mUVl_(kSrRbyKLjb|drtSicG}2E%6mfuy z1c^GRp0~kew8D0{ci*>Mx{Ri+SG7TW65vssNDHkuJxK5IMSRop&CM(iZV zsoX8?X-YJ2^25s)z0qywjy69Ssywn)&A;l@oIS78$x?+N?Z(R82>i&U7_Ui8KtZDA zVwdfow?h{1ei||Ex;WFaxyLTH!J5+RdL(27=m)sFyZ+2D{JQvZjzcjI*LmjWo_nDS z{@1)NZY+O#lN3l}MWH8}B`P;uRbwx28J&tn-E1BsThhNDteCmpmb~1r{twbAGA2^< z>}6O~!uPk#hBm#+=a?UV{kf-Acqz|WrMETu94q(5sZu<#=9azHBcj;yt!^jk7TRpo zXdT5flwOT;l@SuZhPP^!|JPK>+G&n)8`Si~Wo&hIE(GXST|Nmnv~bd%rk$Lr%qa{j zl)E-Gc&Kv}gzd(vdy9|p_o|&&K5SVrd)W*f0*{~kMECUHBn=x79O_2(;68LuRGL@& z(QeCyI;kY}`b6N|svy(DTNw4EH{w}FL+*?P0u^tcnFMkVz#f!T>myc?8U`E! zyrRo=>SnXc{ZVJ@j7?p_OS*gEf(Qr$06W_W)I-j6(btDw!gROEV{mu{z6@9_a`-O~ z-PuI0W$KQ!J#@k)!j~4RcGuzn*N9bY9vj6q16m|(Z8IE3`Cr=)u~uoeC1(lgG#gjw zp|Y^CYpaz{PKPT!yOlhBH9o%=T^m|pKIRddYuEg@5*Ne0D{^)~Z%E$Y8_`#Z2&xzw zvw&5WlyfzBl%B-<2@sp>(j{A!?-nuVJ=T4Cwqz3*(hB`dD>yRwjI!u0nvIpy1U9Nf zbNtZ?jPBpo*b+#}(sl`8j46Q~mApMDoSDaY9tTC(F^3pPT{g&G0= zxzf5e^~h&$TxHYpEw7Kb%)kgkEBZ)#c1dh5SKmw~wIsAsp!{%f(M3kj;e(@QuIWSl z9BcuFr#k5AD^F-D>1@mIt(o&M#Dbz@wMnA}YzZA;LJejMlI{bRh)U?1m|jbMXGT$< zt~+L1Wl)i{oPdUS78N^qObPOK2612nps5VI5;g94rC=fxLZzyFoyXzWT#lB=dXkRJ zZbIwUHUbkd0}UWm0dxeQ=PM31ynErhyBqJzvhs}E?s8Jk^#V003m;B7 zW&Ytit-K3!^=4^U(bHQMFKMSujN2}zWv#R37m}D0nsyC^z5Vs1jPl3D$;4}(ujK*9 zV}Eo04s<}&t|;Ad1@kX$jjwM~Xi; z@nyQ?zbkX+nW6aeCQFgm>&E&Ip-l_8Km~vpP~v~}`ew>7>YcOztNTg6G=l6!;N8Fe zbu>kFwGwn20TiWVSzt9hy*Tgksu7~|4=&;Ns^?XMOEH0-s*v?_dMYMHqPmznZAZoO z3G76sPHJacr4`MT0BfB~11^bejc01PfFk_*e>}cTL%TVphl4B4>#P#jcAcG!eabJi zbUuwHVx-ZQ<4sve&}b zOlR&mo)|a&8~*y5+Qqr72<`Fb>lgp?^w$Ib>9bvYD|GHIzCLUJ-Wud&KI7MddyFV> zH3}M6%UH4wXakA=n_GyF zmwUF30m?Z-&WzdtA<^jnsXOS?06DXt)&~+kO6SZ7Er7jMQgIlqMT+LztKBNv)1_bD-VZVw zTc|@SQ9oE)b?2|lhjt#zSGsI*ws$o0$?_!LB|qzFs3m}rsb9s8Y;W40!DVv~33gv( zny(k1oiF4t#4ZwG5+7d;Ko?`@{^E&d59B?>(o~@ffK=(#zA-O7ND%#Jmf>QvoodtK zJ}c*O_tyx?^Z;;Sk=l3x>W$p`JoTHa){75UR?2Z{$1XD{!G9iLJomq=ur(YieO9RQ zAk*@OkmfpEX0#Cmovhh3(4=r@P&u1tmd{l#kN94F_<3>-%elge%10$r*2_yI%e#Zy zL6U|j=n3&9et9P%W0#p-r!MF6j(HiumVq{ZCV=!bAl?ucopl4{lQfR{Y`^kO%-^_D zz?rufEo4;Fd(a_d?FPjQWq}!g;^#L0RUv@fmoE8q^6DC&>co@hO6wv%uY#tJ%EdYx*4OGsX=7^_eE_dVb|r8HousCrg}L;d*z9iYkrE z?LN7wEJPDf#;%69nl|I-w;xF=PLm2$hGzslRLz99F?e1n{qzog4Rmbo#k_O}`@vRG z+M{MM0>mxEx7poZsgMwLU$aHkA?hZ2zC1%mX&+jxZyAMRU-nyXMaupwz11 z2g9L5c<>*>hbOfOvlLP$WPyf}OfUoh#7|ge%tvVqMqmPvxC*=Ym7<4?v+sSWRu4DB z>dJkLzAjAbZhJkVUhlgnXHx&p=@c!>F+LyqM(?!CW>X8tDJC;BKK$~fJi3-+I#_j zSRLz#Fk01|=~?$E(Qt;&Y0LVoW$#1JCohXxJpvR>%C%M9Kl5}KGc?lPPANPNG4MQA z83;7=0VU%bXh+eoP;rn5N7+Oa9ydHcq@a68Qm%WwpF!zb+iK;0eIR^pfyrsMKJw;}$q?~vf=9Romre|AvJkj~Idm!aKW5j$qm;Kj< zCuz(jLeCVlu@(qz12S?__5U3o0%FJ9gvs+pDP42kQd=OQo8aliW~!X5OiLO$!wso~ z5;b&dYnu`ofISusF0d$ku)0~~brC;^pU8`ON$GA(*aF82ib4cLQ9F$pi;^0dkpT883)V)5uIG~w)gxCsI zhb+;=cK2QM`eF_h-436>{>OFe4})zUIP;UUS1nG`992!W4GZToErDJi#b>9dR=Gn# zUB%}TeEuPvZW+G}C6>>r0E@u&+|BJM;U$F%oo9i({JvMfSVV2}1{H=Q|0n}XpNQ?c z2n*QRhNKINLTmWN`+DPrYwykT!j1DPZn(iCwbsq)jf|#KzOIEgA{Hu+kKfJ16<-pSKoLqnN;m2)}PZN5Yu78k^x%A8Tc%6 zdhE}KomNL@x;m-%^Og#vCan{)FY+KBB`$~dmfbw%P-oXnyY`pQ512j#Kq4^#29g$e(n(r>Y)5gB5=Qr8e3H28WG~SBM%3I2r?y9}Q8f-Y z6lz8-P5#`&TUk7gVqKfHbf95!Z(;;)Ai!Qm{TvY3(DRW1We@5(>qPIY+Y_a~sUn?vI>2lbnl zZ`X1?t9PQCbj7vA=w1xY9Yk@>t1jrJM&3a^loG=QBOYr7Mk%CU2Se|Ekjv7s#Hie; zASUmX4k7Ay=}xYWws&3}w>`-F%U7mpm}jRJ@HpsP zGKvYOZi0TuS-Y74Jr>fn^>&aU30xD3ptv_Qv{*?)PUAeP0;Fr-w}0;WZkpre5o&nb zxwR{Cy~(=`c*p)rV1#a}dgZ@c)B!Xh>`H^Uza-FObxQTNQr7$BJ0+wlgK$<7)A_H0 z^iLjZqxbMDkKD4`zL)Qw(C5|v>Pmnm&L{$F*vHV)&!S72&HbBLCTQj79^^<=O;<&C zI1VDz|N1{iUGR+$N5#*86*6mdqU>SSIkWyLgmCYulMQ8!BCiy@n@uTY>tayT`>l zys0_6w0pc+0j)aK+BE@I6R1Z$)b+8gsYmH~-tn=E&4>D6#Cr#A=rvxVruhS_m*-HY zVjD&5?2dp&qR$N!4#t!p!|5eNM42F|W>ueOG*TQ9MVN0;gQBARiokjV;CAdNPo<46 zCmop*|576i_fhaD7C1Yny(SYp6$e9%;6*Wxo8i!AqoPw80{>8{5qsBj@u}w{Ueg=5 zs&h`bXKl#OE|2^DP71CWCF!}H9Q{&)*-=`l3-$&{%>ED|;e(AqoTh0)j?H=37 za-*bFJo$S`+7}|hB|J+lMK?rR^q6PVsuP#m&Kmm^%b7OhaBbTe)65`z~kQ+Wc zKral_BM5-KV+H7Fojc{kHfAF;10pk=oSZ(pw8ndStx#7+)Ze&Ota27qfEVM*a(rN6 z$UI7}ODDsuZ?Ya~QO&q|4YM8p7#>BBOJr}QG|WAoL$c29h@g~uM0?fqgc z|NeWVt#HKU`i z0fn>XlTadX+0cU{)^JD=qL9ZtaofY%8Bw%sX67kl`y*PsYE_%4HpaI85%VD>122;iG=1mhD+znmE8wNAE1 zy70g~>c=3-}bpX3fUu4f5Ldf>YGQ_Li2LQ)Rwr=hTahF1;YP;=eL;ZgZ89a+bh4Ef0qRUv+k9 zH!N;nBg^)~NCo7YZ%~>2wQc_6$4=KY*E)u6>jW} z&k3w1+v=!?d6x;FTLNZBkr)YUn}9?UIbK0CTjx%vNbdbx)fHzOW_icbhO(VGN~6#E01Ie1<4m3CUQ?D0T5Te;-WE=G&xeh_`Io)& zHRw6%-tW!rOAZFCIxxg*jc&M0zXWP>in8uZG;$}{!8X=kI05_xCw)D6JtGE<5*59+ zLt%urzKON#JzC`p^8%;7Z5lIF3?rWzH^i*2t|>kw491w~C*7}Iqsy3B2$D7?F$L{M zw}+JN{;`%tNH8|k_WaTwP62IVVg4H3FU9B*GCxEUu@eMW=OgJc;0V6swy)&1^b~7a zn97c-e@-AgfXmh)?MJo9BzGU8pL0nMV006J7kua_TG)dh56Wh4J&CF{i#lltsUl;U zAyCMbfn{zukr#V7$PD)#3dC%gx0<#|N&)L{%^Xk#YEZ~>57*6(d(8N6yeKYgxq za^i}wvuD3KfLtv6COX|Uv*5+-pW{^3=iabFv{SG}a~X4`aTqcylKl*)_x)-j!HxU@ z__alqc|-0w6$qy88okYx^uOauLiEvIXM|!1oX6_P^9*6l6tLvA2yMjDuMQ$7-XwnH zjP}}D0cJq2R6O&LBv^V0f2-5VYvyoS-uxIsJuQ;#bS4&9EaD$vJCA-v9 zSBvIC*?!K~yo}r{D@U`9!4WrhE&NxUK6wl1?%n_Z?k(~iN7|G{v~xl_JgYk>jvklR8(Y$$jnc4gk;S(HHx6z0CU zC2Kq29IP+y1qE{zuFXSWQ4AciWzxxZ(}}nyvFAm8U0j`;W7w{(&!u)?Px=PaUNlI^ zuz!x!7s?qTT}+ZObd{UJmr9>-(8E#ZMxaVDwn z0kx-FJ|ud1PhEWU)EN9V{){2?HvrcfALD)qd^2aJ+`&k?l9! z^vcUO6TPyOh;7)INqsUK^xuW+FM`gouH7(#AsVk!!zaJ*IC|u{v^4o_#~iG_s;6rK z_+XThd;q@A04ok9GEo0b7+`HbFJpH!SozF~yvvC$$oCCj>hTz=YH3g349&woE5K9v zgwIGLfhx#P3SCM%gVYt~e}#EQ87U$y$>lEKjl4Ufx9acG#a5)28TS$dA5PyyMZcGR zCL00P6|Yh0x!~>G@Qe zC)h(TW+86R^FVk3G?N=%-94x^Wc;evv|&78e`S4T@ytqiX1wnT?3H2ayrXcCH};pX z@GVlEz#9vNwoIAUV8ZiUv)GKn(COJ(J#XaXrXvT!XflN_G&bh|bv_lWkhZW?7v9!T z^l)``{z9FswVR6$GuJ)`@+f0NOuoM+-MTBZa~!d@h(hdpCAj=9{e;MX^L7dvl%f!# zLCA8-_b*^FD)ah53>*X@BGO3zLav@2HMp0@@jpiU+;E+kPY(3>xrYhz8I-IO+bEya zPWRnyxJ13)a`NHr=mjsWD1QdtiP)}>vvchIoE&zi`nq*Pf+4FMDpYU?|38k-#32du z|KsgwZC!SmrIo8Z%5&vad3~#;si_?kLsV9#ri5hP7pzq}HFasql+=})B_eqMDheyF z6wN%)1cAV$JUA^5Ire+@`v=T~=XpNI`~7;?@u_Mvqa*%_#p=Z+g|qyix||l257q@- zBpU|?JPnj1p`R$t>Jz;e%zKx)VV^aolDHxPn*VPCUl(brO418AQ4D z6z>6If8I_}gVv}AGhv+U0{YV;Ck^DFd zT$SBqYL!C$v*%Y&&))xIo8_6Y-HNZFcOq=%1@$UC_q$TT)X&q6EWyX&vfbj{$s(wW zcSmg&6(o~jHM9t%hV;3->gZ*+)4iu4j}S}*GsD%hWRhHm+^YJ01%93N`9#sXF ze|iV^=hL^sXTw#Ppw!(#_lL)QQ2W)TQQ4NknYqO zJ9??%(KS}yaYUuDc+{(6o1dfdrxU&hW}zX6$yz#B*vcOhmh-ExHuP6_Li~9ydf?k$ z3c>c^*f7eANsIQgLta}~`Ig18*`jX7#~Q(pz8<&E`Y+=n1z`B*rNrzgYUeK<{u5`Hw%HWv zBw~stX!VN5RqH9M|J}nd)zdd8ctZFVSp4@v!x0%jn5~%2PWaMLoTz)XtRZ?|2N0~r zYGM7x&D91IzyG_Y8k@2F*zAj+A9qLummo^E6zBOQxrfa4o(O+#=KOXM;_!+mlXc;zn9?4Krz*4Q3RGrZnbAItcPrd5av8&kvugjt@1~{QQ#eR60N)`t}{uA+B z4nbw#Aer;xpFoFk+CJ(p56Wv%-3JxVItVGJ`(HKE#5vj1Yv$qfA#gMG8WK)7ydpSf%&_7?uw3S)-#lN5iu_$t}aq$%0JWe{7Q5*!l*#}luDJGJvv@=k_o-Dr}ZZmp?zbM zj|usH5rdxVu*fx+g-y|IyJh-!(@m&fd}3X;TUJ_$Ux_%RfgulgH`_^P^nTLnLJ47a zM#CeBE{g!E(9pO0I4=xU&tw2KZ**-cP9fJdcO})gaV>SVZvHom8ib2&8!^Kr>*jM*L5Pn!JKn{& z$g5p`Zmnl5K1D&o@Kg;i1AJYRyGQju(4Qb^`*lr@gr9l7f)uT zMEI7_*A(dzzEiOFY!?-C0G6!bb-`_eF!{=A7bWIiL*>9d)Xqo@Wq~}n5%00IvoN_m zY?z*wNw<%inq{omfqpcq%^c}>e#=7gv@@klTI1@)E~V!}%fVc#g&HgbVOV|VVVs$B zqfZw@Z2#)0FxS`&4YPw_!l)84K9jzzrJ@y!(ns?tk%TUK{p!|1!@W>^Ow#6h$DLDC z#`Tr!8^h1J{JUn!ZZ{5vH6GByXkrO2Dbgn%iQ3x`s)}0VL?Nd)e8`+O%}B(-#6ZC< z*3z9$>dkV7GaWG6ony<5x)aTv7Ll4kMD*Zit)i4qf@{rtW!f^Q$hbOQ@Ah`$iE8x7R z(eM4IaD|*`OupIU(1(gbnL#LAZY&tOR^=aP5W^)Gjs+0hcVZ{LfFWG=_TmmgM(VE< znT&5sl()p!B}QUmIrRpDGS8x70a*R%>7*y{ST~`cFagHU{fbGLncS9*iO2E^1D>;@lUR^3QL}Dv!`&1neKn9EaNmY$JDG%- zcj`IQ+*A`ai$7?c#bLvAWpHd#A~XBIW|3oCYs=@ch3=|ai|I?Jy(b(goj#7;uA(}6 z(mbIT7erHn$(CkBKO|-=8z7{P?9;#QZ|0JlyDs1*D^P4Q^8roF!|90F1SE)D;aif% zg5R_wu54>OlsDd(>)Sdf@kN|RHRH1Nj85`EH;h^Qp% zrf{cd%^5<|l!m_K+@R%YoF$SVWJuV)l?#Zy!LZJbd-Nn5Y?(;Y9~A`D4PgHl1?O`p zx5k~a@5Co7a|y3CbrE|;tQuoSIz7AP;z&3cENAE%XqS)S@#&WGpP#<|j5FCeUf)T| z7FV%~P1IpNDo84d!>7!W83mq3uh2fr?oQAs;)jqFrU{h;&& z4P8Z`0HU{-0I*>Q{pzBV1#c8yNgodWQ+Mj;zp@(UE#f!NWG>Y4apbL zhK!U#f%jx%3f0xxhvpPCO9lL!|6(&2a}%+@Cl#9@Od{l1-13Of!4Z>^KA^;cU$9$= z$!$^;YMFLQN3HQkj#fg_SYorIePHb ztLw-QF%iX^#?Jz5U4osc`9GYH1e4wo_O*yNXP^5dt(1|Pa4NU!MQ^E5=ihH!GeN{! zA36@vEVC6!#oK0*Odj#i}@w;9tktkbiobW*k62;dqxp0XofBZski&?RCFWZfo z@3tTQ)XS?(2R1ezFY~Tx?1{SpdJ`Hce<-TVWT2qfR5MRg>b}THnQF~RxGI0nROIx2 zbY}WY>!nD3Z%%K;Frov37%UI=FNCPkOFnyW!Sief7pa)|cM^O{jK^1ik`&ylx#ZXU zqFAnSRC!=M8%n8F1 zkO*s%onLvqe^O97+c5I%VqO^B?ID3>v-*@J?D=QaqO#`tPE=YvxRO|V@84R^AX<1< zc$EO(98f`7fL?9IGq*gbtL7B*3(~0H!%7vy*HAYANw)vohUZt?x4ixnwm^YM7q{>n zZHC8K?7;-L_yMZ2Brc3vTim^KF1Ssa`2!(51awnN6)glpX~A!7^AAJR8qyuWO1p&G zCya_x2L(MXNh}DhZN9ACuX?^2KbLjIzU$;%(MC+&>8CS}1x|eh<88?b$Rh13tI@@# zVMN|~YWys_yn;TFT3>o-lH%n3L{7s}f-=%+3l3`Ky0S~sl*5UKJR9S`JF$5zFClSC zQff^e7b*$q#hsAfQ^am2`~u6D6~><`jt;9!>#5YPtO7L( z!;!e*on12#v9kjCG+EcMDmrkFJ_B}xbPl|t!!IkLAAKk(Z+O#pJ0aJyIS0EY%Y!B8 zl^bOWp46}%r6}vl7X*t^H1^sJQy)9>^eDsBis=v48e@#e@eJJso!C7l}okX6Vf-Sy18kMt&>o z*9?57?}XEi6*mXIZNeouzdet!3;_~>*;Z~_23FsJzu1>Hr_0%aQ&imn`Hz_(=n52&t7GJ!s5k|({B8gWwEYrA5rm|_A&wPb!sZN(G!gdZ0W@AFJN z?7}SW$kquYd2VN%rbL}K6}97_8f+oIMZ&c?@Wc6Y=q)cTo*#!`l@Rd0Q3z_!gNhq9 z|5R@}daR+n!lBn`ir`hVShM-(>4y)C#!dHXf05?gMs6;bArybLwVf5tN)h2l5xaM~ z^OBF*>^#03tz?k!!%6n+QN;miL|KPb1 zJ-}<*S<@M2{l34h)*(dqIDk68fKGHJtMe$EOk75kWIR%DODzLk{aWDYbC4k$-;T&I zuaZMRY^~DR;GNSC<`f)iwS&zFD$>xd;=O_O2|4*JK6-}`NYn2&HVpDYuoV5w2fYRQ z%RhSKtF56Mk36z1yI8P0;nrmR_op1r#~t-Wl!9oMn6RfLQu)$}X{}+;%Q9#AX^?P# z$K$8}&?G?$$+nqy{d7^Y-)Pk-HLQlzqOn_k04n6_s1${cR|C;o)R0*f8PJU^3- z$q`S#q3`;4O*AIYb1jN4m{(aW1(M3iFPlPY{OY+bx+5KMtp02EHBbV#1NPu>^Iyr{ zM0 z48&7<(Q~FGwBmSIipBx4vykptxw*2Ica$j0;+a=aC7VHZO(Ffd&ocI(=)RPFW)p;K zNMlojg>({)SZ9L^s=Zxa@n#nktB1MFu*I(%NpkPy0nMQ$Q`00LrvhK`+|SY~oYm7- zqSK>dmr6NhMuZg)I`eP|Xt9&JR&R*va{ER)`Vye!+?9|(uu9KG4PaBL;S=SxAU!ZQ*i z5oSj3@!j2Y#n5{bCOP!^WVPP}GQ^6N;L1k0R+zSe#pmcBfD&LyR-uU3qeEndiYMgs zYw708uB7iVQ{HCnEVA6Hu_^_7@Dvr02n*?_Jb+~Vxy06$C5K|&2ZIryJpbS zK%Bo`U4qO34NP@1D9Saap<`_rqZo(l)BHI#lZjm~>(ny9F>51f*Y;k1v|-rl^xA-* zru@x5-35FqQ9vyM@7|9YiwSxrIZ<3z=;P}zf2Q?V3@mqN zmdGUQQcd~F-|WtO{LRByh3UEQ3U{N8xB2>%;wE%}n#{az2*qi}F-E}s_@1`~kpHVa z*2<>yk_?PvfY8Y8WKAe9%*Xvp9T^v4b2KfF`zz!@inR|_S)dukB_P%K1yRC3+(Xi( zH2FJ{?!`h{eebZRsMc@AUc2rVKp=!JgM7Rk-DcUf%m^j21FQe$-!<>134WUGre2kY z>MTuIN6`v$_mm{EsW~RZ{^82wyA0VoRMf-BK>y+vl1Nz9q->p%TzZ+gAI#W>bbs;u zD@OaOtW|fO1v>Tk+4hD^Rk@Au(xB+R70f?$s@ADr3(nE+SJ8v|9dDi7Q~Xs}SvK_X zn4KNH)e&HRGIAlTy#;3Z8fsFEkp#TKU1Yd`h|jcsPOI4~8fF+1x~Oh_dRuyWJ8z@v zj?XmhU}{P4>QoJKZQ-M-^st)HSuaC{y2O_nsfGz7IblVH*i)6eP8;|S)x>!Cxt;;| zgNd0CrLV|@-wr=qbs)*|qPBIDSA8P^~qqV-cZg#Qd^rp%D4GwW^JuwpgR*Q_T zX^ZONi*b&;lzC_!N3WO)iq%fh34KVQ6zMGnJG0WMjeS+|Xn9Yd(c0W3jz5VQq z0njBjH)47MWN_`)z`0_7M*!PBmTJ%oN$?s^>!js2#1s!6bhuLf1X0T&(Uicq+Aw`u z%7wFDxmS>?bHrVS(J5`tj~impp{-Ux!fkzh4(bf*_QZT8{uA*B1osHs0Crm7p3!@v z9$XUp_ky+O9G|Mi%YncoSUk-Ta&52NJpZVa3`RF;Pk(u$9XSuvh{nlo=lHJDhmY+>Zr+2jh;v{%oZ@Gyja5o z8}v1dGY?^0F#E4qETF=cq%nTcdjk}`Gt(Qj1_ zuUtUYob=&UM(XY=fqXC$gb0J-g_y`;H4r*zN)B;}yH;?2W!;14!lMrxoYj{C7EeHZ zW)?ZT;HV}v^AFk5p+R}W!{%G5c`fOe@;eL2Lflv^QM}?4lbDPKci8X0Wgh!|7k{$N zJ@6cJxjIf1`;ufv2||uFv~9C_OOXL8XgPgGjjhx@!`uj*=~3WRx23IcSGSQFK- zeP@6p(hL5D8h-R$0qt`H0tvq4{hpU!gTLL{JpA&JBOglc<6wjXT%9N@k=(E>l}mQF zd;h_4DyX-yO*)ZWsxA&6-wde;o^PRE3azl_zE*e|=*PKhR6l%+{!s;$aku z5W{<-BR1GGK6)z_Y7ac63)66ZV{IOAk1T?G%$Pc9KWcWyL~|Co;(ZoozHX z1XK0zKVVnKDFD^EJl@*4@i}>$pI?0A^kpcjbF1BOPGGw4TC|vw;y`14ArT zuR)J_xQuB%W1f6(YhU(;^J{M04L5RuVRAq1sz|>lYn~{qHj018JwV)0CpydtU-dmY zzqHu4@LUj&-8dIBRu^dCwWZW?;=&kov62v1u*hUnCs!R_>GyWEZs9^z$$q6ddN-lQ zV-^?0yk5c6mE*xH zU_IK7`RaRqtnWoc+4)n2E=jPs+>UTc- zGP%-st``&Wy9KJ0=0jke;V1zaGAuh^6V}5|`%rlHvwDMbf7z6dP@SUt%%JMOfjAkk zYeF@Hz*_!qrz0k|y^1&3Jon71z02k~qw!ZZyN=}t zKeNPJ(N6T-Fs>rSoWwXhIzSS45LKZcC4jN*@uPI_m+tyL4C_)4WZEVFB>hyP!k*HSh z_?u&t4MLlIWiU$FQ2J5ySn#Pa&XH%Ry*l1>V@T3D;e^`U6aQ_#k`{@8aFm!F9jj-O z4yygT#@T--@(eS-mU`7FlG!#>AxTp?Haur0OA+kbK)2U|*=uaKZ(6@!cw^-4o>SF2 zbUnnE6~{kL{n8b&cbpeoYPakCo{BtZwq0fLziR>-UIl1A)$OWdy}1}T;%aZ&sX8Ow zk9nVSQ#H|3y}8Tb+2V7JMcQ;l!D!x>wCJ`j)FxE%$*&&4YG0l59ca7#+tNX>+x}Pea5`u(es{~QlZfle4Ef%V0L3<#ow~5xk1c1pe3mvWXU`g zN!9xA)td{lq|?1pRgQjlX%A0?FpC?-7q^dOS_V5X61tP{Bb19ROYH! z*464Q7J>S3w~9eX_4AeQ&kvN=|JJztIm0!%2pqkm;$t7W9+Z>uUM;6Ucuu!6Sd9O; zJAC_AE7fze8VJ!Q=H;Z`uaYuGwYl}DvlAX$g*37cY+HcNOU4amM zXTEJX@gn%4Wx(cb`7JRg(oc!ZM|L!cMv^j8)1vGx-dLY7eEfZVtN%+vN>^Jf$K2Xn zSXAp44Ql8I&*RklB&E{9_DUy z#gqPb|0K`O=O62tMIa*g8daHP97>}{7Tqn(m*WjgW2vV=14>?{dzylr%TWQ5W!j@R zm!{lJtbdX7%6Kv)3ZGUV`0Ui{iG`!*T=D)2FLtM3 z7n*5yq&1WJl-0<_+Njn{sP@7nKmWPu>5FKwi)zyra@myUjYaV^F`-_NxOIFj4tpA_W@ z-ZBnqK7;1}t z!r0gQV53hxC$)iT2+&DkZTXfXz)QzGNq_%1e4p_{W6Ny4#V2KYE#(4>HKW4#O%jQA zwGYaU@pB=^cbDU08}LXZFPYY}ws((vn5COj2E*HM{d$RpaQ_CrC z&(}Ua{d!Mf`)ibwu7UpkfoHSjZJHx`l)G7q)o#b|H6g$kJ1@${&u5iincQFWl50-g z8Tg|2gAMt`?AKZpDG*0W1>uCQ`V9$E43L2i6y4>1JMD&;yMzZ3a-BN545)l^~#lk-_(F!S6?JgDI#}n<`7T zISaMSlKk?62 zYNvLz%|;tFQ7*PXV$2aWlugU~b4sI6TucdH5(co4LsOL#S-TYy@b$>8ZiyTwxOqy! zV0vxHrV+9qYH#dUUxSCKYA2Qv?rFd5qH#pOsowGhzD4T^z7;+5zAduq%e~z*=-r@d{~jl;E(A` ze?2XlKSIIQHwhtXDA-zwuIfjsrSbG$3Cl=e@P}=Z43%Iqoyx zC%i&WrKe%KSL~Je`s4ij-U|QFb1Un|>kloBE*JZd8Wg_-J1JO-3=SuKo8TPn_u{gB z_f1#M1f(lt>wMBBNavU~oxyt}5mBH4`O7W^Hyr(qURLd7l1y_v4Bl!FBNFOjU3@l@ zSF8U(k0NbuhQ7Lz48v|*zQ3jRa=|au&9J#?k-Ka_@mJUH@Z1pAVqm1JU(M-Fjp^#3Q|qGZkm6sGjV`L9`;C-k`}a?3 z&yJr-F`RIGYHIbV?DpO#r#IYbe+_AMi%#GdtzCL>*@MEb-tlB+)~T$yhC4Dpf;xy^ z^eOG4+m@oiC7JHdE)N!JFa04q)|s?(b7^>N?2cK&laiYSxcpY68L5RrWm;#7QH8x% ze`<)OxS5nu!1iAeAF$vap7%!5JTS&oEJ6o50*+>>K6&{Cp*j(-A~lLV^INWKh0G)sS1s9COl}qGzF_i9>^F zD}h3^h+Bi{Leh+`Pmm@uw^R`PT@onqBW%?A{|rJA7|;F_MVN}8v9f4 z>gQamA>L808U}`;!K4}uJw-!$S`nHaLa%J}F`vXNc70-_h8Q}qV3pWwoKk#>d@9D% zXuLf`WVX1zVColG)-ZnlHP4>Q(FxfdH1Xn{kmfFFQ+CCh(lkU?O1!lVJF ziz(bljA0FOc!UxMOnn@G-|m#5s(e~?K^y$GTf>oqQwJ+teZJVb1Qg13W$q+7BZpRc*A+tjFiI571#e=GykMT_el>g3*>`P4DR!} z3gFC`J~!py&g{CBMHV*3#{3Az5q!n1)JmA1kG~BCgH`t|URC!sO*7Z9) zCPBeKUfm+&_^dch=I^Mswsw}32PboQToe{H#EwNq^qt76Y#DK4_6>24iaLwt*#})1 zBp^ZIb@H_Su;)+-5 z>MKFPK|mr^JKM+rpfx^N|5;_bCtQG(i;7)iGIdo+omOpmEt!pYt*6!x^G~eRH2y?p z{?qF1S`_A=7_a#v%m2X`$gNx^*QgLAA{M&Hr&Dm$TVzoKU8U5VV+Qy*Ero@%SSIRX zlMJ}?;l|IBDkli5kzmT@bbw_N6)?jH!RE}ZwtVdJxl#JoQ*W(Xoq0-_uA0x5>sVgX zjmu&)QfY60OcWH!9!BzP@+nHp(FXA>71Y26+@{ON9oy(lbf>5HtewwKVkjTFa(~SF zW6XvZSNx^v5{g?;{v>R;VVGyn(7NJM7?8RGO{e${I}r7v_MRWuw0&og6#=alX53Bn7O0sOY`7XO6Ne zlL+D)aY&nQu{+rixpR3}I};oCAxguu($Z6I#B~M)om_o%9k89IGK&)a=39Oa(g+2UQk-OuTro)tMO?;}Qp_>ZdtROiJPrrYyr^ z=TeG#YLQ{^|NVNa{tb^07ZV~xrgjw;U&hdJl6rq<-jfjP-P=NEmNtDe!wwG(Q#axs zV-b!Rhob{PR6udS7%(W3pzbz*~le2*2V@5eBj7X|yn)bBN74 zHQJLN+N{Ec(Elvo5YpEWYF6#?*AQVnxWGmm&*k2_9HHa*U{=7O`$ zERBjn&6OY=^C|OiMJYb|(&cBCD=QrP5;^4;yP#PD>c@#Icl56Gi=a-hY5 zEE6aQ`wiyGC^z#FOaV&2Qt%x4)G3 zubf-bPyu5=odHxS3~)_@U|LjJnPAm#S5$B|OK#}T#^fP^fcU>K8|vj>;x4fz6H@jE zNPcp>ekYSPH3&IF1*4FmJJS)lw<5NYeX~F83x_jO2Jie{5eOt)uHAgp8LYqq= zZqFkJ7|L~|B!?!_voPO^i(YlOh{n7+Tw;E6IcA0fIFe$D63-P_)RdnZ;&t?V`FG8; z^r-gi?kAs%^u)MH0AA!LSs%QmDif(wZ$lLLy{WH%ll4o9p+hBur*+fci%pemr zzAe@+jIjEiZ{Cl7n|hLFu^9YA%_kOfB8I!E^?Lcdza5urVqy)uZW!W+%dN?cy3U70 zMHQ*gv9e7rL9#&6agBc}Ltt>1_NF2{AAx#5ViMZrg)qt3S;CmK$Vb?e_`8(b#5ZPO zQW8fFdXqxBKlakBR2QVC{#ZeMZ_#!i1>>&9%(!&i+v0I5l|J|870Wq@d=DU(mKBUW zu?@5cP^#&5AP>CK#87!v!>39VC>R{)jqsBSv`tqFmTK2e?I)q;u^`I6B zKIN&d1O;AKL;G55RPBT7p^#Tc^Bv(^e4GK?8cUDqsNy1^gy=$ruB#8~+=e9kn3X7F zX`vX~)aU^W+gp&*kMvkMIo^bOsW*t0*R^KrE@pfJM5{P)unMNnnq!;l5}Ozb2&h3L zGJn<8==Usnc#iAWr?M|i@B(OeUakh_Xf2Ztz+?9Xe0tgebb^$^={N6aA%mS@uH8?? zi4v-}-OBVh&VS()(;ufKK;rKmn?W?Pgk(0Mv7=0H0V|G7wLyl)tkl?^0weGrm^c(Qp@SWpsX?*$& zycb!&RFOo`Es^)0wHpvE)|P*lP5aDH^J=?qSBPj$TBnIv;~ADH*q z-DAcj@cC?vwR>pT)P(+y2 zKUI1ZlGhg;_DJnWj0>s%IMalme!k*|{-?V2qayUrPx>9qeX?*DO8canUy{v5FfV2t z-x}h{8C1d$aX~;eoIJ_mi6W##gdJf63@rHV7bszphO4dXewcvCh@3?yPA~nJ zWr$>ocHX#8Ftgviu`1S12Pz;n43v;5LU>tQqf$?B zM3MI|)ajpga~d!&s1-xnd;e|tx;6I0W0VIS29d* zmdh!e-tgZweFopfl>LN|ajmpv%9}oBM>y=)JZgCak_s(HPMv`etNvXR0pP_=rneO< z+B4E*dO{wVEee;SZ7rHt<4{AKC)x`w!H23h zXw$6iwHlRABwLB0x-cMd+Jxr;2lc~mC5iROMoRh%3Y0%5@kZ_`i7U}lL8?=4YZhyD z_Dx}AIsc>A*7MH979F=ulT$7Vq4lIX^ta8KZn=siWoB__gb}`L=hAe zTa>T2NOj;(KCf5J4V>6~+pca~?N0svu0pmb-;%L%<{(<`KEzux0#Xh{+@i|HOc1#fMf-mZZlQ7tA|D2YKO$>VX?#RT9Fjwep0hw|iGa!7NuusxD zV0NEJlfQGk&r1p_N5`phz;qx^S7Ryr)(E}tZP$^%7eZTz^LB`B0dw?@_zGQZB!Zy7l{%*U>7(OJ=&D?k;obTcFH|}Hp6Y>& zC=lPSE838`Np;oHFXMq3)|t1(zA-v)h<81*!q#Kl8DYVhZ=v8wH2FYBW}iMv@b8)$ zyWpNs7v1BW0q^Zrm|kK2{gknneS@~f0Y^)y)cZ}7f!Tc8=J#DyCB*Xaw0mho0YTdy zia#jkmCt!F4w+5uZiCmJSf@3_DIO<)vo7yA?tdKm_y86)mkyZ_?2dTi`2O4LBooCv=YtX_d{UQl{Bh^T7DmA6Y*lCI) zI07h&Q1&e1X5bYKsT&fR2C^K+9l7ukvXh!j8PSFlHJ+KrRJS!3QpdFJQeg%|;~HED z7R(id&&la_#dMd|cY^Dz7?!|b3sgQBj+ z^oH+Ejl9dK&+p^&Pi+nUcqdU<*?Fj)!(FgUvJSH-UuC=p12FhD{q7L~=3bR;JG0F@ zcYJ^S;)K_gRQ+cUIK%OlHP}e&BSAEJta^H`=pX3kzb*w7TBpsY(c8WhIrl|PiXzJC zb3jWzjuyJJp(&YwEQ7)^^@ge6_?zod*?c=BQ^4UEZ^o zcD%^bW40{Z9BBx2u8H3>y>xG-7G>v7=U#>wqNt!L+_|W*|IO1d(f*G9rxGxXrtl9( zA^gkc&bJn;Mc1H)U7wOEV%)tjUIt2uFi|vtjN(bz!}QPTm)Ib=h&5Lf+t?S=E|emow|j0r>~gOysC>^Pph!obwjhrqwP2eV*2IocK-4 zfP%Qy2StW+(~!n&?*l}ztAMPS0T7frLJg%sDs7ZU27D`xOGAGZjW~}NRdf?9Cq$R^ zx868})_=46(5u7l9T$NL?zU16`*||@cV67o%I;u^gSFj^83c@h zP$S`4grx@@zw1gxx)rJS;)$@(G+Q&=HKe9I#HYe55=o1Q1Ix&0AwE6pO&t>NwE_zZ z+XbcAHa5Rze6*W~I{?!?gH#Jb2FcT`_w5NAZZ|YL5ifSrj#H_LEh#OZCD%j|D~-vR z`+a@mPt}JNh1FRUbtxfHgZOgd9%=s7|Hh8|QW$iGO#vf_S3^`xx024P2^xPq@uA~H z>7$ag%ueF#1}X);atj3AtI#Wu>=Hp`#ChiX*3Lo`m%=xGI5_#tMt==>O^cGwfKLPX z<%4xZb##bvPOg*51x2Zp(1=pZn3p~#m5e}w!kQb}*vaBqHkl&DA8lfNAzpMcMoon2 zW778RlQI8FPIlgs-I{?&Ept(e^dJEjENcDaO+x)%hJ5<4%z1Ja1gM`w+FD+p=%-wG zk&DYPlq%Z5M!USZgepOrDyNaPsutlqdRV`u1NOWusGE z(;W)$XXAV3MzgmsCO0215$ql2-?iF!pSy1=;3JZLBF^*>N8a#fH-fsRqR+=?X?Zaw z0~C!NMjI|qsUEzyUz-^qbtXuaecR5i?tC2e!pEtJ*%1WZ`z`o5-3R2JTYz(5swl_G zdjvUmx=w;R%>)0wdZ&5!h^|2JgcIWZhp?bD@|fe%^4*axelE+B#+lu*hR_k{5N>J)3_j+7vbVRe0#T^f!lT4@z1t zw)$n&J?So?be=;nYc2%cZJeUVt0gE|hQKbz5iTvtJS#JAg>}-!+LK9pZUVzi(lwAV z0r@~D{kukV#&R%Cb2fF{F9}HLZ3V8Q)(A`rBCg5g7Z}2#@Z|*xGAPeUGqREe1pk20 z5#2`vMA79&yoOmc9|XKC960mhYk7L>{lzQ%!?w7)Ft~HM2#9%hpRCWTtUhzxTp9Y= zN!2|UxE-+-lnmO_Q{t|Lpdggf(qJ%yLyeol>44fOou&sJRF-}cZm4(7G@8~r%t`hV zI6@pA###*C`pye$TzCIZ%AeCV02%Ib#$v%FPGSqCA%}Um6B{E$5fQJeLNEEvw^)X^! zH7vYrx!jO(+*dX(E9yQ*O?-H%pp+s?_}H9Q<+(lFeyfX9QVoi}(4j1?D#3?^*5wp~ zvl5wmf%b0cyHDC~hUti0$H9{@Gi+*7bZiAy5qgui7Nr(pb)rEY=eajtKShCmW&Rf zD%THYwH&r@#KUBY>xx8@L z;QFdet{1r8yIx)%gh?aOphjZREZttpEO6lU~j3BmLqz}h`fblK6(E{-0LJb zcW~XR&G;F44>g4oD2VaQS7GPtJY%2K{NNEj-QAt2MT=6ZuFWT>w%FjoXVbOv^XqSU zYhG7ErqjN1gkuwaT|0P0eq7U#lF1aY61NUSD0CsPzlzV6I+7kht2h;N`Vg~>%)ZNP zi33v8Fg#4+A5P8%Kd2i;;g%mj{WiaUD_&F>xP0%G1Mw!EYiuMrY90q}4z`6v!C!R6uKLPB@?ziZ6JnDWNi(72G!u^V5$o*LYf z9AoA?mbRcMjnn|ji-CBCh6FYYWjXCD-4i#T&DAo8{@{u-$_4;WV`ah&Qy&THtqkhBKeGO`zPo_Z%F}K1P@_eAhUPHy-IAiz9U(x%w3lVe&%NGiSe% ztD1n(JRXV=Y=9PLFq@)Zm<7t&d4(3 zdQg|lqq-*c=;A8OiPU?}jX`k4#mtIrzE7R;rNBFbpMP;`#b{#m6Y@_4zU@K9UD-#) zxuSeLjJO*f2DF+0auk2iN)!plp`^M+IeR4ITuDGDBFy7f$n`R(2f=2ge-h=MmC@H% znQxvq*jB&We~+S9#-`zdwIq!1T+6U|dMY&NY3^1QMD!v;j!g~Dl1FlpLr5!Khk?{#jI|k6c7nFL=lll_K%d}V@!0E-V~P*6nL*1 zWODwUs_{|Xi+;`xU8+E8UC2hk(;%)f@s>^g%2^<47qPXz%-jh6Ue`eb=LJ7YnV%e! zA|`~7XL{DbIH$L(_wNh?7p7jqRii^1_8h;L*!cWeAEDT9I)8#QI z_@;LQ4fOmQ_M|Z1>wiJRVklc1cP5-2qF*nWpJju64g*zM6UMzY?V@pCycn+CEcGu0 zG{57QG;1T|NXW76!-X-nA08x&iV~|}4p~zV%FD}XNRCg@g~I5HcLU#IqEXJS&pfBB zk!%wm0OV%tLA?dp3>?#E!EEE57}G_o;!yxjE&-(I1t*;l`m9)DUfHFmcmm_{MtxSaIvh@CYND{}+9 zR-=LB=_HKdlJW68`O^7{j3pSyShzb_WG@^z*ofWuGR(w0|HwtE-w`!_9R1ZyroYN4 zI-~y6L~8VC7xPI+dplK%?CUKa*d6q{<@GP$_y41L^y4OM)W&5(o<^<-z-)=3M@?;; zQtT42Kg@=ZhL4CeRN%wt09PwEA;{L{mELHaZ<+9R_S%67x zJv&TTaTcOF@gWddib=ssH}{aOE#&-YM2bJE0g|!(*hnpIVr<`6(scvjhj4=EKitA( zUc!mceP$&Ad53K?k0vhsn77->;(xnBNB`KcXy0ib#TVXK{F}Vrb zL$8++(rmjrtT#7cx8C2qyYdQEU>p{c9|9v_4Nyf6bcJb=!(l{7jJV8X3ANIH&bL+zEha zfsx}7@r2 zEDIG;S@RA8Q}dPx+nSa-b!y7gsVi?OB6$H)5LVt&B=dqMh@^(52B;*8T=qNt{?$xx)YnA zOK82FZiFZrEQP=HZCC!xSNiNu!?DZU1q;?|W?)wV3rL=MjPRW}*;Zn1VD4LR=wS@T z-uv;yr6jIC+JBuNJsAre?8S5&!1MBhq4X$O_WN3f>wFIN5ZjEebC1m%$B}FyiinUV zAje*9g9`573b8rTjIPU$Ml8}ZU)n`aYGD$&v2dNni_{wMM#Vu7ib}QPK z10ww7uKqw_%90ZRw6lzaEW5j;cXL<~EscCifXH8FL5=d02S?iMz@cc4K@=weT0iaC z!1Nw+Y1W{KCo2 zF_VsO%diUJbNzE_O8bDe(PyN80j&%Gfecf^ARUJ~-~GZR8vk#%C^(r8*k2`1wIQvx zRFNbzRe-DEs+c=ixY2|F#Rm_Bc@-e z*(q1v~a04Gj`B4%;+kH?~}A)C&O*~vkq_!V#`Fp7_VtIMa*XEKSz6lSOp=^+h9ldf@a_Oi*^TB6w78}sh+n7IgkmV93nac zSBU@s29#h6^-Fg`^GIJjw0ux_zO~V&=`z_O4oFa>LN3iiK*w&?LeZ+mdE@Af z@JXG7WxZd#HJ6vQsgxs@_>qIrWJ8Wv>4y=}If&94K8ea^cxlQlXNpr5m(c#QaC~e+ zFp18mdoCy~T1csQIN{$?*vBNM99TKe$94Vi{FBn^b@KdwJK#stSRb<4cG(V=TG3M-a%W(=N`es9NuB@*gJ2Un{a0!_8 z>zxt=xsSE#EbrN+!Ana@%nWBBu z!-_n7o))<)i{_;VQQ_6!@f7d#;{%*MCtv!{J`Etb{ z=3uozqhW

0%}NpL`h@8Fz&)?NlFxD^7Ne?|k)E>xVit* zXd|0wQDv=ilLWj7C%6Z@9f$zdFoKjMy;XL)mcddhWfREj$nUc+bG(zi zWbaglBzB9a2a$Qqb}KdU4f~$qw{`~UxZ`sM-tG~vsE4KgyHc>mDjSJ^q(G7SosuO< z`$YN808uktC6iDVMK@U;uiTL2wD-w=@K99QULpmCfD3Jc`(}J!M+(dUb!DQ!7ANWr z-Kf=0iKv8b?5=c%RP0W zcKuE=>GW7(NYQVNwGnNbcVg02p{{?&oc3}OAfVMVLM+yZM|e{&+Tna;6+SP{q`&bw zG3h(X41457up&lL0fSbbVwIE!<}?9EXKwESDJ6fkN>oEs`Ga`0-+H=;0XhmmS^BtT zN)OmKpD*zJ_t=>=_rkAcvp`S_%UmBfx6XVCZ~tgo+%85a9JzwK30wzx{Qf&DnRt z{M9{+`eTt~?#aV-Jjs{TFNM}g<`+|r7FIEfi`aE41Uf%Sb;T{_wJM3eN+06N3CSvU z(i6N1OzlE9TqWYJ^DLbm()Iq^@o=jU9cf^*4)!X_ge#myYt5O0@LYO2f9;4B-erqg zgFT>*;8{NUv#ILG)yDqnf7zhA<6LqZ)lXL4uy^PNNwK(3XFaNVJBu?q4@2wO6V`NJ zXC&L!_Obxbt=0iwvlt(VR8?NibBLppCRaOEe5db=dS-!+NiXk!ucwZ*%G*9_6SY+l zQ4ex%=A`B%7)Hjto*ES=4^9c|9yQ5p*yX{n(($e`J$r%l%Nq1m z3iHgW>eAS^i!uA>gL#px(qi^fF1-kRG^%O=p+FHUN0)uPup3V1HRFky*TWupV#a1H z%o?9OZ}IFXEpZ_mq4{JZAVLYvnk7sP_ulD!;GjKC^nG>A-LEXwmo~j``<+qQ*)T8> zj4dSmiz+1JO!dmPd!r~**#UI#cQZmv{xbgfS~d78zS=_Uypb=D@YHpT!~7m#ABF$W z&^sO!bDDJtaZzy=#=NlxhkrK%zJ2ums9SHhlFZXjm`;n5HAnnCpa7P1Npz>B8NWB> z)AMWJPi|1|ZKuz)U_blGyTT)e5=Ac>ny$oFu3P`*if-g|B&Q2`E3S}L-}}zGKhy9A z6LuBn<-T_%j<9=R^7|zogPBTy?ijijWY|e{h(UMcS7-8nNrGn{i0!0oZ0#RB_wi2^3OlTilYvn}JQzLKcFo%KNq!1ldY|4o@4nfLF^vrB|`k{^102B&2nNyX*AcjvnX^M(``4feOi5ByR z6CfX0&6l@hOSaY4lZ052;O?BR#Dr}6pzL?PTZWNFaxWuxQE6@LADOo-MGz%6o?_bT z$=hb4@55C@o&Ey zzcuzjm9~*au}$D;Yik0-B7xd3ro`9OWl&IB3dbz>GkDe?G_Jt@`oU0g7Q>)q(iuZ^3YX!ODD^1h`(XV6aE!e-pgK!J_B%b2lA)Os6by{X`Bl zaMV3FAoh=$;A|4mJM+M-sL~*2A%kk}l=CV;GKB$-j9#?d=fB9vetY8GB3RHl^{mq4`EOz!$bo^&wnj-YeWw~~Z4H>xTT z9CqE;`6`jl7$-Ar)=o}^Rhs6%j1U?ZVxTb&S6Z!j*e|Io1nJ*oOomI@U#e5Lf;?iX;@+8!R-ixPJ_V{Aaa$9 zv1R{^QNL<#R+n1c%D5%45nSQupJ!VSeO$EeTf=;$wmGb>zl5d=J@sQHX_95e20gx_ zX(aa>#yu#laaDD# zTZ5W+<=y;_^=>oiO4S??5>woHmWf%?6K$Pin1^SkFvM$E%SY23nmr#_9YnLBkeEEQ zmN^Y1_}}6WF@L|1X4s?ucvE+O%b6`gQ0~9sMqjs{zUn2+V+4H4u&Q!NHb)KIKoN!; z6o^$^c6PF3*X1Jxx0vZj@3ZKLXuUb_5K`jNB6I-=9$(-CvHYeG(FL1v9gTHPdt>Q0 zx)}2GNv6-qYb`nNTg;q^dv=!-hw`z14vu-ugkjbdlJzOLcuDphRka&`c*3&G8&X{H zet{ZY09ub0^&sHprzPN;n!s)w>ikvR7F_bvdUrP;hZz=|r;?nwyDl}3G>b}E+tx&* zbRJeZc})M1g&i;YBzI>bol?}bF{O;YxHJQi;?JOT6ch0#tv@gaz-# zK$_}6O38may4~6rf1y~iUdceAgRlC8+!Pw#YB6TXdOf*S$`lbbVv0-0(=t&vy(qml znR@`Ml`il}bnW(IXIlc@KW|+(x*`JDC>#XhRtXy51^p4S8@<=-5qr!I9hyJYt}&@T z?pe8?2(O|By(HeM?@!p7_5K8vO>Ya&lAR1Rp%)sEjM69k3zA;2&8@kNExLd1L`&WB zqn8F|-GmDKtDx?hw#%HOas9imE1Sl=PV@=r9su~q&SaJ=?rh2DWLZ@^a4vI{gTcYU z)5B3GkdDRAm91P8rYNc;5=*@O$3O?&VJ0#%NE*B0^3zwqxG405e|*Og?6YoyAgAeS4VlW#uf`?3soSU*&8*yKYIJ&T0jN{V>BNAH2?hp z3wU!_q%sT$8xqVKC6US?9|6+4AAO9!Foipl7-!JfFE;DpQfnfGMpisv$w@62hcLjf zYXnOPS=O1A*?D((7V8Il8INKz-GE;Pygm|vJe?;94L|9(tISa%7}%6xIX_>1zxpK* zXF@?M`r}KIA~k}(bwJPN`7h2+w;?+YeRnK1_LuliA--J1r65l-Qm=&TaG9Ap-x)M)J(`NIxSnO(`;3ESd@+ zF9J4(dYf&%<>(a?b_QowChm^uG`6+xtF?%6PD+-v4MhTc0b3$bCTS#W8wy9+t3m~* zV0%RE#9V5w!)C$2Tux70x#VkY_|No0GI(xModA0C4%-`deQw{@MYTcd{1%!#kKMbZ zOZ18RIVXOwrQ4X(0Vv8f0(!h=39v2%O5b_0TWm>8L4Fa2B==T(uWMwg6~JYq2$-L8 zpeggVUxiST{>F!XpiZi)Ce-%G&cp`?CJAHem!DoAyOlY*5Qr{H2qD$~b#~SebxMs2 zQ=qr@k=3bN!hh6Oudqf`=7hCz)`t1-@)IreK)#n1@TRTLL$M#NWKGHm7<%E->$9r! zW-MX%*$APUBmrjG{(lW7wndUp=R6(n_&C1foAka#JdF;+I$bMeMJVxsxA+WD6RURK zpW2?q987uFUI${5H~y}-H*gdrmC&wyXuNv0*j`AG3@JN(_io*LzTI3sYpwIH5VpMS z@^+$=R&_#xO050`$}{=MJb9WI^=OWk_I_c)(TFvwL+Mu z^8R&(WzMO8tF+m3T2K<_J6pCztPfPen{yUh?8JI`F2k=7uQ`2)J?aqE8Q^fe(2L;X z%~n|rIQy);EG%eUEKMT$ zjR5`ekxB_8{kg?C`ltH*79!@6v2+c0J~8vsm#4qeKAY5yzx+$X*9S4zuFiJ^s-GGb zM~oYuo=(OvcC&Y7&(DrBjcdx~m4cUrVu0 zoat6Dye=Pp=sANQ_D-DK9!oD7e&R)=n&wH>R=v-b1H|{OiBX%^>W9=4D<-v$=qt%LFo@EGSEkx)9Zn3Zd~unJ4Ml z=2b*rUI_%{rKh7B6yQG)m_R;6qYR1?2d%zk7LQUz93I83n0P*@Wk~Su&V!_6>jq?K z_%%AXDuMSk34}j_l?xi4x2&Bz;~+5l#Y3|Dza49ks66i^Xn17cypWEOftpL@ggWOO zM_vxl)z#bhZQ6I=iz?)$F>wKNz`;xJ3Hv+gONBfulWUdbD-r)pgnn#$IaO$Mg>VlP(4(x_3N|qY|9~?vG7; zq>9}Gc+vxr#L>%h`Z+oGo(2X;bpKbZ<5rGYh=>GLfAZExd+vUTa4U4r*U!-o1Lf~q zy=|tJbrZU^4>;@X*^O))lBe88BOC-4y(R`1IpJc^wCHPfY7TMY>=Zkm4en(;ln=53 zgle~$CLfFj^M7v8&!AsWWhPeRJx$0KAj3O}H3a3etOu zORBYlW7vPnKofjk9vNoP#hl#DI^QheENJCPyuh~+=q8|W6U~LdMwh>gctXpQjvYfP z&s=dG6q&VoF=RlFLEmEe3pQi0mbbEBI*eNdT&4dg2=XoFB}2q{_66X*6aYYaq;>`B z&L55hVzzF7u!vCir?s8z`E%=@U%=Y#$?aH6g(6UA<$Cl4*#4sBN0K2t;LEQ8j}-&8 zbVv6-#+9cnJ@!a3`rnRPSRV_PpKyt^Xx5e*I|B#9(Lo03(QohI@8rm2xqVOae(vYv zKZl?@guBy#I)|Ji- z*3z4neCdGklk}xSIGPelTnMVk5ffQX)?Lzu?aNSa1E0$<~Mx|Q9)B* zO;>+DROBm@v(X#E+}IF$p&a(KZ2fTTdt=n+)=~TTM{qAD<7pFUj*%A-5fsCd#C!qi zWbeK??v0B+W(JT8maWwB8$rEdi>hT_((_R?8eAMm5`K-4kqj;8I&xse5j?78t1b37 zJGo9er}Gjoya;Sw*^q_l779mg|9R7&ASt;IuCxq8v(kH7q!DO7^uWWv|L ze)w=Tc#`Eby1=?*iulIofUkIHt#2lDVR{HmM{U|GgHfAhm!_Cq43v&j$o>iaziP(6 z%Utlto8Ch1RP|@$?hVqMzR)u2%zWW@f0x}vN!D@TjMvaV=JJ{O2 z9AcCPY!xT9Hj8G1R>+B^%;R7euj?4jAJPr*iBqK2{cc=dEzr~Ou+|Iy(Ta~`dZ~YN z5JDHjOhq^2aya*kABA{?yeD;h1a#5C(lulRIKRZCvRjj}@4p+P*j2b#viJjX+aD}S z)+k)L1zOxl@|cO*43ME!ug4WHy#{JARTZ5Y(JxCJZ-a2{sa48-JU4aj@x!sQmG|qW zWYgEp@K{&4L&;3k-z2?%TYyFrScGw}#?>KjcBQ-|-6GWYR-223x8=FApKbHRDdaTn4PaA2!^nn(FOjlnAK8k1u- zMTmiY>3a?P0fyJ#`KSpMKPs+-yr*!96Rmf5bcp+|8{I4erlN&6^pu@>uIMQ^6y;Y z=x#1)EcaCtVktR>ZEn3U+9~^Sb`u-BC_1lO`BYwWmYq&fpd>P)(yIg_KUBV%1}zN$ zxotG&5gHm{4F1bTHfk<#41WyzbJ_XBR?b{Q?Y(E(-A}7f6u#cXgcn~2%V2P(fpRpf zk*K&ZjVmi@iA&S>%lZHIslPMaB~5~eb8f7aJnH;OM~TOt07teDVCx3LHTMoqc`!>HF88>qk>bUs;f)C7(~XVf=G(y-H$&2iU58LZLf5%%;6SzrM0^4pD+I zfk=TM3XRo#&Hl&vZ12s!pQ&eGqBhab2atul5Sn!3U7_o555%E=N1RL!uV5ClR6&d1 zu6cHg4*au~`)2rP3T5u^5}yY5#uSw6p3#Y3%do(C=C3$aN!gsw&=^D}Y*jO0$@Vc@ zCl!}-<`DU@Os9WR4*0E6vh9z+RW?p!esi^xmFNtUGj-%vZZdv7GI}255L$_a0#HF; zn8Rkj+QQDVf3X|DGQD=@wZjEIkPwa{4W3o*lUg6)uR7Gc@|xFd3VeW2xiO!*El?c= zjG#|6#kemOf9j_rdi7enPi8xx|1n&3dqmAS8ZBVv1j5P(!1CY5XVlgnkC?B@KeF@g z)P;wa%@34LFlsORJne-A)n2R^HySf=I{)ne@ueuu1c2t` z%5iKNH2v@v_%UMtS>HqDYZrg|XPN(dwa<-(Y~Kpw_PD#Pe^4hY&UsZ^+%)q2X7S_-d-qq7~MAR>V zq&H$iF#Vospx)xm%9#mN;M)nEk+5iWm6dp6IQj1(j!AzB<00~c&(1l@`=ZsTId@ux zDkMb;hIu9Y@G=%1P|AJER&K5xG~*n-gKS69+}Z~iNlV9b(epqNRRCu3`nWqlQbEv; zPgbp7lO5eM-b#CRZ0}gm@V~4B^v?B}F`v96Mwi~x&H9}Sl&x2WSi$jE!m=Sse-`79 zvm7UKU&>Ye!Q;p~z6W4rBi+@pyJ-W$t3Dwf&W-BPRwYh$|L0WuFR#|wE^nBeeZD_u zbN;l`Ndtr0wD=7lh=!NIW7>(n&>q>kg@Zmqmbq5cBjsrNxDgXoy5WiFBs<8gtO5NW z#D-#t$ni-QWUaYn?%wLjU(^RoYJ0mLBD10vWp;*E;!O|^d^K$Di`I3B{wjsXvs`e%d1NuS@MrADCW8q5C6gB@o)%sj`Qi*q3 zN;rzZY;5E*kmv}|TiCDJDwalAXJGuL0DkDhx)>T|kwwH>SLViPBqDP(Vgm_$ZYroe zd{Vcp?x9X`l+IR(Kf=mI0Om{2N!ra#6no2OHLAx^`xxC$a-)T$w$$@Wli_@z@M3(F zmmE{Frzp>BpZ$f0a8Ksv9kV;0)YK?U`bGTE%W3OHJQH!qHvptQST@V_j z7Nx(Q7k;oPNp>LF4`)-TNvIWs>ql0Mvyy&^QSA?uRv92n0rVk;#lArG!?rea-|Qsi@1aYc_cVN=f=8KHw^<@pyif?gL% z_2tQlr5mr9gtdL1YX`a(dqww)kp4+4C7wt01W zgp~DB<|#Tz+KlO%VUPI|HoTa8ek=Qn@kKf{BBG%5qXTZRsf8j{s0H}QplRNFXHV4b zN&o$!aBoH3x@n>1ih^3OMd950{rrTp~{KfrMCC= zUB%ZnB6Xh#d>350`yj}YJ)ieg9gU-NVoB?R= z^>9Yw8a^e_*wwrx z#1L9${!~p$BUsIW4%(9*IOTsBh(*_*Gl(E$icUyswy$5#+UFaF;8GC3a^=U48uC46STQo_0K}EfXtSJwJLSe-)LN@tA zA_eviGLft`9J{^I=@Gyz`;u3Oa1`CH`Y^URqI7duZIqzNumq~?!Uxrd$qm+TVw;!c zv+#2=-~;&Dz0saI+U`G5*7ulh^`|6ozMwh+T1u2Ql_xb7NQIr`sqQcSC~>$}M@bJ` z=*B8?FI%_wiW@bL+)9bJQB-_ja{B9J4Dz1;n!aGy#dLFUJz7pVFOdVdF%EX+;r_w} z$PyjLWvEyPVZ#Tqu4kO00z6x?F+v2?G&t#tar>DZ|jUHDLirc2+pO+Raj+r|I$tw;a}1#PMx;R?18x61YWm^e~DOKT1sR|_BsWA z*z?))w|pecVUzZK4Bhs*;u`WyaA<0w2cyIzvON9;*qlcA3rs~s^o4mcqMNz)22Oz z;Tj(L2fi&I#und_n*>hE2x$ocIfQ4PiS+=M{hrwLe{>y*RSOCBZn8CWnXs%FP-KN? z1uPVLsO-R5LWP3p%#zNOs8ggKoX_w7VkKr)D3`h7#WWQDq5v&KcVr+3Gjt&|O%S3~ z$yqC!i`eWnyk9n)k9BsyMkO>MiMoZ)FjY@8y^LdygdwEOViizL3xnHH3sGS$1vJ)z z)*956B72kGUS99mE;NM!9JhowP?t_eq3+b-NUgO42F$J^NGj>Um|II87534+Fsp%R z;y`r5^R>F>-`AX7uiia+>c1V%li8IB)ftj4VSb1TzgQ^$$??kIn#3~_4TxBSz?5Gm z+IqGs@2}eXGXLS$3b2Pc=w)W1NK=C8P7<|ZRQcy6pzE&5N1@SVYB4aZIM~j@9nj~U z>=N~B1@e=^$e_VPOaAzY@`aord$MNFBl5lUBilrv*JN!q)UH&ueV&(Lq6*}`k?jw4 zZ}c+WJ>rFpGfwt8(Fl-t)bM!hS>vVu z@sHR)bYD7m06%3>8C-o zhXlvOLOG74-O#&tFS2E9#^69;D*H}rZ#s783qTQBF|y{a_sbpDF#ZkA>RY#>;%}$h ztJ$UqGnkdd+knDzMj6bHE~W*$Y_`Xl3HV{0ouL;Nqi z1a|z7%one!_x|ts?AM*M|@uX z7W>aXyimR!GfU?-3}$83Y$Au1vXc*1*^pAp!$9l)@ge6)7co?;l=$eCfH9xjchjx9 zuKS5mi(1bDEl=w@GNy=K(;?L_5%V?mmd(Ye20QOhycR<}+FrDP%FZ!UHj- z)hM(`a)F+Z{(7n$c!m(=uEuK)r43F?tf2ERIXa?QC0k{teXwd%bL%|H}D(spCe@-z8m9_6C+DFE7@hH-lbn zdEAE$#(sj*(P+NaUF;`DqSl}$GB&x>QWTwAT~L?lOO&<~7!l|h_!W^Y21*$MJBI_a zyWKux8KV%pC^BG1q81}O#*ff6kNVsrfAb}3s)RJq<-&GZuTH+(dDl6jMjf_A*q7!0 zvNk44Q>%c6_>`S80>7|X!*61+xF@%}L}oMz+e)SnR+IT^-MhWoy;SDtZM&jA{GP<8 zop6^>CndIB|1o~Nr7g%7&NX(kpZtpjH&c!P_q_G96raE6&myuSybnmn$k9G_ABsgr zXe;#yqxEtC`gLE^E&pTpdK3_OihmRj=a8CJVybePJAy)Vnve1NP@S+8pYeaEPd)38p^RKGS0GkNAY$^Jvq%E^*ZTd(9zSWTI|zk2*B zkJ<6<`&nqtdlmksQa@fJQ>Xp6SKqT zpRTR66TlN%r|O6IIn;iKrxhsep2BWV z&novallX%!hV3xK?h)CH^LYc~iI!Ivd~Cx@CbJ9q%5`&02+7Wj5fp(2O$>jq5h$!C zn*Dqa0&sM%-pPW0f>B961mtAlE@*Lhrtg{Xh(aB7#*-wt`jHS)+w8faCn7# zyZ-PuNCh``BAIxN&;vroLp(_Oo+nEUiDIz1m$Flh01ZYM8LaPfS-|FW#buZ%_Ln#kdTqBTMb2^w*OvEo_?jZ;fPfp~Eie;1W<*YuvWxgYSa33h&<~3n5d93;7yv8!3=s zlyh=ngv|ODYUWD!7iSZph1)Z*6HhB=o|RhZd9i#1;O8awL=IR=FS%~6k_(saSh4Q* zI4Ec>qL5TtC%OS%8g%$A=!T|6pj{G_GTB5Bk@tgYfvJ-H7e?4ahUzG zv=F%WP?^H@CeG2cB+Y^{s6FJ?_*-6t_f&H`i8G;cj{8B}yQi}a4Rc+WK(%s*BA_y> zRk;>5BllmZUJ5why@?!}3=I?Xza_bMM&uH*;0pkagjEE!*Pr&wH(TmlPpjRcyvU16 zu*7*)zxMjPg4%xHw2hvvPvX>;9G2?GH|2;WLbVgLxEy63@WbBXS=cRNU-Hb?W8yT# z8FDPfn*XJ#e-USWHhL4xG$Mr^#8{9+jUn~wmajc_kwN4UAPFsKQ~6CS}DLfH#S0ldyrnC zV9X3JREgxH!~W!-(r@^zLULn8VH4G_TL%u(99TcSoRFxvf`x?gbL zcC2mh9)_1TtJ8-=4f2RqcL6?KTRN{}-uhthQX5hat*uJ6=S($vqaFvPqWAoPKAKBk zh&gmCMt6&DMuNx~fvR)9C03va8k7;(SK0B$_z$pl(YM)77e{7UupheuP&reF!ZQtL zi&Uh&!dA3@q=qjc>8veZ7fpvEJNZ{eg`w^`<)}oezkNjBxiWXC5+0jifgBWooQKZb zv|bRMp4ig3)%X(aXmCt10T)`GW4?w=r^efzX^#a8Vlz^@Sa?jrH5KMy)B>di2=ZF^$6Hkqsep`u92NelQgv@&o2Pp;tD#-o2hbbqx-iN z`9BrKVV<;ko*-ERezdHsgGLvs$Ze^l;N?FQL62z#)m|s*a^fy=|K0@zV-v>gTNfnp zgqvl}OATZZ+=?Fc8RSVY(t@ra)6O;%k{8VAgN>rZQanhAmxPFt&%TVi>X(gfX7dyu z0=(XBeD>&%b7-8WiH0n`>#69g(&%%Z%0W!dD^7nnBvH#CSnDD5APa6KUBB7Q@AUoM+C7xXf$_67ZVzrsG|NOg=ZDitY!0i8(W zkO0ZUMoK$wZzhIlVkIMqp;x<;&z-%UMdo|8Pn0=LP;XKTb^ThT2&;2N@k*3F0B?bb z#sW!?nz2w(KONXlx>Dk??v#6eLRbvvK!qmH?W!QM;%X4!1s0pr3~CK?bsgpPolPBz7ajZ1A8?nsv#ZvHqV6Pe|nY;Wnw|_~?0{ z`c;wi33uv>R6o7(ozq8Y%v@|{Awo|S`fVz=1E_+O@c5=&DjqGeB_xqNlS#obvIOz@ zS0U~!xEDnyH5}aBnnkdb$J!#LT1!z9g)j68v(xi~_0h|S()w|WMHK3<|90Ht6ca%j z?z%F&i9t6f>_Rt%&n;h8tSr)-vxTyVQPoNP*YNzk@RYx%R_^?S3OFz4j76YLPfesm zPIHfb-_$~T%|9EiEKQ;<_(wzriDhcbS+9pgzj1q5UD@{HzS@D#Wq61407R0@?2B;# zDEasOYJ1v5a^=atE;MgdD*u=~V6`In5)-rxc^U~OU@#*AU918sw#X1Fwr9rXDbFa^ zzI*`PtVq3{o*B``Y|8^6(%S{rAegTyt^8wNUn#2D{&dZQ@G8&Zntw zsy+kodf9=L#rZ?PFgbj5s&`g+p?fX88a}#Ea#mWG2La~1ShfvF^TM7<_SDm^v4T{F z7ss4|?C5|!=@J&*JevOLC2s^BVc{EL%WXPY)HTdi+t|8#I%Qqm5!FMO>YtVM@7wKB zQo_&7nH%1xsYL8cdH!-GcKfmtWsKTnr!Z;+Z#6O|sHz7=uY&dGBKQ5aRkY6&Qfv0q zWVaJ4#@*3QS(eO0HN{c|aCxHqe5J`bQr}1?z-{}p(j}845TIGPp`Tg>P9=XzJx+fOWb7ewvFyky{>s(1F`Pf3gP_uX zUgig69S0`~eVbb7uEbIKO(JXQTP6J7Rt1{Ia#wnDR*L!a`SNTDUzSWAu^j<+U-UOfVs*Du+i{W_=8)MbN4bn+$Y>w-P+f;xhs;a5ohz~F%yWnbr2c$aGd?7 z8nNnU{k2PPW{V1mLyy%yjfA<&a_zrl0lP+xiae$Xj3xuB1O~I%nR7VvEKGcz2hH3WR#XZcH)9=%SL3CKU>%~u; z80s!?c;wHaF8DEfcH3Ao}eCmHUn!g0LBUgGuNrE|(&b ztSw>o7C6G*sU>#r)s(!fEfCe1Xr0+>G`RZ9-?Q)2p?Swa(&OpOH`Q$jv*%2Dr7|6o z6c>Pfl{_pKI5sIeZzT&pa`0Mr&Fu}BY(>?Yeb6WtI=%E}x*ZwTDnk0kv$c&s&ImVC z=!H}5S8x#fB*a9TBX3UUwgQDb^Q}Pr63iXYEJ2|H(u}AD`GqTfk=PUOq8mGS@Nc} zz>pc;_f@YC8Lqi8Uo*c{-07=0xyGPVnYh}8guZG;CIWeuY$z+5pTVeGW2*oHXih{u zsVKjxko?knf&R?crOWVo4h z^(r7vvrSR|kd|(}%3e)rB`sFI*SY4HNbdP{J3eKZZ+k@Z2_OPju##I{>^1H(=ohIi zDq9O)8~IAUj-jsEgqm{6GF$$oE{rmUJbNs@$V=G;Tuou6aeTKA{yAdfRmg*aQyk7{ zL`ado%ac-7gCw2VsS$9fp0{hKTzmH0K+lTg;&*-oTNKgZ?3i>~kH-Xcp7WEWuZ8gA5%=q{n{qI!S4mu&L|Q$q#s9vDMNX)H zUVRH@|N181Z1{d^3pu&Si%mwn8ZBL>-$btWcZChNTo|TmugfBXEr6Vz(_L@C$E;x` zw#g~=AJ!q3@9_ljSbsyvm5KKd^HBqIb}*W|T za7+n-6$=ne+p#p2q>r(ZbjWG->t1Qp?h_p@O;=<4U2wEdh3q)KzA#Fz=0>52qe`Ud zSq2TQEGf&16H->AA2|k_y4&)M2FrPRFUMv$5z!qv{tjOB{FE3Tq~NF{@`x-Fl=i?B zQ9W3nom7R1C-dx+X(dmjwVj4td~I1G+W+*ybCO>8$NS4|8K{g?6yNtAHzB_DdVq@H zEn*B&^QcXa3&Ctv7AY8JItezrtco^`G%nEA;xjqXWxvs(g-7y$cc!eY8X7qXdhS>j z!03>zl^rJ40-6w#ZX@j)nTz{{U|u5S9T#{3b4L*flaMq7=!<^$tujdXGUvVz@A*H2( zclp<~iMFhWBFDT#A|}8Dl1&{N7g?MRzoxiQOHi1`DliwB)S{hTv{$OH-aN~P2-nXG z;cg_=$h4q=nO;WyG^rTyz1Mx&t~3EXa)T&N6j-AQQwHa4;sNMb#bDk8Q|TajQ2FIR zgb;}ca|!cTe0vvut29(~fgXf;#zT{}!Ww1$=77Ac7Bxg&ByC+QfP=ysfPaU zTEmxBrR7V!THo1EQGy84tzy>%+YUl7v*8TX| z_I8SVx+9-WO)uSTBAWs0r!DZ^b_mYt;I7ze+0XrwKZdk*UGdAT5svJzrk`*=D=Y)8 zCEBVKTqMhoX9FHZwBz$!tSOPB3yqcb>>pMVrO#vO)GEWp7xg;hVKFJNb+J(tX;TMAG`8A zEnq2NETW}o9<&&?;y zwv}vCGQu)ZTjs=1X0pa8NK?*CYktPax97w25gD^N2fdCTa>*25t++0mBo1}KsUp@q zTGjTyFBG0?4~yons0GAR5!C%Q^bi8E?U>W28M%7oXb1CXl|Y#4vjuJeJbaC&fFyxD z$p$Fl#ICE(czQPR&E{6B#ml9ZR2n;B%teBgM~;XlOvW0Q`D z5(iDOWU5CAVoobtnIe6x_uWE)=;8;2a!GSjuQ<@TdM)(#a7VcAy+*?MmmvX8C8(S2 zw$ERIpTf?ITK3Ph3k>Np7o3`8TeeAIKLDh}3oJr$>1k~FwO6-SrXzcs1q^RY3}-S8 zY&1Vc6~cC2^!|%&$uj&$>%*Ic&lmi_FF!a>cB({fej|IDhpwJ5e$=eA0`Lj(O`Bmd zc!sH&QVKYM>p>3cUIjA#YnUs{iHtJ(twt)KX4 z4uy}u{AY8^pVP{#R!&Umpj&+D=cu+;jJUJ#6a8bZy>0ThZPiTLz2*8%mt3h+@IKIr z=?g2BwRDT&Qcx+51gjB+O1t}CYc1=2TG!ROZtG;|ogS_6!>ubh-)=Pt8%wtm)J$s| zr{-QFv%AGXBiOEW^H;f?8#f(CTWI@f6jk8;9S-TY#k-S>*{@1J_(xYUB7VgF0)Trv&}5*7dl|y${1zT=R_1{v%ISc+y=6=Ls}CB z>BZn0aymrX0pDAIn_ajCI4W{djIalmvQjix72Xz*5ytrH+ff| zlG?v0aax>Tr5QH60+&peRoq3~=5o0&$obAW#RWWmJ_3^83Uj~#{t>7#jM=NZ`{SKygMTb=f2+}_KtacGFFVgJZB2>A-j_o%!&Vv;h=~YD@wv7>YQYIRW*r3D^6C7MZ zvs)Q-jT|yRWD}%#q22yI@ssfOCWff8a!y%FZLcuc`*Nf6@l78F_Op6rp_wOGxDjUn z6S(33B5oP{!AdqR)v*paKQ!84MOwG6A+- z4XbTh%*4dA5Mr|oM0D`XjCh=ttT5}dMdkE}Q5+p#GmSG&ON(*h!)wvxc-ag?XzYHq z$hHk>oaiOQdnrL#I5n1M zo^y@!00Nn1zufG&*e12M%m}WdFzTA3=TlF*%j!mQpM8>oi}f&46A<__uz~#V3xdUy znsT#*Dw5+RnbE%wz)u+K)SSI%r0yJ;bTzM=P#;HF>q?jMVoU7F8@ZW!)HlLvSRs^v z^tO1Sr)BT7$+HZTpQI7Z&vi-7jGtBz!BmbMs)q>XBeFjBe1Q6e2 z?9*(J?>o+O7LwxN?j>og6lw$O<2hpUFtBea!L;<6ybLEO)|OE+^)Xh&>JnKeC9qL) z&2Br6Ux&_}pp21z!Nr6fLZpRlCask|bamY_k=wI(c9e2jeUT83CSTyjv1%xCE^Dt2 zVN(xyh?-ajPhBgX9g$VtnbWSY!k(I}UL=g@g9LRF5O)@ddDgXS}ZF|aV zwraJhi#K3MfrqUJm?Rm+sC??;LXN$!H3CRg`UgWxIj*roNu%X9MYAZStCFu$K5%=0b z?Liz^O9hq_x9!tQQ(vr(+%)^8)mB4rmu1CQ{Z6x-7Y?p$Y9EelzPZM;kGQQ*F9Mf3 z(+Xcmj1VJ6k=Ai-!cy&*6lK_>WTN-Oa6_o!g+GCeWn(4t(Eq-uEaEe#d&V|!+gGwz zsd-c8qaR0?U7dB>91!GQ7_}LtvK=qB$-6Nm>{=$20nHoOL&1b0Zh58Il-!IZ-Mx#n z^5BSK%|UW-1A;|J?5A0cN>$A*A|G@ss;nxA+Z8pt%n#v6XJ|ZG)@w(HhF0YbpDu1H z8dHYj0-5>{sNRFvX-CVVAp$Z_`hj!q2z0%CDik(Jo>P-7*&ve#)&N-{!|bqP8z?-W znB(`ErwU%G*-mUVe_T4=y~td-96cSV5w(65S#)e+cgY5so*0b@}w z^Or8YqI9mCHJ5?&foWk^*hv%c3$oh@EM*C4eP+h8Cu|?~UyAI6RaO7H?!{*Bs;6HXm3U*GoQg1 z-~I>#ls5|?`v7JCd5j9w*<0$GuzorAI~OWd@d3BOuQ^i5OT){&AjL1IJm^^aw!sLQQZ#C|U=$*K(F^sXd7M-Gl8RI$lB5P_On6j{#a&rFRxaQ%fU=&Z~w|G0ee0!wpRY_UEi zg!I^UzZoL$5ML!A2G6b*W07Ay zSyeSDS-tx63wY19se%J=pC5j4e!rp0Mzq_aP%^ySt32`^rc~Z~%nF=rvrvG}`^34o zS>gT`sbgi-+cDnwAAbu8yQ8q2Kd1zGuZVItan#D^ym{0ldpFEaeM{Vl?gEMjlDV`k zHNgMY%LNqDd{$T$qc^LWjF-wZwng^=p0FwsV!)T$LKXL)fn=8@a%xM08F3S~oLkh*DDfSk+vk{I+qlK1FJp28( zTp!LHBF6bN%Q)-T8zdVR=(0w_w0cp{sKB(0Q){{pJDi;!pvXPMPC72;RI)kYs%T*w z>s(~B&x+WJ@SaBn>jUmp49vqmWj=CXQ}k#AwaG&KLA%%*$0HvR+7Ph)0Eyd9n2U-O z88-63E)C`3#lxbeU*r@x-`Nn_dc!Gl+Z6~?w`~u9&%2g^cwhy@f-wdI?$J5dYWown zcl?5F6QY)yIo7$k)JS{j#qT(u?nw5)uIik*kP{@rTgU2Q`(FiZVe8fBK(HHhxKl++ z;%xaQ1rzR(qxOks*(7-H^L9LT4ADKzap-TMzUO%r9ULan{Y6rkvaBqJX(TcdirAvK^=;$xQA-}WF<_wtCZFQ@NV8b8{V(5i zg{N0UiwaU#HC$y1-!alCgjI)r!Y?gF#J=nEjJI(>8g6$q3Q`K(SO`9V?|+pM5`&pz|j!_vWBg4{aF#BNsPO_z#I$mqkIyW(CMi z!<6a@1H>CCxi(0^S*UvzQyV2>3`c=0J|VmqbO^!K(qM?@O9ritj7kO(kR6YR!whJOvQm0w zosbKTuHE#_;!U?(3+^m~Z2}`m05l|}?8w4kT+%vrNYVk;&s+w}X3Nj;f=XDU~+=Y3E56r=#!njBdYug6Py zg(2L0L^_6sWdl6N(QsuMa9uZqBy1prd7e$K&PZhH<_mQE4CiLlZR?m*Xsy4^fu?{_p-Yrf)|MHa1IMUS7 zJ=}rJvTCrMcV^8y&4CiwP1+&7QHbsOY5c<8zT4fA{AV#Xpy0S*Dm+33?wl0q5Zkhh zv0~NuRpqqbze6Y$M!AIh@wJp!>ET6eRF#$kMGe0^hKdSdsrs}gQ_7T=i{nKontg>p zvPQciZTf3&w+?^Bl!LKgGRuELBjUe*JLcfxSZTL#pe#JBq-%3d2yCLkT8mY7Kdq{( zFdlNaRyfqMcx-_@hw#49a`t8au%x}nOE5$RJ1sE+OAhHhxd$YO1z$bJr+^+dp8KKW zmC9w3R)+3EIFgNg=9KR71m<|7gn;y-5b6|H<9^9}_IbBa4h89ECxmgo50PG@MHbcu zRYehog}VkAgN1-G7FHJbv-#-+Ze1f8(n6bBlD}-n+#I&wF`0JnOvN|Yf7KxOg=r{g zN8dc#aztetX!Ap%*pZ_pfNL-$u@|Yh?_va3wxeDS?$F=9^M0s5%??`8WV88xEICbt`sW-&p=ty}_olrHGKjG(}Is|9R!WXvpz1?tja_FE`9t#qq2m1UJ}bv)zk zTg?gl6HGL+i3_Qne^4vSFS0%TaoOn>`pz|{XJ$x?_w$+cMppeSGo-wQDrI0trU7&c zGQINME%qt0c+lwV zW~IR3;?TQpCYbIH9GZ_Rh_B38v6Gag@SN^ori2>wNB{9>IR{Gc-5JMGGoIxhZRik= zH$X{2LqIFr3beL&-Hhdwh^dk$gqsud2~BvTwX?7}dItBEU;%g=bNlG={Pk~^8(}Kn zAQ3KwQJq%?2jd3LkwPd99jO`xd9G;H{vJ+{(7lG7elihGyozAo6oNoWi!4e+^{@y2 zp;W*>V?G~Tf*OUU@p`HKZPA@QY6qEG4zM@)uUiX5HSLAg)mPVRGxg2Lbe zu05pl%eO9h*1uAirP4s%&75^=i!A@wLcmlM72+n8uy(fTfb*@d2MYFud8fL5Wxs3T zWeY0b6U2Xss5iy4DasMc!u6@rujrpF#_@BIkn3{f#~jHUyh|SBw+AIDgT^RBWxQlj zi&;sp>=u@HzBR9G8yHRAr`6lL4L-SJi?nkZb>7kMxA@h#jXx7{hFbkJ%gvlsx27De zQ;b7!PkKg6&*gHK#){|HJg|AcS^y*s!PPJzLIO`)x+=At!S!P%FumoeuSW#?gg$)7 zcJp%G-RjdFyolSoUqDkd73m%A)V6q5do?sB)xNA9fVFj!NSJ@GM~?fRwWhn?cifLGz_#a2ka1UH<*~*vP6RTrc8VgyCsDIlMP2-1zXGQ)Hcj4~i zXZ++*t)V+RT}JS>eya8x@9=&hiYfKj<$5ZbI8o)3T#Z?(;_jS z-^s$Z;=;aD|Lv5Uj8v}eb2hTO?+gINeM*# z_-&oSq_6y{yBT51lUIjyK>Gz?{%t!Mkja~D@iwk>2H+Z83Lw5rRo=CAWIRbdJ?j45 zq+fE4q9EXR3)*UY!pVNVNz8+CA08x3^!6c<-5!WhL6+XV{!qJSQ+_Y!vk`lD5JsWQ zk!a0JIo3Fqk8N!T%_SK8ojHKN{y#>uj-)kpSMJL?yZ{JJsyMCh!mUTNTd0l{PJau0 zVDC!HF%+&`hdf7>4haM2o1(uDJb3OR>0aHg(i9=WQOuX&ht=z@Z`Ho6PyPo=-t~n4 zd|XvrT{o=xv|4OF+nG!G0x`!mVM-_CVyWLt-gRWnG|C}4 z#$FFB6lQ&>et;nIAS-SGjka;Mt-ZRWHUHza&T}PgXQ>Sbt%L?D2ntw7`NVvzGwVcR z4Tq-&*|_~mn%i1kuQD(#eSEvqyUr%I##)NWJQuq$TGu?Fxbs^4y#h5gfrcEQgnhS2 zXV~b|vzkagvmt(JD;`gK-qujF3~sl;dv&#qt*5~+i4~9DF896AvTvh%_8r3x2lAs zx4G|@>jMI~kItr^{az7;6RSb}TSehujm9C2u>EOwdNwN#mv{HT#vI#MW@rLQoE+Uq zZ76J!76zuQOesNpYAPP$EoUtkZsGp!iW4ag8{|+h4-XyTsD1Y6{NR3QEPQ|Znk>6( zBH4)x14cvd-Bz+lt;tH@th=jkY4;?Ss@3aCu5YTytIV82K2{9n(YWXXcKYKz*6K_K|3liss3+!ZQ;e)Q&v=_0-#7wfaC zb1b$-O_)bDmS4HX4n*w zGTR%=U>H%){F!;|s3zZ|G@cM%?o}8}vr;DOH1%q(&xFd+QdxGd({dild~RT(5!9Vy zOBJaou-0H8Kv{}`P#Ii^3pjUf1SZqoy2%KHV;kc;37kHSt}oB%HUFfD@}#k{X|Xcq z;gDEzsXQYFexd}<^LG$?6`eHz7oa5qD^h5(a)8&YnC}*w>|IJ)Xi9?I9YG@Nk+xq0 z;Rai`cryj8+U(1MtMW>qf%74hcd^^l>;{A~X+jLFX!UTx0@eE3p{07$!bY=uZ&AW3 z!OPRlMACVVmAZLgZqN&b9!dT@@&J1*c{AI`d<=df{B|-WxHdlp<6vT&($Ahx$5}hH zG)l)yPWU7d<2YGV*ce(6OBbyqu3R{&+gt!!gH}{d2|aWcZlUrk!&QtLSpM!G!-K0U zVj~NyPfV^}81e%d4e@v<#$>P zcYf9IP(Rd!iBO&B2qQ{@Un6h4ko&0K$hu;t)=goW^)tYVirXg(8OI-T7=Jd(#X{@j zA1VPS%ixUEB)TJHw3qD8gBHvcR>x@NQV}hYS9%|7@>C)5YiD+?ZZ;VoL-O~(wrJA- zbU4`c)r=D6i8&UJS_ebp@W>dR7rZ?3UGmvOLQiT(tItzpibr(+63!OTg2AMMZ1}$~ zc7vJTL_H}?cSn_N$Y*3-X(;U$X=AT%h{ct0tgTZ`c&#|Cq8k~b<+M(K(>^Nf$l36a zX3?oc_H5fj(vh}|V6d^bc9N3jB>T07F;uOi0u^q$lo%U;J#({p)E;7vKYksh8(8G| zefv%p{r`e~>vm`edUN+yPM(fvzBGMmL`O2|yJu)L$Ql(v3kokD&L$-JfKs8V-5t7a zgmhXd9Rb>c?!~W4Y$taq zvbbIvz5Imy?LwpWZOCTqX9=vSvEJ+Gi2`9kFHHP}rc8Zi+Sc+s-?mX5Ym;?|Gutr^ z^6RhU+Oatqw0N-SM@v)-T3H<_V8EHqu4gjz%Q9^r+rB!Z?ef(A>_I03`jh6XE)7_U zlGFaS>;~JSxX}Ob(dJxJF63wFhkOxiqPpx_cB3nMczC$^t;9|ucFfZV)U!o`=~XrJ zC++6{z5w6%vZlqRU5V{;u6n1dyH2XPHlN!Na|Mzg>ycuO|Ie@-%L6XhqbbZMjhc9g zB>7!ChIn-Vz4?wVTSv@eRst8im(N@yrMYavSDWr3p!r5m0!A+K4Nug>@Ro)N!!gj3 zu2}aGV-suOtN^(!bIqT)=!4#yw-sfWshDep8*UDeao5Q7SPMzZ0?9F9kcDG;M+b4U z){h-zP$^l}X-1b6*&h_2emBkWGY_RcrNOz7vmZ`b2s;& z=U-ZcCpqpvy=2~yyFW|jsVoJPhzw-la&!+a|DF0*#pS7{`xlq|uW4$g-e4l2JOqm2 z(B|v{_$*MuTM&$tSc8i+f5tX%*6N)pBeTLAimRLpQ|M(?XAOmZ3luJlpAY2&8aD6kH}-`40};b6n7Bxf|$T{`k**%4%>`^-g3k$I87En#q7yL5IGps1Vy z+jNdh-gVIEDzm_|9GT1Q@O*jOiIy|p?~<*V%-ex7R5KU#I{Jke!?rwAYnuf5Ns72m z_nwI-r)?=WtpN*`kTX|R7r0?3o)t!jOue_qb zC>PteodUuZRLm5B->o>bP`N0O`3hV=Qg`mjg{g=FrZ)>COPNI|?1OAYSm6L7pO=0k=_mg;qoFT0{3q$hwKZvzJl!8Q7&MkcJf zPuVV4UM{KN1>I~iqn}|!<0m{F9Av%zhEC&g8xkF@#G|#eXJ!BUv~t#f$Cv8BFKhUf z;WGoY@u_m3{SIfrPJg$l3*AddwPK)QA?&d52du^L1bb1+Cbv;(+>RNf2d2Wt!g}ld zUVKW!sP+EcFBhfZdYC#JxIGBTg32J|)tYC`%vpY}aVhKjGiJ%?YiIg zPkKprxx#!6e}G#2XI)wP&DpOLiTuG(i#CPtvp&6cAh0JbR8fwFh9H(;>n@WnC)=+5 z0g%vV>?ClF<5l0}u?EkVprpk2wZ5DfNpg?uySxs<9Xp=wdRtSQcdsJ~I@t{GhB*R6>f zpX?mSZbY5Uz2Ze^=N0UkZ6?OElq%G6+9rt7hnWtk+Xj`v+l`Cy5$%B(-QSx;2 zdsoIk8^NNJv$LN)Dd`wAgJJBvq6EV5sHhO`YK3`(q9e;4Z~5)%fID?feN0NdMY%uQ zv81x6vu!EiVVz&Phvw%>&)~&E#hrlB4xmN~ymS`97=5*1ndF|_8+v#o0SM;)F*Cf=pe>gmom(CSJ10x8*05@`xLbE7zh% zWlc_+x~?ySwWB6q+f1!J=4~35JYJ@UJ=Euqfb2`-!n0U|t>PZD#8d4+RRHruv8c^h zXVqEk_Z;jGXxAi_7uJJOoXfu?qjytqbq49&6VAOqH$t>W_`Cp8H3VuzhID&I&JJF^Rsu;2Jl) zOBpW=50U(=u>8>V`nC6OG8E6D}^oU)#C}A&h2o!`PoH5xCbj>Tk(QmEU$9 zSB2`LTe;;%ung&P~nbi%m+h*!xk!Lks$lNX)T$jbL!DHP9_RIr#${E!sc5Grz%t z-l4MX?Egz}JGNOlIxP0Cq(h=!#ejXIpet-h2*r~Zw)M0fcE7Dwyr@}@=ka(>oOzWk zzjWTmhLzaj46xRAoRX*~-ThB9SUVLjgc+fJlQNO-TH`Z;g$?K+ggjiUYm_d^=z#R;Dm zMNW>kIe5=DJQ|XS^Rm!I%8lKb*O&Y%kQqAH-|i-nUHo!S0fDEFagvo17bhSc8KioF znj)rjE9K*7$Tu|e_2M`Yx-z;}bLh#rivf$*6e%;Igl_*S=0RIxkEZTL2z&t<*FdBl z3R@{jY2*r;n}(QH#1<~f-gk5uRtzo&n3usbD0T)w%{t9Ri9m)=KqLwyK!UKX?Umzu zR00aZ)eLnw>wo2T@`S+0U>GU{tDc6@0pkjItJ8d>G7uS!tT0&Jy7z&1^Pq0`N!$~% zsd!l!!tL^qX>Xa5D+)S_}Eu%lMO$ zZ%-3J#N&36}vqcWNg;*U+|A24I_Caj_#3vZYC^RC%K@DJONox zig+d5ikB>_8<;&_&!%+Jqwa;NES~PDhx9E<;BPJwZsuB++DSPmjd*1WSS!$itDL#n z$J9b0sfYeUZO_;8P6%+z zT%m`+Xue?hEVqnij~`+?m#Y2b8o`yBU5V$8rZP5SrCTadr_UDJX{J};AQ3Lmi+6O4 zo90hj=N<>{9bR5Jq(D=PBVfcz5Y8co>4MmK>nO8XTFXE=Ekszoqz=yHd%}v`n09lZ znyscr!6Jnec)ME$oH;tz+`>~IOh~{b$I;3km7fny<`QfO5qV(9{W+dACj-|!AZw|o zZdLBHxqJ!Ww~czmMkwz=-zMD&rIZpLe4r{=|Ae!(!@w-D70@s-Lu_KVAEyIe(153= za3r_s48+c{=!_{1YCbr*^MW3wLjD!7lN!P{Ixh|dVp!?{gE_#6wi7XsIb4iu$jsqgp)t;tzp3Jr-XjA- z+AgU1R9&_##Z$ynK`C8RXM5kPK3@s(?iIsRN+`_XQh{!Z~^opUJLy1ZM-M9Y0e$M ztk`dXhFUc(5ARvzZ+ zz`o=kA~AR`7>cw-K#Rz<`O!7~_A15gHG7+>q95^y^yQpq5IQT41-!sTFI%nHMqZ5Y zf}0u8E=)PK;WI{>;i9DE{T39r6^L&#xNrA7I9z4bvk8NSbVb}~O)thWstc>Y17ku$ zi;JOf6z(DWQ(LC}d>fZ!ZBQe7xc^M;V&NjDPU1)Xv2>ZLlWyj3F;`aZf`*$Y^S!oA^#(%V5XC5ZVz<3 z8$^%ZG;&z=wVI5jZQ^MyaRKYfxeR09Ky&+bunt27?k{hFU=&px}& zR|QOq%<$&L8u%NsC2L3Vc`_ySn3n|f9qOg#*|9G^J>ikVl1GJzn^|YsuYJScB>U^f z!NfW_!ZYut#_eijdf?q!BzdFAdshoh3=K2Kb&z2YkNcs=>1#X5kMPI*lFT_`hk-O| z{jBonz+4-9D`N5;t&~EusqCYaW%~LT^tGI^cs9lIO+z)=&op!GEgoC~jV8PhyHWy= ze9uX@VaY*#A74gJYdM>oD=D({% zFCHw_Fr+bWR;0m;Xy6sbv69k(<5wI3E~7*u1Ku8ze0gutqwBQLO^sf|b{In9yS)4` zmI~w8b)WqCz~y*Wr3H3(9cimP<7cOp z_*Y<@Q2xoHCqy;|LT_1MQF*>OnjWiurcGf#qa?1NSNCq#FKai8aWPALeE}Aq&{do! zFmoPI>0p=AB7zM)^Qxiw$HV2bs&{Y5>zT4LZV&Vs@OyOq}L|W|ZM5ZS)YMBwQtN64XsSal(Ct#jg4Bni8gsNcYtTYY-Qx zfr&=cGRc*|Y$19f*A-=o7ai^)s_XLC{kid<{p{ZIdU_4n6=afaUn2>pGoJDXqW>L{ z%p3w!EuyT*XUp0#cTXIVL15fBwwk%}T&jBPf~;eG%zK+Vcl-cxb4Bbm)_C7{w^}FJm&`GZz#gJ62{?NEf$j7M`BtO*I&I z9`gQ70eVxki2>LX@dYkQ;>@BK>|PKwKG{tznDI-*?G2uw^vyk47mDLhi&xs?jv zHv8{lJmS`Nte@j_j{>k1@~@95mRVBP90(UiWS2!uHn(EQ({3iymdmFy>(m!lkHi_) zbt6=~pq=ky8#^qMO|!K9$(Zt(;I1w8Mo3GR3{(t=MG2YHI=;W+zN6mxVQF&^_1QPQ zE=D+?U0bAELQ6-gt+&bo=O%~SZ}EFgzM*tAUn8GxLOS~GRPT?;eSx@oH65wTGDgz2 zfC3E`2?#FbUc#`atGf5E`b=$R7^eiL9&ou*E1WA&p~osAv4~_E`YL+=?)=x|J=cG$ zyHK4TdBD2=(qg&#vdG18%(HT5nh;d&o9?YH@_6N0r0avmtqWVHU0%)DGWIZ-*~}bI z;Z`sr{^3xENv`|ezAAMz9kXL85pH?k^$WIbWAn$jpG|`ozn6mDnP-t6v^|W@{zo6T zYpwL2)*@>FtbCJK?&H2zA4@=eAlOFvx20iJQ7!|caG!Dy5sme+#}dz|64H=%(gu=w z>Y?w!*!2zw&$h35{CzU&_bKTuoy2WlsiABL@s0W~vHUoe+@h>>Q1TWfsVR~b2(H@I zb{cl7cf!w@Wz+l9xeGtP9!&BI49pFT{UoEozVV~857M>z}PC5$wEjBr0oqu_uTq8+B4Ktti^Baxq5K; z)($bZ^)k6x79@-M3x7tBig2%+x!G&pC*LN18adf>kvP3_;5fwEjx4IlA|CaLmu-X| zu1RAIz*@}P#2%eFDBuz`dnDDGhqTzOtg8{oPus0^G$i2tb~}bD^DU6%;`+oIA!e$|Lc^GF<7^~ zE1O-5RNf1_j^vs5W;m(+{Zjo0l+=M1o*;|>lxT+*w+ZJI2CMiJ=V6jPH~+8VvH|<4 zm85Sg6$q&?r&a^A9ldm)Dgu34JeAB*$4{5%MH(rCgln8j2$ktOuLGXI(faA8fA#CW zpDp~eWAb{_mcOi7ey|m(wJ0*Omlk}FEI3B`6+gYW1dgrPR|XiZiyFEd);e&R!dF6!Y_o*~kop4f4Y+#&J*ufq zd(Yfvmiq~Do!TK@bgs;Dtjk#uCABSH=5_7k=GQnWF2`LBi)|iQ_h~P^S%{h>wc|P{ z%CGYk^t8%vkDXQh^!lgQjZk6&>4HyVDSga;c(P?{{5^^&2Vcm-*r! z&o=O_MAjlV_cep=pA&ABN9RbJEK2=o+0R?YY%G*1ZMl|&tt?avco}D+U{Kf#GuqlN zR`gSlv#fca7lE?Vdl<|65f0d*zO=0d@)0##?P0f45>wn~i2i*lBh$2JQ-E}?G^5>M zCnuzTJkL4%k~m;$;{x+A?+Tk%mX)RmKB*5i57vOJUgMSzy<*^3lpv?_nu#EOMinqH zkFaW)^miCkjhs9_8wo9>_A?>4((xBRQ-+11KB9%pE=>~?DcMd+xis$T*%X49@J}ZW z7!AZR1@J%LaC*#cw0avO4QWFhTz#-v0V@X*Y>*=5-rbI(5o4RupI6)K5z~tX>v|{3 zk8__D#ezsET$$HS!;N?f5An!j2P@y(ndTfgqS7gL9G#@}^kGc?YW|yZEK^bjg=U3y zYTqv2lCWw-lJ2dO8LyL(=+QIPZyPx%aP~?hwt9i`*JoR~vIwnAAtzYTeeo{oID}a1 zd!^~cWd5ybNkAbPt~vv4?(iy277}YVtj*#WfO=a}_79tuNcd1PTn z`+;iBYzAO)|3mVE25$XH`|j#Q+_$Kygz#Cvoy-ZIZ>JH@ zi&Bx{bq2e;1tF;_t;%w4%U~?r5~h9sP6k?`%Qy4q^@9J?8FE?|1WcJr26OAK)G)jU zc$#3A55I*q6j#;s)h#$oJnzx#77<8kGWcSwjk?`vUOo%BEvUk*AT4P8H?UuzWEhAU zy`yctO)*i8@ST{9S-lyqPSb8#;M$v-t-X~%CXJAq*2!jnHNreR}Ewxz$mq_uM+S`73_oBd;# zN_b)~k$kB6|0y~bcec;|kMDW5cF(5TwVfU))>5`Qt&T0quj`zuskL1~1X+s^5ozlX zhu~Q|S`@>`$gtL-B?%>`N8DXaQt-tO$1E`3F&NuBC{_89 z@1*&~e;0c|vry&YVAw5Kq0^K{`D1z!`!sjZEahGYh@Iem2i2i)31g+1bo4v+@#m)8 zGUurXe_42}l)Ro=V%#xkhSh@&vqPM)RhRYrSA|b^3qpz<{P(0(N@jF->cNi#v1A7J zbA!emx;={p0;q6lYSTzw3P+oai?)@zzFmyXr3V}roE)&N_Oe$xs&I}DREtRviy=GH zD_uTtxH&sFk)cUM=BC~bvn5#?fOc4PUGolhpuNKEi+4c|L>rUQ)K&R~aXWjTaB?8c z#r{Uy5=m`XND5b&al{78K6DFO^5MVstKQ+iks5Hv8{;RrgTQ^$6&|<&WN_0^9A_Nu zW4V%c`?_mbY7m_h_;uJV6URIWux7 zy&-S~*K2flD>UN_H^-Z6hwE;$va`M8)<-ltky7sA>j)BemYmf>*^0;dVlk!4;@f*u z=I+n@S!~v=z?t&+S>(wFXshDt&=idAkP@%j1VG`HhrKyRMOMq}51Vx43oqBGM$c)H zzjTe^oKCu+B?oRSg|b;lMbqT(pHyrlx_f@RT|&;9GR(EEpj#>eH$WH6 zh7UM>-nx^%AF1nGDGgaUE@yW5)b1?CT|K^_Pi_DxX@byZiTzTao0W18&`sr0Zp{Ya zWnF3izPFyn)9i6IFP{GUECWNX4NBX7J3GZ;xi4};b(n?XfKc7`{kI!YwksvPzin<` zh{=Fyr~96_v`(G3nlurV&1c>Hx7$z7P%}Y{8MKaT8_}U-%w!zz7I7n+qoSrRjGS3W z_kS~jsu0Kz{~DEXTcFR0*ooVK;&aa~ha3_;z1lL3IW<)I^&@#Qh81cY9*M)kh%zBH zXmyiD+v@fn`Dr^pXUClHzuUc$Gq^p9$@$DCacz>Dx$h=QA{c*n^$DuB>DFhYM6Vii z!RTTnua}dW{pkb3_sbg6QG@77vGIWPQ2(zP-|T*s6F*svcM=}Sbyvx|(ijBE-%c{8 z^@qB%4<^C3R#Cf5JKK5N(f<9?onjR&d;H6?B5`qVw%FdtW92}bZyypwBH}~#xjp|@ z$9ZO&S}P`O8hiBaKa1Nl^d%O_tbg*9soAW!vVpk(a2u&fUw1SyC^pZtZFJwpu~-X8N?%^plz#o}ttBVLUc2Y?}Jr zNmRxYhv8-_D7K@0$qfAER$C2dLZI&I1G)FjF!VSgqm=_w4Ew_>aA2aaPD+!$FEvRo zosBDCd`kRLSM>1w!-V{z-YWxUt1gwBKlIN8&1qvk{DcbEVP>oczvRzCzRLJ-@o0Kl zY{YWYWRIBJegxdk6_xU5DG#5$%DA%LK@fVZApNlV?^3NiTk#V0^`0-c%X{n=YO@iZz`I*hNId=xZ{3XT?EaF{|`Ay1Wx7wYxN)^wV;CpYQH- zN6J6`iug3tDdn-F_aN-l+79hs)4vNj7U5Vcjil3uv>C8)B|Ko(_g>GA!c&yI#{4~Z zLY;@1#g6K2f#*=ekdf!Ffwrn$vo~v|hqulNj(WZ_9lm$pjgM+o2&vn$Gp?D05?zq2 z+SL$1(XH`f5q1tf1Aja95}I-|_ykc>bnUkpu3IXK&&an=i?GH~P1SPHoj6di?6%`S#BrH`Yi`wrn&`OWLtj zEN+YI#?}zXh>zsC*2b-4bQx!Og{fH|)ftV()LN(AWjsSKd+M+1<0TfTE4R0$>&jnz zzSNxrOrQe8!8_{(V+539Z#iN>^l*9XJGZbHXz@=IwsJMZR9}uo$&b`?f!wKE)@Au~ zcxYbF0fNnPdoL$DbDEKDcXllC0f)wVzcUf;U7OqH{PQXDn(O<|fZW01iV zVvg6TQJ_h1c)E<04UpW+rT@(_=f|aDe1UM~4I*n-PYuW{)9maqI2A2NAp`_A?_{SC(ldDz8kdWv($MpxM{6fa;}lPS}Ncp&yrStJW2a8{?fz$ z$Tug(OZ`$bAMl-NWBjyhEmy5Bd4ALMW;<-zw?OgliYOiF&w0gvblHwH99egBW`5|5 zg_YIs--`ufikr4*x@~8N^V;TpEE!{1Nv~&@dB%sFY4EPw-s`6Cs%X(1X7)y2YQ;HvYx9iG?KQGE-7 zrL9b9YPci$A~~QD8a#Q07YFjLF6upda|A&T`B!tjKiqlcRy6qFv6ik@8u$8lp2JvY z5#Md>5;6!QTuuxW(0+S<-rPS=#kCy&Lkj9m*?)j5MqImnA#1O`?i&4OMzRx8wI!=e zGXT6y8NBD0y7}RE3WTtA;47}5KdYx8E*+m;zoFz}I|jA>RF_=-X=`YON_Nqcm2dyP zh3mdN`Pbkl0%>;b?LDU(!^AKgF&Q*!(I7o7Z}bQ@pKTtUW8yQ@D%;_apxKwn1(nWyb{9r9usaC=DrkSBvsE*tv;(C2iG7mLQfTM$ z`t9UBvZIA25D!1dOSJafG5xo#Te3zd3RSw1yI_s$w!+@|LHDcVLoKt(t?nUNzHo26 ziQ?Q|f5XN31*ejiPEq!)lASeDSrH5%LfdaUvk{6i!;W=;K>FUY`keRR8C=Saxwj9|; zY?9X6JzI=Tj;0KF4z9x=WEC86s~8UJL7Ncq8>Ee!0URuR*|LP;kbJfS614bU#u56# z7J9!`1h!ECYbf4K1vkSFjd=)xWZAuyIR%g%)N3;z{`CNXfLA*?v}=0gan?8+qDSqn zOTBmo9Euo*6+?mMrjkiL&4r$R(tSE~kmpmfz4al%Yu$6(>RFGqIQ0|WD8*FQ>eklO zG{?LQ|L3EGbJle+IT2~a1`a0RNQIMPiUYQ9?gyVEKi7?fuYAAM5+%3EYjLV!DxEdNG+w3b3I;xMBauB%RIn|F-j6G)kIh2H-Q?twuzRL`$K-CZsHt}1P`QY<} zAHbcgB)r&SmH`nu#)%*LeWq{Tma}26q?vsXmK6n$8LyFkElR44VaacfYRW8kU7Dt^N z^NRGlviQZbo(R@1#l@xLRbG`y8{er#nbtLnZ1?QLI(b^|8_65@WfIO_wWy&!4AbQp z=079!u~uiF$c&t6Nx&kyYJdU!9{Zc=M=e^L^2#dG`%sBr_J#-<@Zcl|QJYo}I}=Qe z)TGuQ3BcaC;RHJWrK!^G`r*hi!sudG#V~dVVBrH>JHX!X`;{+y+&RJgwTkbvn!|kQ zZkR#q_*wLU(@6~#Npn6}rCn02z6a@qar$h=*lfV(n1l`_A|d;OuD6PXsV0)&R`f+` zXW=nTyhL5)>%bNA*$@s9n-skAZ()uk_HSzF595Tn@MqG)ZaAhJwL}$GrMe%RpBfKy z)0&zK83+{$+LChB^t>?W%;I=95)t1%`5rGC3YR_N>wTHruZXt2<591|<&a}&nzbT= zxG&&XuXWiuXMKfp{}2rebQdr{<(u&NPvPl<*$nzkpeQsK=HGu5-kF4}Wv;NX*W*`In3p-+Zz3 zpr|bcajM zF2%P=APG!&_PqkDbGyD>ECc2$Mq?RRPv@+5ig)~yyWi1`)NS{CB}_KQTu{$%m_~Gv zgFa`9i=*|q8BG6JWH;c^$yVi~4yhb38=5F3_%NNHf$!UHc+~4%ey*!=s*Jhh=vQ)v z=gd?C82|mzh+;^8sB{F0SYuSx%(aA~mDe!;eDu+Rh26`gD+paAC9;XTz7a|ckEHhI zqJW0$1OPvRLQ9!_k#+eAnq3n5pk`_;g+AeCXB-LvypMICQU?*Y2hz8R20nx#SrtU6 zbuH7As!1xSz6ZZzpic~pN7{3@jMaEg*=pS2*EpA*DA9{^G2zr&E z%~YkvWNp5{xyp17z6|e2*jWZ?F>UpnEzz29=FF-!a$K@aQp=oa&Yj}q%D`~Pt=5Ik z%sEM9UAf#g_t-u=1;)pS7>?M+_2Mn1m!G=4N;p^wqR(E9sFL{CWew${%t14-6U}wj zlS0!}bA&4o=b{`YP~x>YhH-Kjm&GXTg^0XI-ouJf`8#jD!B?~Jn~*pD;PJMqp-?j| z^L@Tz*8xKb6GPH--yWN2cU9?=nm5NimOBlqoc~>3R&om;VN4H@Tj?Kc>K{ae%h&;_ zO`r}Db}U~ah%E_};C}D9rpI#BNO>n%1YX?*369$L+hrx`d&b)~*5U%r^P<#QlwZ`W z!}OXqeSpt9Ify3?mIu3bP)5o2#f%1hGis_;d7{;z0j9BWe0n?a+q|y`}G3q1M{~9`|DEy>hSH#hq3b2PUc(iE)l@ME8BTq zYTnFA^=U$V#X@+&jYS+?_JdpE19{Q6F<6|f8ry@HgSnSb?e=5z4rJwcf=r3$pw}le zadr`qpa9r)pRaT+t(Z)QKa`(q>~lNUjJ)o)9cQt%m*tIGZhmE&{)PsM%8N#=5-L%8 zrfBJ3I>CIJi;((O>z7OkVM;b_c2=snr5kohf>v2$8R6|~aIhrad?8C1*9AOd7w*{= z(4g<6?`^BtU7hOs_}M7fcWhTT0#lAa+lZwyp;!(EHl&4;0w5=_(T2T3cQZt2m1~ip z(A*fnm`kL+&jK>MzTzQCDU~?dAijA~69Dew9pS=4*#V0D3KJ9bXuM>I`#{`8B3!z+zAexBb>1#%dq>|lI%7r^Vp>;Aw#|&&bzEB z^LjoL)h+c!S&P>m<YLi^9eFni=J`MVgH5wi;9CriWGqO46h)(@*OCBW;70 zslSaI`yjlGvZ$CJS+@n+gDLeqv{jf%Lu?rUE-CSN1&lhgurRVe{mOI00Y>40(ukJ^ z{`URdcNH-w7q?{y_a%^&Ns!@_WB?YVbeClk3l_P)GiRKFUV0~Er~?giVmu~AY`l)h zs5wxAf&X3OyH@r>nSr;Y&YTzKv)~X>YK8GP>5{_(qqFjlP<^ zs|VYyCK@F@fnt?$r7Bd3?|Ke)w0*_J?I(h_IAzkrX`h+d-S00u$h)Uj-j%_Eh{+j- zn44JnF?|+7GhkTrGVBY+9_uswwQ&U=8Fd0Kbi2i-=W~or(i@R9PSqeRq znfzN#a2N;iCL4$Ee$SN4DUsv|Id0Ee(_ez!f1Z9Z&7v1XLAk6F{F7KYFQwR2U(c2H zL}Y<|`|6fJ1Q__p$-P{+m4=Jk4Y{br&!xlJX7I@1%geRw=R=D|b}s1Uz@g4S8EM-* z2a1ZM@KUq2K@&;uw_Qe7Aabfm*!}L>@i7(pPjVKwI78 z%9>1SeL!m5r1G4nF5y(G^F!D-{Xxxj!ZYKVnyFRGpLu%?|02Y6JQ1fZ9a*@H`L_O8 zmTPCBmACSyS*|@fC{du~giR87({SJX?txy#XJg#c51VT9ZtsL5zg-#xrkLMZiuQl6 zu->6(mZW=rpy`y$<+B0H1BLH2u4`i&hVf;|g~;=Vtyix-DR+;B<-3<|NGz`DbCAC) zqMa8(T`Vp{W-93?;+4C5+!Qv|GuR`5Y!dABJ2W&we{YDa{!Xa1=W9=}mUwTPavP&e z(<^!v$EHIc-@0E^NSJ7oFLIh?Nx|Ht*DUG0?~vhZ9h~%2OR!~G%kZs;@unQ~Sx5TJ zgFpVPqDEaQTrx?0S9Gmx1?IYGEMBzu>pZx(A=g`9Kkn~*SG)l#%UPRBIj)Ly1UKq? zM}2<#?Ee{1mFQDamDknvjOYJ#v8>JketI|c&{gfVQG!$B z+&+JC>!@>toBF;dCwI>fvuJf&mGIu!{abcMN@NquUAeuJOFlnAr=EkaFjU*OUew$v zPW^_Ox|2)7cDL0r*Ru!=xtaR{L^&^08Jwou|H>)XH$JvF#5X<|7ezglA@yio)Hat+ z=9E6Mq^_)8xDxs!`XX}u{U2BV`KWQo{{z>~v-i*5Yh?isGY+T~seXCfk@64@)7S9~ zh||bIRfZE|$UbBcZtz@t+#tIaTbw zOp;rH7i(p}9v}YWOKXW4n56}jHRPXmV0|~FxKNjj1?OfNz>Ip<`o>x&)QtqR^@J`_ z*VYmZ$|5?$-)$hz+b(Xpv8zXH)mfJE7RXS`He3^FC!SNSg4B+$RA_6EH$DNjO&b{> zSS9Z*_vm`eKMfpBay)RbJlzaZXM)IsAFhnfu!2E1W!2#j4m1=mWM2LI;fh_n!i5;b z8f~Lz>9$%9z%AZfW|f~d^(p{ZK44vV+`jD!ZV*L``sET`>5t{ZQf==|BYsOvzFmgB z4G||8gX}8DO3ABjDm}LkrH69+sWy+kOK|I6upp(Ws>3U%#1w&}+k?-?S7dK~a1N-I1BOd{sXiitOFBYUjS>v$M% z598=`QI=?O`;nkH+eZB)EUp%g*hW^3VNuZHo(-X?2(}!0+ttoi%srPIw_Th1rMNx1 zd0}+$ksBxDW=2A1fQ?A*QN3PT%_IkGc2n6ZZkm+fC@~yj1-8?lJZIux`s@F<D=|8L#<1)<&d6OAFK4bP>&w6%hQmDez;OUptS zRR8m984k(R;qB2Lc_~KC5-Q=$4GJ3Oz8V>Z{*YH+&6eORK89F>P-nt`&C+q6s!mxQ z6C0HrRWaS&R+`)r&ywtk=}Gn`hVs1}o+t2Jg3Q2-mao#>QB^EuD=U9mfvECJn4!`u zgC_Y94swQo0@Qj|1P+N$ozo5xN@0z9oT?#;)Xl-ugoAF$ zlwSc)9o~k0IN@f5Ib_Y^J}yUgz8bV7+Adj_DB@pNq>K-Z`70N!4K^_8RB5pjqxhKk z(AIH^W0L92H#nzuQE12=aOtuxcT{!**(WL40j)s$?gJ-#{<}=b@3Dx@^wgLwT`qtU(TJ6V#b>2n|!JSWcrx|EJ!MM>EK<=l67A*}Sl=C$G!Gkq+Fxyaph$ zZsjyqBrL8@rqMLsE;DD&1&*b>U-qF|bc2e_I{TVifBlJPqrED~j%LvlIRk|}$rBTL zqLfZ*hDCLAx&zL>d{}ZZ==i2LGc8k)6Z4V|Kv3+j3Dp48R7n(o`dUU8l>B*FzFBp@ z14bK;#baD+#w*>}oY2g!$jLvl*GfthyUGV(G{dV~UeG7O!UVSv$KVzpWA~3IyLQuSV7IBv!jOI#zz2uXb+#oy>qT8A#pJ z?3Mk0u|H8R|B7D!0?>9M+b9k=M*3oVqCx@6dR98)CWTz*v4eO5MBUwB~Rn5xL^<7*%q(QOXBsKt6=$;zA&A z`FapnXQH+|yO!UFMjaiarru3yVsDy7F^SH9v5Z#fKG9AJ`36x3XKqoE(dh?&SP3Y= zVtI7$ljgMp>%U>vPesRe(ZXh>H2VaetI!H?BSTfFRZ%rlG$TG}u;i}b3aILdW^PZp z5lF(C8revDuzh@&6O7&@Fr5RTiONtuYVF&txWZmu+^OVKOG^%s;CY8T9ygcOV0(Vk z@_0JpuTWwp-RBT5?hJdW0JQ8e%K*>Zl*-nWpOT)o z0*6n3xtV{fKzTwE)QGO3)ES>`o@Yrb1w;hZ!Tv%14qCo@{ERl52}Q(jbsbE`#>UF5 zJcbzeEokf1Gw++3d6$O~sgd8s*95R(Px8_R|L28P$48^|Jz6_|#@{n8l-@7(c7t%Q zNIP`%1}9-1L*8QEvobGn*Y+~g+$BE8#jm@|rSo&tH=DaItk@T}$`ArM1I+E2?R*Av zi<3}#ug7Gr=lH}9;5E4KiIw>|E9TZ8Jq8YFK6;Di5M5$Y?pMUmBPQByz)8uWA&KMs z4D8kHh8K?!lM&(Yxo@ZJG0oBevw+yoHCPV3x>1jJaoS)xPEEYrN}iDiaUK4~ehgtm zo(n}7J8oX?E;kC!9&H^Sm$@cK1`62@o&Er{O%iB*@#Zj7Mh2w6^2fUNYC1irk-el^ zaj5{348V>koE~kJ)K(F){HB=t zfX-!h8%7aOcpOI7bu7rqT}H3kgjQjyA_an0ML3NhWTjpqJ?n92h?_e0NhhgIf0U27 zsTRqT!i?Q-U&DRg3-^?_j>`Ib@V^gr^b2^7^s;%2kHil2r!zq{>!+woNl8{>HJkNB zW-55x=lbiwt{a^|5siwM@ZmiCCXZxFJ_xJ#x>32*_F*Aw(5Gf?{0-Gwa@lgvTqYn4 zpy~rw;tDhjlD^&X*b|9O5W~{(P)uxT2%O~tu?kXk4zeMmtlz23Gt`G|!R?rR36{%c zTTU-kV{Hdc|CzsV32O);su&W!l`D4VB0ot$1MEKnIjkyky)tf^5Al6&adiCswv9rJ zJ|=c_Fi6aQ0d*96yMqGZ8&_w7B1#R7tq>G5MgF?_zp>Zm&7=EV;B8IR$be5mmDsQY zAqp4&4!|v~QXsI%X{f;_VfYgH8_)UOmv3e{FuFfjzpIr%#zcW_sId#Gt&rK%YnDYH z&P*tF^^}0NVnb#u~MH;jn9KN@fDEgmYI_6N{}9<%g4{vxwWqLlPRtP~-PBoegbJ#`@!N+nf&yV8*pz&R3sL@V>X4 zUf+9SeIv*Uwbtld%Q4#G?^;mz8s?I33S#oxVl|LL$a_a)m_|)!yuNty)-@kBZm}pLAhl#<3 zoxq#YlvOg@MEcOQd3|eT>xrQtDu+zX3EEiDcb<;U+82@TKHR^FSc&`Dd!JZR1#;8u zP2W3f#%CD?*43!h@KclbOL0Ro=D?n%N{~AvCC{K!7G4;XUl!~rzB5T*5T3ipfHJh+ zwS040lHrgDWdDSKSvC(xIQ_WdRCJWf9;1Js|6vAuIo=|EEx5?cjOx1`E93-Q=eiah zqulGn!rkc@Gk3XoE2Lqt>D`3k_~iWlpcZR9Y<~g_^Y5v1T`;@S<1E*l=m2$qwF0@3 z6Ujk@KW_1M-f)UVdvzgWSrEs{MRP3qLeXmXr@EUO<6{Q!q|XkjT9+*D^}}PEWrn3i z*lNI+W&NY{mrDxYJcoRdy0m;&W!v*r`q;Dex=J%h{Q@kM^_Jvl%|fi zs^BwMUu$cEa{-8{5rrM1vi@yG^?3ekx1lwLbm>V!yQaZpi*M4qiq?H@povj!w@h^ts zBZ8QIn|w>6>n7eMDT12ygRZP`3ep~idD)ASKn%Y=4iw^48W`LPOJ<+;fkrClB?%g> zyU5oh{eTw4H7!nz8;&J-?ru1~O~!Z>VXU=NLF;+w=qVON#S(CuOEq9f$V;uq-Kb{s0!X@kHmbRtiQ%_B*zS?53W|jlcF=*NoCzKd)1_h$~={o5Tg=0WXN^_P};{KUsHz0g`{U+%0QgE%kdrLl(l<|6;lk!2ezWl5 z0uge&Z-A&8u&&FIb3kn;kC!HVz92l&vqgt?c{RExS66Hw_d?}^eq`XYhnz~i+Yb;$-l=fNp>{nYQI9$e% zkP7X}&P2CWttH|k?`_t9#Fc&`CBB*#JK9uUURd1I)!w-J8dW+{gVSxSs$xOuyjt~w z_l$w*?b-PfN9Whyg@waf{)98`;W98Y4yME3IEWgNPVW@w$x*USdaiZ^{1By;4^3)I z>GthC=;67hCN|GsO2F?8Mx|ixto|=2@J4%dib5HsV^xD}UoW+uj0>uOFfnJmkh~7A z_~@#NUGfx>nCpTVmT?CU_@Q8W;B$P&d&Yl?TyXu`%~qJx1DhL1*qt$*B*$YcVCyFq z*u-DjQYE)9yjjj9M#AS8!Jh2A(C)u-23b&?E0VI%b?tGO>_2@AM)qpvQAg?RQC#H4Uqu$64?W! zNBUxx>dk#2H2FJ^pe43wZ4}KL(V(eU;`>$tZZ8rp;ZkSJe1DO88%Lw#qHi<{o2L*7 z`=cCHvKz;@iq_N0Py1lfWqcG&D#N?|FgiQxwrBT96LRb1lTqvOHtRz8>_kJny}DGj z2^xPrF|st)5~!U5#41S@1z>hTOT7`2pHy&<5T2WVI;(arlHVy_vS_I3?~DNk0jfJQi% z6O}ieYn-l)>gXCvB{xpEW;;G}iKC?7vWb^LwC>NDKWG3%T^myj2Q`o?0LdfQR?MuE zsJa>qy4>97(D>zvHoAO;GfC~unX#n1_>bF@8sa5*`IIs%?JY1P9+?Xrm8X+~SMq2J z?jJ91u-J*`#e$at{_CkJO(c`d5kO(90NDxD!Bw?>_bUCHD+(tkrJJ^lcVlc^K#DXI z?q}1MCVlgale=RsB)UCbm$mSpBv;?PVNHGH`5K6XnEi>1U69SB2}#qs7&!(&;P@*J zNz|h+|1LVe{M}Sp20(ydSG1 z%+)K{w<-E)}M7Clq(+)unLSl&s#O3fCK@P+C zhzUk80wE)jD#x!Z?(;K4W35*OWA?@CX=T;bVAfV1a+0Wvf#RyF&NaV!%N&nNZ~b6B z_MZ1RGW!867JeYa^YB*{I%tE|!->y;SLdvynFjp4m2BeidX5%s zcc%oMA#6W+7qpVux=4BW`3a+Pq=VzS+>7I-@ZjJ&zi$a9UGuU$g!ZxO!hJIL!1sM> z&DXpe$N1}aw}j%A&i*m`a@TvU!cC_3VDgF1&grnXtk4yN?&3cmJ;Ux=XKs9Urtbd1 zN^jz2$5+z8N!|?`wam7PY!<-Uq#_hwv6*5bgiTxCN(&9f9gBoSr;LGIM3BYO`+hjD z&|m9e@2efb@xy;mMCkI~7ax4;`$cv6$i-D^jCjwlkEi^{>~Cf_XPBgsgRBKKY>BUP z2uU6O!}gQ|1g>YUiBiT9%=RReUC_eIhM6balkMr3wdYrmy^w1gVKmXvljq$39G@FI z@k}u8_emiKA7t7}vva28x>h9VlLFA~L3jsAVRPw-(=kQVIlCaQKw5^ZCVdtP)Blw3 z4wIV>SS4-05fa-X7wVY%NUHw=G#0a%=FD4XJYW>Bb7Y&^57@%09z$@fadA8IQ=PvrY|39V*6y z=hzpw>3A4h0l}|R*1bH)HvQ3^6H2y0=6c5)8OG`ih&qv|iyovYe_6i4g zT<6q@bM_m?wzWsnIHW}Tu>dC`#{l0N3t&`YF6-BvjJK@wN1jFP9LHVkz}Zku`Zn&W za~GQt?GDisWihh#uyE;{#9R(mFy>LcK_>4pg*u&}${i5rC8JK69~_{yqAIaVLK;kz z8+w|Apq|Y6W?mX~=gf>$7xL8fVturEUooS5pw>h|dHM8up+4i^E_O!-y|cU-yIY=t z=u_3|9Qr>5gW&8srn(-fE zNwQL#kSThCfHFF$CUkmK@lJp}waJ;DxPxc5s}9eP0~Fzx)SRldxg138sYi;|_oasN zLMGG4-JVGFkUl`)P#}iN=821z)i;i@1f!ZwB?sG?o8~W7y8P(#^-BVl`Zibg#VaJb ziWrJ~d(fzfjDUPj24LJIX-`Z!Sjk9yXAI_IUGfY95o{d#++hDC*z2w116q0f0Vw9J z*uUhEAczHVBVO5yl$TaN`flz9sTE{I?N`stOi21QHhq{{Ja(7UXVTObU@)&zg|fh zxSWsa-#p^6FCxeq`9x+5R@vO&(W#?FdO915+rw2qg#6uTX3`HsM$+BW2QD;++6z8)(gt{ z*FRUeguRIGk7HJcH-Ddf*85J*}F!{c|9n4AZ>gJNu-oJ0y`IFWD zL*yvJY1^;If2+4!j7uHWH!D6gOT`~54|*kDui}{#4#&;>hwDJqMYdBkH8Zwc!#I#! z<4#BGV}9mVtKn9+aYB`05#AF?h3I(85C7sF?6G_m@hOeXeq1h3T%xEpeYzSH8RZc6 z=`@X8iv^cUWOXRUQ5;n=FddrjTE9P=6rfAJ(I9M?Pg~y`(5$5Fg&fo5<*Yp~vLwDC zbUIZq0Kiptj0U@Nw`W(ezu6~a3leXw$-LD^EN<9r${h7OhX4H)U{BrgtTV4 zesE*^{(3TfSI(7!y8VYC<1L@Y9kz4ZPQmyy-6-aX)A{&Iwi;Q(-JI3bI_2TG9xHL@$9H__&1CPUC5;A_Giu)#9g?e<>iDFs zd~kf;D9<%=%lT$@V`uO^Z*R(5vgjEPx5p=NFbK{e?1ms0t#hjX#+jVUf1<8EA-&S) zgxYG{p2q=P1|19j0S?Yi0IYBLNbZc~=dbh;14>_ zYAK&VO!dqDTefI*#lod~MK$K?aD5cxkWR%Wbw=1@Z+u!|6&WGm#Dsy?s|_@^R9({b zXe(ga!7Eak^C7T@R<{>ukF?=^yH!$Ijh{XS-caLP-LoN=o2PnO-c5mH)zxli&R%R> z8M$hO7)0-o0F+$DE}+K9z7K7bQiQCOLgnfxm~ho}q&~W-eK^E|z}?!4ARZ!~k2E*S zFZs;Qd6$Ud1Y=L>uz1_Lqv9RQw0PmHeO9PC9tIi9z?d#VE7bbjC8BL{&%uP6oh`dN zu40-)9?tEZ;x`YTc^#5!S!;%n_I;vs&?ueZ?%-9zSA(Mj%nUNR1^siI*XQHR7`Gl@ zOez!6c|LkDT&wPZhD}4kA;yf*D$7A(85Iw80pBBjkMuE_ln=9S_1(ar2u+N9|B95V z(I7Jy=Pe3V)i&7()8j+kj{8g6q&&b>LEu(HdDOas)s8A>+h0(3`b~+Yv-wi2UvmM@ z!o376Vo@mM(dslxazDr&Fe+n;dHHl#rG*Z)Lh*)Urm-H+VVMsr-12K!)OELVG`F%` zeG0*kjfKa6FG4;<#=~z=<}xFBw;XLok3BYs$Ltdw>C|Jh!T)^ZS8{3=ZG9^~CKBW= zB&tXX0F0aBW=OrO>34qA75EmWau#<>9l#j}Ba_Vy&0QZF1p@BF(W_>ftyVH1PnTgb z+kV-9gD_U4DIb-++d%mI^O3ycYrrfDWuD!$W17PiUeSXInL%yxZ~!X;vGlLgsgj=~@w!5d40V18i;0V?*Nk0QX;93?`@%u#Py z2gK{w=fivDL50y% z;PRCI$N5n;KNhgJJ+l;nTNPtjtxkMm?}`Ww2tJU^dww0fS0HS$|D0HJX*-2 z(-%r*1VSclM85h}_J5s(F}K+O>tZm(8y&Pxd<^K&f@1s$$+$A?<@=)CFLcVYjLxnJ zC(SVff8{_e@PeP{6doxXUI- z`U&2;%(~CU+up53W^O`9)mi-PE5XG_napx=EPCIeq^%cXDJpEM+I^9vE`yy)Tcqx9 z)W!Yf2`L4A4EGlI0%J)TFRadlNr$@+zNWRl5CV4*q55FrJz(>KFdACpqg=$^XLmP{ z(`z8GtoF_k@3P6kbp{u0%_!Q~OE2ua!1uCq5;Af&mh5T`eCD$H3GTh`{sq$qkxu1> zADowpYXblIh|tldiYZ`3##{V+}!iRf!D#h)d$`_c~tHWww(oXUl!6yT<01%aY17<6oz%!U}7 zaZXd-y9XEX`|1vZ_gxH|a|hJJIN*R*-Y;VgZaKTFVo`0){rtZUmc@%rAF~SIgj5AH zxva`|k_v?Uhu!iBFQjKWXhBx?S@%&e*yfln0h#AQnqa=P|K9V?2WV|yUnMsS=V*hf z1OJ$@<+F*{W9`+eOciuWh?qHO1Fa|ysLo+3;z zs>EusS9WKBb%kpg4a6eC-K79ZL8`2xq__F*e)`jgZf3Leow(y%7TK-%PTBtK7WlPD zTCX-Eu_H;3*ajK|0+pfo`pI}|c5Di9SzUDeo*2K2wabv*0CBOu`QkBC1cnilS4%gr zn@sS4$?N($x{>U;iuFj%P9h@RrTB}&#pbFn|E9+L6qQcuRoJZTXe*ud;*Ts=(;0=^ zF|l328AsW$cu@ljCkJnO6PZf(qJxsqDv&s=g?x;GJPMh1IEhVMYS%c{xQ!=0V8+M9 zqPMES#+`$gmxE^PuA3eox<$4t`_TWg-}2bAl`&$p_=H{qk7fbr^!-(n2AP^bc?qWV z*y3tlTBp5pdOjBAAxc^B657Ze9sfk6rPboNJjWpGu6zj1nF3Y|8XO68LbEDmJ{MP= zO#YfqzbR=&NF|bBYi0fQTQ}stdn)^JJLBsP(NpTcd?&qzK}p*?NsI8%7)JLE z9DC83kx&;?9yr7yf`Ekscx1NkiV9PtAM1sO9nVM7lv5aKO7ZsnT>`54bF(zP#RU?~ z4T?J%BV+qIVFP=NS<5|@+vFLqGfw`*$SQGmnxxm=82<_175w+EVr7}U*f6scXGf)@ z8%pafWQSXnP9t|sL2^N3Q~o|U>l^zNS^p?CZo=Vp4Q?uk8FHF>AJ$m_kWg^Ki%>(j zQ}sgczkYty*m{;<$!L5~j<(sJpnPbbKFm#X@I zmd?a2>9l?OJCR&)~rBJ=)Zd$M*wjOSCHs? zw5kT{s&y6esR(L8V-PP6&#t`>915HQ&Vde)+2b=e!wL-p+8tx3?QUsg3SOFlNG)w& ztSLSQ@L`TK=2s}u3Uf@|;UeD$HUjwWis$R1-etrQN9WWs+3#HVyrljupRt9A@}wEd z25MrXv9#vb*u&kD^vwCzQ3FwNXgzU>v^z6=jpc>CA}G!6qm#~74>VfU@%d7?EnsHA zS>Wqfe?JE@4kl8Tyo_Mc^8AghP@W#TlOcvWIrtkC>59x^e}6Ud#&{@V%|o7SHwlC3 zq0T%?kN+kf1H4xBRAytUkEFrN^KQ$m@g9Jz zQ&3pstWd&0)oS-P${Gu)Y3+b|p4xW7$5d0C8{TVXpzP*HgPBsjk*SSDd^%eq1q_2U zKz(`(R-$zvUb8O+dQ*>08=1belXd#VYgX&Eq~|$yzyk+jl*EW2fclHwrY+CQIC6)@ zv#s}|IN^(HyZd*p4{r3~mjf(SSWbV>ME5ZX0yGI)D9(7ZUQw;FG8W7~u<82r$|*Le zQD?_9$Ov3=%faTrfj~iIVlIAhbHk>^#~=fdoq7gMFRa0hH*#M4jZo$mH;87x*2HW; zk4DxEA4m~^5E6$}g?|ok0i6Y1RJ#HdqU$yCdY8&WQ|v}aPt=~5ehE+uc)k>PAQI-o zGXGX-(}vr(zMD@m#U{rDYfyPqM4#&Pyr#F6hZ3>oP{u0sX9(;TEPHU|U4Lra(-S^R zhw8oUGsYW5%26WX>!R_vgvIvn`HcEnF>#WMWOIOKHw8dvjL)aac6wZU&!9s#kC&6Z zVe^XoPH36+wRO|-Bx$?vW^}=r_kj=mpK|7V#a;5jcE)rFjPo=kKPnbvF>KPCrN&BO ziqxmR#_2!XhKr>EDd$Av#2MY=G${{z9ZGcnXpQy3GS!Tg3Y!?_Rv>&^Fr0L?`(UKr zK5ZvTdIMsXnyn1$dbg9mWv696^kRiFnVa~yzGreC?3~pt=62VvR9g$Mr5)t zyzf0xKuZ&SnQfF7I7I-UOYk4pTfeP{XPRQeBuAcv9C%hHCZQedQ`3@1oW1-{t{69+ zq&Zp#Rqpn4^^h)?UXet*`w72U)cDz4YLugc3RCDEQpdS3wSGQwQvWCO=CUM4+b{}x z2B}MXu@&;yxtsBr$(||qy%60&?1^4*3UDSrwBi%yYcZWgg7RVIe_o)Sw3e>Ao48a( z$W!@wO|k9{*gv^27!o9Nf?>Os=9tp@W|6Idg9N?ydlDA@-IRY~#ncBOo>D4l_+OFi zu?FU2L35#hl)oZkGq$}P%^23aEa|+V`ca0y%sU`+(TUE1vO!AiCN?h-OSA=g0<|sa zer05wFnKl{yHz>MmThXG_{6h&5laPKO~sD8Lh%^ZZtUYV*6Z702{}6c#zh-&=T*hb zING8x5r49Wy;g9qY~8wD6Q)+W9_*9rhe<{B`t5dFwRArgS|hCkwt@bx-6fzit+rn& zHxj;iFsHocoQ`flD1d}1g*6VOoQ_wUkY6}i zX5|&Gwr%Z&$1S(-#q)IRdC?8EL!ktJpTd;BB1UDP?a!w?EtJd^CiYC=0y1r^cJs+3 zxlR8&G)r4h?vBCMnyp=mv~L(&{&}L7W@rxXbQxF;qY7Y}vUGQ1-=)Fw z#nr9#J3jXNE#E8W;*U%gqbqV`fbe6#P$ru+p;~*$;YXnm#C*ZeTtV`c=;{mqS4OO~?0N%6F8GAnqdUQRD=pRkXy}3c3~2lPHeG$_MA3U_AZYmSNnQJ3EMZ|s=r_?k;hiN zhn*&!QZa@$T9aDsdxxHnN@v+6>K~xt`$(i)x7$QgDd&OyW4<&b@9-Mdo~)C$)SA6c z?dwdNr#_VqBWAh7EO<)E*g$mw!BnTNex@ifV_T~RQt?4xE+&s@nJIDlO6?TFK0-{g z@q&fNFeo{~N|}5>gT8q=7qv+K^K&@auRr>(JOyV#3unpwCjN(}$YyCdg%U{9Wx#nf znY)%;ou2}|J$@}i+OcNe6BA>4wuX3~FS3agp6Na`(6a(z?<4|*mIY8v>P}>Qu8RA1 z_0LkNz=$Z}#c40B{rHwT6}{zMNQa%8SOGyJKByy@++dN6%YJFo_(KIj`GhIURX9Wo z;43wM=Lu~>fWatte4EeNDpv0u_gjYl5f>53ddMIR4coI4X=Lkd92i z;Gee7J9^nw3~$z!$EakM5X@c}Qw*Q{tSZvT?0LYncEL?jt$%MYQR2uLYwMe;^dC~r z-mDBvZM?!^6gjMtU1a4=nr!%J0$=!63a)%&v@?E}7VNV;1z2BoWA_@DE=bIQVaUCF z#`d8x)Bw?a_k6;n+bH|Qf=+BhO>vm4x4OPA*;BLB6dpKbdNhO%P-aPaqs5EeO*t8Le zilcDF%Fr^&;~+AqKcS;u6a z=xyaj^T2X!|0&tOLwVy{l-4*(7h`!`^7Hl=jUEHc!kD8NmYam%+FTSSU4cdmKH6R$ zvakWHp%0Sx5v-;Ms_Y8VtT&UjEM5xsn;;HfXU_XZg46`enSx278|P0YvHg=N-x+Hr z4B%XwNvF)j)4_SspI39fCZf$@x>RsNp5QB%-<=(LVt8hz%(vat^sJJvf*fyYf=PWs z`s;dyyDV*1TIN6Nam}lD%Fo4C#aO$>zeMN%HhkF2&@9%rEb`U-&R~P(W1fzMtcK?6 zh}UB-E=L$F3L44>Ne2TRos2|fPsK;(b#`p(}=eh7e%3e!--(jFr@GW2%r_TvCbD;2OBlAr6%9+bGdEINAF5(b}K-E+B4(pp`Q5mdX zUvIv5Fo_T5ooKvF7q-j~=C=k?Dye!cLoii<=wN9H)bDsG?!( zEXwH1(mvu>IXH_@nHnbs`~VfQVwMblK^enkel`zWmbCfBy&SDyuGRN?Rpe)>d74vb zfY;Ov1vGklGYY7T$-S9E^Re~TN|JhKd0{PGFsIC6TG+USAN_2LAb1@&p50U)d2aD- z_9>*AtTsCOpwL%_#exx22PKv{Tmbf=GvOLvl@4fQ78lyZXFC7gbKlZ% z(3W5)S$q4cktGf({@QO&tS@^j4CF9M1}~Q>^Tw+T(sXFDROC}z#6B3YYV~X}J)?~K)G;-khF)qD znv1gr+OmPc)YN;*tb~Iu+bHni&_R@}04@2Lxs5f;i+uHI^Z4Cg6#{H}so<6zJ6XLg zz@filOy0I#o&x11gapQrzrEX#%Nr*r@#=d^N646zH#^Jg+e>>#4mPY=k_Wihkbg7- zFAMdVjgomCC4n}c%q@QXe5Pm1Zks5-yQn}g`E;%*;1*q0N5RDwK?gx!+F#pzn*Mu< zMvf+u&uiv})?F|E#`6*45rQ4SlEpzsXeq0SD-oR#l~77fIZ=b~Uw6JX;0O%`+s#t{ zPR+Wb^-b7`8j3GZ`Z0RtL>{Lz6bT0G{6wDnwZ*>s+m;&dSSm?$IsWBEYI;@VB@;UL z({>4Pu;M5;EqmFB#08zH+FYN--Mq?KO@>(e>+(bgY_1t8XCUnT|{N3`=Qe~9(;-n(wq#}h@(8Qhi|vz|f=+DAs(Rq?q! zS*LX(YmzygxI)?p*S`>!`}Ovvfo+Gf_VMgf(KSQS&szt|u=Yx567Qq?Ug%f7-O<&< z2&U!Z{%VF8urq507E3zT$TxP!w{9tVUuXPcX||wk`cix<2;*;`Hks17Lgo`FJY;JJ z8&l<-QB{?6=YrS~_0NXx=aw)Jx_%=k+bQ=Ylm%rBfU*h3L-~0gn@3agpQ%nhcWtj4z?juP5;?tu$?nuhGUezcRK9f|Q>p8HGhB-v^$1*cE3be-$ z{PNo2Hr1pl=56@B$2J$bg*4FpT}a+Nc)E^bueSQM=2n?%Grx%d{zFr9yppar!!U)~ za@WEzWPO*w&`O!PR&;pnVBeC%SYX}DS@^v!!r{ym=d;SGg0OnojJn$<0 z3d?WpF8{eNHx0a(*Btl)B<77k@z@H@l`QHgJi}NnAh3O>|khQS|y@ zcX7>XRXqC<5p!w)hua;9IU&^+M?{l;0cuf5OIyn^@jYA#BW=$|YsfF((6^9p^^!erY{sL{UvY{Q4SMv>`9Y%R06`ngy8 zSEUGq^H;NKwJuZ{R7L0)9NQ_0vU0ImwlTHItI3iAj1m3QuKqe=JX`%o%vq@|HGwh$IR3KCOl0a_|B(PgQ=&iZ_H40}g7TQ| zll0g?-No)~L#%a2(b~KMc!$<|!E=t^ z0FACK7<`l^cT=w33@&%aCkG#56-?Dxb(W6MhB{*5GKq_!TlGL);lzfOSr*UeA=Owi zanF(lY4*BUybQZ_{6w952m`W#_4KqXI0_?=rr#<4V$l;m&5F*h@HV>Qjj`Gw%J_`| zsl}%qyNC|ud8HqCRy_<4fD4A59SVIY{|_Cz_lY#6&2}-`+eGF{;~_x&2E}9qN<#%yMK&#K5y*TPo7an}xV9K5fVfgB6x# zfR2nKyL>ifZl#I9=0#3StS65@y$U9aMZzb$Gf_g1*(e70ctehZSA3XX*lQ&4uX*%0 zdbHX<<_lA51in9uDs~=LYL<;ty`qa+@$ea?MCCi@P*Iyh`I~!ekb|%M0L2^K3Sm`{ zn45env?6R7CihlA>R&62#_h~@X&pt2+v>qbEneYYmV3eBL#|M6;u)Bu;t;Ph^y zFdA0m5#n!eLrkd_24Gk0u?Sz*iTiQsUr(nUWUN;OmkBWY^1N;^(!;U?GRE9q7wb|d zdydA`{Fj9UgN>!ZrbL&^@y$=4Cp%_}UPK0IASrMM7Bn1Ci2!l4JjySf;Q}{Hb{oMFgWP4qt zU@=pbCDL!>+P^B^f5USB^^Aqs?HYx($-ImWLeVfO7pU)taW6ixNY7vXM*4Ag3Y{jc znGwf8P86;nq%|iyr_vANO{%Neq|gmY^iT)DMTC0;5m$TL*w#E+GVV4M_ny3^gAmeu zcfc|=uF=)d>aD`QKU9(%lSjaiv2GnU?% z)ywKt{HI&*=tN4)?TLA__TkrLOkRmhmHD^rXWmoGJ{JX_bRu(dmnBHlv3gtIlm@4o%!lFTpXh5Ya`At0y1xpveQ7gK z?kJ#N$(gNArphAwCO)L!c1-;1KX-whc`WFQ*PD&6F(-yU|6XyHdj3<`_1mx;%Kbn@#Eq$b6WIISz|M8 z4%66R;*pUGpU^OWZCNdd>Lt2tJAg>>EX{LSbWv0LtZ36_5p0#3jN#!$ zBeBiTh&QahERN}@EEpy_crDL7+7Oc}hf>XM1|T=mu8;iQ;Le@|VWjFVT5|}_Ss1r) zWcF4qd#pvjgNS@n2eHk_$L&-ZdPVc)o?WeM*^~_h5!r|(aJm4a<}*qoJ&b~k{yW9} z9%ak<(ru5*r7zGo&1*lTC2w&kRZOiFiz9 zW46bD1>T>!w`#q2t1Th%OsC*p2khTH67kYG-KoFoH^^!_0qF;nJBkrtE2-Jw&QMJ~ zhGM;sPP4in*iZ$*kii=L&eo9x22jc3Ihtnz6p{XAMe~w@w88qG?|ZDe_a&FfjG}l` z?yS^nDR3?z+ePdi7*IBO#D%gj)V?N9Kmf{mZuZm(Zg*ueaY$P%vgUwg)8Z z!GViEv{0iKR{}1G_qPbqYw!$;nIwfoGh)~skSSj`BU{7nmF@IxK9i} z5T7v0S~^(TAW>Pn-=>^gO2{AG(VMi*NWvj}SoG$hiqP9*r!`NRj|BVRD17O1BV?CW z7Aj>m?HVMd|DCA6KYA57$qr!PT@>eLd*)_HguO_iMR8j)nQQI;9?P{GXvGP%h{ zmXzxHfHylpKXrXvolg(T! z20=6{#)1kCG1Y-{*3XrWOgnwe7>-&{mWiah`+Fva42mUST09(=ZRAiEnm4*t576*T zax-H{us2Z13EkW7)QBmi5lSQ+y&lJt?rIN#s1_-N9IM$6T3BGAFf11(537j`dRF34 zp-iO0peQd(k_4EdC+Ma+5}|R`n5pcr`+A+4>iO~QU;P68WKfJg52N!NRrPp0uSnK9 zAaMn8=97i#Vz(jJ=y+tTpK&8gq}TH-13x;jJxz(bQG>O{oArYF29Rk{oaUld34Tt7 z>zV(YJ#mG}7zB{!-I-kw?ww=Y7Q01=Lvv31`1=IsuI0OfehB|y)Pm3d_K=6tM%C7;goq*9`BCU%`eFd@6GJTjxvcHy;ln__?^>+pZOyz7`WxiJ@77 zFbK9_UWsvFOYKdk2bCPiD0}DYBT@hE89k68*gkJ~CMt1lsndKAs2^Cbg1n+5KIvjS zC86nItEoLXpzgsp4{Vh#J^fcOnXgGl=onuPFz(3RP{*JRR`Y1`S`27wwW0#m8RNaA zf4N9qADP4+UHiXr;l~`o)r-PEHvRzS#04WF<`-vSnZIMt#1?on^yfhWNs16S;>JoO z)u~p)f9vTkf)O&j@)P!D=RQ)xBsOg*h6O`QdtkE3zp3o0T<|oU)+o4zp8S=1dFz)h zUbuN(8gsG%g4%{bzq8d7Co2HG-QRo47mMdMZiI^NA0FL%&8|7>2Dl7AKOAn(BbOuQKn-sfmq5588<}#ztkk zzWO$9caW8_D_6`FCDC(c9DJp!bdvK2#*m+6Mzc6oinxgdnVk(v|7T0}0O^Qyo%r!L zW)kiTuV?Z|`O)1W1n)tq^e81P!xdZKXzeDy-p1!%r&{uI49)I+KP!QF9)>Q@mhaBS z!DHou$UmX6NOMnB`T?^A;Hdy*Wcbt@j4J`=9MbZ|{x*fxHsGUT=!qAkx#VzScI_?nD(P3ce!ediSP5< zCEU7d2`W;6v&c=YdD|Mw;rMHW_zm+=4NO^%w;ZTO_L~A))1rU8N$mQgagnGrA&}9U zu=&%&53BQWa5sA0jAU-Jm|BJWZ3T;uiaxRY6DOaKI;I(hRanjEMs`VIcBqAG#YXGs zvA%D6g2rgUZxYr_ZH2u>(OJV}5#zZ&NhGbwYPFa)k~LG}Fq1afTb;Epx`blUMap^Z z;%Bn`ExJ&K=J$M@Nnq+-$m@wzup=Z8!;Lbg-!MPFCohcPQ$R4yu znITs{1uld2UVX3=^J!ixdEjKAKn1v9bc+j*Pc`a%sP2*@_|#;hn2qd)u+SAPup6Oy z>CwjYzg~B~%7DVWlzTh#QkjsHY(7-&O(l4OZyh&maDIxEo?;?qstjj+q&wFkE5 z2d{Aowu44n@D`5eR8;C-gaBEr&NS(w**2*tB$a{(wmBZKh!6k>BzjEM29Ne-)6IJ<)ykqDOuQP^k;M*&U05Uafr)l0Ln_TYx|3|-c!iQkSYtn>5XH;=Ot zJ=cWdp$f@*Z4=s7rzMlBzc1-UyN4+Lte%(MQ|z$}l>qR3t{C zac$-fO563U3%h798@=qRqJ=iY57}@Bg*6{-j8Zh2u|v=rl!Q;JrdteR{)cI~^Af-y z)>>LVZ}l_B#+9r;cK?WI<0hjz*v}(I09Xnjw;wjxO>TY2?_}47$Mf{4a`_JYbHc#c zP3_+)h}LBT#`?KM>IK#y=$ct<(>WVGncfJ)OrXU>rCW|f)H%&5dWN0sztwf*NhWWi zja^37w#(M#7Hw8g@c2=w&Z(NYRjA&k55nItM_=w9TV2y&YHgE^Q3s6lC}~I-RcH)381py?tNeKtjox`CD|NE6M^YU_4uttrE)b@rUg& z;fcZNMs~~XiTK@2oeK??Pjw(RH{LdwFg}ff3iRhhHyNv*mP^gaJ?r^^D?!DVvyey{ zk;xJbWEEX$$v&KRc+qFI6vMT$iiCIN;#9dmApJfG;jXn--Rh)avqpEL@7canL=RPW zGnp4LzEV$IY41B4>s>o3~;}bi1t!WD2{L!x($=Rdnk z)eP=WpTvM7wSR_d)Yvd=VL|qOHQa=R9q2yXe*2GvoVJy`xXasMd#Z>JlBDe!0 z-24{rd>y7`PK1O!yj6HJaGmL~FAutIAh z?9NGN!Uve{L2J8W*;zFh7&hQnLGqk4#hl@l&w@)UqS(AgHhw zs(;?m@A%Xosmc7YNxba$?$z#O$h}gSRBsA6fkTU$X};Zhmg|XQ%7eQH{enW7!?3-l zZTBM2U4axZZELoCDqvwbu6`=2J$aXBn;e)qYVO{x%B=}dv-T9(B;Wrm?WtMCP>X}Gt0;fzXSV+>0KC1Q zkl9eeB2@hT4^6RdG=1J0_3vvYzvqf~vg67=H$tf^2uzDcegcCKIrPX@FZvpOo;qx3Fsdi=}fsps*pDUe>>D4JV%7z8Cs{blS0%G8vkmD1&E0mMU*Y1+f;PgIngI z5r249*%q3^Y(+!#K<2cw#AkG@&)AXo`|1a{u8kMI>|ZID$kR=6~q# zHkzRtT6J%|oj}cRWN!(0f-4%Inwg##58qOppDRj6qQe%%s(J`2Ea+3Q?c(r*IQwRx zhW|jT>JB%iPDLJ&c7~b5AqEI)^T!CuK%)7hJgP@EkDh1>(VOqp85~G4c$tpfHYTEr z#NJBYG`BlkBe(D)yw7UB$eGS>Fxo0ZJH7^0@sBfEO2e+J% zE>~C_dPCuYAS ziNpt1gg52T3Pq@^e;2&kKu(EJDEXMaAfWu7=xjz=F3jU!$}0xbnz*jr-S zV4VL?Wv1gT;Ro6Jf4kHog>TZE>S|UYG{js?UH;aFQ79r57K#*_yydsr5SmMAd;8R( zG5DLK;dHJTTUVnwCdSw!nuA!c2|+uZO*Cz)#SkId5DQ$s0FRQ@?cFaLohm4MA4v=T zgLZtd=+T+jzG^wtc5pZ_WUhV#Gr7^eM*N+&FQ$u4o`gR?(XsILr_Ehz;<*3ZXi{jz zWR3!whv)F=qNS_=KzZ z^S49S1<$v2mq;b$Ntqo9X?o&Cq2lfEa)1)t(5@BP<+w?YZCs(^0qYeov;~*egU9qc zS~aJ&o$p5hrJlRnezl}eXxYSu6&nJyjZ{hUib%BWAevCEYNvMRr^2G8F6+5vkV-(G zDN4BWMVHVUPlY;@tK4m$cF!B53^Di^zC0vh#iVZY8iu{*@RIaqJM_qs;=!Ex+}vtd zQ2B<`tc`2bo3?7nDS*#(lrKcsr}|7iZ=IR>*U$9$}_3eGS|y z&ywGEue&ehzd`-z=pKGJ!-k*^CCP{W@QXL4CNPPkWranfj2_*Vrs9ASGhzz66ea6E zA)mG((TBNuXO$DPRv>uft~Q9(lRkK!apWV+XTv9W#cm^eC0e$PXf9kW$xMVZo{!Et ze~Z2)m>j2b8}UbHzf_s)DP+`!?xO@ls7^Yafg{Jxzgz{a63?SV9n&wt1~hn5%If|` z^#pF}9@o00ssSwlTP@WzHm;D3u@toXi0b@LbrWY)`SY|&WMGvPY0qW5LON!ByEeP> z%4F;M?YBC+Gxf%XUbHe_w;<39WGs{S{C6T2@({U&D2 z`$x-64sUOA^3K(*D-o7-(<%@(kzvbL%8J9#M*bQJ-2nJBlZ=?$@JYi_Om1F#-g8J2y+p*NLj!APcs^_~ z-^WLM&yCqy?{y@;#jXPxsp(6a{V2C2F=m-PxL+9UIPC`0XxPrwP#BJk_2H7c>{wyX zaig;o)3~Cxvkq4g4?-BSDL2|=gNHmAI%T2+%wKDJne5W6YFA@Pkwtf3T0 zo*55#<@bV>SBgG7H&DY@*buC`S$6kecA*~>iBFbrZKqOPu;v-eaV7kt53^`EtRxbo zxANm+nl718FV-tJDsb$rr>(PEfR$Zt_WP9*Q!k{Y)CX%byMTw!2FOnP(MPw`@559F zlPDd#zvc|Yf1NkR72-yv-*1=*!JmiEH`z$$G#eu2!PqbyoBc9UTSWl}?zcO&C77~l zDp(6u%#z8dNVgoFcxlXXo3sqL+6RDjM@%S1Yc`hNPFIWcy>&YYAAk0`2kkn#42Cy) zjg%w17o3OZB|@O$kmDmqWC(Pi3DmGlesFQ;Y5CKkP7Z^SfHiLnjF!j)OV~XRVBsre z%v|1vFYp7H47u6F9GpsNIRI;*GZhsiZtj@qs=ZN@osJhMpB`Q$-`V!Yy`()I#GK`3 z4A2BaZQZO%#s-#UD?34EgBYC9CX(m3`_mGgnf>_}$CG0oPVHa#K`pAND6g1uo?6?# zd#W1=!TJ0Mi7xnrwBI&ZdjRyv72c@+f2Pxp75}sDg&h3cQwujYypriv&mhzlX>Jrv z`lP%~dnbe#MGZ8SSn^nQ#D=skvC8+1ywSF^AuuvOz?c&M(Ge5BJXdsDi#tDrlQj|X z>6UzHQU0YjgRv01ZTc#z6q8&T<9y5RLM8eiTJX7s+^es|bDyCOaB+04-FjtLOv_UO zXQ0L$b79!l^HIv*vd+0&9S`Fr- z8RiS9%C9uzR2d79K~{hzDC77$fDLz6MMVTXdymXSoZcF(QYgSzY zjTR$Xi7A4W^yEYyGT!XjgAj`KEL%n0E4XjRFH^Qs*?TyI9+G!I6^ZeQbebH;l9YRSM}VCul^)n-sqzt z&Hi#sa1P&*6^lbDd>G)LpsZu7dzKro4eOaRmktvueZ{3i>i5fYlCbj}dNJB)XG~hn zl!xWVr6E#UZPI6ULos&$?m1-U=sKq6`L6FWXc${ij?xLM_~VTHHB;@{*~gMlKM5=o zg9T011wNP_LO}4HDYh2^F20g>tAY_&q~9Y1iSNc#J>~~&~Ea#$WPK}K8}z4PzEYx=tcCQR~_osznSxM`)tN*@elS7}a+ zr|lH9B9yfh#@lEJHnw-Y^37rGp^i=L)2EYsal%sS3tL1;PK1w75MY)_`1M`#@GV7$ zz@LOt2hc!BMSl$)aa!bEtV7NI2rUWE_ezPUr2PCToNQMKt5{siS#9~kHTPnsC({m7({!f%uf`(( zARj_-7TCp+mZ+xgIv}fMZG+XZ5W~Ye^&b=;c-;~fQ0=9OT~><6#@+uLgYg)4FTTbCVaY@sJS49_dE@3#BBhrapzr)2oCmF|(sm{BMrlT{03gVO%|g zV($I*u&g6f91Mb#t2AYVA_sb!3w^O4mpr&gNaQe!R8l=6U*DXLqflr4Ip;8`8ZE-M z{+`Ld`z@69d|HAuqzNg|f7%SU4MjyC;glY{@}&OriN3i}&FLXRWe@as%YuH2ua|G1 zse`2F{q)mYwq_rYpAqE^UX63HA62D`iT4c$I1cG!z%4tiy4jOnioQj@y^44}hS&S% zwcmBj6%FHKcCnAY3ORq;*Y<38_rmwG5u_sNHtN0|>OH8c)7E{uc6-&8j+ym8mUHU1 zYs4|HTr*jES%8tW?0{Vm&N8eri*u~_ME~m5q}i9=&uai^_+IT~rgvtPm02Tj5`?iL zxbq8?*!f+}^6K?jH`bQ^L$mFmGUP>8v=SAlXu-~FgcOznvM#eQ0d(goVW2OJkG8jW zJu`CXNu=3X{BoDLAju(c02Z0ao5a|RWYXJYm0IclfV6JjX;V<1nD)csp#$yqsxjGz z$lVLzypaD_{3A3*$DXL^-l%SbIl$8SJ+Gd3^3g`*56aw5U5f?yl4jxIcAS<1*cB@A zjtO>(@8flvUf(|dE$QDqoIkaP#(zFuptvm_&GY(6O_D@aWr%I`)t}h>`R_XZ( z`SIyH&r_#Ti}gRJ_DWL()vX*9i)K$^GowGI9ohD)XMi+b$wNKoWADbic4B3W52^LR z5#wZjj;o}Gn=Sydw}75#Z646L?@u8mHkmoMw9t7{$#t)o#E4IJy2+xCm( zj8&|4r|Q)-`Q|jEHL7K(B8>R1JC3|J+*o0)G)>&9*c#Zdb~&0Zr!B6YO4cwc&!{_o zXs)Hn{#M;aCTqxlR^b!py7($#60<8uZ9kauUUKNvR^0a;_XiBjxR&2VKKW54i*>8s z4V33NZQJs;ZmnG0UzabKKFu?EX7Pk>c~}2%y!Th;O`A5mhFkWTj*eis6TQ*Suc#J% znHBek&t2#jS|OyX_kyGUYhYxa>`LxBBzD}de!mIoIquU~fECIi7U0sAew>rneqx>r z%PVa}({}q6*6zpZ_%+l^5u5k&QT}!X;YY3gWY&@y?ZN$hDqC#7@{JGd z(+tlUQ_JDyJ5$g95qMoa14l>?J$x(_@40|62o3cMm(Iw3L{U2<_rMCRN zlOUZpY6!g1K3Kk#JC_bt%{8#m9K+1h!*<%$aBQHRmpjJs43kq#L7GQ(Ro-oFH{S96 zch9lVnLxeupvn)@q}!Hzl1hH_h}ySCK7(ra{;^%-eL~)|L8FJ#nhV;72iop=e!1Ke zRjVn^AM?%(+}rw!e06XeHI_Y#GoYJ#A5O6yYFcQNS06l-u!I&z@-PlE zrWW2Aiymf5I_ya-wc@;(pl9-ch)W&)4ay8xzwuC%g&V-n^eR>c076>!m=AMVy{GYg zt=KpInS|-lO0}2FdeIiJ0g*vw%J2UFxugDoYqy?0ZV+FAbg7-STk8pnZ!w-7&8V>1 z+Bp|=Qeb%Oy&*p8f$>|J?gN7E`$xyPX-k%EgyerLYEvLvhmvx~Ld&!ZgxzEMduEEi zY$Xm~CF+zDWd+BX%b{WW&Du_1jXiS1$S`a3W>?9PG@F2w^P*AC(~1?^6k?KH5b94G zj|j=?^0wqcpD$@4%CK_PRd&5b(4W+=pXjkt z{&&!xTKFy{32clM`_RP}fGd6%_gm_(0dFibosP;04n8e8_PrmP>?#Fph1MUl+@CJp z9{tZ3S36J#chv&9rB#UcNkceW-d3u&mv-)*c>EtsHqpZX|6lWl!i$4PZgh~AOSLZf z{f}3EZPB|8UyG~R#@pXr(DWIy@t%`@s|tWKH?Mi2PdsigwRtEsHKDp23DftQ5nQ_0 zv~zC7~qpq+d3@tJAy-DM{Z3+ zrnlBdj768R{j!9$+Wet8rq8d{CKl>|0s>B=@-gT8`g<>9p9$Tw&1e>Rc1T2ug*XIy z<)2r7NeTmhE}R%GJ?{ayT?Oa=^r$jepNvxM*Vu-RI%lQnlf4hCYgSp_TV)Og{`}Yf z-7C*QeJ<1@x^S&-pStAurr(-ZwpS329Q?SHneiFbd*t?AkcraQDdPM88lY_f%tij?lKy@A zcXabFIxmp^!uO~Y|D72Pb}<%BSxIf)B5qZyN%zBC)tX5atbV7Xm>Y6M(l=1%kVe{j zF|Eo`LZCkx0Qpy>*A_iOqpUgSEQss~jx~aropU(XG1WNpV}$*$URKkp#IN|ox9{q^ zC%cbgb*v$*v$So~<^u8*Jf>}tYf>F|qaZ&KrW!!N7uIzaLgS~g`^tUy$%V}I|Glz= zBJb`6HTVq4$(YMp&kJ&T()UAtym4j?=hv2hj?Do5+eK30;s0L$mvU?s$?1}!ZC%k_ z{N)h#pVq|++W1++ORtU{8eCO7e9h;vX2kp-OpiF3Ag=Y()aeriS1ip44~0+vLZ|;2 zwe6FLQ&lR`wdo@CVk0g`5QaN*-cPzln|QIUFxg$%Aw|w{tSjG)y)|eSJGxw-t`4w4 zT}M>7p*2Ee)>tV!dYX%sCSG7OvB;K4h6@RDjhaD=sHmAeS01MdY_Da_=?$wNUu1&| z4*2ePkmA*}bV2^jqT+$$$!AFmSBp2_{k%{U@2nO`qfb1rN~K|uaqENRN2W$)P4yg; zhaDw$EacI=@@n*OHQKI$6m2q;%HwHwhsOKmQyiS#8G zrP0baV)}A~cI+K9>uFP8FB21xzEort^C$)~*-%s%g{o!fq_mz?q^SYUX zNfpZwxL?R?&h3G;4mpl&Krpbn2y;DfUdLASekAFD=vunoiOkQs$ClSFBd8TZ*jbuQ zY;=ytxP&iF2h318T)@VyHQ7FB5?Db^cG7sp^*e+cw^i>fEv7ssUjMtt*Kt6(VuUj% z^FQ?R0KSptb9d~b{_`s)71~W>1vHcXMDQ#=d-|w^U0Z}MzyIilFFX7HKZKzHNkwPB zK6`9y?1%EDGv>%q&Y%Z#vF`q8vpV*a@rH3x{{iiD^hurjet!Bt-+MrfKpk>YkKmp- zS2eA7LTIiWZDF(V+s-ss>$VsE6F1zR8hRN}_CF|p@7R5u@3FU-*LKT|67yn2dJe7I z;2w@w(VSi4>$egMF^tuex8q@XT%dgGBuq({w*{<{ERL=IJqw_^!iKvlaX9AOn%Ys{ z+hfk_DuQ{bKWP%K1#=?+uR}f$SBO|kLv-G#FlImcbv^UYYD?UU$y)IIP)2xiUhhlAp}QGA1_poad%T2n zSf7zXMGKCm6|?fFOOh@Yp=^a3&|@PlB`{N`wV0M`l(_6;$}OIFJ@d`35q)Vg^M3sO z6oRk&P|ZTr_PJoH3wDQM`?R|0*Ecr1fg|KNr?5|H{u-qO3O}sUjk(Xw)Aq&?QK;bF zNw-ubvuW(pXw`-0SAButw)3au?)fp}o${13Brz*s*K&B?CU-AgT=G?QibFFYZr$QX zo{h08+7y5QW7Gk4)>%{INms0fS)(0@O= zNy|V7<`jUg!FHrEdiw0c@O=#O%^qws9O*eT8@Vz3J@axC(CKI&A;)_G+oRR0ds*oN zpQaH;dj{hTE^-P=6J}kiN75S=uBBTALz!RQgU32=f0w>&T+prjX>70g!49Xdmyj8C zx4vY6U#Y$@ipQQ%P<{`l`z9Hi2&?ypU;`KF~^xEtz4(nnsS?5 zb3f&{W#+ViA}LcU3It@iujkaLm7zx*F=5mxmr52>Srib@g?Yq@}mrQ$Zf z=ls6^0kXV(p8I~iuB&LeJxN-+5#k@qLg=G16bk7d|B38P%ubQ40L3)FaIag=m7FWW z{rccOAw(Qb0Y(rHYD2{M*qjG-!{kK#w-)v#Jiafo)VF$OZik)vsix(~uZAuxg9jjO1ex9_ zxJX))OkLELU&-T3xJ3cG)}`zB+9(^Txd-~?k=#Oy1F-q%QU|THgo9O%9X~aXiPRf8N1B8;%ZE27QK_fz=f&TnY6;~K1N#l@jtD$8*lB3 zIH+3H6-8glYUoZS&LL^X$B>El2(Cfd4hP7Z{oXlWr_uTcUbANv=IV~m!25eV0)O4h zFXm~~3H5N&!@oZnVla&7QC5WdRaZ?3_}#XF^jgOW^^Vf838wFXyS&UAdB4N-nN^xg zq|MRnb9J_!Ltt?F|E2(Hr^)IU*`Ala!|a-@lrfEO1|3B0ekP7-WuYD$IG6sAe`>mf z?axaUHeHaSPFLc;%6YvRaK{AwI_uNX*RO1weOeB55|WqxB%1b_$ZOB&g`I@Nj9Z5n zxjb~T8cVKimLfWhqq=|F7j^o$R((lrf;bO_@rBj)E)^7Ev9KsXep3yIZ0jU;vV|U+ zv|jF-LC;&O!PIV$r9ZbIz!0bs4{VNl=~e{;3g<%8X4{-wf!u_#-24aLS^0V4&{ODU z%x-l6;$}o9JVi;uXKI%6(BqB&?>IWJVKdduXsioE)yNiJ%rS696x2jVK9}A)`eCxa zPcIOQ-ZyDla>TZ^^X~shf2Z!}7Uk#_cP>X;+%b0#Q*XibginU~-#-hCTic&}xAXrJ zNU+CHJL9u)pDzzY1dV_2DCktgyck+AH^~lWBn$UbM4YIzO6%fZU`CVaRtP7os(dDw zn#UKW<)AFFbhGF?-Bk`NUlcgp#U1~XUhfT+UrtV~`n;|Sn z_y7dkKQKg34*9Ng|C5Y2XQd>OgwQU{uKc`C-p*a*-t+Q+CKCVth;#X}X#2wQ zu4eu6@@)B&v>w&Z>5-_#k4yR7&ZB=e8~Sfq!euY^!_U<5$A0+jT+`>;7}@@{NJO5O z_1iZ%e|A|6wmv|Z2#8*tsYYxYTt@=H#U&ug2~#CyvCtAy&{z%12O5D+--cl)K+QE| zXxlco$w=bXQT+1|$)7y&s5P(ADnC@ns;Tk^TQ!Af=B3D7)}i(>egb(qF>kf>gmph9 z6?_TisBTG()|r?i*0vTW&hE=y`~QzSri3gz`gY24QAhlQ@3$~G%J}o?4pb<(a|f@o ztTSO>jCk|WxPMrBo+yWXhR#|2RuR`c_pQop+j4z99Q&ks3{Cbb*R#6OGccT9_|Bmp z>`KPvH3?&soqDzNtGlwxlJ#Bl6@z~v|Ni7Wq#$(F)-vJ{l2gKn)lTnBrl7}MZJrJhp~2u%3$Mwh>98x;6yf- zS-J^V(_thJ_uqj%c29uwuYZ=JBi+xZVgvTDr-y_6mHnF27kaYz4ShBks-goB-tqg; zk&Ok{eazB2&#qXP=eyVeL-hmDrQ~u--#Mp#KW6(M0Lu3Xk^}(e_DGJpEURC&Z}*5i zGK`-@~v(cnVRLpjFveVOq`8MJ9m%C;GAT<^~<3wfX%v`UfuMuS}34 z;@V*cgfQcYEIFRfOLe6Ax(NY~nmVJ=KD{`)0as@fA^kfH&B=a5DHHKNVHOk8?)4kDTBbH1(m)0SbaYOV14NHN?ss7 z7lVJ2z^2ZIJiT(F?}++-*vUEqq;}Q#;2L8gyCZ|e_^AJVX9hn5-&1@yBy+DDl)a=+ z?M%;(PikrjG%f$B&p>t+QZ!Un3t${=l$-1s>})0a{srtU&+SMUFWANgEnw>mg*?U>v(`?iO#+| zyKGJcuLq}yfWP{cz19MUhsMwJygpYFJbQv!vaRBSL@8wk3W@7DS~{g~aru4^Vk5|Q zf2Ov6)U%sz0!#i~vE8Fq3sLrRoUp#B@tL;Q(XCrBXINB;_=}|sb&MzaoTj8w% zEIsmGZO3w89)9@mPofI~Gf$NrBZj&|tyvL~?vZLz05M3~cmIFclb$Bp{U)YO2tAnI zS35vP($@+?o|P>wCZP^FSdDieBVe@&?_ies$ynjEdiB5n{$wp9rfYeDm_`j;{%pxs?x1``zMAma9gSK}Qc#kxt65Fk zw0`Q9S6z4@z1{n<(MXl>(>%A!?fbJY%P+i_*N82*e!k^&`ZHp1TWYZVTt{kK7E3ln z#l0EvvUH`_zoMqaOnk`wX5Z$TS;$2d{ zBy18K`(;-c<|?urTK-*B)LAJUItUaWP{awD5l}*Y-MIY$?B3q|{Yjei)!xs~pR*1T z;;>{EbHU6*!CP2c` zg^5~PDQJPsuoeCNa!k|c_3hLg_L_`cmt}HMECU~27`Ih9uUa@-d>v!{UXq}I2^P}t z3)^a;*i1sm>E+6q`LxWlI|G9+^ju}H{akwH$Vz{F1#kgdfoy@j1gq-2%n%&d{zb|v z{3=;*3t^y)^1o3%wtlg5|AC7^19BuWBwuUfU*tgcM-immLyoU0< z@2f)&y#zD(9+oU&v*YfyAsUdp*k#A)*S7fE%5rLMl=evVtohBMCa$xk*NHEl_pNi> zDzwbxXV0>Nm#Yi1@XoU9B#sPQSvDd(Hbx#3U?3&-M-Y?&yhn0HPL%Qr#snLOyrO@p zE5+D6FQ)Fw`{xQobh^iHY#Su!K+3wF6~XenaFgqwu`VY(#k{=woT5lbworP`Gv`^nI-ATd}zS`Tl!*U+UF22wLV=x%kw*0${j)&!n1sA_Jg}s zsB@j*o^@PmW&-$9&DF#{k>(D>|F(U6!eAeoa}l#=g5ud{(H}E87CFYJ+;hV=M~vZ) zaZ7#$0uNvn9Xpg;cr0{S+Wq{^s|kXur?VilzA$>|BZyS%YsNwHVsIA~A zkTS#>Xy~wEs=kRt*`yo%3%}GjKtAUbjfOVw%-Kz#_}xVTGC6XU6l^HK(wa0|4RV9~ z8AnU%UeVI{J1jcRXnt}R<8EIi<_mM}Pf0b*Qh)$EqK??^2D+pMmsNnprBn?c+ll#>dcGy*?(Sb;wBE@HX9kq08v0;QU-oNlnC5UJNt$}* z05BUz5ia$n3|e#=lkU{Qq60A|q0XtRcx^1cI_XBARpLuk<~o`O0nEM5;J6io7m@4# z8gp>oh@9OYL7uevoY}@A2XmZGA9<;A>=7Q(6kzOU6c!KVl_wsy_Lf*ZE-Rxzx!jP@ zMy`Sit1We$y`MVoH?j0@u70O;v(j$b5lxDNBvXE?zCaF|%C|TfQ&JX=Zs0r|>JjR> zzK7LBR$qAD&KVNCn%Y7!l7yMhXeF#Y(-D$|4a$=Jn8~g5Y6lXp3|3Jy{`!5#K7ROA z^`W$aNw@AovjdQk@EdV!k?gB6l9&_#gsrC6n}Zq#`bDs4^Od|A z2|Wu`hVRKbKYySXkEun6y_=&u>L^#qK~f=kRfqshw5Xw1Rwe3j7TZP-Uh+zI>!xmO zO#Wo78NDg*7hZmH$XhSN{@L3@4?JEK+iv^F3};3~`%}q!Vb=!whh(8vDatEC6CBKo zmrI#}KtTKKVzeB?k$KdXSI4&!k00!P@`f4uSv5~fkI_+P6qZ5q6egO+gEW|h_ZHJ& zoeg7i64z9AqTh^L{@uQjaUz4x3!rP2_!H%CR;6T;;~8Z3yueL zK&WNiOq}!mR?xlX{9J4#O$Ji7 z40zDm8V^M1P4L1tau`?DMO{z5IOnr}XYLc2F8e0Uj0RjMS!^=C48kzE<=DgHq-QqJ)&WHKc`2l=05YVGi}K%hu%?VYizM(#_bO_l2A&SP zt>-|uXslc3hxy*0aZ49ot89<%VI0esO#BLk`Hp6nkX_?I8WpQL!I)V!EXVp02{us* zU@Xm;tFNlIM*cFily%{0z~-5GQeQlVJ9iiw2T%8!g(4Ihc!gmM%E+6pZP3V?FIPFO zl-cO;PFF4ecr7GGmBNcUmEtqjfEfs%9hPoWLs%(l znankKznR?m=E_m3DSHJm?uQfEX&GWdES7G%3(u0nI48C{T$e~4-CMqg#;>G>hqo_p zlZ$`TII~(pp3$$zwiN}%?Cf+u?mYI8y)dhjXdQbx+TwO5d)qQz0{SF6?J!x}$DDdH ze^Qhr-KX%F8?m+R5F|`Ph~ij6s-uP&d=z9~NXhX%2YmF69G;|wckV;E4#5KVQyI>w zZjRBmISA78GgN)RuL2FZnE$<;S>*bSHxKL%Hk91H6`9%pO37}JZe+mf88q1!Gs2jc z2KP(Jw``o;C#e*tHg2J!r35Ie5)4c+T7gGND^{xK}n5U z!3pGU0YQJgz-Pp$_`qhC$8Wn>*2l>P5f3NPLTaFd9Hb67P`PKtV7&Gs(;E@IyvCV& zRj@hEmPYo}eHJd*)82|H+wzb%mV1>(_Tfz_@jK&*z+&K5!4A;M_v9k8FCm&b+J-*1 zW*wC5#x~EJ`-p4U?3K@MT}gL+{gN7E-0Pf~o1VzKp2=}!C|Dm4o^?O(U~o3A@VLzw zzMWL$hRxtn)os7coZ?v?>!~)OdqI+83<^k_lf#35<mgTkUkKVn&Fy%7j7P?G00Lx>^M*MbqfA zQMo+kb(rc?_f~a?O@9p6j4&7Gl3>j4QTB`GV5=LVV-;-QiWH7AZk#oUYC*i$UfDRB z`F?i@R|G6^J29!SVj1Z!nrDPyQ9+tw?&Ok&qg)=;QH$8noe!l-s;Q&IZvb6btnT8 zVY_47M?C0su4?RsGVA7&{D_SX`}ZfIh9F>5zPlNv>Wwoe5433Zj2RcQxvN7R6E>)`5v zn;NW$H>*q8Sz5ZRHu#NY9T*aj&I$hh%DH(J4>?ZVU0SE?(0MuNne_AKWpNjxU`gAX zn9T%)C&n-4(oPNQcvk52fc>K_kC$u2;F(2;oCSh{V2BhUok2`6sz%m09YfofTTnqq zVRgvcCR97+S=~tRfmghgnIcH>0HPv>oUj`Q+H&>J>gLMnakI52nYKb391>Z)Qz_(N zz*uEU2&r#kw@@yE9X3&GLg>Ra!KqpQVW_xNPKuhBgI^=_EX(4`!k0hJpc9%#gwaaO3s!oA zSE4@n>2LmYbV!x=sXtGJ@(T}wu zh33uI>)9TJHyB*2{#ZfS1@HnxQ&? zJP`-?%rI7r0~unq|1Zm);>#qPyVS-HiLKGi8)J96(D-Lgw>zcwMY1BJ4a z3W3Kb%nlH~Ud=k$zmeW-re0D5^cDUyCf1E`{iB}e-=BP&F6Ba|%P;hyta>p7&ai zXhZC=R8Zg6P80)mp$jf>&@qg_eFA6=odUI@E?`gAcU_c4DyNp?s<*FED z6K1m&19X_ubi-yc)VGIh>T_gdnmNLngTC}C=6cBw2ymkFch zmwz4m`;^N?C^1ZmQNcfcTmVp)#MJlMfd;+!yqpuuN>S#Ei(jmK!LeHQ!_r>Pvl|+Q z;Oes4IL>W<8Qm!egatH+3Q9x46tvOm`S?cR#Z}GG+X}q#L~Pr*knOMV8n$tAmEt{I z62#>xs5x6xE^d;gtpE;ejIXBd6tgtO?gC4R>86Oy+@6vM; z79x8m>QtR(R&&vIk`FvlNJ!N|-=^t1{cS984NRC`Jp{7STRnJ(RN-uH>dZ=FTQvOf z6R3JjJC;p__p83{r3m-su;%*3Y=C>cTA3E!#w^L9wFdkqOY zw5tt}dpbPqaK7G8olz6QE#HPc@H+68a6W7!*vk+tF}5iGytiOV>@y(UhlABd*h`bD z`NhC}EtuCJw^*y;_kCQ`URZpAj@HjL8h5E2U3uk0(Vw!tm9tWdGV_5Qij0F9b||;# zS2Rs4pY|A=$u=5LmaQH&1NRcwA|!h_Sk#n1R2RNZp={BQEpM|%{PGf$Q7rd)#c*n_ zJ~@(lU9pvfhqqn6=)({TPNd~t9L=)2NAHmdC{(yiNcWp`1!V)+BkfpBDf&edZ3_m2 zRMhN(<8}4EG|6iC^6Vok*@qim8U9lXppv^*HfmVA72_=D1gARcLO^v*2lZAW21F|d zI#v5oTza>lu-@-U#FeoI<*QqXPnj!G&OqZV1HQGyzF9Z$`K+&VQg*F28nF}YDYB0a z{Mm$B41TddZb(gk&@a@>qJE>&r6N~dIZHlqx~A?MgGVZpzrK}vpT!hsQG)UHn+w#0 z`jPvio3}azr-o@+r?J`3JY6Q|%Rjw16x&vDhz=h>SrSi87tMhy27(kS2Obj`tlZ)X zb9ohQ?3kvV^;9u!5@f8G_egu{ch=-kZEm*3W;f~(zt0eyF4-;N{gDMd5z*a?4E*+c zdlKGt-Zhpq2NB>a%Gk|R@U&{-Xm->JmtOU+?409l@pZc?EoC5$DTxqcXR*(j)v2Hd zY9X3Y%T$h>WvR!6SO0^WMU6y&uv!lmP;3yBzt+|kCoel}pK$n&o?kYB_{RWl=?UEI zLd^=g`}%VclKuQdW2uD_5(m*rZ#F9B&ZU>t&p4eAW0)&@_4n=Eck9S^**afoF4o0D zi)Y(pf@`|_A`|CahjIx2(AMd+%*n9ADe?71OY$0gD>L+>s#CINlj=hAd_+_NOcVP> zlSYSJt;GsrY_MpcwJx$_WxJglq0JWUv3Cw@Q3OcUHlfMKmL8YzXXAFJ(g~JXO`tx{ zkgc9P_;oXF>VhY#2Dd;}&iAho%712tx9TpgXP6}keArSKvh_9fol!A{m2*ike; zYa)zT4;r>#YT21PFat%DQG@`5_6)u;X7)j45CQ8;{&T*x(`BAXHS8(hD$8TIs-Ym} z76CHQZCHsbf=o?AE}fMoE6%);D8CV zYY4h2M|1b3g>CuzEjOo2sx=xvQwo*S#orcvhsE5zFuP;IJ}NHw&V&u z>Xuh0%ifAVJiEpnJ-UT#Zi-Z)IugnkIfeK{OGmkFPRdNNP&TC2N|XU{pbEhG>+es# zz_>>_}KW5ZN2DocpiE z=@m$EesVihGYJ28;^gOCv9ZiEs)1v3U3g=l#dhRHv$GJ~G_}L75XSuo+x-X!r+3Oe zcsVUas>4dwXQNK{)TCyb#&>m$<@RLsZ}hzT=kj=Qy=$qSKOMDB{J8`aQ30;Bgfd$c zP&@!G169?os*^CT=*vzIb>nU0SG`WwT^ZdY{_)S?=!L?kSv?8zTQ8=$j7a z-Bc5Ul8$RvRV{&#%oTJ~>*LalSl$f`WAmT<$F(9r776Yu}}~Py?q$Y@?E^ zaUfN32mnr9qZ*n6xxpqk8;?yLr|s^WTSI}esGIBqAQ`YaB#b)FZGBf4R0XmPsAj>V z-T@QznwIOF4WRia$xaY8K?fPt|JcmvfP4McW{E-PA4p&p9@fJA+n&j4| z+qKoh>m;SqHa1Y+)mg-@>ewV2*g2C)$He2q=o*Shv9{rCvT1A^-8wr-qOMiPWx~cZOZzVt_4n#ebQqqVqT1n1LJG5?>tSM_ zA^W(05<$T}`J{ zkTCg|BHpE2fTcMd!>|cEjeR ziTmM#qz4TR&lzIbYpPQKO~@J0#VVrqTYR>-Z2}5vnzBj4_YqSt0jEy){& z%hLaLeCNCtc2{7+E(jZnOL)v#UI=14?yiC@`pE7T$sCOTFsof&R-=Q`(gL@fI-^an zW?nAwn5+83bzP|kmb=%|jb1HfiL^VamY!xnfKO+;*Xk&jCT45u(3_zkVt1J2`7}Hf zLxsFn%jP1LE(-51EQ_+ZM%ZGv#wDo7YIde_)D3=<)p-&h1CQ{6>-DYN;Tf_v8yzBi z%JglfUh%Uk2{Y>w>A^c~Z7fIwA{IF$*TbdAYnf>i8e)K`IOEDhxUkUMT$j{rxIi%+ zmaLa4d=?fmG$H=L^EKNztK5_aitTcwwjP|ju`U0`S#nA$`Sq(_Yj~HM0{oIOvLptg zE4watE8si0NZP--lZl-?=wR9LI<`e@#bz}@@(rQ?&ViyjQs`!#*aZ1?0tYWN=qkSd z6I#Y#O@!qwHDH`v311`TR#aD_K#2}pFfjT9^R{a|Cez&t8Ic-@2a(!o_Ivn1Qkd-3 zM3VByj%6hk?AtoEf7L`qhfMOQLXk@LOEd7t2om5kn0sDrMoM?fh-n=R99JB|*8=bt z(e>Xv_y`p=`XfnHu*41(6ed3Xu~(I`U3|VG%e`&`=Si)EhP1oRzu4@&FtYeDD83Dd z<`uXaCa>gT!F_ zI}&v4W?i+bS^$SFFBN)uGEmR3xs$&l`6+ObU!>UBo4uQKU&uCr;vIH=PGn+^fZ#BJ zmBoFZW4p2NS7O>0q`Irh3V#rxMD#LHx6hL zSAM<{Mc&+DWVLQn*t(MPSs;s%(NxY^jKUSfr2o6Swu@QbZ`YJ`Ad&v|g07zZrtXN=RZ@jjW()PCU+U72J=cO0uukLKS$L!QR=_&ML+gOz2vCgEk z?A;h_k?i1fD-^jk(CmEE<(F3TL#&ZW0AbNjw(dG^CQ<1tBihHMrRQ3LlbY3dM+IHX zu#~Ttv3rIc`X$s$bz!otFByXwQ45Vvs2Qs8r|o~F)89<~{mHEr&*)L-r9){Zm#c!3 z)26blyY!4f3C?e(r^BHF=$F@Hpv7p-?!lzuJ0726)|9S*=Z*@O<>jr0x#ZERWfoV7 z2i0Tj22H|Pq^mBA1=3lt7$l=-Q{E|y>dLWVFBe7!KJ_>I{jsNx6ykzR*ydGOBBNeY zBh&xW2c!!DrR#~?)aSC@d-zEkGdyg?h=O^dz}R6DT_kkJY{oYVUCC72CV#!3X8#IJGhs&BFh(L3nIm{tgX|tW(`hVx!cHANAI_ZJZLp z&{i2V-@B>LvWHE3&R8P(M>Pl|BhT#`tZ;stCL9%H`G;*fT=KfZ-OjfuP5Ixgbu zn8oKoC`m*>`wpGSWK!{RsmK!7H;>x_3v^Jky*LulbPl3A^~e$X?+K`)@=? z!@P*mer*89ay$ka7Xf|=q?ulY!ax)qAT#g7*@$=%zv- zpA*I<>?Y)3hq2c&l4tjy8P?HjctZ&vLt>X0bDK-hU*3!k^HX^f^z}}92#nMEI!0_& z9nwT-v|ZG&j+5}?%m>YZp4pXxA4H z@#$a2osNC8+4V97Eax>HfG8{~06!b3LloG|w*#r)qIWG7mbcA>v|r+%oJdo z9b|?icf%N{j?EOcad6RG`=1_}CtIg%MX?0*TUMH3C~#txy0~g8Sd;Y6opbjJX()uz z)q((wUiR7SYs#^BlCL8&PoSC&62+yPj$CP=0;*1Yu@Fk&_t*U6y|Ecz(%F5o)ATiL z3^qOv8|bsjjVA|Lu1?9F0{TbBf%EqaxGB>u9VG|uCmDRZV4SU_)NJV6C&{0>cFAgb z1d+Xttg?@Pe^UL;m=)Mkb0AOn7Ij&)Zlmw)=&)@S8}evKRu?6E#7o-#YVRKx*MuVTr@iEl<22sbD=fE4TXM0v2i(m#cwS>;}&_+QVb%PQ5eWzQ- zn-$NmVs8h3{PAN*BP%z-kEh<-9Bd)ltv@BEWp&tb9Q)ScxRJC##7dH^`ZvDy%88$| zFKSO3uC-HDus5BFxF)8f^WKqAr!yT$S-&!=(DVjFp>n&Hh8B$wXKtcwZjNzs8X@sm z8oh4Fo-)gI7a4@IheO+|34EJLOkp~0irdN&b8IuAhCA;bHpm4is|fjZN(CH~vqAz< zc2_Q&UjLq~Bs3HzB!@(>?7q)FbSJ4uEZVFZKv~cJ;vZ616TID~jpNIXL2l zcpNF`tlQEjpvW9Kp^Er|Jb3!F|;rPxxJA#2(Z5blriT7*F7m4F%eTph@6+tS+Of()xAk8_<7W& zPBKe2>}X{{?io)eKynhGVH4O6J{Gjk!G`CLKg6fRzcgb4W4QvMfUrPcAD!a=F2hIZ zecMfg723HiUP3+Zz1D&lzv91&xZ+k}RcQRzE}iTxPt8_0X9G-|=Q`LG35q!F|B9t??V zttiJ=e4lPJ9cV>~i5dV}MnYRwPo#MHYLi}s&~g-+v`vQ^HIwcA8_f_ z_hvBVRFhn$|1Dg{!oInhB5xY~h&qnI0cfMGhJnH1B)cy=GcMm)kO`jMvisw^el`Zf z!ka8M!_|aTSNYws@YzzdE;H*DWfm@3H~*UBcEIO&`J%D9syyCBP`nrZggo#5s3>TR zaYw6cKvPOjes?S%{uk6 zCtVdb88;JSsoQBYJTx2}TD&G8Ez+g2nu&oR2@(I`!v zeCUxDE7N=S4oKKC;h?1Gk1RzuR)picu1?yyb#bc9C>IO14s?c_sBzBndg91ME~k~l zD!EoY14Ux#vnOa(kDBchoGY7mWM+njzg zJ(JH?fMN%FY{N)?t~_9*zKvS|AJNd?p_qyQlM35uIrsCZ78iciHQG&V1u2YA=WOt- zp@vU`MVQC9t=uvNW7C>Va+(1NMd`<<>hP_zFAl6g%V<%7SXwa8pVM?l1c%jt|DM+c zu{N_#0X!JxmXmF@b_nfYiZ(w8cZaV2#Uaz2qC(R)>;GI4K6P2OvBjiLQ_o12X{9-{ z2iM*H5Lpq9Xz0F1y>I)H1Ab570&4gW(}?@qK9tm#7Y93B<%z0*x+YZc*-#|VuJ4+P zLSAIx3uteL78~um*-e>A)pO+Qm`?*O9`?&gVO%+=2&$a_qmJT5KCdg>tWCcpe*d9e z-Coxb8hKW;cq?ybPt9_1!N;IWU^nL>D6x0Bo0EvWIL3XvnOtEw0HybZ< z4d&RQ1Y5ITIW1y0&z-8c;&nS4zf~JYW;GwGt4LAr8lhx$tfKs^FDKr$Ef1rIA{*Dr z2#fc5sW@b1Ru9Uxxku%wBXH2oR2<{j9uJ0QeeArER+g?@z>{SgM^D9$RDl`xG5rV6|+{^)yPHsthESsWbAEc5b8m+U$;qw${YyOBn7 z43sxASz$S97gJJ0h%E~CO`&Sl(2_`tG6*I4!iw)bfyDwhN;H#K>zHt&ql38>Uv`v| zh(FRWP?d>O90PMfdCcxA6unXSg8N0e;j|NczA)zL3pUk^65>@F>L^-E8{@arO-pH5 zt}g{I1kD?9z=JmYL1(#|zMJtx6uVT{KCMgsGLtEcG+5M4s`TD_c!V-w^EoM`<|wrB ze%`ef7P2IkRpom!(vvPo679SZ!vPwV8@%+k@Sh`#%nE(=sK@o<#6;IN7g^sC-F3nF zSy_|j@~#oyJr#?kUz~F_7WeS_zanz9d;mz%oPLAn>GCq z8>9=6!T~e~{eu+8PMNFWT--hQ2b6|BBk~e!yY86TL8eaVW6q8 zVwT?Nl(Pxs!LGA(IpW*Mjbq`&FoU_x^PHIUWP@rG#JGPFyVM-jnA7huM5z_>0G&474oKXuhPGY3oMfXHormC_*W z>$zxi0dshQ#oB^=AlJX~8ColLQWJ4e5@o8;lmf{eZB(FaeCd6cAMD4+35O*q=Vc1* z2gy?V$XE44uS8c-_{juYTR;kY^r}9@WdzpO5ctJhMQ-Ym-rc50a2SLPQqrv@Wo3dW zmO*}&xt()VU{|COr+%#ck6&0b;_k`5{VB~@t?blvRCCaPGLm+E*v5XT3_|9bT>%Sj zMAv_IT3)cF&y{j^GdnRfCiy8KDFOMg)U~9~TG3H?b$0+g45vE#@)BlOzFYn%8oW1I zc~$-KM;9LT-jUl=P&`|v3_dzX(j3)pvr8xg`5^us2pBv7bL?MS7ihQ`<68C1)5hDD z6`r9CL$adRs6XrWCn4kW)!Tl{+}Wu~ANqv)_=hd3XJ(}b_5#lOz5LVBnzUPBz4B%L2TyPJMuBv#knoMo!B{6cw#_Wos2TT9 z&=0+@J9LV<6+)VqhT)=-A9y2R=2X=$Ihk`b>o{2bPF-j(=oyKW2O5i98pb1ASTm#8 zy^g`VrL0Ft8I_l+S!-ac9NQF@YtGBG99P7Fv{Lx%k8b4MxKO+3xGGVAg33io0pCGzh%LbmmeBDn>cXX@Jb0hWB zsK)B>p9ZvxHkVKMoQFqYaJcgDmJw)slH=vcX&td`RoZRvCjQb!G98f?{&VKRC#%>f z2OqgH>;)aq8$=yOfmlLn7H#+hGn3?sw{KG$yuC6qA1RyTl%kDqzE4#NSmSh)K;vd* z;B1!A{LK_)R#ySvjl?z5jox*BRh|M0B4;-PSOT#NX=}e!p3^VFn9&@AcvvfV#4Isj z)vFA4SaGCpt87+ahpyVUoWpKh-tG53`vQ~cDHOMiTrGmX-zHHOw`w2?pcO3&14lwR z54}wtvp+8SBg!W#juy3vO18Pag|7U7$wWS1$$0Vv(WU#e%Xg24x#LayNWna71bmsl zKVjJ=sT@-Xj6*R6I~CFa8Lv$6p@gwifqAg<&7Ju8hQF{^UH*{n+j9g}LkjkqC9wNA zTg|vAc?$68A#QH)Iy=4*9F~Vn?O_q@LsPqq_MxC8AH(I2wJa*eQKmR;rDvKbZ%I$A zebCgf{hF;7Tzjd6Jlh>?npgg?qEBFOFbT4*@cdWcOxzwfN{8V7BpYy>ZHK6V&c3!o zJ1qmcvJ~l1+%~*mHnEFiVxw4VPHpC~7~ABToRx1L5ZA*hEdHBgx1~S$oV1!Qxy@iZgY9|kfx-T>@+$I<9oKcxFctFWdfNqpdM5?94E;<7>HWo@}<$J;SbhdbXcSD#qZ=II}U9_+Yl53-p;Of7+kA2N*i!-UGG#7WLG6+7=gZ&~#cuYdK|5&AqH2*_T($05> zm-k9!p)+}-GV98sYHJ#|xzDX~Hu$E{89P1D)XR#IHT6(;MYt#3z6H_W4Sz)JLq&Uy zIQ-DUEwNu>dIDssB-j>biE`eEMUcd0d{(QRlSDAVjY}>gy2_1S5Tp@|sPMh4LODA-je;T8giIwQU`2&xBW`)6_0{dF{=^AN7vgKh{FsAB8j?CkK?UHS*c?oNIl@2ILKEl;zb6 zO--XNyv(vzHmhp&e5n0Y7#SXsLK~vpfM+$e1Y6N>45wFjH4LevM7&!14CA?sQQ5F@ zdq@&-F0xzj#QCQDCDqi%>cKD;M)Ic41`MBQ!NOdn6>ziP@}kixq3u^7RdjszhhfjT zxR%l1Gt7NcmjlCxG>1p3&z4@2JSK(osXqqK-iD|1%d|15t3?MFYvPD5Fq8F^#o;sH z1jn+g{`dB$Pd0o2HT%?%-9(G{2rDWebA$ zamfdtzKkndZ%v--uZ%wt%-I(C zFSL*1lWRU;km|>48IkYqq)FG#gdIRf8nOX;I1IuXPAYtPjMvQ1iz`y8FF91s1hb8> z!*aJJD`2~pXhE^Pv%01CZjO1HDwOiqYH;!n1t&@dn=55Ij^mM4IHLoj|8xC-puP<+ zPgTYtPG(2$=IW<$MW*QgD7`Bgk20UiTb5vUQziV?qz!436vG=pIA}Djh_VVaqkJX) zoRlB-{kAuo2xDRW%Q0wB__%}v6gpz|(6->3Mi+}L5F*xq`YOJyJM!djjA3r(*iq#= z`c)>Ho3QljuxGM+?sr+XD_xu3MNN~{_YV|P-w?EYlXuBc*XG@iLW!Q3)+v-#Yratl zTjWfzbk2l?FGVaD{Dm;dI?W$Rq6$M64D>OQ14d07k3GL2MNDy6ct${tsmV257U9UVrI_w*_hkq8a%fkM+bBSXQ7GOQ zohBA6e3Y_u!&)R8WrPGIQDFHia>YtO7?sYgZro1TVBsgcO@dd@e&#h39IY+Gfl=Sa zj|d;6EP`u+I4>4SbSd7bCHj|C2oQf>`YXp^h!y-1B8lOLo|SH$UDHbTSQlQ-BfB4l z#pF+27wsL=D0bWUY+X6Jxi5P;>@A_2ps9K0D|ZH+>tVY~!9xs7;-Dk}hFvlqO98<) zW!5eDplBV4B&y;cO24tGfz(BNZQ8I*;E;ND7#xKW+$}qQ^YvRF)3lKnuQwVjj5az0 zo0?lnBUt5IcF8vHF#pSYx3F*bC*Y0}y(t}12hB7R@3cVxakn3M{1u-kfQ|%$#w1Q? z@)7R`5BmG5J4<%(o77w?mL_Mb8B@_al{x#fe$cWIheQ(!mn=^&!n53KI$0bwbk(^k zs#y_iJ@`glJ`4P>Fm@wJ4oNFE$#PvT?{2FD*yRgP?Q5|#x3p+g=cZ}OjZpm2F}t#O zUTX;h4CwRlC;2@H>l#3j53@^q{$~7%#U=6|wKci-y0x0}ZaRaQA)IOd@cS`COfF*B zBU5zizvfn~HcGe0R^VFt(i7?4A(s|dgPOP&%yC^RD}$cOK``CQjT@wh36oc!XZZ}7 zuXZ=j2kX=4@6z#ge2lN!CR=#@d7v@V?&&#IEIFzuffMA&|_3#tUbmVLuO6P5mYCDh(XDx>4Bhev`y|yN1jTfruT9-gQMr_V6M@ zD(aztXiZ`__5P!|kL^;59~cgH$QK+-ke!1X8xWZ7PJS!aqhM#?!P+Ke{7iJVTdOAP z84pRZczGDh4{1rEzh=x0NcMkZEFQzWF4aqPGf(%);E z$RwBq&gN2yuB(<{RrIdu;Xo|r%$1TGD7LzvR<_7Jjz~ogr*NSjkN;%y+CsvX2_$O6 z{DJV@!=tvJ3D7JMl18RcLfh16v@NKWa9~D!t)UG8O`O` zsAbi>>V=V0+8O+j9*JZE3bVx9gG?imoJ@47sE7eE)|?daf|tQc>VNm41cz6kjdc!p z8=VhQLPqIJ&KQKxYFN=B`?~!8G%?9DnRrUReiNJ$ov|~t%-Tk`{Y^M3u})-83eKv0 z-0@;016`W(y4bQ^(a~7(*GA1`3S}b7@*I1)<6n4Nk(eoZhcQOW`c?0>!G@!WyXvyo z3d;I*c?|?VDI2Z+es^zz!1A?!dv&kFVxvCV=)i$JbsMQKF7)kG?@iqoXja+J>K4q^ z%M;|l509T759O)PvLu-3AKC=Nk2%lTD}nbugnHB8BJ8EF_$>mwV@R;&?J(}ZkdEMt zG4*YWi&N}!1we|jH!Gx|3-Mht;@b^>x;l#j!_}sJ&`t9>BQiW2mM-FA(ykIn*rtZY zheMs5l$rmpqBHSJGHu&%?@UcIY1yWhmQJ}fu9Iu#Hf1ho=46QiqEoJjNbZ>1JJYD8 zsUs#Prskdkk_(W4`c7^sl8S-}f{9DHpo06__k90={(d6QbKlo>p2s0Dz%b1mbg#v+ zk*kT|P*6=cd~uo-Q%yyOym?WPjD&so-%5I#(VEav;i*TWi_~jFA@enC;0F;L*5My) zRQ!mgp-X->5dv?Fzxsi<+XkUZ0z(r;8WbKWmHa;C9Hh6b&If|z+mtk^7yF<1w4|hj zHS^3{T+qzC+)ucW5|Rta_loivN<@5CKa0uqZly}WnQz=Gx00B<398#a595RIN_-B? zNi(4mQUlIq;?UZT2q0kB5--*0Ry(s28QNw&3P_Q4QrmjU7NEXwsEAtqxKr#XdBEvQ zlXf%8Rtqb#Dm}N0qOIZx>9v-+xoLDgE1jk$gw~MnrduK$LNarYT{~ZmpbNXInTf%H zX07FOu@OlX9*9bW{v=*lhUb`PPePb2UeQL+mSCM@CoLTfv3K-X}sE{+5XK3>|mGJbLCHlL-z|_FpHm-JoKHDTI+dM=v z=?xTT+3Ku&lYWbg&!y?M7Y{m{q;{!6$lB$4>l^VkW3U0++VF{BU^4T}6Ms#DVm@0^ zvQ&-I^Pv?pP_Y(6U$2jL?iR-Hp++ifHX(s+VQuGJGj}RxCmp-N^F>@J@J7} z&mefqN~}1huyWU_P!&ihkW|ec9Vkn`JdI{eucSZU(;gH#Auo%@F-4APvzFE9!hkeq zo*7=I=sk)W2OhhCT~HC+QkrqSAj=>zxE8}*hu^#XmWy-VGYY~fpS(WS6EmAq-4)sJ zzR_?t4KTe-1J;*FXClX|)K%em9wwSgT%6ErtD6WE@;7H-+Eb`|eZM}B$sHQsvgq(T z`3f0lt1z5s7aS9jlu`rjtueVpG8>6BV_qfpxA{D8m$8|>8VKudKLecO*2ja1Nx!Gs zTg8(`hwR1h;QPZcHvqwAt@rsUN@oaq&1;coV7k(C!C0U38v|#l=?yrP@*D zoL@IGc3;o!x=iXGJ5K{es7s;UgXBEA%h$vhv3rJ+t!8@1Df8NztxR`h<_5RA9gOSp zPYuGEQDF(-W^@xEzYLo&D%6Z1W>Jc+Pl!{xid{h?Ywi~&zHX?*6tlVcUBmNNd!imS zIkliQaw~Q{tLV!z|1AIgRY&5m;}G|bH^a^Rcr3|;bQ%@G7ZnNSaSmf!9?XlUd%sK1)ys+miRsGNw8mJr-?3BF<<0YO6 z6{#@kZWeB^s!Zn*4Vnz97u!Mw<9`Cs71WOn1ZMytYw?oe-*I5Gve9lv&NdZ-r99QF zo1=HX9dgd@25e$k;PZS?J1;&Z#y_V&5Eio6s|~?h>o}5L%{vt)k`v}gC7bleHNr)b zgUy+SL9=YqYvj(r-xTyu-dpLeLxJyrV_5e4dA(+y|-*Q3t}#7gH+CqB`)VbNDT<& zv$RkcyZ1X`6MQ>`=;dQ$J=ODA00Ftx-ia?rg)B2~9HgssRQK#k8pbFi91x(2QZGDS z*FO>B?Vm<}8}F;E_!DU4(Ot$JuvdNw&dtCBL<_9WvWWTx?!q%CCxTFuBxN^dI}hN6 zZ4lOX4<~D;Q092^bxo#Mdt}?T{_&)l18E#!1Ep<-5vuos=$u4WQ&Y7k_69PPk<0vrn2N`w8)8fVI6(Rjycw&LSviST)UllA>XpkFd`1N?y`eSs0uEJ zFTS+ws?KXV$SnrmKyjId*0-cL0L6;yenE}UD0NvM*HxV(k<*B8M;1cLgqm2VV|(li zx(uMRsu-6QeLVOc9cFZXLHc#4vp`9Tj8iU&S$mA#js#7!Gxn zJb_v6I8a8Xd*LPAk|%1UI@j=_uM=Y%u~g}P7XF-d(X(nM6ad`Z`_$A==G%+5Lu@CgnQ%Bx?%2poJ!TWeKWfC<&A9*LrS+%~Q~ zfK&tJwn93nLbjy5ujcsw1Ge#rJKf`g0^?L3RfHWJZ{>twu3dd4LT&=;2Jjtw^7JFVG+5dtg9HVmmnh&r+U#ZeGK)w?st68b%+92=)CTjJ}GytVpK()V!vOE zRw@exewMxFo%T4S3(G{m#OFD&zdepcn6gX#W$LF=-(n*j!JBc&*chqP7`k8XZivv_ zH4STv&bq*WRQMSImq0tr-vAYcR6(4j&Dgs-ze z4%2W3#i$(T-Wd?z<^b4!M9J*VfnIFGTbP|B#2;7$dqeH)dIJHUA3-ysvB{LT8y6lb zOG>lsjf6*%W0%-)8W+`;Ef}M4t4oL+xc}sl;?A9^h2)?Aa;*)MNG9omv-s3C0RCX= zA?Tr6DsQ5+XR;v4L8!h4$~&g&WRmuQtDXD;c$12C4gCWTfN@d-uz@9tIV{f{mT)#c zt0h(<&k&3cI48^-`C;w6?G2#B|At5&7H)n^*nFh4*nl+91&;~$(ncBIb=3>7tH4W+U`id&pHc~$?KFhG`(da1wF>6-O=|Y6W zbAKx3ERs@47>OaLs;Lbodd7baiB?dUm$@shPZWyxBmUr!_!Ip?b?n<73bo1p1UJhc zYcrvr*gZcV zlJM(*g0*g`9zL>>H|m;ik-$JEK{I^k5nqQXHG~G8%w&y3b1Codl3!Og^6R|W5wAZr z)2I3Ts_bQxm_DJvM*^l5R%V9fS}c7#G*=_4_ucI*F<6;+O1=d`qv}G7!ow;?tSy7B zPw{(>5iIg;C5@onY9ND>?}bG1`$-j_tJPjfNBxA4>7OCi`^ou?SZq;MotAEo>XUZ-TcW1DuQ-%*dC#9 zU_H2$$3m#iz(?9zjvYh2H$1j6_7zK6qi)2y2*Q?%9zCzdk54lV%*#+=^b-VB;wY)c zF;^?E19hy)NiIHW36MFo>m^RaCwuO4g_yifyy9}IKR5Aat`?iHNTH<~j8TF@c$QxM zt15t_WY}m~BM=NYXMn!djID0s4e8N8^(UJ+&DR!Zhoh-n=54xv&j#wo1Evft4FdD~ z8bo9IDXZ*4T4J97V_+m!!;hX>P}58ArBV<_K2zur`2yfiwH#;u)lg%#ywGb_(2~3% zVU@EQuD#SgNN)lu@Emmr;4#*3a!qp_hmtQrpoo=u8hEJpaBPB#x36;{TKS-m zCN&(h^<(x;#8V0P%oLtcPcN8vxC}qwh2h@2g40&I3CFo3k@_g*SEAc-uBw`PyWN0@ zog||Q$I1VVF4pVv>ix_rJ*NxT_xkDe!rcXf!-N?5M?#CGX?TBQd`6p3iC+QLVZ2`e zd&XRmTWCAVGkcf#TVqn%4=rc~%1p-uw0V$VDgY~`f>l$gX+ZuFa6Prb3A0{zDFjU51k%IWb|y{TX;pByjz6S^v_n#`A1cAks8bUhl$! z_AC`*-aBhdE4Yxknk>P$lqz9*JURZ22O{Z_u97_S0?pGM5fx5J8*>$S$21W*+uc|Z zt^I@FcrOJwSpdHpw6c5$Kp?EV^WLJ}2p+PA)Y>O}-tA@Dsg>>L)g{lAm_syH4Gkt& z!5~{y8~lqfZ^m;8ZLL8>;`O~~kF|4Z`k|u!I& ziZw+|TzT-TcflRgC9VK~OBmck=9?vFg0!kRQHJ+h$o|h()c+>o;fAaCIkvs!zQ}%h zTLIeeQ}eD85+mXX`g3WCG~`U(fs|QTfCS(XFKoY;Ds;3OxeU*8!i(;$?DO&-nPtKR zvBLnep_>8*Fv{T6*H{OEs%d7#s&2E{-1X7am?HOMJ~tkS&;1@cx`X;yT*IE+3O9fR zc#gNDA%!H>&`#Eun?sVIY!GQW+huesNc8hoyV#qlBUVtrpk112t#k%=J>`spF@Gyr z%(JX6k>bsP3D}G-TV*L5QI<0c&dqO8?SEDN;@-Uk_(bdrXlyej0wJ2yN%(Z8Qs8G| zHMxjSdFs(+Z4a<1Ejl)KC?j#u@@<|7rrWrb`ydtABQe(ns>a<-#fH~HLd!u5fc%ACc6NOD0-MG#z20Ug%wi@LQv=U^bm6qr4k&%|Y4 zoAg{g&!iw|;H4sBe2Fo)K3e^-5F+q}3TPX4PtC6I1@81THx8*lgr8;Sdp=lI-EDig zMqkV+5L5#AOwA%-(zNAok@Pa$tc^WE<7 z=%YYlngpw(GRVbif8A}e3I2U#??0avPNsxmo=j?&p=$1ADL*ycO)_}cBAd$j_$R5R z<(;1{ViTm*vvd7m$qWp!(DPECViL{BRT};?lgt0g}*}&v5gVPO6D1d7$DywMUHAOyiMgm*7+cBI4 zgTSiEs02w}Yo5pOjVL&Eu3{wNc8QN?+1)+eamm@bAI{j+{*R#u8YLQ0<+=d1tYwh@0T03iMK|r@1Dq+gsZy`2R?Dj zaPt1rDic1~#(N?P0bNOuP^Z7m-$IHCgg$m7V4^xaLU+8<&90Is?lcJ~op%}frA+)a z{~UCIi7~-6DjcE-28f^il&-P)xE-LaB0PvQ{3Ye?Hb#G6kd*NyXq6}i~+e8m>e zcbHjtL;a^ItOI{&PtY!(-7Yn89bU6h@0bYTJP;<3wjzHl%W$Bj_hW6J~Wt}(dLWgks!UfkepFhnk+@&8)-E_;+_nWYbr!>!GxGFh$aaMUs0 z0Z;7uk}UBb=GoM-^^J{*oOmSlA`394S^DG6q1{`qQk~9ZT?W%N>8P4o`dyga3Ao$Y zIAbg4!Cu{72H=20&bVoYF#@*zvno;C;klh7Y3Wg7ztcMJzXml(m9mloQ=&-T@atcj ze0*R)J48R1`gK{!U1LA?E6@s&Erl$cag$O?)9cOPN$XG5Rz0$9R{BN38(6^yjuvx8 z&ry*4v4Mso0FR%7rM=e1oz&Z^J51GZp1ao%B(CA8yH{Y6;r8=U)8P7^xJr!67bsnN z^!ZxKkC+5E%pS0%Tc{bgN4$acAT?Q$R2(Oxbbs$e>8mHShxfok=p9&PsE3b1slgw9*5Pvu70HmE<17TUX+)BQd*FTgTM zB>|a_8gIxi&{*%XP^10m*45i2Mi|P+`<3;KrS)ygX0C8G9-X8*(}o+eF$~FXepVw@ z^_5zrE(Ikyb}S}eS_!Xvn^X>CH$r=KY78478V6#({?H5q%z12^1Sjd zVv^FpE@)~5)(Q49NZGK$1Y{MK1^HdA*ACfh*P`}nxTyfKpL_E=e(C-=E97xC=XIrq zv&QYtLL9eAoeHnAw1tg-kfOWfBoOX|O7xdfp6?H~67}n59D0uVa1j*wblaN^cF2wsO~hge}%7 z_s3HI^n6fQg)W+v?R8@vJ%RQ|KAiN$AXapGlmhqjY0dGBZK4jXp{?#0H@{m{U92O8 zQY9!&qzcI$(yR8`L%C;}<^*a`F4$cN4EJ-F zydiBt0BRJ)N)1VA>dRMgho9lhp`!ulM&0&5bq1=z@RjZA^llZqj7nx~#Jr)2gTy3T}GhM&T?OLZvSS zJ&K(RCVCdjKZooUPw1Dn`3b4>v&}iD5m4XSM6Gs6D)Zir7-Dzdw?ktxV9lfIbRmo= zfO-PrG6>Y%PaK-#x#HM>Mn{O*>OIM-4ugB%gK)s`5^1NmNQiE9(g4Znvj84!raG?S zLwmvuy#L+gN`&Xs`H*IqUr}^^F`qIFUn&A4NUCmMbDT%A^QBJzt2L{p#0TIun;u^C z5dSEdr7!~#s{MgLA*SrxArF?S^Wb(7Es?mO@DoPIU8+P_`Mi}HwrpzPF)!Ocfg^=c zr6N;oQuQoISz=;V`f_OdDplA(UbZkXkQ6$H(LXN=)_9xs|Bf*fy9KYFPt!w{-jJq} zpRNH@LZ(epR8maxznA=)`tpE#l)g#`vum5#jq00-@k|a9cxOF6kRQ|W%(%uuJ`Zg&dP}CMc87bAY@^d z&+iD<{Aa&>31VLNp)o#}zdh95sNXli2!zhzj^%s5={4nXl7<;5Z4zf+mFpLW&#x+P zQRwtj$D~XyZ@`P&)7D?mdF!gMOyP;0D_`1x=-M93_+JfO39w!SbhIcuAXxW|2enxy zTFRmoqqC91$9f5w=1mBkD?lX)M84dkAb8IFpUMF9Auy#GCtMz_+tc2%NFl3whcVCi zYv9Q?;XQ`L8oRPKG7s3$?i;n!`}Hw%f+KJ?d=rLSso*l1l_DWO96=+o#u}f@Ta? z+K~iIzrZ4T;X37iVUwPhevi1W@Fa$X&baRPOuLyI?On#ficGVAdVnxe6900Z*)Duf zkoC+OE*qPp3t~K^)%hzuozT=qZHjr>52f)NMknA~Ntc)*+)CL2SDhcOhzrcE@YC`~ z5r>v!Q-N6bJA(%*Ld=Pa4*(X!zU$3l(gQlz40W^55oehX72*-vyY}&RrU`K%Ejn!p zYD1v6d^>a+W71=LY9*Oc_kL3$rfPw0K;AI*3zEw7^C06+rXm;Gvfbd)LjcKScy8~j zzbD!xbTT!P^t?b*kYALw2BwtIwSpwEA`-!6~D9sRyhC+kdQqISXn){>&>@O+J*(Sp*zX7 z@#ZwK1FZNaH_$t!fmvITh9r?RPZN^o(KsL@2}9~-MbS; z4uQ4aNItg$7%NgA&HYa4qlBI zqoSfNSU|(aVglhFqK^sZ0zC0!ud^zG(3h`GjZZMQ!ItMe6Z?cK2T#f{?WnYdmC2Z) z;|$o`^%Jzy|5AlzK_Q#B8Q}qig$0|>``NNR2BCwTT1%$58RRvik($Ic(Hvncl0mQt zW(Ji@QcT_qCtpSm$djWUOtwa%ZqzG*SN0`qs^J>o>~mfcgA0!&MUhR$%~({frxy#_ z$y*~3gm<>h$Upe_*yjB~c*AcZQ7AH?69xAXxCkC(oAdjr!{OL#W01DBd~zM8qg#~- zR4h8LCsmgnuWOaOz&|gDvs=)RNdgM1cdZtkT9SKN|+&a&GVM-f2tMgCQx63Kj6Ur_3XN;<>u>-)Ib z_RJu;K~D`f*`85iK^a^t_U}+XQMU4^4e z_~&|7kCS}U#Q+K7KYP%jb@!73jESqtz(xR;b)vz_UF&(26EZ^UdET~N;BIiCs&hjl z@EyV?3YgTec0~*WM&~V~W6(-lDV?ybVp-R%n%eodgwB0{zvUcCFm8;8HTDr#ZL!i#Q~sJl27pSgOkFpDU$lFm7Z)=qD9PP%K^(;P5o`^AA#>u0~koh zkOvU$B4F^&%uPmI0vhkYacj$d(!Cp|e%cH;2q(g{E8f6_=poa6)buJ(HFKcZuscvT zB|yq0&pNJZq`5O0g`o!xD<^oPo!L7{C6wy%P-W`sw?n~Og*b(`A4sMVpjcLf#&3OX ziKV*RCD0xd3mlPvx#oV{ZKQlSv~Q!`Rdb~D{>0d5YoW;t(U<=utawe8R3YAcx*Vm+N_Zg56U%W-^PWT~ndeCKCxXHIGk;*7d~WYi&j`Ax-YqiV zM7w?e;a=6;fhKqTIP?lCd4T@o)an#gjM*T@N;YGgpL6s>DZpdzt+eC8~Y(B2I+bq?H(0PdTD2Y~iUpR^j!zFgNsuiqkfi%eIv_7AX$3UA_npXk-X<s{_uwb#s|EzSdDbCt$jJAPsF{Rb|ujA9ZTHxeLvb3wuaH?J`h z=INGc$?k=((oj&v7P`v|pR%W^beC0YU-_@8G~U3XaQ6eLBB%wN7l#x1&S2e$b#>f9 zjQ@tu$~upX61%#en)iB!B=y=>Y`C{xu$z}K87&3To2Pw1s1|>?$mIj;GWFAFQyXwI z%hVic`+3XXgXs*NBwrr6^zD#iU|Wc1fS*}xk38<%A;#(@!S06uKXG1^_e%TIZgODo zB+Ksjc&OVO*aWn-bK~@Hw82l!97o=eer``B>p1lj&qfqo`q(@#eCb?J%k1UB4eTp2 z^4cz+WKf;icdJfw3D=}J-U=ZatB>KAF@&KI18W^q+ z$m5b9{yPI`vu8n<>m)?gWw|XZM}BxTs&y#^JhL=ZA=mB=$V!8ADBzs zMlxcB2enf;+C5TI zS~a(soHUZ90Wtw#cOOJP9A|;QXPRT+sv`Ir_i@hYXW@`frv%1L@c^GVf!Q@)2XKFfhK#I1M(QqU+|M=# zE~}DX4BI}k4~O*=bP69bURbu zd_(%NQ9lB7AxoWgffJ5UaSRjsGXc1F_>kHgqaGHXui&Gn-ieUODdBO03J&AB+axWN0u)YmoJNknz_vjSLdQ`*r2MSv}38h;JDMU=@l(RG? zJ9^*mHWOB>mFdyIHCU=ivmsrGbVA4{2gp(UgJ3v>dHpBPWVq0vf8sR7&7|W%f$NV< zS!BrlJuDIUdi^Du_y`2p;sxN4u|gm_mhTB+;>O=L^6K+rU+8!k@&IR>ge&Y_ki~IB z`ZBRzW-&>s&Rgkb^)ysU<%D+#H6^e(g`&+No9=UrNyF}ea^c>d0B?R4L@5M-r_h-7 z8Ol=0#OcX)TG~X1vEXY2Y+-bi$KyZl)p{>Lylpb*YC$5JCY(roy@OCkHWv%#F-o}ek za0`^qkujiaTusX3YLD=p@Tkq}BzEh5*9+U9)0X%bRm7U_vf-mFInV~ClK*xoum9b? zXt`C5E6@av4(*c?sGz6h0>@gX8lXzvPf)+^>IqXYo+0rq@fiMj4HPfzts~n6Rp?L4 zrIJN~L)H$j=daslh8<`k)@luj0!RALqQ>x-QR@L?D_pi-EBh#$(gw~(iyYhhmQ@0q z>Oz!v?xbq+{hpS!H8JyJ*v6_tZJFFcCK#4!In_c1*7AWd>SK6>VrAevPoaFCo>`!L zlUjW2li$7sFjHYplzE1Q?iyWJ(B#&AgV z)q2fbd<-ymBeubkK5aA?yBfS|{QB^hws4ez+OC1pW~_iCfDG4KwMGAqx<2r}O|D6r z{wLEq!0%-IL%F9=Rp(16wmeSeq#4=>`aZr7P;dvxc-P{>^TLguwOyK10xp64fi!Eq zn3}OqzM-c=aXS~gnmR6aBI%PTQnjZLrLc?#9!*S~H`fal6AMjA+p~D%vv6?5wJGjL zJkKDWlKJcp+Q=n@`G>|H-~r;Ap2U(>iZJa zmL}Rr?1x$rm$!wbnH&gIp1GxY=E~l9TelTe&UWww4mGRce1YplBbW4!e+Ht&7z)FZ zY@OvHn?JC!xh}TLuTJv}3=~|*H3vR0l%}y!KfA><@JVC_LbTx`;Y){*F~hzKx3D93 zi*aAvrqmS^?-tjrbWMv+(UF-f%k@BkoIz`c@seze1QqXBV4ZP}aDs{bK|_9W_|Z<4 zfzAb%oBxe;FHWLeVAZ%IjZV!8M#ue-H`5Pj3Zk4MA}2y&AuyzAZjTAbj1eB%1gx}v zL@ixjStpj@ninE;&h+5+tx5b3i`U zGBm|OH@9j71WGP`_hYvD`XJz_bc>&68T2f9#m<7kIcS{lrO*93artZuHqONzzLa0| zIL>PoMRG6*$MpQYXCam#JFeG-+8XeP;fD2w^^}*SwL;<)jRw%GH5>Jk=fzg3yVkLU z6Fh8?3{nv!TQua?fOif2A=bslai?QFmj^;w zjNaS|goXVVM~X1yEpTquLr1rtWL`}{J>o}ebebDr2>YcMkW?!&9vO?!$W0#Dh%Z9w z=h6yPlNQ>}pv;A4=b09$H&EBwP+hhrUw$H-r@4x|hO<;gFL7a!%~+$op9r@d1C2sB zRqDk>?se_18%=lm1mnHKAfL&P#glri4tqMUF=ienRXIA(Nw%FF zE0jAI<9;5=uG@OtEkG47jw+va3KcNlZJG;#ijGh|lFgX$E0~pW2`ta6XJ?duT(yi2 z-9+d#LMN&gB^7N;uuLzf|A;r5R&<^M2l``}ICmLo%}xQaA$r7o2U+&#v#Ah|-{#xf z1yl_cNrqPU*(ZYJe!Os^N8won#=`HfQP2U4d#S_#)`7b_<_CNy2Tp@D`&wd8{nL6? zmewuq?^3M4NH6q%MaiQGgbOEXOb)c#Nn`$>MOoz-*1vqpY)zHZ#rcL2*xfXU?I0&5 z#Hm_Qv?#E5O3?3TQTTybnfdViRH+VpB)a|7xNySROsAQn+48KuyQlwB?(e-v3oe2E zm~r9g<+~2n_Q@aBBKGZTd-q(Wc2Aj?ru0b4?!0iV-Q!slrdNT&?mihAc&Vd)Sl!|G z28pNV`?@96P>9kf;*BLd=&L3)+Tx#E=3yD)KG9?uK;iiQU`8=VdW9c(tvq9`<_`Xg zPo1bF!1XhFXDr`{X<36|Ob$HVdr)`&Fg>ngLE_TKv0yVj7=#ahnSYQKR?OPb$-^l>BwDOQbv-z^7>ZB*TbZPtw{iIaUKLZtbT{?} zI}2DZCv77JmPYqU!XL^ef%eqaY8Uj3Qrp|NqrS=wbWi7cAbpyICCaIJK50D3h^*P2 zFgxpI7VlE+dD9~<7O*FVD`EE_^VDBIg_IscCu<>~(U06K_e(}yhq0g|6h`a%+T}0H z@j1p0ttT0cbssv=LcuI>HZ&Vu`Is8_8iRIi7+zaQD*%+CO%(4b+%*lVoW`lu^M&On zIKfd?CMk+%y$T^laRY0!G~sZ=FGf9k&zDo}PFZ`#z5WzB>}Mn}3n_8U{P*OlMo2x@ zy6iyxs`fvieM!flf7P&cOyth119nf$9#-p@LN>rdd{Xh~=9zN9wh z_g*1eDF5hmM$c&WdYM$?^>Cu4QD@5Y6639;I^$1_J72S7zf98pZu>bAMCDk!RYC_& zluhnKC7XziMSrZ7PK=<2bd-bQ8pOp@_m480hx7Nq36PP$LWOvgo%Vo**{iwF4JQnF zH8!m${C&8izimxPQO z{^u*6s>rmIV+f1(=CqfFQ$q9V@bD$F|E-M4na1y~{Dd>}deC9@gTbsxo1`yx$RX|_ zr>01JHX4i*<$gP)wW&e={ufs{xwg!8tI{9$?6wlbTCBRRmahu+b z|Kx7H(FkiDbV9_TL&hIkDlBV&FL|HAUm+()&(Y%(Rls@N=-9`8twh@5^Mj!;kB_Xh zOP}w=%w!R%t0C8qj3Bh=O&$8U&q|Ox0Y9U6iTx+IqPBF9@DDg zU;m^CD+Dd>Ei=cU^Nj0xE50MDEB4L3J*&8+*&u|tf!T~~SA}e4-lMhCR#Ho<&<4c| zzdiV;_ek4F$VM}=L9q1C*q4~IUgmP5$|?iDRQ3Txv7rZxDH_^&+0|+lVm5kGC$*}y!{RcmcCmT) z9UAQwjr$mV<>`;gp^b0k>n{5JXFRTcZ|^2B4nHu$;DH0g4)Jjkmm||nay`$=LvrDj&jnpn2oz*q&kORy(cAx<{3D0bes1gg_oK>j ze<_^Y%Gf52$DZ%6Jij@A-`6f<}j=XZHUbQe$*0Ev( zw$O%@HyC?|=Ww?pPrmZ-ujtlZr&TTMmjX|-^&6VhZC+-zw;IXzT7egxv?-+9@^36O zaKOfjZj?F!&5=T=P3Tx`wikQt=W)_^8gvvZHp}KIwWrVkyp|>@fCUA zwcYq({Dj2hL?$7=weLb7WaaM8W^L=R-U7cy~ z03N1TZ*4@M`5#kU0mr$Kr99TWaR0fUGbtxO9?dk4ZJf=}`&d3cU1s3Pr@2OLm=<(I z*>~Xs|IVttV{kW!)!pmv0 zvcxJQMGtn1M!jBn>arC~)Ipv7uI{^Y(^*R9@OI;zqNv1?yHG9xjN!lHphY!|W#yE1 za&qN<)#~2RNiIcQZqJ@WagCp9DcXH3heT7Lw>-G7#z0?sRLW~!T5q2Xm#=zs9;WEz zy7i3o2@|;hPcXQ?4g+5mB6hm|^5YBbV-o*iooyI@_Wc`J;jvOCQIQr9l%eN>+I7(@ zk4Uj(<_)jC0HEAmqcRa2sE1y5$eF6#a7?t;qkR#N2$yxRwNEAxdf4;)u|fw6MFg&{7L0gcJbN#aFdHF_XSZtK6?n{UOTB0#n zbZhH$sWK{tFw50TrKFre}b~!-^vP2J@)72<-E;O zGmj^o2ih%ua+s0<-;x0^_itRmt7QZxtG{OTMfjy(r3w)9EZtD^ZuWwVu74kc9T>Di>5z2hO#m*&Txeh9ei#<+j&zJqr)kLmDPXmNieQBSPbY74WmV1o=?h-t>lkp_Pk zC3gPzS5H|*AQ-qh7TSN8F)Tm9Q(cH{bG_AidU#L;WaKWpnrzlOGn#gZMjcrqMK3Fe zpuhvxro7`3JFvP(pE~~_;};7B0}pVPCk^Frf44$$q2$CUGft}Tc_~*)=m83%Zoy!l zcva{5zezqd%jZ<(g^ycU8^w`BcMiiwGYNtBxyX7?* zn=dZ9=putHNK#vrIO6!X6aX{|LnfBHD%mZ}`t;DGTC>299Co{1l9 zqc4yn%c`Cm*msp&D)1VlRC0r8d>M>Hd7j3aZ-f*^hZF+0pl`(T$9G+w-4* zPO&(gFlhr;#Ht?uet`1s$HTpPP(i<6vhBu~@TAQo0HU9v_g6$F8JSeDP1z2KbBfIA z9LP}9+KT&Z2JmxmoTSW~e_n3NRk`x=f6{WsITec_bn7MIZufSP4nF1INu#GfGg|Pn z`LXDv%W?}Li{aDhNt~iyPH+tRYDVw5KZ=-5xN#ExO682!&G3Ei(#|Yk!a2O`<+XBu zCCI=DoN*0ppKJ5PJ+vT-_0z{;4OVDPV#Z>L|BEYv^Gc)kD+dHLajU|Mb9}m2Y<2EP zD|f&DAG9l+b?rHeqBSmPeCJaiyEnirf6j}ldNKUsVlfSx^d?hcSC~HOcLG>&6DB{Q zn0Ba&0r5o_%sOoUXbh=yP{3#FcD2H`N|!+gv~UeCZ|>dLy`w8S|JyNoW?*J>P2{AU zwVs*|X$0l_|0ni?eArn)es{GuN``HZmXv2;>efd_?iar@^Los8nTW|}KvafZ^It+q z91L<&M3ZVbH_{-@Y_Ho1K-`Kc(0s=%xsO}!1CHpGPlx;=tFyX?ED{N{v@M1-r>pHp zsm;KD*COP4;-rv0J?)Ap&4Fl~Ic^1X#!(dXX?A=Odh2(X^%Doh>Lc|0iUBgY(yqU_ zd@+i-zfr+ySB4EsTv|9op zc+(yCF1^-=$IUsenXa%8#DI5>|G;3+ypxLWQ{_|UjFUb?9x)&h4vOpN_5Ih=Buu}9 zJoVL-gr$cY+a-b(0t{}qYvfjNYk2ABB42<4r%gHejKu%Z|Cs2;ZHeR0kAqsH+*cf* z#t0q7^RBEXKpEiu#0Q!($C&kKQw&6AmWu=K|2FihM`p-{R~%8Cd?>C^VH9C9>5*+M9w^+={GE-#L=WD)@T|-PED-ppkfX2|ON^TK$!?HQdPw zN!H6#=))9Ec9Bz=PD| zTPD&)hgQ}{bIjb+l}}lX&4Dk& Date: Thu, 11 Aug 2022 09:53:50 +0000 Subject: [PATCH 66/94] first commit for modnet and modify nano --- .../detection/nanodet_plus/cpp/infer.cc | 2 +- .../detection/nanodet_plus/python/infer.py | 2 +- examples/vision/matting/modnet/cpp/README.md | 10 +- examples/vision/matting/modnet/cpp/infer.cc | 109 ++++++++++++++++++ .../vision/matting/modnet/python/README.md | 10 +- .../vision/matting/modnet/python/infer.py | 53 +++++++++ 6 files changed, 174 insertions(+), 12 deletions(-) create mode 100644 examples/vision/matting/modnet/cpp/infer.cc create mode 100644 examples/vision/matting/modnet/python/infer.py diff --git a/examples/vision/detection/nanodet_plus/cpp/infer.cc b/examples/vision/detection/nanodet_plus/cpp/infer.cc index debc6db9b1..b155c4a795 100644 --- a/examples/vision/detection/nanodet_plus/cpp/infer.cc +++ b/examples/vision/detection/nanodet_plus/cpp/infer.cc @@ -63,7 +63,7 @@ void TrtInfer(const std::string& model_file, const std::string& image_file) { auto option = fastdeploy::RuntimeOption(); option.UseGpu(); option.UseTrtBackend(); - option.SetTrtInputShape("images", {1, 3, 640, 640}); + option.SetTrtInputShape("images", {1, 3, 320, 320}); auto model = fastdeploy::vision::detection::NanoDetPlus(model_file, "", option); if (!model.Initialized()) { diff --git a/examples/vision/detection/nanodet_plus/python/infer.py b/examples/vision/detection/nanodet_plus/python/infer.py index 8ad585f3df..58a0f866bc 100644 --- a/examples/vision/detection/nanodet_plus/python/infer.py +++ b/examples/vision/detection/nanodet_plus/python/infer.py @@ -31,7 +31,7 @@ def build_option(args): if args.use_trt: option.use_trt_backend() - option.set_trt_input_shape("images", [1, 3, 640, 640]) + option.set_trt_input_shape("images", [1, 3, 320, 320]) return option diff --git a/examples/vision/matting/modnet/cpp/README.md b/examples/vision/matting/modnet/cpp/README.md index 82226ae4c8..0e735a63b4 100644 --- a/examples/vision/matting/modnet/cpp/README.md +++ b/examples/vision/matting/modnet/cpp/README.md @@ -18,16 +18,16 @@ cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 make -j #下载官方转换好的MODNet模型文件和测试图片 -wget https://bj.bcebos.com/paddlehub/fastdeploy/modnet_photographic__portrait_matting.onnx -wget todo +wget https://bj.bcebos.com/paddlehub/fastdeploy/modnet_photographic_portrait_matting.onnx +wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/lite/resources/test_lite_matting_input.jpg # CPU推理 -./infer_demo modnet_photographic__portrait_matting.onnx todo 0 +./infer_demo modnet_photographic_portrait_matting.onnx test_lite_matting_input.jpg 0 # GPU推理 -./infer_demo modnet_photographic__portrait_matting.onnx todo 1 +./infer_demo modnet_photographic_portrait_matting.onnx test_lite_matting_input.jpg 1 # GPU上TensorRT推理 -./infer_demo modnet_photographic__portrait_matting.onnx todo 2 +./infer_demo modnet_photographic_portrait_matting.onnx test_lite_matting_input.jpg 2 ``` 运行完成可视化结果如下图所示 diff --git a/examples/vision/matting/modnet/cpp/infer.cc b/examples/vision/matting/modnet/cpp/infer.cc new file mode 100644 index 0000000000..dbc0e38fa6 --- /dev/null +++ b/examples/vision/matting/modnet/cpp/infer.cc @@ -0,0 +1,109 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +void CpuInfer(const std::string& model_file, const std::string& image_file) { + auto model = fastdeploy::vision::matting::MODNet(model_file); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + // 设置推理size, 必须和模型文件一致 + model.size = {256, 256}; + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::MattingResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisMattingAlpha(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +void GpuInfer(const std::string& model_file, const std::string& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + auto model = fastdeploy::vision::matting::MODNet(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + // 设置推理size, 必须和模型文件一致 + model.size = {256, 256}; + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::MattingResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisMattingAlpha(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +void TrtInfer(const std::string& model_file, const std::string& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + option.UseTrtBackend(); + option.SetTrtInputShape("input", {1, 3, 256, 256}); + auto model = fastdeploy::vision::matting::MODNet(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + // 设置推理size, 必须和模型文件一致 + model.size = {256, 256}; + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::MattingResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisMattingAlpha(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +int main(int argc, char* argv[]) { + if (argc < 4) { + std::cout << "Usage: infer_demo path/to/model path/to/image run_option, " + "e.g ./modnet_photographic_portrait_matting.onnx ./test.jpeg 0" + << std::endl; + std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " + "with gpu; 2: run with gpu and use tensorrt backend." + << std::endl; + return -1; + } + + if (std::atoi(argv[3]) == 0) { + CpuInfer(argv[1], argv[2]); + } else if (std::atoi(argv[3]) == 1) { + GpuInfer(argv[1], argv[2]); + } else if (std::atoi(argv[3]) == 2) { + TrtInfer(argv[1], argv[2]); + } + return 0; +} diff --git a/examples/vision/matting/modnet/python/README.md b/examples/vision/matting/modnet/python/README.md index d7b1149f8e..2ec5e4f267 100644 --- a/examples/vision/matting/modnet/python/README.md +++ b/examples/vision/matting/modnet/python/README.md @@ -9,8 +9,8 @@ ``` #下载modnet模型文件和测试图片 -wget https://bj.bcebos.com/paddlehub/fastdeploy/modnet_photographic__portrait_matting.onnx -wget todo +wget https://bj.bcebos.com/paddlehub/fastdeploy/modnet_photographic_portrait_matting.onnx +wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/lite/resources/test_lite_matting_input.jpg #下载部署示例代码 @@ -18,11 +18,11 @@ git clone https://github.com/PaddlePaddle/FastDeploy.git cd examples/vison/detection/modnet/python/ # CPU推理 -python infer.py --model modnet_photographic__portrait_matting.onnx --image todo --device cpu +python infer.py --model modnet_photographic_portrait_matting.onnx --image test_lite_matting_input.jpg --device cpu # GPU推理 -python infer.py --model modnet_photographic__portrait_matting.onnx --image todo --device gpu +python infer.py --model modnet_photographic_portrait_matting.onnx --image test_lite_matting_input.jpg --device gpu # GPU上使用TensorRT推理 -python infer.py --model modnet_photographic__portrait_matting.onnx --image todo --device gpu --use_trt True +python infer.py --model modnet_photographic_portrait_matting.onnx --image test_lite_matting_input.jpg --device gpu --use_trt True ``` 运行完成可视化结果如下图所示 diff --git a/examples/vision/matting/modnet/python/infer.py b/examples/vision/matting/modnet/python/infer.py new file mode 100644 index 0000000000..5403d66f3e --- /dev/null +++ b/examples/vision/matting/modnet/python/infer.py @@ -0,0 +1,53 @@ +import fastdeploy as fd +import cv2 + + +def parse_arguments(): + import argparse + import ast + parser = argparse.ArgumentParser() + parser.add_argument( + "--model", required=True, help="Path of modnet onnx model.") + parser.add_argument( + "--image", required=True, help="Path of test image file.") + parser.add_argument( + "--device", + type=str, + default='cpu', + help="Type of inference device, support 'cpu' or 'gpu'.") + parser.add_argument( + "--use_trt", + type=ast.literal_eval, + default=False, + help="Wether to use tensorrt.") + return parser.parse_args() + + +def build_option(args): + option = fd.RuntimeOption() + + if args.device.lower() == "gpu": + option.use_gpu() + + if args.use_trt: + option.use_trt_backend() + option.set_trt_input_shape("input", [1, 3, 256, 256]) + return option + + +args = parse_arguments() + +# 配置runtime,加载模型 +runtime_option = build_option(args) +model = fd.vision.matting.MODNet(args.model, runtime_option=runtime_option) + +#设置推理size, 必须和模型文件一致 +model.size = (256, 256) +# 预测图片检测结果 +im = cv2.imread(args.image) +result = model.predict(im) + +# 预测结果可视化 +vis_im = fd.vision.vis_matting_alpha(im, result) +cv2.imwrite("visualized_result.jpg", vis_im) +print("Visualized result save in ./visualized_result.jpg") From 217bb398f3386753d1ff2f7d763385eeafc342d7 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Thu, 11 Aug 2022 12:14:54 +0000 Subject: [PATCH 67/94] yolor scaledyolov4 v5lite --- .../vision/detection/scaledyolov4/README.md | 40 +++++++ .../detection/scaledyolov4/cpp/CMakeLists.txt | 14 +++ .../detection/scaledyolov4/cpp/README.md | 85 ++++++++++++++ .../detection/scaledyolov4/cpp/infer.cc | 107 ++++++++++++++++++ .../detection/scaledyolov4/python/README.md | 79 +++++++++++++ .../detection/scaledyolov4/python/infer.py | 52 +++++++++ examples/vision/detection/yolor/README.md | 40 +++++++ .../vision/detection/yolor/cpp/CMakeLists.txt | 14 +++ examples/vision/detection/yolor/cpp/README.md | 85 ++++++++++++++ examples/vision/detection/yolor/cpp/infer.cc | 106 +++++++++++++++++ .../vision/detection/yolor/python/README.md | 79 +++++++++++++ .../vision/detection/yolor/python/infer.py | 51 +++++++++ .../vision/detection/yolov5lite/README.md | 68 +++++++++++ .../detection/yolov5lite/cpp/CMakeLists.txt | 14 +++ .../vision/detection/yolov5lite/cpp/README.md | 85 ++++++++++++++ .../vision/detection/yolov5lite/cpp/infer.cc | 107 ++++++++++++++++++ .../detection/yolov5lite/python/README.md | 79 +++++++++++++ .../detection/yolov5lite/python/infer.py | 52 +++++++++ 18 files changed, 1157 insertions(+) create mode 100644 examples/vision/detection/scaledyolov4/README.md create mode 100644 examples/vision/detection/scaledyolov4/cpp/CMakeLists.txt create mode 100644 examples/vision/detection/scaledyolov4/cpp/README.md create mode 100644 examples/vision/detection/scaledyolov4/cpp/infer.cc create mode 100644 examples/vision/detection/scaledyolov4/python/README.md create mode 100644 examples/vision/detection/scaledyolov4/python/infer.py create mode 100644 examples/vision/detection/yolor/README.md create mode 100644 examples/vision/detection/yolor/cpp/CMakeLists.txt create mode 100644 examples/vision/detection/yolor/cpp/README.md create mode 100644 examples/vision/detection/yolor/cpp/infer.cc create mode 100644 examples/vision/detection/yolor/python/README.md create mode 100644 examples/vision/detection/yolor/python/infer.py create mode 100644 examples/vision/detection/yolov5lite/README.md create mode 100644 examples/vision/detection/yolov5lite/cpp/CMakeLists.txt create mode 100644 examples/vision/detection/yolov5lite/cpp/README.md create mode 100644 examples/vision/detection/yolov5lite/cpp/infer.cc create mode 100644 examples/vision/detection/yolov5lite/python/README.md create mode 100644 examples/vision/detection/yolov5lite/python/infer.py diff --git a/examples/vision/detection/scaledyolov4/README.md b/examples/vision/detection/scaledyolov4/README.md new file mode 100644 index 0000000000..9b8a8e3f87 --- /dev/null +++ b/examples/vision/detection/scaledyolov4/README.md @@ -0,0 +1,40 @@ +# ScaledYOLOv4准备部署模型 + +- ScaledYOLOv4部署实现来自[ScaledYOLOv4 branch yolov4-large](https://github.com/WongKinYiu/ScaledYOLOv4)分支代码,和[基于COCO的预训练模型](https://github.com/WongKinYiu/ScaledYOLOv4)。 + + - (1)[预训练模型](https://github.com/WongKinYiu/ScaledYOLOv4)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署;*.onnx、*.trt和*.pose模型不支持部署; + - (2)自己数据训练的ScaledYOLOv4模型,按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)操作后,参考[详细部署文档](#详细部署文档)完成部署。 + + +## 导出ONNX模型 + + + 访问[ScaledYOLOv4](https://github.com/WongKinYiu/ScaledYOLOv4)官方github库,按照指引下载安装,下载`scaledyolov4.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。如果您导出的`onnx`模型出现问题,可以参考[ScaledYOLOv4#401](https://github.com/WongKinYiu/ScaledYOLOv4/issues/401)的解决办法 + + ``` + #下载ScaledYOLOv4模型文件 + Download from the goole drive https://drive.google.com/file/d/1aXZZE999sHMP1gev60XhNChtHPRMH3Fz/view?usp=sharing + + # 导出onnx格式文件 + python models/export.py --weights PATH/TO/scaledyolov4-xx.pt --img-size 640 + ``` + + +## 下载预训练ONNX模型 + +为了方便开发者的测试,下面提供了ScaledYOLOv4导出的各系列模型,开发者可直接下载使用。 + +| 模型 | 大小 | 精度 | +|:---------------------------------------------------------------- |:----- |:----- | +| [ScaledYOLOv4-P5](https://bj.bcebos.com/paddlehub/fastdeploy/yolov4-p5.onnx) | 271MB | 51.2% | +| [ScaledYOLOv4-P5+BoF](https://bj.bcebos.com/paddlehub/fastdeploy/yolov4-p5_.onnx) | 271MB | 51.7% | +| [ScaledYOLOv4-P6](https://bj.bcebos.com/paddlehub/fastdeploy/yolov4-p6.onnx) | 487MB | 53.9% | +| [ScaledYOLOv4-P6+BoF](https://bj.bcebos.com/paddlehub/fastdeploy/yolov4-p6_.onnx) | 487MB | 54.4% | +| [ScaledYOLOv4-P7](https://bj.bcebos.com/paddlehub/fastdeploy/yolov4-p7.onnx) | 1.1GB | 55.0% | + + + +## 详细部署文档 + +- [Python部署](python) +- [C++部署](cpp) diff --git a/examples/vision/detection/scaledyolov4/cpp/CMakeLists.txt b/examples/vision/detection/scaledyolov4/cpp/CMakeLists.txt new file mode 100644 index 0000000000..fea1a2888b --- /dev/null +++ b/examples/vision/detection/scaledyolov4/cpp/CMakeLists.txt @@ -0,0 +1,14 @@ +PROJECT(infer_demo C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.12) + +# 指定下载解压后的fastdeploy库路径 +option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.") + +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) + +# 添加FastDeploy依赖头文件 +include_directories(${FASTDEPLOY_INCS}) + +add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc) +# 添加FastDeploy库依赖 +target_link_libraries(infer_demo ${FASTDEPLOY_LIBS}) diff --git a/examples/vision/detection/scaledyolov4/cpp/README.md b/examples/vision/detection/scaledyolov4/cpp/README.md new file mode 100644 index 0000000000..66aa9c8b73 --- /dev/null +++ b/examples/vision/detection/scaledyolov4/cpp/README.md @@ -0,0 +1,85 @@ +# ScaledYOLOv4 C++部署示例 + +本目录下提供`infer.cc`快速完成ScaledYOLOv4在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. 根据开发环境,下载预编译部署库和samples代码,参考[FastDeploy预编译库](../../../../../docs/compile/prebuild_libraries.md) + +以Linux上CPU推理为例,在本目录执行如下命令即可完成编译测试 + +``` +mkdir build +cd build +wget https://xxx.tgz +tar xvf fastdeploy-linux-x64-0.2.0.tgz +cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 +make -j + +#下载官方转换好的ScaledYOLOv4模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/scaled_yolov4-p5.onnx +wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/000000014439.jpg + + +# CPU推理 +./infer_demo scaled_yolov4-p5.onnx 000000014439.jpg 0 +# GPU推理 +./infer_demo scaled_yolov4-p5.onnx 000000014439.jpg 1 +# GPU上TensorRT推理 +./infer_demo scaled_yolov4-p5.onnx 000000014439.jpg 2 +``` + +运行完成可视化结果如下图所示 + + + +## ScaledYOLOv4 C++接口 + +### ScaledYOLOv4类 + +``` +fastdeploy::vision::detection::ScaledYOLOv4( + const string& model_file, + const string& params_file = "", + const RuntimeOption& runtime_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX) +``` + +ScaledYOLOv4模型加载和初始化,其中model_file为导出的ONNX模型格式。 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX时,此参数传入空字符串即可 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX格式 + +#### Predict函数 + +> ``` +> ScaledYOLOv4::Predict(cv::Mat* im, DetectionResult* result, +> float conf_threshold = 0.25, +> float nms_iou_threshold = 0.5) +> ``` +> +> 模型预测接口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **im**: 输入图像,注意需为HWC,BGR格式 +> > * **result**: 检测结果,包括检测框,各个框的置信度, DetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > * **conf_threshold**: 检测框置信度过滤阈值 +> > * **nms_iou_threshold**: NMS处理过程中iou阈值 + +### 类成员变量 + +> > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` +> > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` + +- [模型介绍](../../) +- [Python部署](../python) +- [视觉模型预测结果](../../../../../docs/api/vision_results/) diff --git a/examples/vision/detection/scaledyolov4/cpp/infer.cc b/examples/vision/detection/scaledyolov4/cpp/infer.cc new file mode 100644 index 0000000000..0452909714 --- /dev/null +++ b/examples/vision/detection/scaledyolov4/cpp/infer.cc @@ -0,0 +1,107 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +void CpuInfer(const std::string& model_file, const std::string& image_file) { + auto model = fastdeploy::vision::detection::ScaledYOLOv4(model_file); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +void GpuInfer(const std::string& model_file, const std::string& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + auto model = + fastdeploy::vision::detection::ScaledYOLOv4(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +void TrtInfer(const std::string& model_file, const std::string& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + option.UseTrtBackend(); + option.SetTrtInputShape("images", {1, 3, 640, 640}); + auto model = + fastdeploy::vision::detection::ScaledYOLOv4(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +int main(int argc, char* argv[]) { + if (argc < 4) { + std::cout << "Usage: infer_demo path/to/model path/to/image run_option, " + "e.g ./infer_model scaled_yolov4-p5.onnx ./test.jpeg 0" + << std::endl; + std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " + "with gpu; 2: run with gpu and use tensorrt backend." + << std::endl; + return -1; + } + + if (std::atoi(argv[3]) == 0) { + CpuInfer(argv[1], argv[2]); + } else if (std::atoi(argv[3]) == 1) { + GpuInfer(argv[1], argv[2]); + } else if (std::atoi(argv[3]) == 2) { + TrtInfer(argv[1], argv[2]); + } + return 0; +} diff --git a/examples/vision/detection/scaledyolov4/python/README.md b/examples/vision/detection/scaledyolov4/python/README.md new file mode 100644 index 0000000000..bec85e0236 --- /dev/null +++ b/examples/vision/detection/scaledyolov4/python/README.md @@ -0,0 +1,79 @@ +# ScaledYOLOv4 Python部署示例 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. FastDeploy Python whl包安装,参考[FastDeploy Python安装](../../../../../docs/quick_start/install.md) + +本目录下提供`infer.py`快速完成ScaledYOLOv4在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 + +``` +#下载scaledyolov4模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/scaled_yolov4-p5.onnx +wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/000000014439.jpg + + +#下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd examples/vison/detection/scaledyolov4/python/ + +# CPU推理 +python infer.py --model scaled_yolov4-p5.onnx --image 000000014439.jpg --device cpu +# GPU推理 +python infer.py --model scaled_yolov4-p5.onnx --image 000000014439.jpg --device gpu +# GPU上使用TensorRT推理 +python infer.py --model scaled_yolov4-p5.onnx --image 000000014439.jpg --device gpu --use_trt True +``` + +运行完成可视化结果如下图所示 + + + +## ScaledYOLOv4 Python接口 + +``` +fastdeploy.vision.detection.ScaledYOLOv4(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +``` + +ScaledYOLOv4模型加载和初始化,其中model_file为导出的ONNX模型格式 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX格式时,此参数无需设定 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX + +### predict函数 + +> ``` +> ScaledYOLOv4.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> ``` +> +> 模型预测结口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 +> > * **conf_threshold**(float): 检测框置信度过滤阈值 +> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值 + +> **返回** +> +> > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) + +### 类成员属性 + +> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` +> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` + + + +## 其它文档 + +- [ScaledYOLOv4 模型介绍](..) +- [ScaledYOLOv4 C++部署](../cpp) +- [模型预测结果说明](../../../../../docs/api/vision_results/) diff --git a/examples/vision/detection/scaledyolov4/python/infer.py b/examples/vision/detection/scaledyolov4/python/infer.py new file mode 100644 index 0000000000..eaf287f2c1 --- /dev/null +++ b/examples/vision/detection/scaledyolov4/python/infer.py @@ -0,0 +1,52 @@ +import fastdeploy as fd +import cv2 + + +def parse_arguments(): + import argparse + import ast + parser = argparse.ArgumentParser() + parser.add_argument( + "--model", required=True, help="Path of scaledyolov4 onnx model.") + parser.add_argument( + "--image", required=True, help="Path of test image file.") + parser.add_argument( + "--device", + type=str, + default='cpu', + help="Type of inference device, support 'cpu' or 'gpu'.") + parser.add_argument( + "--use_trt", + type=ast.literal_eval, + default=False, + help="Wether to use tensorrt.") + return parser.parse_args() + + +def build_option(args): + option = fd.RuntimeOption() + + if args.device.lower() == "gpu": + option.use_gpu() + + if args.use_trt: + option.use_trt_backend() + option.set_trt_input_shape("images", [1, 3, 640, 640]) + return option + + +args = parse_arguments() + +# 配置runtime,加载模型 +runtime_option = build_option(args) +model = fd.vision.detection.ScaledYOLOv4( + args.model, runtime_option=runtime_option) + +# 预测图片检测结果 +im = cv2.imread(args.image) +result = model.predict(im) + +# 预测结果可视化 +vis_im = fd.vision.vis_detection(im, result) +cv2.imwrite("visualized_result.jpg", vis_im) +print("Visualized result save in ./visualized_result.jpg") diff --git a/examples/vision/detection/yolor/README.md b/examples/vision/detection/yolor/README.md new file mode 100644 index 0000000000..772e0c6c1b --- /dev/null +++ b/examples/vision/detection/yolor/README.md @@ -0,0 +1,40 @@ +# YOLOR准备部署模型 + +- YOLOR部署实现来自[YOLOR weights](https://github.com/WongKinYiu/yolor/releases/tag/weights)分支代码,和[基于COCO的预训练模型](https://github.com/WongKinYiu/yolor/releases/tag/weights)。 + + - (1)[预训练模型](https://github.com/WongKinYiu/yolor/releases/tag/weights)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署;*.onnx、*.trt和*.pose模型不支持部署; + - (2)自己数据训练的YOLOR模型,按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)操作后,参考[详细部署文档](#详细部署文档)完成部署。 + + +## 导出ONNX模型 + + + 访问[YOLOR](https://github.com/WongKinYiu/yolor)官方github库,按照指引下载安装,下载`yolor.pt` 模型,利用 `models/export.py` 得到`onnx`格式文件。如果您导出的`onnx`模型出现精度不达标或者是数据维度的问题,可以参考[yolor#32](https://github.com/WongKinYiu/yolor/issues/32)的解决办法 + + ``` + #下载yolor模型文件 + wget https://github.com/WongKinYiu/yolor/releases/download/weights/yolor-d6-paper-570.pt + + # 导出onnx格式文件 + python models/export.py --weights PATH/TO/yolor-xx-xx-xx.pt --img-size 640 + ``` + +## 下载预训练ONNX模型 + +为了方便开发者的测试,下面提供了YOLOR导出的各系列模型,开发者可直接下载使用。 + +| 模型 | 大小 | 精度 | +|:---------------------------------------------------------------- |:----- |:----- | +| [YOLOR-P6](https://bj.bcebos.com/paddlehub/fastdeploy/yolor-p6-paper-541-640-640.onnx) | 143MB | 54.1% | +| [YOLOR-W6](https://bj.bcebos.com/paddlehub/fastdeploy/yolor-w6-paper-555-640-640.onnx) | 305MB | 55.5% | +| [YOLOR-E6](https://bj.bcebos.com/paddlehub/fastdeploy/yolor-e6-paper-564-640-640.onnx ) | 443MB | 56.4% | +| [YOLOR-D6](https://bj.bcebos.com/paddlehub/fastdeploy/yolor-d6-paper-570-640-640.onnx) | 580MB | 57.0% | +| [YOLOR-D6](https://bj.bcebos.com/paddlehub/fastdeploy/yolor-d6-paper-573-640-640.onnx) | 580MB | 57.3% | + + + + +## 详细部署文档 + +- [Python部署](python) +- [C++部署](cpp) diff --git a/examples/vision/detection/yolor/cpp/CMakeLists.txt b/examples/vision/detection/yolor/cpp/CMakeLists.txt new file mode 100644 index 0000000000..fea1a2888b --- /dev/null +++ b/examples/vision/detection/yolor/cpp/CMakeLists.txt @@ -0,0 +1,14 @@ +PROJECT(infer_demo C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.12) + +# 指定下载解压后的fastdeploy库路径 +option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.") + +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) + +# 添加FastDeploy依赖头文件 +include_directories(${FASTDEPLOY_INCS}) + +add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc) +# 添加FastDeploy库依赖 +target_link_libraries(infer_demo ${FASTDEPLOY_LIBS}) diff --git a/examples/vision/detection/yolor/cpp/README.md b/examples/vision/detection/yolor/cpp/README.md new file mode 100644 index 0000000000..fb6543bcb1 --- /dev/null +++ b/examples/vision/detection/yolor/cpp/README.md @@ -0,0 +1,85 @@ +# YOLOR C++部署示例 + +本目录下提供`infer.cc`快速完成YOLOR在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. 根据开发环境,下载预编译部署库和samples代码,参考[FastDeploy预编译库](../../../../../docs/compile/prebuild_libraries.md) + +以Linux上CPU推理为例,在本目录执行如下命令即可完成编译测试 + +``` +mkdir build +cd build +wget https://xxx.tgz +tar xvf fastdeploy-linux-x64-0.2.0.tgz +cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 +make -j + +#下载官方转换好的YOLOR模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/yolor-p6-paper-541-640-640.onnx +wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/000000014439.jpg + + +# CPU推理 +./infer_demo yolor-p6-paper-541-640-640.onnx 000000014439.jpg 0 +# GPU推理 +./infer_demo yolor-p6-paper-541-640-640.onnx 000000014439.jpg 1 +# GPU上TensorRT推理 +./infer_demo yolor-p6-paper-541-640-640.onnx 000000014439.jpg 2 +``` + +运行完成可视化结果如下图所示 + + + +## YOLOR C++接口 + +### YOLOR类 + +``` +fastdeploy::vision::detection::YOLOR( + const string& model_file, + const string& params_file = "", + const RuntimeOption& runtime_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX) +``` + +YOLOR模型加载和初始化,其中model_file为导出的ONNX模型格式。 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX时,此参数传入空字符串即可 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX格式 + +#### Predict函数 + +> ``` +> YOLOR::Predict(cv::Mat* im, DetectionResult* result, +> float conf_threshold = 0.25, +> float nms_iou_threshold = 0.5) +> ``` +> +> 模型预测接口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **im**: 输入图像,注意需为HWC,BGR格式 +> > * **result**: 检测结果,包括检测框,各个框的置信度, DetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > * **conf_threshold**: 检测框置信度过滤阈值 +> > * **nms_iou_threshold**: NMS处理过程中iou阈值 + +### 类成员变量 + +> > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` +> > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` + +- [模型介绍](../../) +- [Python部署](../python) +- [视觉模型预测结果](../../../../../docs/api/vision_results/) diff --git a/examples/vision/detection/yolor/cpp/infer.cc b/examples/vision/detection/yolor/cpp/infer.cc new file mode 100644 index 0000000000..dfa4cde0ff --- /dev/null +++ b/examples/vision/detection/yolor/cpp/infer.cc @@ -0,0 +1,106 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +void CpuInfer(const std::string& model_file, const std::string& image_file) { + auto model = fastdeploy::vision::detection::YOLOR(model_file); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +void GpuInfer(const std::string& model_file, const std::string& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + auto model = fastdeploy::vision::detection::YOLOR(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +void TrtInfer(const std::string& model_file, const std::string& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + option.UseTrtBackend(); + option.SetTrtInputShape("images", {1, 3, 640, 640}); + auto model = fastdeploy::vision::detection::YOLOR(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +int main(int argc, char* argv[]) { + if (argc < 4) { + std::cout + << "Usage: infer_demo path/to/model path/to/image run_option, " + "e.g ./infer_model ./yolor-p6-paper-541-640-640.onnx ./test.jpeg 0" + << std::endl; + std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " + "with gpu; 2: run with gpu and use tensorrt backend." + << std::endl; + return -1; + } + + if (std::atoi(argv[3]) == 0) { + CpuInfer(argv[1], argv[2]); + } else if (std::atoi(argv[3]) == 1) { + GpuInfer(argv[1], argv[2]); + } else if (std::atoi(argv[3]) == 2) { + TrtInfer(argv[1], argv[2]); + } + return 0; +} diff --git a/examples/vision/detection/yolor/python/README.md b/examples/vision/detection/yolor/python/README.md new file mode 100644 index 0000000000..268e897e2a --- /dev/null +++ b/examples/vision/detection/yolor/python/README.md @@ -0,0 +1,79 @@ +# YOLOR Python部署示例 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. FastDeploy Python whl包安装,参考[FastDeploy Python安装](../../../../../docs/quick_start/install.md) + +本目录下提供`infer.py`快速完成YOLOR在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 + +``` +#下载YOLOR模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/yolor-p6-paper-541-640-640.onnx +wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/000000014439.jpg + + +#下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd examples/vison/detection/yolor/python/ + +# CPU推理 +python infer.py --model yolor-p6-paper-541-640-640.onnx --image 000000014439.jpg --device cpu +# GPU推理 +python infer.py --model yolor-p6-paper-541-640-640.onnx --image 000000014439.jpg --device gpu +# GPU上使用TensorRT推理 +python infer.py --model yolor-p6-paper-541-640-640.onnx --image 000000014439.jpg --device gpu --use_trt True +``` + +运行完成可视化结果如下图所示 + + + +## YOLOR Python接口 + +``` +fastdeploy.vision.detection.YOLOR(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +``` + +YOLOR模型加载和初始化,其中model_file为导出的ONNX模型格式 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX格式时,此参数无需设定 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX + +### predict函数 + +> ``` +> YOLOR.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> ``` +> +> 模型预测结口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 +> > * **conf_threshold**(float): 检测框置信度过滤阈值 +> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值 + +> **返回** +> +> > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) + +### 类成员属性 + +> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` +> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` + + + +## 其它文档 + +- [YOLOR 模型介绍](..) +- [YOLOR C++部署](../cpp) +- [模型预测结果说明](../../../../../docs/api/vision_results/) diff --git a/examples/vision/detection/yolor/python/infer.py b/examples/vision/detection/yolor/python/infer.py new file mode 100644 index 0000000000..f701796228 --- /dev/null +++ b/examples/vision/detection/yolor/python/infer.py @@ -0,0 +1,51 @@ +import fastdeploy as fd +import cv2 + + +def parse_arguments(): + import argparse + import ast + parser = argparse.ArgumentParser() + parser.add_argument( + "--model", required=True, help="Path of yolor onnx model.") + parser.add_argument( + "--image", required=True, help="Path of test image file.") + parser.add_argument( + "--device", + type=str, + default='cpu', + help="Type of inference device, support 'cpu' or 'gpu'.") + parser.add_argument( + "--use_trt", + type=ast.literal_eval, + default=False, + help="Wether to use tensorrt.") + return parser.parse_args() + + +def build_option(args): + option = fd.RuntimeOption() + + if args.device.lower() == "gpu": + option.use_gpu() + + if args.use_trt: + option.use_trt_backend() + option.set_trt_input_shape("images", [1, 3, 640, 640]) + return option + + +args = parse_arguments() + +# 配置runtime,加载模型 +runtime_option = build_option(args) +model = fd.vision.detection.YOLOR(args.model, runtime_option=runtime_option) + +# 预测图片检测结果 +im = cv2.imread(args.image) +result = model.predict(im) + +# 预测结果可视化 +vis_im = fd.vision.vis_detection(im, result) +cv2.imwrite("visualized_result.jpg", vis_im) +print("Visualized result save in ./visualized_result.jpg") diff --git a/examples/vision/detection/yolov5lite/README.md b/examples/vision/detection/yolov5lite/README.md new file mode 100644 index 0000000000..9e63ca7e09 --- /dev/null +++ b/examples/vision/detection/yolov5lite/README.md @@ -0,0 +1,68 @@ +# YOLOv5Lite准备部署模型 + +- YOLOv5Lite部署实现来自[YOLOv5-Lite-v1.4](https://github.com/ppogg/YOLOv5-Lite/releases/tag/v1.4) +代码,和[基于COCO的预训练模型](https://github.com/ppogg/YOLOv5-Lite/releases/tag/v1.4)。 + + - (1)[预训练模型](https://github.com/ppogg/YOLOv5-Lite/releases/tag/v1.4)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署;*.onnx、*.trt和*.pose模型不支持部署; + - (2)自己数据训练的YOLOv5Lite模型,按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)操作后,参考[详细部署文档](#详细部署文档)完成部署。 + + +## 导出ONNX模型 + +- 自动获取 + 访问[YOLOv5Lite](https://github.com/ppogg/YOLOv5-Lite) +官方github库,按照指引下载安装,下载`yolov5-lite-xx.onnx` 模型(Tips:官方提供的ONNX文件目前是没有decode模块的) + ``` + #下载yolov5-lite模型文件(.onnx) + Download from https://drive.google.com/file/d/1bJByk9eoS6pv8Z3N4bcLRCV3i7uk24aU/view + 官方Repo也支持百度云下载 + ``` + +- 手动获取 + + 访问[YOLOv5Lite](https://github.com/ppogg/YOLOv5-Lite) +官方github库,按照指引下载安装,下载`yolov5-lite-xx.pt` 模型,利用 `export.py` 得到`onnx`格式文件。 + + - 导出含有decode模块的ONNX文件 + + 首先需要参考[YOLOv5-Lite#189](https://github.com/ppogg/YOLOv5-Lite/pull/189)的解决办法,修改代码。 + + ``` + #下载yolov5-lite模型文件(.pt) + Download from https://drive.google.com/file/d/1oftzqOREGqDCerf7DtD5BZp9YWELlkMe/view + 官方Repo也支持百度云下载 + + # 导出onnx格式文件 + python export.py --grid --dynamic --concat --weights PATH/TO/yolov5-lite-xx.pt + + # 移动onnx文件到demo目录 + cp PATH/TO/yolov5lite.onnx PATH/TO/model_zoo/vision/yolov5lite/ + ``` + - 导出无decode模块的ONNX文件(不需要修改代码) + + ``` + #下载yolov5-lite模型文件 + Download from https://drive.google.com/file/d/1oftzqOREGqDCerf7DtD5BZp9YWELlkMe/view + 官方Repo也支持百度云下载 + + # 导出onnx格式文件 + python export.py --grid --dynamic --weights PATH/TO/yolov5-lite-xx.pt + +``` + +## 下载预训练ONNX模型 + +为了方便开发者的测试,下面提供了YOLOv5Lite导出的各系列模型,开发者可直接下载使用。 + +| 模型 | 大小 | 精度 | +|:---------------------------------------------------------------- |:----- |:----- | +| [YOLOv5Lite-e](https://bj.bcebos.com/paddlehub/fastdeploy/v5Lite-e-sim-320.onnx) | 3.1MB | 35.1% | +| [YOLOv5Lite-s](https://bj.bcebos.com/paddlehub/fastdeploy/v5Lite-s-sim-416.onnx) | 6.3MB | 42.0% | +| [YOLOv5Lite-c](https://bj.bcebos.com/paddlehub/fastdeploy/v5Lite-c-sim-512.onnxx) | 18MB | 50.9% | +| [YOLOv5Lite-g](https://bj.bcebos.com/paddlehub/fastdeploy/v5Lite-g-sim-640.onnx) | 21MB | 57.6% | + + +## 详细部署文档 + +- [Python部署](python) +- [C++部署](cpp) diff --git a/examples/vision/detection/yolov5lite/cpp/CMakeLists.txt b/examples/vision/detection/yolov5lite/cpp/CMakeLists.txt new file mode 100644 index 0000000000..fea1a2888b --- /dev/null +++ b/examples/vision/detection/yolov5lite/cpp/CMakeLists.txt @@ -0,0 +1,14 @@ +PROJECT(infer_demo C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.12) + +# 指定下载解压后的fastdeploy库路径 +option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.") + +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) + +# 添加FastDeploy依赖头文件 +include_directories(${FASTDEPLOY_INCS}) + +add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc) +# 添加FastDeploy库依赖 +target_link_libraries(infer_demo ${FASTDEPLOY_LIBS}) diff --git a/examples/vision/detection/yolov5lite/cpp/README.md b/examples/vision/detection/yolov5lite/cpp/README.md new file mode 100644 index 0000000000..2fad44e34b --- /dev/null +++ b/examples/vision/detection/yolov5lite/cpp/README.md @@ -0,0 +1,85 @@ +# YOLOv5Lite C++部署示例 + +本目录下提供`infer.cc`快速完成YOLOv5Lite在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. 根据开发环境,下载预编译部署库和samples代码,参考[FastDeploy预编译库](../../../../../docs/compile/prebuild_libraries.md) + +以Linux上CPU推理为例,在本目录执行如下命令即可完成编译测试 + +``` +mkdir build +cd build +wget https://xxx.tgz +tar xvf fastdeploy-linux-x64-0.2.0.tgz +cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 +make -j + +#下载官方转换好的YOLOv5Lite模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/v5Lite-g-sim-640.onnx +wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/000000014439.jpg + + +# CPU推理 +./infer_demo v5Lite-g-sim-640.onnx 000000014439.jpg 0 +# GPU推理 +./infer_demo v5Lite-g-sim-640.onnx 000000014439.jpg 1 +# GPU上TensorRT推理 +./infer_demo v5Lite-g-sim-640.onnx 000000014439.jpg 2 +``` + +运行完成可视化结果如下图所示 + + + +## YOLOv5Lite C++接口 + +### YOLOv5Lite类 + +``` +fastdeploy::vision::detection::YOLOv5Lite( + const string& model_file, + const string& params_file = "", + const RuntimeOption& runtime_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX) +``` + +YOLOv5Lite模型加载和初始化,其中model_file为导出的ONNX模型格式。 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX时,此参数传入空字符串即可 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX格式 + +#### Predict函数 + +> ``` +> YOLOv5Lite::Predict(cv::Mat* im, DetectionResult* result, +> float conf_threshold = 0.25, +> float nms_iou_threshold = 0.5) +> ``` +> +> 模型预测接口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **im**: 输入图像,注意需为HWC,BGR格式 +> > * **result**: 检测结果,包括检测框,各个框的置信度, DetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > * **conf_threshold**: 检测框置信度过滤阈值 +> > * **nms_iou_threshold**: NMS处理过程中iou阈值 + +### 类成员变量 + +> > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` +> > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` + +- [模型介绍](../../) +- [Python部署](../python) +- [视觉模型预测结果](../../../../../docs/api/vision_results/) diff --git a/examples/vision/detection/yolov5lite/cpp/infer.cc b/examples/vision/detection/yolov5lite/cpp/infer.cc new file mode 100644 index 0000000000..a899c48c6f --- /dev/null +++ b/examples/vision/detection/yolov5lite/cpp/infer.cc @@ -0,0 +1,107 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +void CpuInfer(const std::string& model_file, const std::string& image_file) { + auto model = fastdeploy::vision::detection::YOLOv5Lite(model_file); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +void GpuInfer(const std::string& model_file, const std::string& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + auto model = + fastdeploy::vision::detection::YOLOv5Lite(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +void TrtInfer(const std::string& model_file, const std::string& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + option.UseTrtBackend(); + option.SetTrtInputShape("images", {1, 3, 640, 640}); + auto model = + fastdeploy::vision::detection::YOLOv5Lite(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + auto im = cv::imread(image_file); + auto im_bak = im.clone(); + + fastdeploy::vision::DetectionResult res; + if (!model.Predict(&im, &res)) { + std::cerr << "Failed to predict." << std::endl; + return; + } + + auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); + cv::imwrite("vis_result.jpg", vis_im); + std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; +} + +int main(int argc, char* argv[]) { + if (argc < 4) { + std::cout << "Usage: infer_demo path/to/model path/to/image run_option, " + "e.g ./infer_model ./v5Lite-g-sim-640.onnx ./test.jpeg 0" + << std::endl; + std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " + "with gpu; 2: run with gpu and use tensorrt backend." + << std::endl; + return -1; + } + + if (std::atoi(argv[3]) == 0) { + CpuInfer(argv[1], argv[2]); + } else if (std::atoi(argv[3]) == 1) { + GpuInfer(argv[1], argv[2]); + } else if (std::atoi(argv[3]) == 2) { + TrtInfer(argv[1], argv[2]); + } + return 0; +} diff --git a/examples/vision/detection/yolov5lite/python/README.md b/examples/vision/detection/yolov5lite/python/README.md new file mode 100644 index 0000000000..7b67636d93 --- /dev/null +++ b/examples/vision/detection/yolov5lite/python/README.md @@ -0,0 +1,79 @@ +# YOLOv5Lite Python部署示例 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. FastDeploy Python whl包安装,参考[FastDeploy Python安装](../../../../../docs/quick_start/install.md) + +本目录下提供`infer.py`快速完成YOLOv5Lite在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 + +``` +#下载YOLOv5Lite模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/v5Lite-g-sim-640.onnx +wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/000000014439.jpg + + +#下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd examples/vison/detection/yolov5lite/python/ + +# CPU推理 +python infer.py --model v5Lite-g-sim-640.onnx --image 000000014439.jpg --device cpu +# GPU推理 +python infer.py --model v5Lite-g-sim-640.onnx --image 000000014439.jpg --device gpu +# GPU上使用TensorRT推理 +python infer.py --model v5Lite-g-sim-640.onnx --image 000000014439.jpg --device gpu --use_trt True +``` + +运行完成可视化结果如下图所示 + + + +## YOLOv5Lite Python接口 + +``` +fastdeploy.vision.detection.YOLOv5Lite(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +``` + +YOLOv5Lite模型加载和初始化,其中model_file为导出的ONNX模型格式 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX格式时,此参数无需设定 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX + +### predict函数 + +> ``` +> YOLOv5Lite.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> ``` +> +> 模型预测结口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 +> > * **conf_threshold**(float): 检测框置信度过滤阈值 +> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值 + +> **返回** +> +> > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) + +### 类成员属性 + +> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` +> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` + + + +## 其它文档 + +- [YOLOv5Lite 模型介绍](..) +- [YOLOv5Lite C++部署](../cpp) +- [模型预测结果说明](../../../../../docs/api/vision_results/) diff --git a/examples/vision/detection/yolov5lite/python/infer.py b/examples/vision/detection/yolov5lite/python/infer.py new file mode 100644 index 0000000000..0236c182f1 --- /dev/null +++ b/examples/vision/detection/yolov5lite/python/infer.py @@ -0,0 +1,52 @@ +import fastdeploy as fd +import cv2 + + +def parse_arguments(): + import argparse + import ast + parser = argparse.ArgumentParser() + parser.add_argument( + "--model", required=True, help="Path of yolov5lite onnx model.") + parser.add_argument( + "--image", required=True, help="Path of test image file.") + parser.add_argument( + "--device", + type=str, + default='cpu', + help="Type of inference device, support 'cpu' or 'gpu'.") + parser.add_argument( + "--use_trt", + type=ast.literal_eval, + default=False, + help="Wether to use tensorrt.") + return parser.parse_args() + + +def build_option(args): + option = fd.RuntimeOption() + + if args.device.lower() == "gpu": + option.use_gpu() + + if args.use_trt: + option.use_trt_backend() + option.set_trt_input_shape("images", [1, 3, 640, 640]) + return option + + +args = parse_arguments() + +# 配置runtime,加载模型 +runtime_option = build_option(args) +model = fd.vision.detection.YOLOv5Lite( + args.model, runtime_option=runtime_option) + +# 预测图片检测结果 +im = cv2.imread(args.image) +result = model.predict(im) + +# 预测结果可视化 +vis_im = fd.vision.vis_detection(im, result) +cv2.imwrite("visualized_result.jpg", vis_im) +print("Visualized result save in ./visualized_result.jpg") From de3b4de6f8e8a4f29fd580d91fb2307dc7d1386d Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Thu, 11 Aug 2022 14:43:48 +0000 Subject: [PATCH 68/94] first commit for insightface --- examples/vision/faceid/arcface/README.md | 40 ----- .../vision/faceid/arcface/cpp/CMakeLists.txt | 14 -- .../vision/faceid/arcface/python/README.md | 79 --------- examples/vision/faceid/insightface/README.md | 57 +++++++ .../faceid/insightface/cpp/CMakeLists.txt | 22 +++ .../{arcface => insightface}/cpp/README.md | 24 ++- .../faceid/insightface/cpp/infer_arcface.cc | 156 ++++++++++++++++++ .../faceid/insightface/cpp/infer_cosface.cc | 156 ++++++++++++++++++ .../insightface/cpp/infer_partial_fc.cc | 156 ++++++++++++++++++ .../faceid/insightface/cpp/infer_vpl.cc | 156 ++++++++++++++++++ .../faceid/insightface/python/README.md | 88 ++++++++++ .../insightface/python/infer_arcface.py | 90 ++++++++++ .../insightface/python/infer_cosface.py | 90 ++++++++++ .../insightface/python/infer_partial_fc.py | 90 ++++++++++ .../faceid/insightface/python/infer_vpl.py | 90 ++++++++++ examples/vision/faceid/partial_fc/README.md | 37 ----- .../faceid/partial_fc/cpp/CMakeLists.txt | 14 -- .../vision/faceid/partial_fc/cpp/README.md | 85 ---------- .../vision/faceid/partial_fc/python/README.md | 79 --------- 19 files changed, 1166 insertions(+), 357 deletions(-) delete mode 100644 examples/vision/faceid/arcface/README.md delete mode 100644 examples/vision/faceid/arcface/cpp/CMakeLists.txt delete mode 100644 examples/vision/faceid/arcface/python/README.md create mode 100644 examples/vision/faceid/insightface/README.md create mode 100644 examples/vision/faceid/insightface/cpp/CMakeLists.txt rename examples/vision/faceid/{arcface => insightface}/cpp/README.md (63%) create mode 100644 examples/vision/faceid/insightface/cpp/infer_arcface.cc create mode 100644 examples/vision/faceid/insightface/cpp/infer_cosface.cc create mode 100644 examples/vision/faceid/insightface/cpp/infer_partial_fc.cc create mode 100644 examples/vision/faceid/insightface/cpp/infer_vpl.cc create mode 100644 examples/vision/faceid/insightface/python/README.md create mode 100644 examples/vision/faceid/insightface/python/infer_arcface.py create mode 100644 examples/vision/faceid/insightface/python/infer_cosface.py create mode 100644 examples/vision/faceid/insightface/python/infer_partial_fc.py create mode 100644 examples/vision/faceid/insightface/python/infer_vpl.py delete mode 100644 examples/vision/faceid/partial_fc/README.md delete mode 100644 examples/vision/faceid/partial_fc/cpp/CMakeLists.txt delete mode 100644 examples/vision/faceid/partial_fc/cpp/README.md delete mode 100644 examples/vision/faceid/partial_fc/python/README.md diff --git a/examples/vision/faceid/arcface/README.md b/examples/vision/faceid/arcface/README.md deleted file mode 100644 index cb93054029..0000000000 --- a/examples/vision/faceid/arcface/README.md +++ /dev/null @@ -1,40 +0,0 @@ -# ArcFace准备部署模型 - -## 模型版本说明 - -- [ArcFace CommitID:babb9a5](https://github.com/deepinsight/insightface/commit/babb9a5) - - (1)[链接中](https://github.com/deepinsight/insightface/commit/babb9a5)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; - - (2)开发者基于自己数据训练的ArcFace CommitID:babb9a5模型,可按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)后,完成部署。 - -## 导出ONNX模型 - -访问[ArcFace](https://github.com/deepinsight/insightface/tree/master/recognition/arcface_torch)官方github库,按照指引下载安装,下载pt模型文件,利用 `torch2onnx.py` 得到`onnx`格式文件。 - -* 下载ArcFace模型文件 - ``` - Link: https://pan.baidu.com/share/init?surl=CL-l4zWqsI1oDuEEYVhj-g code: e8pw - ``` - -* 导出onnx格式文件 - ```bash - PYTHONPATH=. python ./torch2onnx.py ms1mv3_arcface_r100_fp16/backbone.pth --output ms1mv3_arcface_r100.onnx --network r100 --simplify 1 - ``` - -## 下载预训练ONNX模型 - - - -todo - - -## 详细部署文档 - -- [Python部署](python) -- [C++部署](cpp) diff --git a/examples/vision/faceid/arcface/cpp/CMakeLists.txt b/examples/vision/faceid/arcface/cpp/CMakeLists.txt deleted file mode 100644 index fea1a2888b..0000000000 --- a/examples/vision/faceid/arcface/cpp/CMakeLists.txt +++ /dev/null @@ -1,14 +0,0 @@ -PROJECT(infer_demo C CXX) -CMAKE_MINIMUM_REQUIRED (VERSION 3.12) - -# 指定下载解压后的fastdeploy库路径 -option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.") - -include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) - -# 添加FastDeploy依赖头文件 -include_directories(${FASTDEPLOY_INCS}) - -add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc) -# 添加FastDeploy库依赖 -target_link_libraries(infer_demo ${FASTDEPLOY_LIBS}) diff --git a/examples/vision/faceid/arcface/python/README.md b/examples/vision/faceid/arcface/python/README.md deleted file mode 100644 index 034b93049e..0000000000 --- a/examples/vision/faceid/arcface/python/README.md +++ /dev/null @@ -1,79 +0,0 @@ -# ArcFace Python部署示例 - -在部署前,需确认以下两个步骤 - -- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) -- 2. FastDeploy Python whl包安装,参考[FastDeploy Python安装](../../../../../docs/quick_start/install.md) - -本目录下提供`infer.py`快速完成ArcFace在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 - -``` -#下载arcface模型文件和测试图片 -wget https://bj.bcebos.com/paddlehub/fastdeploy/ms1mv3_arcface_r34.onnx -wget todo - - -#下载部署示例代码 -git clone https://github.com/PaddlePaddle/FastDeploy.git -cd examples/vison/detection/arcface/python/ - -# CPU推理 -python infer.py --model ms1mv3_arcface_r34.onnx --image todo --device cpu -# GPU推理 -python infer.py --model ms1mv3_arcface_r34.onnx --image todo --device gpu -# GPU上使用TensorRT推理 -python infer.py --model ms1mv3_arcface_r34.onnx --image todo --device gpu --use_trt True -``` - -运行完成可视化结果如下图所示 - - - -## ArcFace Python接口 - -``` -fastdeploy.vision.faceid.ArcFace(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) -``` - -ArcFace模型加载和初始化,其中model_file为导出的ONNX模型格式 - -**参数** - -> * **model_file**(str): 模型文件路径 -> * **params_file**(str): 参数文件路径,当模型格式为ONNX格式时,此参数无需设定 -> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 -> * **model_format**(Frontend): 模型格式,默认为ONNX - -### predict函数 - -> ``` -> ArcFace.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) -> ``` -> -> 模型预测结口,输入图像直接输出检测结果。 -> -> **参数** -> -> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 -> > * **conf_threshold**(float): 检测框置信度过滤阈值 -> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值 - -> **返回** -> -> > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) - -### 类成员属性 - -> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] -> > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] -> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` -> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` -> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` - - - -## 其它文档 - -- [ArcFace 模型介绍](..) -- [ArcFace C++部署](../cpp) -- [模型预测结果说明](../../../../../docs/api/vision_results/) diff --git a/examples/vision/faceid/insightface/README.md b/examples/vision/faceid/insightface/README.md new file mode 100644 index 0000000000..17affb4cd3 --- /dev/null +++ b/examples/vision/faceid/insightface/README.md @@ -0,0 +1,57 @@ +# InsightFace准备部署模型 + +## 模型版本说明 + +- [InsightFace](https://github.com/deepinsight/insightface/commit/babb9a5) + - (1)[链接中](https://github.com/deepinsight/insightface/commit/babb9a5)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; + - (2)开发者基于自己数据训练的InsightFace模型,可按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)后,完成部署。 + + +## 支持模型列表 +目前FastDeploy支持如下模型的部署 +- ArcFace +- CosFace +- PartialFC +- VPL + + +## 导出ONNX模型 +以ArcFace为例: +访问[ArcFace](https://github.com/deepinsight/insightface/tree/master/recognition/arcface_torch)官方github库,按照指引下载安装,下载pt模型文件,利用 `torch2onnx.py` 得到`onnx`格式文件。 + +* 下载ArcFace模型文件 + ``` + Link: https://pan.baidu.com/share/init?surl=CL-l4zWqsI1oDuEEYVhj-g code: e8pw + ``` + +* 导出onnx格式文件 + ```bash + PYTHONPATH=. python ./torch2onnx.py ms1mv3_arcface_r100_fp16/backbone.pth --output ms1mv3_arcface_r100.onnx --network r100 --simplify 1 + ``` + +## 下载预训练ONNX模型 + +为了方便开发者的测试,下面提供了RetinaFace导出的各系列模型,开发者可直接下载使用。 +其中精度指标来源于InsightFace中对各模型的介绍,详情各参考InsightFace中的说明 + +| 模型 | 大小 | 精度 (AgeDB_30) | +|:---------------------------------------------------------------- |:----- |:----- | +| [CosFace-r18](https://bj.bcebos.com/paddlehub/fastdeploy/glint360k_cosface_r18.onnx) | 92MB | 97.7 | +| [CosFace-r34](https://bj.bcebos.com/paddlehub/fastdeploy/glint360k_cosface_r34.onnx) | 131MB | 98.3| +| [CosFace-r50](https://bj.bcebos.com/paddlehub/fastdeploy/glint360k_cosface_r50.onnx) | 167MB | 98.3 | +| [CosFace-r100](https://bj.bcebos.com/paddlehub/fastdeploy/glint360k_cosface_r100.onnx) | 249MB | 98.4 | +| [ArcFace-r18](https://bj.bcebos.com/paddlehub/fastdeploy/ms1mv3_arcface_r18.onnx) | 92MB | 97.7 | +| [ArcFace-r34](https://bj.bcebos.com/paddlehub/fastdeploy/ms1mv3_arcface_r34.onnx) | 131MB | 98.1| +| [ArcFace-r50](https://bj.bcebos.com/paddlehub/fastdeploy/ms1mv3_arcface_r50.onnx) | 167MB | - | +| [ArcFace-r100](https://bj.bcebos.com/paddlehub/fastdeploy/ms1mv3_arcface_r100.onnx) | 249MB | 98.4 | +| [ArcFace-r100_lr0.1](https://bj.bcebos.com/paddlehub/fastdeploy/ms1mv3_r100_lr01.onnx) | 249MB | 98.4 | +| [PartialFC-r34](https://bj.bcebos.com/paddlehub/fastdeploy/partial_fc_glint360k_r50.onnx) | 167MB | -| +| [PartialFC-r50](https://bj.bcebos.com/paddlehub/fastdeploy/partial_fc_glint360k_r100.onnx) | 249MB | - | + + + + +## 详细部署文档 + +- [Python部署](python) +- [C++部署](cpp) diff --git a/examples/vision/faceid/insightface/cpp/CMakeLists.txt b/examples/vision/faceid/insightface/cpp/CMakeLists.txt new file mode 100644 index 0000000000..56c1d4cb9e --- /dev/null +++ b/examples/vision/faceid/insightface/cpp/CMakeLists.txt @@ -0,0 +1,22 @@ +PROJECT(infer_demo C CXX) +CMAKE_MINIMUM_REQUIRED (VERSION 3.12) + +# 指定下载解压后的fastdeploy库路径 +option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.") + +include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) + +# 添加FastDeploy依赖头文件 +include_directories(${FASTDEPLOY_INCS}) + +add_executable(infer_arcface_demo ${PROJECT_SOURCE_DIR}/infer_arcface.cc) +target_link_libraries(infer_arcface_demo ${FASTDEPLOY_LIBS}) + +add_executable(infer_cosface_demo ${PROJECT_SOURCE_DIR}/infer_cosface.cc) +target_link_libraries(infer_cosface_demo ${FASTDEPLOY_LIBS}) + +add_executable(infer_vpl_demo ${PROJECT_SOURCE_DIR}/infer_vpl.cc) +target_link_libraries(infer_vpl_demo ${FASTDEPLOY_LIBS}) + +add_executable(infer_partial_fc_demo ${PROJECT_SOURCE_DIR}/infer_partial_fc.cc) +target_link_libraries(infer_partial_fc_demo ${FASTDEPLOY_LIBS}) diff --git a/examples/vision/faceid/arcface/cpp/README.md b/examples/vision/faceid/insightface/cpp/README.md similarity index 63% rename from examples/vision/faceid/arcface/cpp/README.md rename to examples/vision/faceid/insightface/cpp/README.md index 505d144bbb..03b61684d5 100644 --- a/examples/vision/faceid/arcface/cpp/README.md +++ b/examples/vision/faceid/insightface/cpp/README.md @@ -1,6 +1,6 @@ -# ArcFace C++部署示例 +# InsightFace C++部署示例 -本目录下提供`infer.cc`快速完成ArcFace在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 +以ArcFace为例提供`infer_arcface.cc`快速完成ArcFace在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 在部署前,需确认以下两个步骤 @@ -18,28 +18,30 @@ cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 make -j #下载官方转换好的ArcFace模型文件和测试图片 -wget https://bj.bcebos.com/paddlehub/fastdeploy/ms1mv3_arcface_r34.onnx -wget todo +wget https://bj.bcebos.com/paddlehub/fastdeploy/ms1mv3_arcface_r100.onnx +wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/lite/resources/test_lite_focal_arcface_0.png +wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/lite/resources/test_lite_focal_arcface_1.png +wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/lite/resources/test_lite_focal_arcface_2.png # CPU推理 -./infer_demo ms1mv3_arcface_r34.onnx todo 0 +./infer_arcface_demo ms1mv3_arcface_r100.onnx test_lite_focal_arcface_0.png test_lite_focal_arcface_1.png test_lite_focal_arcface_2.png 0 # GPU推理 -./infer_demo ms1mv3_arcface_r34.onnx todo 1 +./infer_arcface_demo ms1mv3_arcface_r100.onnx test_lite_focal_arcface_0.png test_lite_focal_arcface_1.png test_lite_focal_arcface_2.png 1 # GPU上TensorRT推理 -./infer_demo ms1mv3_arcface_r34.onnx todo 2 +./infer_arcface_demo ms1mv3_arcface_r100.onnx test_lite_focal_arcface_0.png test_lite_focal_arcface_1.png test_lite_focal_arcface_2.png 2 ``` 运行完成可视化结果如下图所示 - + ## ArcFace C++接口 ### ArcFace类 ``` -fastdeploy::vision::faceid::ArcFace( +fastdeploy::vision::detection::ArcFace( const string& model_file, const string& params_file = "", const RuntimeOption& runtime_option = RuntimeOption(), @@ -79,6 +81,10 @@ ArcFace模型加载和初始化,其中model_file为导出的ONNX模型格式 > > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` > > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` > > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` +> > * **downsample_strides**(vector<int>): 通过此参数可以修改生成anchor的特征图的下采样倍数, 包含三个整型元素, 分别表示默认的生成anchor的下采样倍数, 默认值为[8, 16, 32] +> > * **landmarks_per_face**(int): 如果使用具有人脸关键点的输出, 可以修改人脸关键点数量, 默认值为`landmarks_per_face=5` +> > * **use_kps**(bool): 通过此参数可以设置模型是否使用关键点,如果ONNX文件没有关键点输出则需要将`use_kps=false`, 并将`landmarks_per_face=0`, 默认值为`use_kps=true` +> > * **num_anchors**(int): 通过此参数可以设置每个锚点预测的anchor数量, 需要跟进训练模型的参数设定, 默认值为`num_anchors=2` - [模型介绍](../../) - [Python部署](../python) diff --git a/examples/vision/faceid/insightface/cpp/infer_arcface.cc b/examples/vision/faceid/insightface/cpp/infer_arcface.cc new file mode 100644 index 0000000000..f8415c8d29 --- /dev/null +++ b/examples/vision/faceid/insightface/cpp/infer_arcface.cc @@ -0,0 +1,156 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +void CpuInfer(const std::string& model_file, + const std::vector& image_file) { + auto model = fastdeploy::vision::faceid::ArcFace(model_file); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + cv::Mat face0 = cv::imread(image_file[0]); + cv::Mat face1 = cv::imread(image_file[1]); + cv::Mat face2 = cv::imread(image_file[2]); + + fastdeploy::vision::FaceRecognitionResult res0; + fastdeploy::vision::FaceRecognitionResult res1; + fastdeploy::vision::FaceRecognitionResult res2; + + if ((!model.Predict(&face0, &res0)) || (!model.Predict(&face1, &res1)) || + (!model.Predict(&face2, &res2))) { + std::cerr << "Prediction Failed." << std::endl; + } + + std::cout << "Prediction Done!" << std::endl; + + // 输出预测框结果 + std::cout << "--- [Face 0]:" << res0.Str(); + std::cout << "--- [Face 1]:" << res1.Str(); + std::cout << "--- [Face 2]:" << res2.Str(); + + // 计算余弦相似度 + float cosine01 = fastdeploy::vision::utils::CosineSimilarity( + res0.embedding, res1.embedding, model.l2_normalize); + float cosine02 = fastdeploy::vision::utils::CosineSimilarity( + res0.embedding, res2.embedding, model.l2_normalize); + std::cout << "Detect Done! Cosine 01: " << cosine01 + << ", Cosine 02:" << cosine02 << std::endl; +} + +void GpuInfer(const std::string& model_file, + const std::vector& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + auto model = fastdeploy::vision::faceid::ArcFace(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + cv::Mat face0 = cv::imread(image_file[0]); + cv::Mat face1 = cv::imread(image_file[1]); + cv::Mat face2 = cv::imread(image_file[2]); + + fastdeploy::vision::FaceRecognitionResult res0; + fastdeploy::vision::FaceRecognitionResult res1; + fastdeploy::vision::FaceRecognitionResult res2; + + if ((!model.Predict(&face0, &res0)) || (!model.Predict(&face1, &res1)) || + (!model.Predict(&face2, &res2))) { + std::cerr << "Prediction Failed." << std::endl; + } + + std::cout << "Prediction Done!" << std::endl; + + // 输出预测框结果 + std::cout << "--- [Face 0]:" << res0.Str(); + std::cout << "--- [Face 1]:" << res1.Str(); + std::cout << "--- [Face 2]:" << res2.Str(); + + // 计算余弦相似度 + float cosine01 = fastdeploy::vision::utils::CosineSimilarity( + res0.embedding, res1.embedding, model.l2_normalize); + float cosine02 = fastdeploy::vision::utils::CosineSimilarity( + res0.embedding, res2.embedding, model.l2_normalize); + std::cout << "Detect Done! Cosine 01: " << cosine01 + << ", Cosine 02:" << cosine02 << std::endl; +} + +void TrtInfer(const std::string& model_file, + const std::vector& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + option.UseTrtBackend(); + option.SetTrtInputShape("data", {1, 3, 112, 112}); + auto model = fastdeploy::vision::faceid::ArcFace(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + cv::Mat face0 = cv::imread(image_file[0]); + cv::Mat face1 = cv::imread(image_file[1]); + cv::Mat face2 = cv::imread(image_file[2]); + + fastdeploy::vision::FaceRecognitionResult res0; + fastdeploy::vision::FaceRecognitionResult res1; + fastdeploy::vision::FaceRecognitionResult res2; + + if ((!model.Predict(&face0, &res0)) || (!model.Predict(&face1, &res1)) || + (!model.Predict(&face2, &res2))) { + std::cerr << "Prediction Failed." << std::endl; + } + + std::cout << "Prediction Done!" << std::endl; + + // 输出预测框结果 + std::cout << "--- [Face 0]:" << res0.Str(); + std::cout << "--- [Face 1]:" << res1.Str(); + std::cout << "--- [Face 2]:" << res2.Str(); + + // 计算余弦相似度 + float cosine01 = fastdeploy::vision::utils::CosineSimilarity( + res0.embedding, res1.embedding, model.l2_normalize); + float cosine02 = fastdeploy::vision::utils::CosineSimilarity( + res0.embedding, res2.embedding, model.l2_normalize); + std::cout << "Detect Done! Cosine 01: " << cosine01 + << ", Cosine 02:" << cosine02 << std::endl; +} + +int main(int argc, char* argv[]) { + if (argc < 6) { + std::cout << "Usage: infer_demo path/to/model path/to/image run_option, " + "e.g ./infer_arcface_demo ms1mv3_arcface_r100.onnx " + "test_lite_focal_arcface_0.png test_lite_focal_arcface_1.png " + "test_lite_focal_arcface_2.png 0" + << std::endl; + std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " + "with gpu; 2: run with gpu and use tensorrt backend." + << std::endl; + return -1; + } + + std::vector image_files = {argv[2], argv[3], argv[4]}; + if (std::atoi(argv[5]) == 0) { + CpuInfer(argv[1], image_files); + } else if (std::atoi(argv[5]) == 1) { + GpuInfer(argv[1], image_files); + } else if (std::atoi(argv[5]) == 2) { + TrtInfer(argv[1], image_files); + } + return 0; +} diff --git a/examples/vision/faceid/insightface/cpp/infer_cosface.cc b/examples/vision/faceid/insightface/cpp/infer_cosface.cc new file mode 100644 index 0000000000..311679d422 --- /dev/null +++ b/examples/vision/faceid/insightface/cpp/infer_cosface.cc @@ -0,0 +1,156 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +void CpuInfer(const std::string& model_file, + const std::vector& image_file) { + auto model = fastdeploy::vision::faceid::CosFace(model_file); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + cv::Mat face0 = cv::imread(image_file[0]); + cv::Mat face1 = cv::imread(image_file[1]); + cv::Mat face2 = cv::imread(image_file[2]); + + fastdeploy::vision::FaceRecognitionResult res0; + fastdeploy::vision::FaceRecognitionResult res1; + fastdeploy::vision::FaceRecognitionResult res2; + + if ((!model.Predict(&face0, &res0)) || (!model.Predict(&face1, &res1)) || + (!model.Predict(&face2, &res2))) { + std::cerr << "Prediction Failed." << std::endl; + } + + std::cout << "Prediction Done!" << std::endl; + + // 输出预测框结果 + std::cout << "--- [Face 0]:" << res0.Str(); + std::cout << "--- [Face 1]:" << res1.Str(); + std::cout << "--- [Face 2]:" << res2.Str(); + + // 计算余弦相似度 + float cosine01 = fastdeploy::vision::utils::CosineSimilarity( + res0.embedding, res1.embedding, model.l2_normalize); + float cosine02 = fastdeploy::vision::utils::CosineSimilarity( + res0.embedding, res2.embedding, model.l2_normalize); + std::cout << "Detect Done! Cosine 01: " << cosine01 + << ", Cosine 02:" << cosine02 << std::endl; +} + +void GpuInfer(const std::string& model_file, + const std::vector& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + auto model = fastdeploy::vision::faceid::CosFace(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + cv::Mat face0 = cv::imread(image_file[0]); + cv::Mat face1 = cv::imread(image_file[1]); + cv::Mat face2 = cv::imread(image_file[2]); + + fastdeploy::vision::FaceRecognitionResult res0; + fastdeploy::vision::FaceRecognitionResult res1; + fastdeploy::vision::FaceRecognitionResult res2; + + if ((!model.Predict(&face0, &res0)) || (!model.Predict(&face1, &res1)) || + (!model.Predict(&face2, &res2))) { + std::cerr << "Prediction Failed." << std::endl; + } + + std::cout << "Prediction Done!" << std::endl; + + // 输出预测框结果 + std::cout << "--- [Face 0]:" << res0.Str(); + std::cout << "--- [Face 1]:" << res1.Str(); + std::cout << "--- [Face 2]:" << res2.Str(); + + // 计算余弦相似度 + float cosine01 = fastdeploy::vision::utils::CosineSimilarity( + res0.embedding, res1.embedding, model.l2_normalize); + float cosine02 = fastdeploy::vision::utils::CosineSimilarity( + res0.embedding, res2.embedding, model.l2_normalize); + std::cout << "Detect Done! Cosine 01: " << cosine01 + << ", Cosine 02:" << cosine02 << std::endl; +} + +void TrtInfer(const std::string& model_file, + const std::vector& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + option.UseTrtBackend(); + option.SetTrtInputShape("data", {1, 3, 112, 112}); + auto model = fastdeploy::vision::faceid::CosFace(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + cv::Mat face0 = cv::imread(image_file[0]); + cv::Mat face1 = cv::imread(image_file[1]); + cv::Mat face2 = cv::imread(image_file[2]); + + fastdeploy::vision::FaceRecognitionResult res0; + fastdeploy::vision::FaceRecognitionResult res1; + fastdeploy::vision::FaceRecognitionResult res2; + + if ((!model.Predict(&face0, &res0)) || (!model.Predict(&face1, &res1)) || + (!model.Predict(&face2, &res2))) { + std::cerr << "Prediction Failed." << std::endl; + } + + std::cout << "Prediction Done!" << std::endl; + + // 输出预测框结果 + std::cout << "--- [Face 0]:" << res0.Str(); + std::cout << "--- [Face 1]:" << res1.Str(); + std::cout << "--- [Face 2]:" << res2.Str(); + + // 计算余弦相似度 + float cosine01 = fastdeploy::vision::utils::CosineSimilarity( + res0.embedding, res1.embedding, model.l2_normalize); + float cosine02 = fastdeploy::vision::utils::CosineSimilarity( + res0.embedding, res2.embedding, model.l2_normalize); + std::cout << "Detect Done! Cosine 01: " << cosine01 + << ", Cosine 02:" << cosine02 << std::endl; +} + +int main(int argc, char* argv[]) { + if (argc < 6) { + std::cout << "Usage: infer_demo path/to/model path/to/image run_option, " + "e.g ./infer_arcface_demo ms1mv3_arcface_r100.onnx " + "test_lite_focal_arcface_0.png test_lite_focal_arcface_1.png " + "test_lite_focal_arcface_2.png 0" + << std::endl; + std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " + "with gpu; 2: run with gpu and use tensorrt backend." + << std::endl; + return -1; + } + + std::vector image_files = {argv[2], argv[3], argv[4]}; + if (std::atoi(argv[5]) == 0) { + CpuInfer(argv[1], image_files); + } else if (std::atoi(argv[5]) == 1) { + GpuInfer(argv[1], image_files); + } else if (std::atoi(argv[5]) == 2) { + TrtInfer(argv[1], image_files); + } + return 0; +} diff --git a/examples/vision/faceid/insightface/cpp/infer_partial_fc.cc b/examples/vision/faceid/insightface/cpp/infer_partial_fc.cc new file mode 100644 index 0000000000..311679d422 --- /dev/null +++ b/examples/vision/faceid/insightface/cpp/infer_partial_fc.cc @@ -0,0 +1,156 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +void CpuInfer(const std::string& model_file, + const std::vector& image_file) { + auto model = fastdeploy::vision::faceid::CosFace(model_file); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + cv::Mat face0 = cv::imread(image_file[0]); + cv::Mat face1 = cv::imread(image_file[1]); + cv::Mat face2 = cv::imread(image_file[2]); + + fastdeploy::vision::FaceRecognitionResult res0; + fastdeploy::vision::FaceRecognitionResult res1; + fastdeploy::vision::FaceRecognitionResult res2; + + if ((!model.Predict(&face0, &res0)) || (!model.Predict(&face1, &res1)) || + (!model.Predict(&face2, &res2))) { + std::cerr << "Prediction Failed." << std::endl; + } + + std::cout << "Prediction Done!" << std::endl; + + // 输出预测框结果 + std::cout << "--- [Face 0]:" << res0.Str(); + std::cout << "--- [Face 1]:" << res1.Str(); + std::cout << "--- [Face 2]:" << res2.Str(); + + // 计算余弦相似度 + float cosine01 = fastdeploy::vision::utils::CosineSimilarity( + res0.embedding, res1.embedding, model.l2_normalize); + float cosine02 = fastdeploy::vision::utils::CosineSimilarity( + res0.embedding, res2.embedding, model.l2_normalize); + std::cout << "Detect Done! Cosine 01: " << cosine01 + << ", Cosine 02:" << cosine02 << std::endl; +} + +void GpuInfer(const std::string& model_file, + const std::vector& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + auto model = fastdeploy::vision::faceid::CosFace(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + cv::Mat face0 = cv::imread(image_file[0]); + cv::Mat face1 = cv::imread(image_file[1]); + cv::Mat face2 = cv::imread(image_file[2]); + + fastdeploy::vision::FaceRecognitionResult res0; + fastdeploy::vision::FaceRecognitionResult res1; + fastdeploy::vision::FaceRecognitionResult res2; + + if ((!model.Predict(&face0, &res0)) || (!model.Predict(&face1, &res1)) || + (!model.Predict(&face2, &res2))) { + std::cerr << "Prediction Failed." << std::endl; + } + + std::cout << "Prediction Done!" << std::endl; + + // 输出预测框结果 + std::cout << "--- [Face 0]:" << res0.Str(); + std::cout << "--- [Face 1]:" << res1.Str(); + std::cout << "--- [Face 2]:" << res2.Str(); + + // 计算余弦相似度 + float cosine01 = fastdeploy::vision::utils::CosineSimilarity( + res0.embedding, res1.embedding, model.l2_normalize); + float cosine02 = fastdeploy::vision::utils::CosineSimilarity( + res0.embedding, res2.embedding, model.l2_normalize); + std::cout << "Detect Done! Cosine 01: " << cosine01 + << ", Cosine 02:" << cosine02 << std::endl; +} + +void TrtInfer(const std::string& model_file, + const std::vector& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + option.UseTrtBackend(); + option.SetTrtInputShape("data", {1, 3, 112, 112}); + auto model = fastdeploy::vision::faceid::CosFace(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + cv::Mat face0 = cv::imread(image_file[0]); + cv::Mat face1 = cv::imread(image_file[1]); + cv::Mat face2 = cv::imread(image_file[2]); + + fastdeploy::vision::FaceRecognitionResult res0; + fastdeploy::vision::FaceRecognitionResult res1; + fastdeploy::vision::FaceRecognitionResult res2; + + if ((!model.Predict(&face0, &res0)) || (!model.Predict(&face1, &res1)) || + (!model.Predict(&face2, &res2))) { + std::cerr << "Prediction Failed." << std::endl; + } + + std::cout << "Prediction Done!" << std::endl; + + // 输出预测框结果 + std::cout << "--- [Face 0]:" << res0.Str(); + std::cout << "--- [Face 1]:" << res1.Str(); + std::cout << "--- [Face 2]:" << res2.Str(); + + // 计算余弦相似度 + float cosine01 = fastdeploy::vision::utils::CosineSimilarity( + res0.embedding, res1.embedding, model.l2_normalize); + float cosine02 = fastdeploy::vision::utils::CosineSimilarity( + res0.embedding, res2.embedding, model.l2_normalize); + std::cout << "Detect Done! Cosine 01: " << cosine01 + << ", Cosine 02:" << cosine02 << std::endl; +} + +int main(int argc, char* argv[]) { + if (argc < 6) { + std::cout << "Usage: infer_demo path/to/model path/to/image run_option, " + "e.g ./infer_arcface_demo ms1mv3_arcface_r100.onnx " + "test_lite_focal_arcface_0.png test_lite_focal_arcface_1.png " + "test_lite_focal_arcface_2.png 0" + << std::endl; + std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " + "with gpu; 2: run with gpu and use tensorrt backend." + << std::endl; + return -1; + } + + std::vector image_files = {argv[2], argv[3], argv[4]}; + if (std::atoi(argv[5]) == 0) { + CpuInfer(argv[1], image_files); + } else if (std::atoi(argv[5]) == 1) { + GpuInfer(argv[1], image_files); + } else if (std::atoi(argv[5]) == 2) { + TrtInfer(argv[1], image_files); + } + return 0; +} diff --git a/examples/vision/faceid/insightface/cpp/infer_vpl.cc b/examples/vision/faceid/insightface/cpp/infer_vpl.cc new file mode 100644 index 0000000000..311679d422 --- /dev/null +++ b/examples/vision/faceid/insightface/cpp/infer_vpl.cc @@ -0,0 +1,156 @@ +// Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#include "fastdeploy/vision.h" + +void CpuInfer(const std::string& model_file, + const std::vector& image_file) { + auto model = fastdeploy::vision::faceid::CosFace(model_file); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + cv::Mat face0 = cv::imread(image_file[0]); + cv::Mat face1 = cv::imread(image_file[1]); + cv::Mat face2 = cv::imread(image_file[2]); + + fastdeploy::vision::FaceRecognitionResult res0; + fastdeploy::vision::FaceRecognitionResult res1; + fastdeploy::vision::FaceRecognitionResult res2; + + if ((!model.Predict(&face0, &res0)) || (!model.Predict(&face1, &res1)) || + (!model.Predict(&face2, &res2))) { + std::cerr << "Prediction Failed." << std::endl; + } + + std::cout << "Prediction Done!" << std::endl; + + // 输出预测框结果 + std::cout << "--- [Face 0]:" << res0.Str(); + std::cout << "--- [Face 1]:" << res1.Str(); + std::cout << "--- [Face 2]:" << res2.Str(); + + // 计算余弦相似度 + float cosine01 = fastdeploy::vision::utils::CosineSimilarity( + res0.embedding, res1.embedding, model.l2_normalize); + float cosine02 = fastdeploy::vision::utils::CosineSimilarity( + res0.embedding, res2.embedding, model.l2_normalize); + std::cout << "Detect Done! Cosine 01: " << cosine01 + << ", Cosine 02:" << cosine02 << std::endl; +} + +void GpuInfer(const std::string& model_file, + const std::vector& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + auto model = fastdeploy::vision::faceid::CosFace(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + cv::Mat face0 = cv::imread(image_file[0]); + cv::Mat face1 = cv::imread(image_file[1]); + cv::Mat face2 = cv::imread(image_file[2]); + + fastdeploy::vision::FaceRecognitionResult res0; + fastdeploy::vision::FaceRecognitionResult res1; + fastdeploy::vision::FaceRecognitionResult res2; + + if ((!model.Predict(&face0, &res0)) || (!model.Predict(&face1, &res1)) || + (!model.Predict(&face2, &res2))) { + std::cerr << "Prediction Failed." << std::endl; + } + + std::cout << "Prediction Done!" << std::endl; + + // 输出预测框结果 + std::cout << "--- [Face 0]:" << res0.Str(); + std::cout << "--- [Face 1]:" << res1.Str(); + std::cout << "--- [Face 2]:" << res2.Str(); + + // 计算余弦相似度 + float cosine01 = fastdeploy::vision::utils::CosineSimilarity( + res0.embedding, res1.embedding, model.l2_normalize); + float cosine02 = fastdeploy::vision::utils::CosineSimilarity( + res0.embedding, res2.embedding, model.l2_normalize); + std::cout << "Detect Done! Cosine 01: " << cosine01 + << ", Cosine 02:" << cosine02 << std::endl; +} + +void TrtInfer(const std::string& model_file, + const std::vector& image_file) { + auto option = fastdeploy::RuntimeOption(); + option.UseGpu(); + option.UseTrtBackend(); + option.SetTrtInputShape("data", {1, 3, 112, 112}); + auto model = fastdeploy::vision::faceid::CosFace(model_file, "", option); + if (!model.Initialized()) { + std::cerr << "Failed to initialize." << std::endl; + return; + } + + cv::Mat face0 = cv::imread(image_file[0]); + cv::Mat face1 = cv::imread(image_file[1]); + cv::Mat face2 = cv::imread(image_file[2]); + + fastdeploy::vision::FaceRecognitionResult res0; + fastdeploy::vision::FaceRecognitionResult res1; + fastdeploy::vision::FaceRecognitionResult res2; + + if ((!model.Predict(&face0, &res0)) || (!model.Predict(&face1, &res1)) || + (!model.Predict(&face2, &res2))) { + std::cerr << "Prediction Failed." << std::endl; + } + + std::cout << "Prediction Done!" << std::endl; + + // 输出预测框结果 + std::cout << "--- [Face 0]:" << res0.Str(); + std::cout << "--- [Face 1]:" << res1.Str(); + std::cout << "--- [Face 2]:" << res2.Str(); + + // 计算余弦相似度 + float cosine01 = fastdeploy::vision::utils::CosineSimilarity( + res0.embedding, res1.embedding, model.l2_normalize); + float cosine02 = fastdeploy::vision::utils::CosineSimilarity( + res0.embedding, res2.embedding, model.l2_normalize); + std::cout << "Detect Done! Cosine 01: " << cosine01 + << ", Cosine 02:" << cosine02 << std::endl; +} + +int main(int argc, char* argv[]) { + if (argc < 6) { + std::cout << "Usage: infer_demo path/to/model path/to/image run_option, " + "e.g ./infer_arcface_demo ms1mv3_arcface_r100.onnx " + "test_lite_focal_arcface_0.png test_lite_focal_arcface_1.png " + "test_lite_focal_arcface_2.png 0" + << std::endl; + std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " + "with gpu; 2: run with gpu and use tensorrt backend." + << std::endl; + return -1; + } + + std::vector image_files = {argv[2], argv[3], argv[4]}; + if (std::atoi(argv[5]) == 0) { + CpuInfer(argv[1], image_files); + } else if (std::atoi(argv[5]) == 1) { + GpuInfer(argv[1], image_files); + } else if (std::atoi(argv[5]) == 2) { + TrtInfer(argv[1], image_files); + } + return 0; +} diff --git a/examples/vision/faceid/insightface/python/README.md b/examples/vision/faceid/insightface/python/README.md new file mode 100644 index 0000000000..d079098db0 --- /dev/null +++ b/examples/vision/faceid/insightface/python/README.md @@ -0,0 +1,88 @@ +# InsightFace Python部署示例 +本目录下提供infer_xxx.cc快速完成InsighFace模型包括ArcFace\CosFace\VPL\Partial_FC在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 + +在部署前,需确认以下两个步骤 + +- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) +- 2. FastDeploy Python whl包安装,参考[FastDeploy Python安装](../../../../../docs/quick_start/install.md) + +以ArcFace为例子, 提供`infer_arcface.py`快速完成ArcFace在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 + +``` +#下载ArcFace模型文件和测试图片 +wget https://bj.bcebos.com/paddlehub/fastdeploy/ms1mv3_arcface_r100.onnx +wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/lite/resources/test_lite_focal_arcface_0.png +wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/lite/resources/test_lite_focal_arcface_1.png +wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/lite/resources/test_lite_focal_arcface_2.png + + +#下载部署示例代码 +git clone https://github.com/PaddlePaddle/FastDeploy.git +cd examples/vison/detection/insightface/python/ + +# CPU推理 +python infer_arcface.py --model ms1mv3_arcface_r100.onnx --face test_lite_focal_arcface_0.png --face_positive test_lite_focal_arcface_1.png --face_negative test_lite_focal_arcface_2.png --device cpu +# GPU推理 +python infer_arcface.py --model ms1mv3_arcface_r100.onnx --face test_lite_focal_arcface_0.png --face_positive test_lite_focal_arcface_1.png --face_negative test_lite_focal_arcface_2.png --device gpu +# GPU上使用TensorRT推理 +python infer_arcface.py --model ms1mv3_arcface_r100.onnx --face test_lite_focal_arcface_0.png --face_positive test_lite_focal_arcface_1.png --face_negative test_lite_focal_arcface_2.png --device gpu --use_trt True +``` + +运行完成可视化结果如下图所示 + + + +## ArcFace Python接口 + +``` +fastdeploy.vision.detection.ArcFace(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +fastdeploy.vision.detection.CosFace(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +fastdeploy.vision.detection.PartialFC(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +fastdeploy.vision.detection.VPL(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +``` + +ArcFace模型加载和初始化,其中model_file为导出的ONNX模型格式 + +**参数** + +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX格式时,此参数无需设定 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX + +### predict函数 + +> ``` +> ArcFace.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) +> ``` +> +> 模型预测结口,输入图像直接输出检测结果。 +> +> **参数** +> +> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 +> > * **conf_threshold**(float): 检测框置信度过滤阈值 +> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值 + +> **返回** +> +> > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) + +### 类成员属性 + +> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` +> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` +> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` +> > * **downsample_strides**(list[int]): 通过此参数可以修改生成anchor的特征图的下采样倍数, 包含三个整型元素, 分别表示默认的生成anchor的下采样倍数, 默认值为[8, 16, 32] +> > * **landmarks_per_face**(int): 如果使用具有人脸关键点的输出, 可以修改人脸关键点数量, 默认值为`landmarks_per_face=5` +> > * **use_kps**(bool): 通过此参数可以设置模型是否使用关键点,如果ONNX文件没有关键点输出则需要将`use_kps=False`, 并将`landmarks_per_face=0`, 默认值为`use_kps=True` +> > * **num_anchors**(int): 通过此参数可以设置每个锚点预测的anchor数量, 需要跟进训练模型的参数设定, 默认值为`num_anchors=2` + + +## 其它文档 + +- [InsightFace 模型介绍](..) +- [InsightFace C++部署](../cpp) +- [模型预测结果说明](../../../../../docs/api/vision_results/) diff --git a/examples/vision/faceid/insightface/python/infer_arcface.py b/examples/vision/faceid/insightface/python/infer_arcface.py new file mode 100644 index 0000000000..2d725026e1 --- /dev/null +++ b/examples/vision/faceid/insightface/python/infer_arcface.py @@ -0,0 +1,90 @@ +import fastdeploy as fd +import cv2 +import numpy as np + + +# 余弦相似度 +def cosine_similarity(a, b): + a = np.array(a) + b = np.array(b) + mul_a = np.linalg.norm(a, ord=2) + mul_b = np.linalg.norm(b, ord=2) + mul_ab = np.dot(a, b) + return mul_ab / (np.sqrt(mul_a) * np.sqrt(mul_b)) + + +def parse_arguments(): + import argparse + import ast + parser = argparse.ArgumentParser() + parser.add_argument( + "--model", required=True, help="Path of scrfd onnx model.") + parser.add_argument( + "--face", required=True, help="Path of test face image file.") + parser.add_argument( + "--face_positive", + required=True, + help="Path of test face_positive image file.") + parser.add_argument( + "--face_negative", + required=True, + help="Path of test face_negative image file.") + parser.add_argument( + "--device", + type=str, + default='cpu', + help="Type of inference device, support 'cpu' or 'gpu'.") + parser.add_argument( + "--use_trt", + type=ast.literal_eval, + default=False, + help="Wether to use tensorrt.") + return parser.parse_args() + + +def build_option(args): + option = fd.RuntimeOption() + + if args.device.lower() == "gpu": + option.use_gpu() + + if args.use_trt: + option.use_trt_backend() + option.set_trt_input_shape("data", [1, 3, 112, 112]) + return option + + +args = parse_arguments() + +# 配置runtime,加载模型 +runtime_option = build_option(args) +model = fd.vision.faceid.ArcFace(args.model, runtime_option=runtime_option) + +# 加载图片 +face0 = cv2.imread(args.face) # 0,1 同一个人 +face1 = cv2.imread(args.face_positive) +face2 = cv2.imread(args.face_negative) # 0,2 不同的人 + +# 设置 l2 normalize +model.l2_normalize = True + +# 预测图片检测结果 +result0 = model.predict(face0) +result1 = model.predict(face1) +result2 = model.predict(face2) + +# 计算余弦相似度 +embedding0 = result0.embedding +embedding1 = result1.embedding +embedding2 = result2.embedding + +cosine01 = cosine_similarity(embedding0, embedding1) +cosine02 = cosine_similarity(embedding0, embedding2) + +# 打印结果 +print(result0, end="") +print(result1, end="") +print(result2, end="") +print("Cosine 01: ", cosine01) +print("Cosine 02: ", cosine02) +print(model.runtime_option) diff --git a/examples/vision/faceid/insightface/python/infer_cosface.py b/examples/vision/faceid/insightface/python/infer_cosface.py new file mode 100644 index 0000000000..07f1a0b14b --- /dev/null +++ b/examples/vision/faceid/insightface/python/infer_cosface.py @@ -0,0 +1,90 @@ +import fastdeploy as fd +import cv2 +import numpy as np + + +# 余弦相似度 +def cosine_similarity(a, b): + a = np.array(a) + b = np.array(b) + mul_a = np.linalg.norm(a, ord=2) + mul_b = np.linalg.norm(b, ord=2) + mul_ab = np.dot(a, b) + return mul_ab / (np.sqrt(mul_a) * np.sqrt(mul_b)) + + +def parse_arguments(): + import argparse + import ast + parser = argparse.ArgumentParser() + parser.add_argument( + "--model", required=True, help="Path of scrfd onnx model.") + parser.add_argument( + "--face", required=True, help="Path of test face image file.") + parser.add_argument( + "--face_positive", + required=True, + help="Path of test face_positive image file.") + parser.add_argument( + "--face_negative", + required=True, + help="Path of test face_negative image file.") + parser.add_argument( + "--device", + type=str, + default='cpu', + help="Type of inference device, support 'cpu' or 'gpu'.") + parser.add_argument( + "--use_trt", + type=ast.literal_eval, + default=False, + help="Wether to use tensorrt.") + return parser.parse_args() + + +def build_option(args): + option = fd.RuntimeOption() + + if args.device.lower() == "gpu": + option.use_gpu() + + if args.use_trt: + option.use_trt_backend() + option.set_trt_input_shape("data", [1, 3, 112, 112]) + return option + + +args = parse_arguments() + +# 配置runtime,加载模型 +runtime_option = build_option(args) +model = fd.vision.faceid.CosFace(args.model, runtime_option=runtime_option) + +# 加载图片 +face0 = cv2.imread(args.face) # 0,1 同一个人 +face1 = cv2.imread(args.face_positive) +face2 = cv2.imread(args.face_negative) # 0,2 不同的人 + +# 设置 l2 normalize +model.l2_normalize = True + +# 预测图片检测结果 +result0 = model.predict(face0) +result1 = model.predict(face1) +result2 = model.predict(face2) + +# 计算余弦相似度 +embedding0 = result0.embedding +embedding1 = result1.embedding +embedding2 = result2.embedding + +cosine01 = cosine_similarity(embedding0, embedding1) +cosine02 = cosine_similarity(embedding0, embedding2) + +# 打印结果 +print(result0, end="") +print(result1, end="") +print(result2, end="") +print("Cosine 01: ", cosine01) +print("Cosine 02: ", cosine02) +print(model.runtime_option) diff --git a/examples/vision/faceid/insightface/python/infer_partial_fc.py b/examples/vision/faceid/insightface/python/infer_partial_fc.py new file mode 100644 index 0000000000..07f1a0b14b --- /dev/null +++ b/examples/vision/faceid/insightface/python/infer_partial_fc.py @@ -0,0 +1,90 @@ +import fastdeploy as fd +import cv2 +import numpy as np + + +# 余弦相似度 +def cosine_similarity(a, b): + a = np.array(a) + b = np.array(b) + mul_a = np.linalg.norm(a, ord=2) + mul_b = np.linalg.norm(b, ord=2) + mul_ab = np.dot(a, b) + return mul_ab / (np.sqrt(mul_a) * np.sqrt(mul_b)) + + +def parse_arguments(): + import argparse + import ast + parser = argparse.ArgumentParser() + parser.add_argument( + "--model", required=True, help="Path of scrfd onnx model.") + parser.add_argument( + "--face", required=True, help="Path of test face image file.") + parser.add_argument( + "--face_positive", + required=True, + help="Path of test face_positive image file.") + parser.add_argument( + "--face_negative", + required=True, + help="Path of test face_negative image file.") + parser.add_argument( + "--device", + type=str, + default='cpu', + help="Type of inference device, support 'cpu' or 'gpu'.") + parser.add_argument( + "--use_trt", + type=ast.literal_eval, + default=False, + help="Wether to use tensorrt.") + return parser.parse_args() + + +def build_option(args): + option = fd.RuntimeOption() + + if args.device.lower() == "gpu": + option.use_gpu() + + if args.use_trt: + option.use_trt_backend() + option.set_trt_input_shape("data", [1, 3, 112, 112]) + return option + + +args = parse_arguments() + +# 配置runtime,加载模型 +runtime_option = build_option(args) +model = fd.vision.faceid.CosFace(args.model, runtime_option=runtime_option) + +# 加载图片 +face0 = cv2.imread(args.face) # 0,1 同一个人 +face1 = cv2.imread(args.face_positive) +face2 = cv2.imread(args.face_negative) # 0,2 不同的人 + +# 设置 l2 normalize +model.l2_normalize = True + +# 预测图片检测结果 +result0 = model.predict(face0) +result1 = model.predict(face1) +result2 = model.predict(face2) + +# 计算余弦相似度 +embedding0 = result0.embedding +embedding1 = result1.embedding +embedding2 = result2.embedding + +cosine01 = cosine_similarity(embedding0, embedding1) +cosine02 = cosine_similarity(embedding0, embedding2) + +# 打印结果 +print(result0, end="") +print(result1, end="") +print(result2, end="") +print("Cosine 01: ", cosine01) +print("Cosine 02: ", cosine02) +print(model.runtime_option) diff --git a/examples/vision/faceid/insightface/python/infer_vpl.py b/examples/vision/faceid/insightface/python/infer_vpl.py new file mode 100644 index 0000000000..07f1a0b14b --- /dev/null +++ b/examples/vision/faceid/insightface/python/infer_vpl.py @@ -0,0 +1,90 @@ +import fastdeploy as fd +import cv2 +import numpy as np + + +# 余弦相似度 +def cosine_similarity(a, b): + a = np.array(a) + b = np.array(b) + mul_a = np.linalg.norm(a, ord=2) + mul_b = np.linalg.norm(b, ord=2) + mul_ab = np.dot(a, b) + return mul_ab / (np.sqrt(mul_a) * np.sqrt(mul_b)) + + +def parse_arguments(): + import argparse + import ast + parser = argparse.ArgumentParser() + parser.add_argument( + "--model", required=True, help="Path of scrfd onnx model.") + parser.add_argument( + "--face", required=True, help="Path of test face image file.") + parser.add_argument( + "--face_positive", + required=True, + help="Path of test face_positive image file.") + parser.add_argument( + "--face_negative", + required=True, + help="Path of test face_negative image file.") + parser.add_argument( + "--device", + type=str, + default='cpu', + help="Type of inference device, support 'cpu' or 'gpu'.") + parser.add_argument( + "--use_trt", + type=ast.literal_eval, + default=False, + help="Wether to use tensorrt.") + return parser.parse_args() + + +def build_option(args): + option = fd.RuntimeOption() + + if args.device.lower() == "gpu": + option.use_gpu() + + if args.use_trt: + option.use_trt_backend() + option.set_trt_input_shape("data", [1, 3, 112, 112]) + return option + + +args = parse_arguments() + +# 配置runtime,加载模型 +runtime_option = build_option(args) +model = fd.vision.faceid.CosFace(args.model, runtime_option=runtime_option) + +# 加载图片 +face0 = cv2.imread(args.face) # 0,1 同一个人 +face1 = cv2.imread(args.face_positive) +face2 = cv2.imread(args.face_negative) # 0,2 不同的人 + +# 设置 l2 normalize +model.l2_normalize = True + +# 预测图片检测结果 +result0 = model.predict(face0) +result1 = model.predict(face1) +result2 = model.predict(face2) + +# 计算余弦相似度 +embedding0 = result0.embedding +embedding1 = result1.embedding +embedding2 = result2.embedding + +cosine01 = cosine_similarity(embedding0, embedding1) +cosine02 = cosine_similarity(embedding0, embedding2) + +# 打印结果 +print(result0, end="") +print(result1, end="") +print(result2, end="") +print("Cosine 01: ", cosine01) +print("Cosine 02: ", cosine02) +print(model.runtime_option) diff --git a/examples/vision/faceid/partial_fc/README.md b/examples/vision/faceid/partial_fc/README.md deleted file mode 100644 index ca03ba2e79..0000000000 --- a/examples/vision/faceid/partial_fc/README.md +++ /dev/null @@ -1,37 +0,0 @@ - - - - - - -## 下载预训练ONNX模型 - -为了方便开发者的测试,下面提供了RetinaFace导出的各系列模型,开发者可直接下载使用。 - -| 模型 | 大小 | 精度 | -|:---------------------------------------------------------------- |:----- |:----- | -| [partial_fc_glint360k_r50](https://bj.bcebos.com/paddlehub/fastdeploy/partial_fc_glint360k_r50.onnx) | 167MB | - | -| [partial_fc_glint360k_r100](https://bj.bcebos.com/paddlehub/fastdeploy/partial_fc_glint360k_r100.onnx) | 249MB | -| - - - -## 详细部署文档 - -- [Python部署](python) -- [C++部署](cpp) diff --git a/examples/vision/faceid/partial_fc/cpp/CMakeLists.txt b/examples/vision/faceid/partial_fc/cpp/CMakeLists.txt deleted file mode 100644 index fea1a2888b..0000000000 --- a/examples/vision/faceid/partial_fc/cpp/CMakeLists.txt +++ /dev/null @@ -1,14 +0,0 @@ -PROJECT(infer_demo C CXX) -CMAKE_MINIMUM_REQUIRED (VERSION 3.12) - -# 指定下载解压后的fastdeploy库路径 -option(FASTDEPLOY_INSTALL_DIR "Path of downloaded fastdeploy sdk.") - -include(${FASTDEPLOY_INSTALL_DIR}/FastDeploy.cmake) - -# 添加FastDeploy依赖头文件 -include_directories(${FASTDEPLOY_INCS}) - -add_executable(infer_demo ${PROJECT_SOURCE_DIR}/infer.cc) -# 添加FastDeploy库依赖 -target_link_libraries(infer_demo ${FASTDEPLOY_LIBS}) diff --git a/examples/vision/faceid/partial_fc/cpp/README.md b/examples/vision/faceid/partial_fc/cpp/README.md deleted file mode 100644 index 20a2f0eb6e..0000000000 --- a/examples/vision/faceid/partial_fc/cpp/README.md +++ /dev/null @@ -1,85 +0,0 @@ -# PartialFC C++部署示例 - -本目录下提供`infer.cc`快速完成PartialFC在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 - -在部署前,需确认以下两个步骤 - -- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) -- 2. 根据开发环境,下载预编译部署库和samples代码,参考[FastDeploy预编译库](../../../../../docs/compile/prebuild_libraries.md) - -以Linux上CPU推理为例,在本目录执行如下命令即可完成编译测试 - -``` -mkdir build -cd build -wget https://xxx.tgz -tar xvf fastdeploy-linux-x64-0.2.0.tgz -cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 -make -j - -#下载官方转换好的PartialFC模型文件和测试图片 -wget https://bj.bcebos.com/paddlehub/fastdeploy/partial_fc_glint360k_r50.onnx -wget todo - - -# CPU推理 -./infer_demo partial_fc_glint360k_r50.onnx todo 0 -# GPU推理 -./infer_demo partial_fc_glint360k_r50.onnx todo 1 -# GPU上TensorRT推理 -./infer_demo partial_fc_glint360k_r50.onnx todo 2 -``` - -运行完成可视化结果如下图所示 - - - -## PartialFC C++接口 - -### PartialFC类 - -``` -fastdeploy::vision::faceid::PartialFC( - const string& model_file, - const string& params_file = "", - const RuntimeOption& runtime_option = RuntimeOption(), - const Frontend& model_format = Frontend::ONNX) -``` - -PartialFC模型加载和初始化,其中model_file为导出的ONNX模型格式。 - -**参数** - -> * **model_file**(str): 模型文件路径 -> * **params_file**(str): 参数文件路径,当模型格式为ONNX时,此参数传入空字符串即可 -> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 -> * **model_format**(Frontend): 模型格式,默认为ONNX格式 - -#### Predict函数 - -> ``` -> PartialFC::Predict(cv::Mat* im, DetectionResult* result, -> float conf_threshold = 0.25, -> float nms_iou_threshold = 0.5) -> ``` -> -> 模型预测接口,输入图像直接输出检测结果。 -> -> **参数** -> -> > * **im**: 输入图像,注意需为HWC,BGR格式 -> > * **result**: 检测结果,包括检测框,各个框的置信度, DetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) -> > * **conf_threshold**: 检测框置信度过滤阈值 -> > * **nms_iou_threshold**: NMS处理过程中iou阈值 - -### 类成员变量 - -> > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] -> > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] -> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` -> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` -> > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` - -- [模型介绍](../../) -- [Python部署](../python) -- [视觉模型预测结果](../../../../../docs/api/vision_results/) diff --git a/examples/vision/faceid/partial_fc/python/README.md b/examples/vision/faceid/partial_fc/python/README.md deleted file mode 100644 index 6189e99c47..0000000000 --- a/examples/vision/faceid/partial_fc/python/README.md +++ /dev/null @@ -1,79 +0,0 @@ -# PartialFC Python部署示例 - -在部署前,需确认以下两个步骤 - -- 1. 软硬件环境满足要求,参考[FastDeploy环境要求](../../../../../docs/quick_start/requirements.md) -- 2. FastDeploy Python whl包安装,参考[FastDeploy Python安装](../../../../../docs/quick_start/install.md) - -本目录下提供`infer.py`快速完成PartialFC在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。执行如下脚本即可完成 - -``` -#下载partial_fc模型文件和测试图片 -wget https://bj.bcebos.com/paddlehub/fastdeploy/partial_fc_glint360k_r50.onnx -wget todo - - -#下载部署示例代码 -git clone https://github.com/PaddlePaddle/FastDeploy.git -cd examples/vison/detection/partial_fc/python/ - -# CPU推理 -python infer.py --model partial_fc_glint360k_r50.onnx --image todo --device cpu -# GPU推理 -python infer.py --model partial_fc_glint360k_r50.onnx --image todo --device gpu -# GPU上使用TensorRT推理 -python infer.py --model partial_fc_glint360k_r50.onnx --image todo --device gpu --use_trt True -``` - -运行完成可视化结果如下图所示 - - - -## PartialFC Python接口 - -``` -fastdeploy.vision.faceid.PartialFC(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) -``` - -PartialFC模型加载和初始化,其中model_file为导出的ONNX模型格式 - -**参数** - -> * **model_file**(str): 模型文件路径 -> * **params_file**(str): 参数文件路径,当模型格式为ONNX格式时,此参数无需设定 -> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 -> * **model_format**(Frontend): 模型格式,默认为ONNX - -### predict函数 - -> ``` -> PartialFC.predict(image_data, conf_threshold=0.25, nms_iou_threshold=0.5) -> ``` -> -> 模型预测结口,输入图像直接输出检测结果。 -> -> **参数** -> -> > * **image_data**(np.ndarray): 输入数据,注意需为HWC,BGR格式 -> > * **conf_threshold**(float): 检测框置信度过滤阈值 -> > * **nms_iou_threshold**(float): NMS处理过程中iou阈值 - -> **返回** -> -> > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) - -### 类成员属性 - -> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] -> > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] -> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` -> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` -> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` - - - -## 其它文档 - -- [PartialFC 模型介绍](..) -- [PartialFC C++部署](../cpp) -- [模型预测结果说明](../../../../../docs/api/vision_results/) From eecf5ced643e078162e2a64ece7263880fa4c80d Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Thu, 11 Aug 2022 14:46:43 +0000 Subject: [PATCH 69/94] first commit for insightface --- examples/vision/faceid/insightface/cpp/README.md | 2 +- examples/vision/faceid/insightface/python/README.md | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/vision/faceid/insightface/cpp/README.md b/examples/vision/faceid/insightface/cpp/README.md index 03b61684d5..6fdf812887 100644 --- a/examples/vision/faceid/insightface/cpp/README.md +++ b/examples/vision/faceid/insightface/cpp/README.md @@ -1,5 +1,5 @@ # InsightFace C++部署示例 - +本目录下提供infer_xxx.cc快速完成InsighFace模型包括ArcFace\CosFace\VPL\Partial_FC在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 以ArcFace为例提供`infer_arcface.cc`快速完成ArcFace在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 在部署前,需确认以下两个步骤 diff --git a/examples/vision/faceid/insightface/python/README.md b/examples/vision/faceid/insightface/python/README.md index d079098db0..6bc55fe53e 100644 --- a/examples/vision/faceid/insightface/python/README.md +++ b/examples/vision/faceid/insightface/python/README.md @@ -1,5 +1,5 @@ # InsightFace Python部署示例 -本目录下提供infer_xxx.cc快速完成InsighFace模型包括ArcFace\CosFace\VPL\Partial_FC在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 +本目录下提供infer_xxx.py快速完成InsighFace模型包括ArcFace\CosFace\VPL\Partial_FC在CPU/GPU,以及GPU上通过TensorRT加速部署的示例。 在部署前,需确认以下两个步骤 From c04773f15c5a58437b017cfc3423ed57b793b3bb Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Thu, 11 Aug 2022 14:52:19 +0000 Subject: [PATCH 70/94] first commit for insightface --- examples/vision/faceid/insightface/cpp/infer_partial_fc.cc | 6 +++--- examples/vision/faceid/insightface/cpp/infer_vpl.cc | 6 +++--- .../vision/faceid/insightface/python/infer_partial_fc.py | 2 +- examples/vision/faceid/insightface/python/infer_vpl.py | 2 +- 4 files changed, 8 insertions(+), 8 deletions(-) diff --git a/examples/vision/faceid/insightface/cpp/infer_partial_fc.cc b/examples/vision/faceid/insightface/cpp/infer_partial_fc.cc index 311679d422..9e0c920c46 100644 --- a/examples/vision/faceid/insightface/cpp/infer_partial_fc.cc +++ b/examples/vision/faceid/insightface/cpp/infer_partial_fc.cc @@ -16,7 +16,7 @@ void CpuInfer(const std::string& model_file, const std::vector& image_file) { - auto model = fastdeploy::vision::faceid::CosFace(model_file); + auto model = fastdeploy::vision::faceid::PartialFC(model_file); if (!model.Initialized()) { std::cerr << "Failed to initialize." << std::endl; return; @@ -55,7 +55,7 @@ void GpuInfer(const std::string& model_file, const std::vector& image_file) { auto option = fastdeploy::RuntimeOption(); option.UseGpu(); - auto model = fastdeploy::vision::faceid::CosFace(model_file, "", option); + auto model = fastdeploy::vision::faceid::PartialFC(model_file, "", option); if (!model.Initialized()) { std::cerr << "Failed to initialize." << std::endl; return; @@ -96,7 +96,7 @@ void TrtInfer(const std::string& model_file, option.UseGpu(); option.UseTrtBackend(); option.SetTrtInputShape("data", {1, 3, 112, 112}); - auto model = fastdeploy::vision::faceid::CosFace(model_file, "", option); + auto model = fastdeploy::vision::faceid::PartialFC(model_file, "", option); if (!model.Initialized()) { std::cerr << "Failed to initialize." << std::endl; return; diff --git a/examples/vision/faceid/insightface/cpp/infer_vpl.cc b/examples/vision/faceid/insightface/cpp/infer_vpl.cc index 311679d422..04a2d30f4a 100644 --- a/examples/vision/faceid/insightface/cpp/infer_vpl.cc +++ b/examples/vision/faceid/insightface/cpp/infer_vpl.cc @@ -16,7 +16,7 @@ void CpuInfer(const std::string& model_file, const std::vector& image_file) { - auto model = fastdeploy::vision::faceid::CosFace(model_file); + auto model = fastdeploy::vision::faceid::VPL(model_file); if (!model.Initialized()) { std::cerr << "Failed to initialize." << std::endl; return; @@ -55,7 +55,7 @@ void GpuInfer(const std::string& model_file, const std::vector& image_file) { auto option = fastdeploy::RuntimeOption(); option.UseGpu(); - auto model = fastdeploy::vision::faceid::CosFace(model_file, "", option); + auto model = fastdeploy::vision::faceid::VPL(model_file, "", option); if (!model.Initialized()) { std::cerr << "Failed to initialize." << std::endl; return; @@ -96,7 +96,7 @@ void TrtInfer(const std::string& model_file, option.UseGpu(); option.UseTrtBackend(); option.SetTrtInputShape("data", {1, 3, 112, 112}); - auto model = fastdeploy::vision::faceid::CosFace(model_file, "", option); + auto model = fastdeploy::vision::faceid::VPL(model_file, "", option); if (!model.Initialized()) { std::cerr << "Failed to initialize." << std::endl; return; diff --git a/examples/vision/faceid/insightface/python/infer_partial_fc.py b/examples/vision/faceid/insightface/python/infer_partial_fc.py index 07f1a0b14b..b931af0dff 100644 --- a/examples/vision/faceid/insightface/python/infer_partial_fc.py +++ b/examples/vision/faceid/insightface/python/infer_partial_fc.py @@ -58,7 +58,7 @@ def build_option(args): # 配置runtime,加载模型 runtime_option = build_option(args) -model = fd.vision.faceid.CosFace(args.model, runtime_option=runtime_option) +model = fd.vision.faceid.PartialFC(args.model, runtime_option=runtime_option) # 加载图片 face0 = cv2.imread(args.face) # 0,1 同一个人 diff --git a/examples/vision/faceid/insightface/python/infer_vpl.py b/examples/vision/faceid/insightface/python/infer_vpl.py index 07f1a0b14b..14c25bfb47 100644 --- a/examples/vision/faceid/insightface/python/infer_vpl.py +++ b/examples/vision/faceid/insightface/python/infer_vpl.py @@ -58,7 +58,7 @@ def build_option(args): # 配置runtime,加载模型 runtime_option = build_option(args) -model = fd.vision.faceid.CosFace(args.model, runtime_option=runtime_option) +model = fd.vision.faceid.VPL(args.model, runtime_option=runtime_option) # 加载图片 face0 = cv2.imread(args.face) # 0,1 同一个人 From 6a7ed21b96b7feed9828e9afa0130b08154baed8 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Fri, 12 Aug 2022 05:23:58 +0000 Subject: [PATCH 71/94] docs --- .../vision_results/face_detection_result.md | 34 +++++++++++++++++++ examples/vision/README.md | 3 +- 2 files changed, 36 insertions(+), 1 deletion(-) create mode 100644 docs/api/vision_results/face_detection_result.md diff --git a/docs/api/vision_results/face_detection_result.md b/docs/api/vision_results/face_detection_result.md new file mode 100644 index 0000000000..8e04ecd194 --- /dev/null +++ b/docs/api/vision_results/face_detection_result.md @@ -0,0 +1,34 @@ +# DetectionResult 目标检测结果 + +DetectionResult代码定义在`csrcs/fastdeploy/vision/common/result.h`中,用于表明图像检测出来的目标框、目标类别和目标置信度。 + +## C++ 结构体 + +`fastdeploy::vision::DetectionResult` + +``` +struct FaceDetectionResult { + std::vector> boxes; + std::vector> landmarks; + std::vector scores; + ResultType type = ResultType::FACE_DETECTION; + int landmarks_per_face; + void Clear(); + std::string Str(); +}; +``` + +- **boxes**: 成员变量,表示单张图片检测出来的所有目标框坐标,`boxes.size()`表示框的个数,每个框以4个float数值依次表示xmin, ymin, xmax, ymax, 即左上角和右下角坐标 +- **scores**: 成员变量,表示单张图片检测出来的所有目标置信度,其元素个数与`boxes.size()`一致 +- **landmarks**: 成员变量,表示单张图片检测出来的所有人脸的关键点,其元素个数与`boxes.size()`一致 +- **landmarks_per_face**: 成员变量,表示每个人脸框中的关键点的数量。 +- **Clear()**: 成员函数,用于清除结构体中存储的结果 +- **Str()**: 成员函数,将结构体中的信息以字符串形式输出(用于Debug) + +## Python结构体 + +`fastdeploy.vision.DetectionResult` + +- **boxes**(list of list(float)): 成员变量,表示单张图片检测出来的所有目标框坐标。boxes是一个list,其每个元素为一个长度为4的list, 表示为一个框,每个框以4个float数值依次表示xmin, ymin, xmax, ymax, 即左上角和右下角坐标 +- **scores**(list of float): 成员变量,表示单张图片检测出来的所有目标置信度 +- **label_ids(list of int): 成员变量,表示单张图片检测出来的所有目标类别 diff --git a/examples/vision/README.md b/examples/vision/README.md index 31f1dd09d7..4f67a0be64 100644 --- a/examples/vision/README.md +++ b/examples/vision/README.md @@ -7,7 +7,8 @@ | Detection | 目标检测,输入图像,检测图像中物体位置,并返回检测框坐标及类别和置信度 | [DetectionResult](../../../../docs/api/vision_results/detection_result.md) | | Segmentation | 语义分割,输入图像,给出图像中每个像素的分类及置信度 | [SegmentationResult](../../../../docs/api/vision_results/segmentation_result.md) | | Classification | 图像分类,输入图像,给出图像的分类结果和置信度 | [ClassifyResult](../../../../docs/api/vision_results/classification_result.md) | - +| FaceDetection | 人脸检测,输入图像,检测图像中人脸位置,并返回检测框坐标及人脸关键点 | [FaceDetectionResult](../../../../docs/api/vision_results/face_detection_result.md) | +| Matting | 抠图,输入图像,返回图片的前景每个像素点的Alpha值 | [MattingResult](../../../../docs/api/vision_results/matting_result.md) | ## FastDeploy API设计 视觉模型具有较有统一任务范式,在设计API时(包括C++/Python),FastDeploy将视觉模型的部署拆分为四个步骤 From 46497fb1e5e8b8bf77adc380dd81046d397bd61d Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Fri, 12 Aug 2022 05:25:13 +0000 Subject: [PATCH 72/94] docs --- examples/vision/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/vision/README.md b/examples/vision/README.md index 4f67a0be64..38a0643134 100644 --- a/examples/vision/README.md +++ b/examples/vision/README.md @@ -4,7 +4,7 @@ | 任务类型 | 说明 | 预测结果结构体 | |:-------------- |:----------------------------------- |:-------------------------------------------------------------------------------- | -| Detection | 目标检测,输入图像,检测图像中物体位置,并返回检测框坐标及类别和置信度 | [DetectionResult](../../../../docs/api/vision_results/detection_result.md) | +| Detection | 目标检测,输入图像,检测图像中物体位置,并返回检测框坐标及类别和置信度 | [DetectionResult](../../../docs/api/vision_results/detection_result.md) | | Segmentation | 语义分割,输入图像,给出图像中每个像素的分类及置信度 | [SegmentationResult](../../../../docs/api/vision_results/segmentation_result.md) | | Classification | 图像分类,输入图像,给出图像的分类结果和置信度 | [ClassifyResult](../../../../docs/api/vision_results/classification_result.md) | | FaceDetection | 人脸检测,输入图像,检测图像中人脸位置,并返回检测框坐标及人脸关键点 | [FaceDetectionResult](../../../../docs/api/vision_results/face_detection_result.md) | From fa29760ac36487ee3706a85a64a2f2c7e6be033a Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Fri, 12 Aug 2022 05:28:11 +0000 Subject: [PATCH 73/94] docs --- examples/vision/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/vision/README.md b/examples/vision/README.md index 38a0643134..5168f43285 100644 --- a/examples/vision/README.md +++ b/examples/vision/README.md @@ -5,7 +5,7 @@ | 任务类型 | 说明 | 预测结果结构体 | |:-------------- |:----------------------------------- |:-------------------------------------------------------------------------------- | | Detection | 目标检测,输入图像,检测图像中物体位置,并返回检测框坐标及类别和置信度 | [DetectionResult](../../../docs/api/vision_results/detection_result.md) | -| Segmentation | 语义分割,输入图像,给出图像中每个像素的分类及置信度 | [SegmentationResult](../../../../docs/api/vision_results/segmentation_result.md) | +| Segmentation | 语义分割,输入图像,给出图像中每个像素的分类及置信度 | [SegmentationResult](../../docs/api/vision_results/segmentation_result.md) | | Classification | 图像分类,输入图像,给出图像的分类结果和置信度 | [ClassifyResult](../../../../docs/api/vision_results/classification_result.md) | | FaceDetection | 人脸检测,输入图像,检测图像中人脸位置,并返回检测框坐标及人脸关键点 | [FaceDetectionResult](../../../../docs/api/vision_results/face_detection_result.md) | | Matting | 抠图,输入图像,返回图片的前景每个像素点的Alpha值 | [MattingResult](../../../../docs/api/vision_results/matting_result.md) | From d76a4b73fe30f2da4e255bbab24e8c7a71c7b516 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Fri, 12 Aug 2022 05:31:00 +0000 Subject: [PATCH 74/94] docs --- examples/vision/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/vision/README.md b/examples/vision/README.md index 5168f43285..2b2e0cfd0c 100644 --- a/examples/vision/README.md +++ b/examples/vision/README.md @@ -7,7 +7,7 @@ | Detection | 目标检测,输入图像,检测图像中物体位置,并返回检测框坐标及类别和置信度 | [DetectionResult](../../../docs/api/vision_results/detection_result.md) | | Segmentation | 语义分割,输入图像,给出图像中每个像素的分类及置信度 | [SegmentationResult](../../docs/api/vision_results/segmentation_result.md) | | Classification | 图像分类,输入图像,给出图像的分类结果和置信度 | [ClassifyResult](../../../../docs/api/vision_results/classification_result.md) | -| FaceDetection | 人脸检测,输入图像,检测图像中人脸位置,并返回检测框坐标及人脸关键点 | [FaceDetectionResult](../../../../docs/api/vision_results/face_detection_result.md) | +| FaceDetection | 人脸检测,输入图像,检测图像中人脸位置,并返回检测框坐标及人脸关键点 | [FaceDetectionResult](../../docs/api/vision_results/face_detection_result.md) | | Matting | 抠图,输入图像,返回图片的前景每个像素点的Alpha值 | [MattingResult](../../../../docs/api/vision_results/matting_result.md) | ## FastDeploy API设计 From 569b4e4803de8254a2cd20ecb0efaedae5f84017 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Fri, 12 Aug 2022 05:40:28 +0000 Subject: [PATCH 75/94] docs --- docs/api/vision_results/README.md | 4 ++- docs/api/vision_results/matting_result.md | 34 +++++++++++++++++++++++ examples/vision/README.md | 6 ++-- 3 files changed, 40 insertions(+), 4 deletions(-) create mode 100644 docs/api/vision_results/matting_result.md diff --git a/docs/api/vision_results/README.md b/docs/api/vision_results/README.md index 0a05aeaf8c..844388cca8 100644 --- a/docs/api/vision_results/README.md +++ b/docs/api/vision_results/README.md @@ -4,5 +4,7 @@ FastDeploy根据视觉模型的任务类型,定义了不同的结构体(`csrcs | 结构体 | 文档 | 说明 | 相应模型 | | :----- | :--- | :---- | :------- | -| ClassificationResult | [C++/Python文档](./classificiation_result.md) | 图像分类返回结果 | ResNet50、MobileNetV3等 | +| ClassificationResult | [C++/Python文档](./classification_result.md) | 图像分类返回结果 | ResNet50、MobileNetV3等 | | DetectionResult | [C++/Python文档](./detection_result.md) | 目标检测返回结果 | PPYOLOE、YOLOv7系列模型等 | +| FaceDetectionResult | [C++/Python文档](./face_detection_result.md) | 目标检测返回结果 | PPYOLOE、YOLOv7系列模型等 | +| MattingResult | [C++/Python文档](./matting_result.md) | 目标检测返回结果 | PPYOLOE、YOLOv7系列模型等 | diff --git a/docs/api/vision_results/matting_result.md b/docs/api/vision_results/matting_result.md new file mode 100644 index 0000000000..175b5ad694 --- /dev/null +++ b/docs/api/vision_results/matting_result.md @@ -0,0 +1,34 @@ +# DetectionResult 目标检测结果 + +DetectionResult代码定义在`csrcs/fastdeploy/vision/common/result.h`中,用于表明图像检测出来的目标框、目标类别和目标置信度。 + +## C++ 结构体 + +`fastdeploy::vision::DetectionResult` + +``` +struct MattingResult { + std::vector alpha; // h x w + std::vector foreground; // h x w x c (c=3 default) + std::vector shape; + bool contain_foreground = false; + void Clear(); + std::string Str(); +}; +``` + +- **alpha**: 是一维向量,为预测的alpha透明度的值,值域为[0.,1.],长度为hxw,h,w为输入图像的高和宽 +- **foreground**: 是一维向量,为预测的前景,值域为[0.,255.],长度为hxwxc,h,w为输入图像的高和宽,c一般为3,foreground不是一定有的,只有模型本身预测了前景,这个属性才会有效 +- **contain_foreground**: 表示预测的结果是否包含前景 +- **shape**: 表示输出结果的shape,当contain_foreground为false,shape只包含(h,w),当contain_foreground为true,shape包含(h,w,c), c一般为3 +- **Clear()**: 成员函数,用于清除结构体中存储的结果 +- **Str()**: 成员函数,将结构体中的信息以字符串形式输出(用于Debug) + + +## Python结构体 + +`fastdeploy.vision.DetectionResult` + +- **boxes**(list of list(float)): 成员变量,表示单张图片检测出来的所有目标框坐标。boxes是一个list,其每个元素为一个长度为4的list, 表示为一个框,每个框以4个float数值依次表示xmin, ymin, xmax, ymax, 即左上角和右下角坐标 +- **scores**(list of float): 成员变量,表示单张图片检测出来的所有目标置信度 +- **label_ids(list of int): 成员变量,表示单张图片检测出来的所有目标类别 diff --git a/examples/vision/README.md b/examples/vision/README.md index 2b2e0cfd0c..9f05d2d7f6 100644 --- a/examples/vision/README.md +++ b/examples/vision/README.md @@ -4,11 +4,11 @@ | 任务类型 | 说明 | 预测结果结构体 | |:-------------- |:----------------------------------- |:-------------------------------------------------------------------------------- | -| Detection | 目标检测,输入图像,检测图像中物体位置,并返回检测框坐标及类别和置信度 | [DetectionResult](../../../docs/api/vision_results/detection_result.md) | +| Detection | 目标检测,输入图像,检测图像中物体位置,并返回检测框坐标及类别和置信度 | [DetectionResult](../../docs/api/vision_results/detection_result.md) | | Segmentation | 语义分割,输入图像,给出图像中每个像素的分类及置信度 | [SegmentationResult](../../docs/api/vision_results/segmentation_result.md) | -| Classification | 图像分类,输入图像,给出图像的分类结果和置信度 | [ClassifyResult](../../../../docs/api/vision_results/classification_result.md) | +| Classification | 图像分类,输入图像,给出图像的分类结果和置信度 | [ClassifyResult](../../docs/api/vision_results/classification_result.md) | | FaceDetection | 人脸检测,输入图像,检测图像中人脸位置,并返回检测框坐标及人脸关键点 | [FaceDetectionResult](../../docs/api/vision_results/face_detection_result.md) | -| Matting | 抠图,输入图像,返回图片的前景每个像素点的Alpha值 | [MattingResult](../../../../docs/api/vision_results/matting_result.md) | +| Matting | 抠图,输入图像,返回图片的前景每个像素点的Alpha值 | [MattingResult](../../docs/api/vision_results/matting_result.md) | ## FastDeploy API设计 视觉模型具有较有统一任务范式,在设计API时(包括C++/Python),FastDeploy将视觉模型的部署拆分为四个步骤 From 1c6c9922a227118f0df6c8f6b2b603dd5228335c Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Fri, 12 Aug 2022 08:17:53 +0000 Subject: [PATCH 76/94] add print for detect and modify docs --- docs/api/vision_results/face_detection_result.md | 8 ++++---- docs/api/vision_results/matting_result.md | 15 ++++++++------- .../classification/paddleclas/python/infer.py | 5 +++-- .../vision/detection/nanodet_plus/cpp/README.md | 2 +- .../vision/detection/nanodet_plus/cpp/infer.cc | 4 +++- .../detection/nanodet_plus/python/README.md | 2 +- .../vision/detection/nanodet_plus/python/infer.py | 3 ++- .../vision/detection/scaledyolov4/cpp/README.md | 2 +- .../vision/detection/scaledyolov4/cpp/infer.cc | 3 +++ .../detection/scaledyolov4/python/README.md | 2 +- .../vision/detection/scaledyolov4/python/infer.py | 3 ++- examples/vision/detection/yolor/cpp/README.md | 2 +- examples/vision/detection/yolor/cpp/infer.cc | 3 +++ examples/vision/detection/yolor/python/README.md | 2 +- examples/vision/detection/yolor/python/infer.py | 3 ++- examples/vision/detection/yolov5/cpp/README.md | 2 +- examples/vision/detection/yolov5/cpp/infer.cc | 3 +++ examples/vision/detection/yolov5/python/README.md | 2 +- examples/vision/detection/yolov5/python/infer.py | 3 ++- .../vision/detection/yolov5lite/cpp/README.md | 2 +- examples/vision/detection/yolov5lite/cpp/infer.cc | 3 +++ .../vision/detection/yolov5lite/python/README.md | 2 +- .../vision/detection/yolov5lite/python/infer.py | 3 ++- examples/vision/detection/yolov6/cpp/README.md | 2 +- examples/vision/detection/yolov6/cpp/infer.cc | 3 +++ examples/vision/detection/yolov6/python/README.md | 2 +- examples/vision/detection/yolov6/python/infer.py | 3 ++- examples/vision/detection/yolov7/cpp/infer.cc | 3 +++ examples/vision/detection/yolov7/python/infer.py | 2 +- examples/vision/detection/yolox/cpp/README.md | 2 +- examples/vision/detection/yolox/cpp/infer.cc | 3 +++ examples/vision/detection/yolox/python/README.md | 2 +- examples/vision/detection/yolox/python/infer.py | 4 ++-- examples/vision/facedet/retinaface/cpp/README.md | 2 +- examples/vision/facedet/retinaface/cpp/infer.cc | 3 +++ .../vision/facedet/retinaface/python/README.md | 2 +- .../vision/facedet/retinaface/python/infer.py | 4 ++-- examples/vision/facedet/scrfd/cpp/README.md | 2 +- examples/vision/facedet/scrfd/cpp/infer.cc | 3 +++ examples/vision/facedet/scrfd/python/README.md | 2 +- examples/vision/facedet/scrfd/python/infer.py | 4 ++-- examples/vision/facedet/ultraface/cpp/README.md | 2 +- examples/vision/facedet/ultraface/cpp/infer.cc | 3 +++ .../vision/facedet/ultraface/python/README.md | 2 +- examples/vision/facedet/ultraface/python/infer.py | 4 ++-- examples/vision/facedet/yolov5face/cpp/README.md | 2 +- examples/vision/facedet/yolov5face/cpp/infer.cc | 3 +++ .../vision/facedet/yolov5face/python/README.md | 2 +- .../vision/facedet/yolov5face/python/infer.py | 4 ++-- examples/vision/faceid/insightface/cpp/README.md | 12 ++++++------ .../faceid/insightface/cpp/infer_arcface.cc | 4 ++-- .../faceid/insightface/cpp/infer_cosface.cc | 4 ++-- .../faceid/insightface/cpp/infer_partial_fc.cc | 4 ++-- .../vision/faceid/insightface/cpp/infer_vpl.cc | 4 ++-- .../vision/faceid/insightface/python/README.md | 12 ++++++------ examples/vision/matting/modnet/cpp/README.md | 3 ++- examples/vision/matting/modnet/python/README.md | 3 ++- examples/vision/matting/modnet/python/infer.py | 2 +- fastdeploy/vision/evaluation/detection.py | 4 ++-- 59 files changed, 123 insertions(+), 78 deletions(-) diff --git a/docs/api/vision_results/face_detection_result.md b/docs/api/vision_results/face_detection_result.md index 8e04ecd194..3a0e54e0a3 100644 --- a/docs/api/vision_results/face_detection_result.md +++ b/docs/api/vision_results/face_detection_result.md @@ -1,10 +1,10 @@ -# DetectionResult 目标检测结果 +# FaceDetectionResult 目标检测结果 DetectionResult代码定义在`csrcs/fastdeploy/vision/common/result.h`中,用于表明图像检测出来的目标框、目标类别和目标置信度。 ## C++ 结构体 -`fastdeploy::vision::DetectionResult` +`fastdeploy::vision::FaceDetectionResult` ``` struct FaceDetectionResult { @@ -27,8 +27,8 @@ struct FaceDetectionResult { ## Python结构体 -`fastdeploy.vision.DetectionResult` +`fastdeploy.vision.FaceDetectionResult` - **boxes**(list of list(float)): 成员变量,表示单张图片检测出来的所有目标框坐标。boxes是一个list,其每个元素为一个长度为4的list, 表示为一个框,每个框以4个float数值依次表示xmin, ymin, xmax, ymax, 即左上角和右下角坐标 - **scores**(list of float): 成员变量,表示单张图片检测出来的所有目标置信度 -- **label_ids(list of int): 成员变量,表示单张图片检测出来的所有目标类别 +- **landmarks**: 成员变量,表示单张图片检测出来的所有人脸的关键点 diff --git a/docs/api/vision_results/matting_result.md b/docs/api/vision_results/matting_result.md index 175b5ad694..74f8937c47 100644 --- a/docs/api/vision_results/matting_result.md +++ b/docs/api/vision_results/matting_result.md @@ -1,10 +1,10 @@ -# DetectionResult 目标检测结果 +# MattingResult 目标检测结果 -DetectionResult代码定义在`csrcs/fastdeploy/vision/common/result.h`中,用于表明图像检测出来的目标框、目标类别和目标置信度。 +MattingResult 代码定义在`csrcs/fastdeploy/vision/common/result.h`中,用于表明图像检测出来的目标框、目标类别和目标置信度。 ## C++ 结构体 -`fastdeploy::vision::DetectionResult` +`fastdeploy::vision::MattingResult` ``` struct MattingResult { @@ -27,8 +27,9 @@ struct MattingResult { ## Python结构体 -`fastdeploy.vision.DetectionResult` +`fastdeploy.vision.MattingResult` -- **boxes**(list of list(float)): 成员变量,表示单张图片检测出来的所有目标框坐标。boxes是一个list,其每个元素为一个长度为4的list, 表示为一个框,每个框以4个float数值依次表示xmin, ymin, xmax, ymax, 即左上角和右下角坐标 -- **scores**(list of float): 成员变量,表示单张图片检测出来的所有目标置信度 -- **label_ids(list of int): 成员变量,表示单张图片检测出来的所有目标类别 +- **alpha**: 是一维向量,为预测的alpha透明度的值,值域为[0.,1.],长度为hxw,h,w为输入图像的高和宽 +- **foreground**: 是一维向量,为预测的前景,值域为[0.,255.],长度为hxwxc,h,w为输入图像的高和宽,c一般为3,foreground不是一定有的,只有模型本身预测了前景,这个属性才会有效 +- **contain_foreground**: 表示预测的结果是否包含前景 +- **shape**: 表示输出结果的shape,当contain_foreground为false,shape只包含(h,w),当contain_foreground为true,shape包含(h,w,c), c一般为3 diff --git a/examples/vision/classification/paddleclas/python/infer.py b/examples/vision/classification/paddleclas/python/infer.py index b3a02be2ee..6fdb45cf69 100644 --- a/examples/vision/classification/paddleclas/python/infer.py +++ b/examples/vision/classification/paddleclas/python/infer.py @@ -39,9 +39,10 @@ def build_option(args): # 配置runtime,加载模型 runtime_option = build_option(args) -model = fd.vision.classification.PaddleClasModel(args.model, runtime_option=runtime_option) +model = fd.vision.classification.PaddleClasModel( + args.model, runtime_option=runtime_option) # 预测图片分类结果 im = cv2.imread(args.image) -result = model.predict(im) +result = model.predict(im.copy()) print(result) diff --git a/examples/vision/detection/nanodet_plus/cpp/README.md b/examples/vision/detection/nanodet_plus/cpp/README.md index 2dbee5e31d..d1abfd3700 100644 --- a/examples/vision/detection/nanodet_plus/cpp/README.md +++ b/examples/vision/detection/nanodet_plus/cpp/README.md @@ -32,7 +32,7 @@ wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/0000000 运行完成可视化结果如下图所示 - + ## NanoDetPlus C++接口 diff --git a/examples/vision/detection/nanodet_plus/cpp/infer.cc b/examples/vision/detection/nanodet_plus/cpp/infer.cc index b155c4a795..8443639cc5 100644 --- a/examples/vision/detection/nanodet_plus/cpp/infer.cc +++ b/examples/vision/detection/nanodet_plus/cpp/infer.cc @@ -29,7 +29,7 @@ void CpuInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } - + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); std::cout << "Visualized result saved in ./vis_result.jpg" << std::endl; @@ -53,6 +53,7 @@ void GpuInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); @@ -79,6 +80,7 @@ void TrtInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); diff --git a/examples/vision/detection/nanodet_plus/python/README.md b/examples/vision/detection/nanodet_plus/python/README.md index 7aff8059f9..641c1ab5a4 100644 --- a/examples/vision/detection/nanodet_plus/python/README.md +++ b/examples/vision/detection/nanodet_plus/python/README.md @@ -26,7 +26,7 @@ python infer.py --model nanodet-plus-m_320.onnx --image 000000014439.jpg --devic 运行完成可视化结果如下图所示 - + ## NanoDetPlus Python接口 diff --git a/examples/vision/detection/nanodet_plus/python/infer.py b/examples/vision/detection/nanodet_plus/python/infer.py index 58a0f866bc..a0667db005 100644 --- a/examples/vision/detection/nanodet_plus/python/infer.py +++ b/examples/vision/detection/nanodet_plus/python/infer.py @@ -44,7 +44,8 @@ def build_option(args): # 预测图片检测结果 im = cv2.imread(args.image) -result = model.predict(im) +result = model.predict(im.copy()) +print(result) # 预测结果可视化 vis_im = fd.vision.vis_detection(im, result) diff --git a/examples/vision/detection/scaledyolov4/cpp/README.md b/examples/vision/detection/scaledyolov4/cpp/README.md index 66aa9c8b73..d13e926ab8 100644 --- a/examples/vision/detection/scaledyolov4/cpp/README.md +++ b/examples/vision/detection/scaledyolov4/cpp/README.md @@ -32,7 +32,7 @@ wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/0000000 运行完成可视化结果如下图所示 - + ## ScaledYOLOv4 C++接口 diff --git a/examples/vision/detection/scaledyolov4/cpp/infer.cc b/examples/vision/detection/scaledyolov4/cpp/infer.cc index 0452909714..7d912b2230 100644 --- a/examples/vision/detection/scaledyolov4/cpp/infer.cc +++ b/examples/vision/detection/scaledyolov4/cpp/infer.cc @@ -29,6 +29,7 @@ void CpuInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); @@ -53,6 +54,7 @@ void GpuInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); @@ -79,6 +81,7 @@ void TrtInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); diff --git a/examples/vision/detection/scaledyolov4/python/README.md b/examples/vision/detection/scaledyolov4/python/README.md index bec85e0236..f198330832 100644 --- a/examples/vision/detection/scaledyolov4/python/README.md +++ b/examples/vision/detection/scaledyolov4/python/README.md @@ -27,7 +27,7 @@ python infer.py --model scaled_yolov4-p5.onnx --image 000000014439.jpg --device 运行完成可视化结果如下图所示 - + ## ScaledYOLOv4 Python接口 diff --git a/examples/vision/detection/scaledyolov4/python/infer.py b/examples/vision/detection/scaledyolov4/python/infer.py index eaf287f2c1..d3052381c4 100644 --- a/examples/vision/detection/scaledyolov4/python/infer.py +++ b/examples/vision/detection/scaledyolov4/python/infer.py @@ -44,7 +44,8 @@ def build_option(args): # 预测图片检测结果 im = cv2.imread(args.image) -result = model.predict(im) +result = model.predict(im.copy()) +print(result) # 预测结果可视化 vis_im = fd.vision.vis_detection(im, result) diff --git a/examples/vision/detection/yolor/cpp/README.md b/examples/vision/detection/yolor/cpp/README.md index fb6543bcb1..731c37585a 100644 --- a/examples/vision/detection/yolor/cpp/README.md +++ b/examples/vision/detection/yolor/cpp/README.md @@ -32,7 +32,7 @@ wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/0000000 运行完成可视化结果如下图所示 - + ## YOLOR C++接口 diff --git a/examples/vision/detection/yolor/cpp/infer.cc b/examples/vision/detection/yolor/cpp/infer.cc index dfa4cde0ff..0fe8913d47 100644 --- a/examples/vision/detection/yolor/cpp/infer.cc +++ b/examples/vision/detection/yolor/cpp/infer.cc @@ -29,6 +29,7 @@ void CpuInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); @@ -52,6 +53,7 @@ void GpuInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); @@ -77,6 +79,7 @@ void TrtInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); diff --git a/examples/vision/detection/yolor/python/README.md b/examples/vision/detection/yolor/python/README.md index 268e897e2a..d2c57cca0a 100644 --- a/examples/vision/detection/yolor/python/README.md +++ b/examples/vision/detection/yolor/python/README.md @@ -27,7 +27,7 @@ python infer.py --model yolor-p6-paper-541-640-640.onnx --image 000000014439.jpg 运行完成可视化结果如下图所示 - + ## YOLOR Python接口 diff --git a/examples/vision/detection/yolor/python/infer.py b/examples/vision/detection/yolor/python/infer.py index f701796228..ca0c9f453c 100644 --- a/examples/vision/detection/yolor/python/infer.py +++ b/examples/vision/detection/yolor/python/infer.py @@ -43,7 +43,8 @@ def build_option(args): # 预测图片检测结果 im = cv2.imread(args.image) -result = model.predict(im) +result = model.predict(im.copy()) +print(result) # 预测结果可视化 vis_im = fd.vision.vis_detection(im, result) diff --git a/examples/vision/detection/yolov5/cpp/README.md b/examples/vision/detection/yolov5/cpp/README.md index feb44d13df..99e67a332c 100644 --- a/examples/vision/detection/yolov5/cpp/README.md +++ b/examples/vision/detection/yolov5/cpp/README.md @@ -32,7 +32,7 @@ wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/0000000 运行完成可视化结果如下图所示 - + ## YOLOv5 C++接口 diff --git a/examples/vision/detection/yolov5/cpp/infer.cc b/examples/vision/detection/yolov5/cpp/infer.cc index ef3e47ea1f..a7ac1fe813 100644 --- a/examples/vision/detection/yolov5/cpp/infer.cc +++ b/examples/vision/detection/yolov5/cpp/infer.cc @@ -29,6 +29,7 @@ void CpuInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); @@ -52,6 +53,7 @@ void GpuInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); @@ -77,6 +79,7 @@ void TrtInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); diff --git a/examples/vision/detection/yolov5/python/README.md b/examples/vision/detection/yolov5/python/README.md index 57cdba44cb..2a9bd4e18d 100644 --- a/examples/vision/detection/yolov5/python/README.md +++ b/examples/vision/detection/yolov5/python/README.md @@ -27,7 +27,7 @@ python infer.py --model yolov5s.onnx --image 000000014439.jpg --device gpu --use 运行完成可视化结果如下图所示 - + ## YOLOv5 Python接口 diff --git a/examples/vision/detection/yolov5/python/infer.py b/examples/vision/detection/yolov5/python/infer.py index 3f7a91f99d..2cf3a03849 100644 --- a/examples/vision/detection/yolov5/python/infer.py +++ b/examples/vision/detection/yolov5/python/infer.py @@ -43,7 +43,8 @@ def build_option(args): # 预测图片检测结果 im = cv2.imread(args.image) -result = model.predict(im) +result = model.predict(im.copy()) +print(result) # 预测结果可视化 vis_im = fd.vision.vis_detection(im, result) diff --git a/examples/vision/detection/yolov5lite/cpp/README.md b/examples/vision/detection/yolov5lite/cpp/README.md index 2fad44e34b..a622d2e8fd 100644 --- a/examples/vision/detection/yolov5lite/cpp/README.md +++ b/examples/vision/detection/yolov5lite/cpp/README.md @@ -32,7 +32,7 @@ wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/0000000 运行完成可视化结果如下图所示 - + ## YOLOv5Lite C++接口 diff --git a/examples/vision/detection/yolov5lite/cpp/infer.cc b/examples/vision/detection/yolov5lite/cpp/infer.cc index a899c48c6f..ac32bca933 100644 --- a/examples/vision/detection/yolov5lite/cpp/infer.cc +++ b/examples/vision/detection/yolov5lite/cpp/infer.cc @@ -29,6 +29,7 @@ void CpuInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); @@ -53,6 +54,7 @@ void GpuInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); @@ -79,6 +81,7 @@ void TrtInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); diff --git a/examples/vision/detection/yolov5lite/python/README.md b/examples/vision/detection/yolov5lite/python/README.md index 7b67636d93..95c412f028 100644 --- a/examples/vision/detection/yolov5lite/python/README.md +++ b/examples/vision/detection/yolov5lite/python/README.md @@ -27,7 +27,7 @@ python infer.py --model v5Lite-g-sim-640.onnx --image 000000014439.jpg --device 运行完成可视化结果如下图所示 - + ## YOLOv5Lite Python接口 diff --git a/examples/vision/detection/yolov5lite/python/infer.py b/examples/vision/detection/yolov5lite/python/infer.py index 0236c182f1..0a4312c451 100644 --- a/examples/vision/detection/yolov5lite/python/infer.py +++ b/examples/vision/detection/yolov5lite/python/infer.py @@ -44,7 +44,8 @@ def build_option(args): # 预测图片检测结果 im = cv2.imread(args.image) -result = model.predict(im) +result = model.predict(im.copy()) +print(result) # 预测结果可视化 vis_im = fd.vision.vis_detection(im, result) diff --git a/examples/vision/detection/yolov6/cpp/README.md b/examples/vision/detection/yolov6/cpp/README.md index 5a73f8b55e..9d346e59a8 100644 --- a/examples/vision/detection/yolov6/cpp/README.md +++ b/examples/vision/detection/yolov6/cpp/README.md @@ -32,7 +32,7 @@ wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/0000000 运行完成可视化结果如下图所示 - + ## YOLOv6 C++接口 diff --git a/examples/vision/detection/yolov6/cpp/infer.cc b/examples/vision/detection/yolov6/cpp/infer.cc index affb655771..72b2e7bede 100644 --- a/examples/vision/detection/yolov6/cpp/infer.cc +++ b/examples/vision/detection/yolov6/cpp/infer.cc @@ -29,6 +29,7 @@ void CpuInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); @@ -52,6 +53,7 @@ void GpuInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); @@ -77,6 +79,7 @@ void TrtInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); diff --git a/examples/vision/detection/yolov6/python/README.md b/examples/vision/detection/yolov6/python/README.md index 35c35b2084..0734c37273 100644 --- a/examples/vision/detection/yolov6/python/README.md +++ b/examples/vision/detection/yolov6/python/README.md @@ -27,7 +27,7 @@ python infer.py --model yolov6s.onnx --image 000000014439.jpg --device gpu --use 运行完成可视化结果如下图所示 - + ## YOLOv6 Python接口 diff --git a/examples/vision/detection/yolov6/python/infer.py b/examples/vision/detection/yolov6/python/infer.py index 060f29258d..47ea4cd3a4 100644 --- a/examples/vision/detection/yolov6/python/infer.py +++ b/examples/vision/detection/yolov6/python/infer.py @@ -43,7 +43,8 @@ def build_option(args): # 预测图片检测结果 im = cv2.imread(args.image) -result = model.predict(im) +result = model.predict(im.copy()) +print(result) # 预测结果可视化 vis_im = fd.vision.vis_detection(im, result) diff --git a/examples/vision/detection/yolov7/cpp/infer.cc b/examples/vision/detection/yolov7/cpp/infer.cc index 1ddca8f1c8..cf79a16ad7 100644 --- a/examples/vision/detection/yolov7/cpp/infer.cc +++ b/examples/vision/detection/yolov7/cpp/infer.cc @@ -29,6 +29,7 @@ void CpuInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); @@ -52,6 +53,7 @@ void GpuInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); @@ -77,6 +79,7 @@ void TrtInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); diff --git a/examples/vision/detection/yolov7/python/infer.py b/examples/vision/detection/yolov7/python/infer.py index 574755c3a3..aff8cbe304 100644 --- a/examples/vision/detection/yolov7/python/infer.py +++ b/examples/vision/detection/yolov7/python/infer.py @@ -43,7 +43,7 @@ def build_option(args): # 预测图片检测结果 im = cv2.imread(args.image) -result = model.predict(im) +result = model.predict(im.copy()) # 预测结果可视化 vis_im = fd.vision.vis_detection(im, result) diff --git a/examples/vision/detection/yolox/cpp/README.md b/examples/vision/detection/yolox/cpp/README.md index abe7611266..4709c23797 100644 --- a/examples/vision/detection/yolox/cpp/README.md +++ b/examples/vision/detection/yolox/cpp/README.md @@ -32,7 +32,7 @@ wget https://gitee.com/paddlepaddle/PaddleDetection/raw/release/2.4/demo/0000000 运行完成可视化结果如下图所示 - + ## YOLOX C++接口 diff --git a/examples/vision/detection/yolox/cpp/infer.cc b/examples/vision/detection/yolox/cpp/infer.cc index 627142c1cb..2eeaccbf89 100644 --- a/examples/vision/detection/yolox/cpp/infer.cc +++ b/examples/vision/detection/yolox/cpp/infer.cc @@ -29,6 +29,7 @@ void CpuInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); @@ -52,6 +53,7 @@ void GpuInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); @@ -77,6 +79,7 @@ void TrtInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); diff --git a/examples/vision/detection/yolox/python/README.md b/examples/vision/detection/yolox/python/README.md index 8c4fff5f3f..e93d058ac8 100644 --- a/examples/vision/detection/yolox/python/README.md +++ b/examples/vision/detection/yolox/python/README.md @@ -26,7 +26,7 @@ python infer.py --model yolox_s.onnx --image 000000014439.jpg --device gpu --use 运行完成可视化结果如下图所示 - + ## YOLOX Python接口 diff --git a/examples/vision/detection/yolox/python/infer.py b/examples/vision/detection/yolox/python/infer.py index 131f91d710..8af3d9c4c3 100644 --- a/examples/vision/detection/yolox/python/infer.py +++ b/examples/vision/detection/yolox/python/infer.py @@ -43,8 +43,8 @@ def build_option(args): # 预测图片检测结果 im = cv2.imread(args.image) -result = model.predict(im) - +result = model.predict(im.copy()) +print(result) # 预测结果可视化 vis_im = fd.vision.vis_detection(im, result) cv2.imwrite("visualized_result.jpg", vis_im) diff --git a/examples/vision/facedet/retinaface/cpp/README.md b/examples/vision/facedet/retinaface/cpp/README.md index 95a8d4a616..261cc8f44d 100644 --- a/examples/vision/facedet/retinaface/cpp/README.md +++ b/examples/vision/facedet/retinaface/cpp/README.md @@ -32,7 +32,7 @@ wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/li 运行完成可视化结果如下图所示 - + ## RetinaFace C++接口 diff --git a/examples/vision/facedet/retinaface/cpp/infer.cc b/examples/vision/facedet/retinaface/cpp/infer.cc index ddda3d78a5..a1fd27b6e8 100644 --- a/examples/vision/facedet/retinaface/cpp/infer.cc +++ b/examples/vision/facedet/retinaface/cpp/infer.cc @@ -29,6 +29,7 @@ void CpuInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisFaceDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); @@ -52,6 +53,7 @@ void GpuInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisFaceDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); @@ -77,6 +79,7 @@ void TrtInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisFaceDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); diff --git a/examples/vision/facedet/retinaface/python/README.md b/examples/vision/facedet/retinaface/python/README.md index 9d4e9a6979..1e72072b55 100644 --- a/examples/vision/facedet/retinaface/python/README.md +++ b/examples/vision/facedet/retinaface/python/README.md @@ -28,7 +28,7 @@ python infer.py --model Pytorch_RetinaFace_mobile0.25-640-640.onnx --image test_ 运行完成可视化结果如下图所示 - + ## RetinaFace Python接口 diff --git a/examples/vision/facedet/retinaface/python/infer.py b/examples/vision/facedet/retinaface/python/infer.py index 16e38c7f6d..3b0152b1c9 100644 --- a/examples/vision/facedet/retinaface/python/infer.py +++ b/examples/vision/facedet/retinaface/python/infer.py @@ -43,8 +43,8 @@ def build_option(args): # 预测图片检测结果 im = cv2.imread(args.image) -result = model.predict(im) - +result = model.predict(im.copy()) +print(result) # 预测结果可视化 vis_im = fd.vision.vis_face_detection(im, result) cv2.imwrite("visualized_result.jpg", vis_im) diff --git a/examples/vision/facedet/scrfd/cpp/README.md b/examples/vision/facedet/scrfd/cpp/README.md index d01ad619a6..b467857e89 100644 --- a/examples/vision/facedet/scrfd/cpp/README.md +++ b/examples/vision/facedet/scrfd/cpp/README.md @@ -32,7 +32,7 @@ wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/li 运行完成可视化结果如下图所示 - + ## SCRFD C++接口 diff --git a/examples/vision/facedet/scrfd/cpp/infer.cc b/examples/vision/facedet/scrfd/cpp/infer.cc index ba68974d39..c804218ee3 100644 --- a/examples/vision/facedet/scrfd/cpp/infer.cc +++ b/examples/vision/facedet/scrfd/cpp/infer.cc @@ -29,6 +29,7 @@ void CpuInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisFaceDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); @@ -52,6 +53,7 @@ void GpuInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisFaceDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); @@ -77,6 +79,7 @@ void TrtInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisFaceDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); diff --git a/examples/vision/facedet/scrfd/python/README.md b/examples/vision/facedet/scrfd/python/README.md index 87e668b550..93bacc3aa6 100644 --- a/examples/vision/facedet/scrfd/python/README.md +++ b/examples/vision/facedet/scrfd/python/README.md @@ -27,7 +27,7 @@ python infer.py --model scrfd_500m_bnkps_shape640x640.onnx --image test_lite_fac 运行完成可视化结果如下图所示 - + ## SCRFD Python接口 diff --git a/examples/vision/facedet/scrfd/python/infer.py b/examples/vision/facedet/scrfd/python/infer.py index 828877fbd8..a99e66385f 100644 --- a/examples/vision/facedet/scrfd/python/infer.py +++ b/examples/vision/facedet/scrfd/python/infer.py @@ -43,8 +43,8 @@ def build_option(args): # 预测图片检测结果 im = cv2.imread(args.image) -result = model.predict(im) - +result = model.predict(im.copy()) +print(result) # 预测结果可视化 vis_im = fd.vision.vis_face_detection(im, result) cv2.imwrite("visualized_result.jpg", vis_im) diff --git a/examples/vision/facedet/ultraface/cpp/README.md b/examples/vision/facedet/ultraface/cpp/README.md index 768d459be9..6670e8903f 100644 --- a/examples/vision/facedet/ultraface/cpp/README.md +++ b/examples/vision/facedet/ultraface/cpp/README.md @@ -32,7 +32,7 @@ wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/li 运行完成可视化结果如下图所示 - + ## UltraFace C++接口 diff --git a/examples/vision/facedet/ultraface/cpp/infer.cc b/examples/vision/facedet/ultraface/cpp/infer.cc index 2467b12c0b..b45bb3b0b3 100644 --- a/examples/vision/facedet/ultraface/cpp/infer.cc +++ b/examples/vision/facedet/ultraface/cpp/infer.cc @@ -29,6 +29,7 @@ void CpuInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisFaceDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); @@ -52,6 +53,7 @@ void GpuInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisFaceDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); @@ -77,6 +79,7 @@ void TrtInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisFaceDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); diff --git a/examples/vision/facedet/ultraface/python/README.md b/examples/vision/facedet/ultraface/python/README.md index 39be281b7d..4430b01431 100644 --- a/examples/vision/facedet/ultraface/python/README.md +++ b/examples/vision/facedet/ultraface/python/README.md @@ -28,7 +28,7 @@ python infer.py --model version-RFB-320.onnx --image test_lite_face_detector_3.j 运行完成可视化结果如下图所示 - + ## UltraFace Python接口 diff --git a/examples/vision/facedet/ultraface/python/infer.py b/examples/vision/facedet/ultraface/python/infer.py index e8084333ca..5399110b9a 100644 --- a/examples/vision/facedet/ultraface/python/infer.py +++ b/examples/vision/facedet/ultraface/python/infer.py @@ -43,8 +43,8 @@ def build_option(args): # 预测图片检测结果 im = cv2.imread(args.image) -result = model.predict(im) - +result = model.predict(im.copy()) +print(result) # 预测结果可视化 vis_im = fd.vision.vis_face_detection(im, result) cv2.imwrite("visualized_result.jpg", vis_im) diff --git a/examples/vision/facedet/yolov5face/cpp/README.md b/examples/vision/facedet/yolov5face/cpp/README.md index 77ad27ea07..ff6ce8c4bf 100644 --- a/examples/vision/facedet/yolov5face/cpp/README.md +++ b/examples/vision/facedet/yolov5face/cpp/README.md @@ -32,7 +32,7 @@ wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/li 运行完成可视化结果如下图所示 - + ## YOLOv5Face C++接口 diff --git a/examples/vision/facedet/yolov5face/cpp/infer.cc b/examples/vision/facedet/yolov5face/cpp/infer.cc index 4cbf9d3de7..418834e1e4 100644 --- a/examples/vision/facedet/yolov5face/cpp/infer.cc +++ b/examples/vision/facedet/yolov5face/cpp/infer.cc @@ -29,6 +29,7 @@ void CpuInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisFaceDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); @@ -52,6 +53,7 @@ void GpuInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisFaceDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); @@ -77,6 +79,7 @@ void TrtInfer(const std::string& model_file, const std::string& image_file) { std::cerr << "Failed to predict." << std::endl; return; } + std::cout << res.Str() << std::endl; auto vis_im = fastdeploy::vision::Visualize::VisFaceDetection(im_bak, res); cv::imwrite("vis_result.jpg", vis_im); diff --git a/examples/vision/facedet/yolov5face/python/README.md b/examples/vision/facedet/yolov5face/python/README.md index ec3f42bde3..72839d3afd 100644 --- a/examples/vision/facedet/yolov5face/python/README.md +++ b/examples/vision/facedet/yolov5face/python/README.md @@ -27,7 +27,7 @@ python infer.py --model yolov5s-face.onnx --image test_lite_face_detector_3.jpg 运行完成可视化结果如下图所示 - + ## YOLOv5Face Python接口 diff --git a/examples/vision/facedet/yolov5face/python/infer.py b/examples/vision/facedet/yolov5face/python/infer.py index eae11254f1..a9f0446826 100644 --- a/examples/vision/facedet/yolov5face/python/infer.py +++ b/examples/vision/facedet/yolov5face/python/infer.py @@ -43,8 +43,8 @@ def build_option(args): # 预测图片检测结果 im = cv2.imread(args.image) -result = model.predict(im) - +result = model.predict(im.copy()) +print(result) # 预测结果可视化 vis_im = fd.vision.vis_face_detection(im, result) cv2.imwrite("visualized_result.jpg", vis_im) diff --git a/examples/vision/faceid/insightface/cpp/README.md b/examples/vision/faceid/insightface/cpp/README.md index 6fdf812887..4d6e7c7131 100644 --- a/examples/vision/faceid/insightface/cpp/README.md +++ b/examples/vision/faceid/insightface/cpp/README.md @@ -19,17 +19,17 @@ make -j #下载官方转换好的ArcFace模型文件和测试图片 wget https://bj.bcebos.com/paddlehub/fastdeploy/ms1mv3_arcface_r100.onnx -wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/lite/resources/test_lite_focal_arcface_0.png -wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/lite/resources/test_lite_focal_arcface_1.png -wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/lite/resources/test_lite_focal_arcface_2.png +wget https://bj.bcebos.com/paddlehub/test_samples/test_lite_focal_arcface_0.JPG +wget https://bj.bcebos.com/paddlehub/test_samples/test_lite_focal_arcface_1.JPG +wget https://bj.bcebos.com/paddlehub/test_samples/test_lite_focal_arcface_2.JPG # CPU推理 -./infer_arcface_demo ms1mv3_arcface_r100.onnx test_lite_focal_arcface_0.png test_lite_focal_arcface_1.png test_lite_focal_arcface_2.png 0 +./infer_arcface_demo ms1mv3_arcface_r100.onnx test_lite_focal_arcface_0.JPG test_lite_focal_arcface_1.JPG test_lite_focal_arcface_2.JPG 0 # GPU推理 -./infer_arcface_demo ms1mv3_arcface_r100.onnx test_lite_focal_arcface_0.png test_lite_focal_arcface_1.png test_lite_focal_arcface_2.png 1 +./infer_arcface_demo ms1mv3_arcface_r100.onnx test_lite_focal_arcface_0.JPG test_lite_focal_arcface_1.JPG test_lite_focal_arcface_2.JPG 1 # GPU上TensorRT推理 -./infer_arcface_demo ms1mv3_arcface_r100.onnx test_lite_focal_arcface_0.png test_lite_focal_arcface_1.png test_lite_focal_arcface_2.png 2 +./infer_arcface_demo ms1mv3_arcface_r100.onnx test_lite_focal_arcface_0.JPG test_lite_focal_arcface_1.JPG test_lite_focal_arcface_2.JPG 2 ``` 运行完成可视化结果如下图所示 diff --git a/examples/vision/faceid/insightface/cpp/infer_arcface.cc b/examples/vision/faceid/insightface/cpp/infer_arcface.cc index f8415c8d29..0c8824f0e9 100644 --- a/examples/vision/faceid/insightface/cpp/infer_arcface.cc +++ b/examples/vision/faceid/insightface/cpp/infer_arcface.cc @@ -135,8 +135,8 @@ int main(int argc, char* argv[]) { if (argc < 6) { std::cout << "Usage: infer_demo path/to/model path/to/image run_option, " "e.g ./infer_arcface_demo ms1mv3_arcface_r100.onnx " - "test_lite_focal_arcface_0.png test_lite_focal_arcface_1.png " - "test_lite_focal_arcface_2.png 0" + "test_lite_focal_arcface_0.JPG test_lite_focal_arcface_1.JPG " + "test_lite_focal_arcface_2.JPG 0" << std::endl; std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " "with gpu; 2: run with gpu and use tensorrt backend." diff --git a/examples/vision/faceid/insightface/cpp/infer_cosface.cc b/examples/vision/faceid/insightface/cpp/infer_cosface.cc index 311679d422..d08e71d513 100644 --- a/examples/vision/faceid/insightface/cpp/infer_cosface.cc +++ b/examples/vision/faceid/insightface/cpp/infer_cosface.cc @@ -135,8 +135,8 @@ int main(int argc, char* argv[]) { if (argc < 6) { std::cout << "Usage: infer_demo path/to/model path/to/image run_option, " "e.g ./infer_arcface_demo ms1mv3_arcface_r100.onnx " - "test_lite_focal_arcface_0.png test_lite_focal_arcface_1.png " - "test_lite_focal_arcface_2.png 0" + "test_lite_focal_arcface_0.JPG test_lite_focal_arcface_1.JPG " + "test_lite_focal_arcface_2.JPG 0" << std::endl; std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " "with gpu; 2: run with gpu and use tensorrt backend." diff --git a/examples/vision/faceid/insightface/cpp/infer_partial_fc.cc b/examples/vision/faceid/insightface/cpp/infer_partial_fc.cc index 9e0c920c46..762276ccff 100644 --- a/examples/vision/faceid/insightface/cpp/infer_partial_fc.cc +++ b/examples/vision/faceid/insightface/cpp/infer_partial_fc.cc @@ -135,8 +135,8 @@ int main(int argc, char* argv[]) { if (argc < 6) { std::cout << "Usage: infer_demo path/to/model path/to/image run_option, " "e.g ./infer_arcface_demo ms1mv3_arcface_r100.onnx " - "test_lite_focal_arcface_0.png test_lite_focal_arcface_1.png " - "test_lite_focal_arcface_2.png 0" + "test_lite_focal_arcface_0.JPG test_lite_focal_arcface_1.JPG " + "test_lite_focal_arcface_2.JPG 0" << std::endl; std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " "with gpu; 2: run with gpu and use tensorrt backend." diff --git a/examples/vision/faceid/insightface/cpp/infer_vpl.cc b/examples/vision/faceid/insightface/cpp/infer_vpl.cc index 04a2d30f4a..7f570966fb 100644 --- a/examples/vision/faceid/insightface/cpp/infer_vpl.cc +++ b/examples/vision/faceid/insightface/cpp/infer_vpl.cc @@ -135,8 +135,8 @@ int main(int argc, char* argv[]) { if (argc < 6) { std::cout << "Usage: infer_demo path/to/model path/to/image run_option, " "e.g ./infer_arcface_demo ms1mv3_arcface_r100.onnx " - "test_lite_focal_arcface_0.png test_lite_focal_arcface_1.png " - "test_lite_focal_arcface_2.png 0" + "test_lite_focal_arcface_0.JPG test_lite_focal_arcface_1.JPG " + "test_lite_focal_arcface_2.JPG 0" << std::endl; std::cout << "The data type of run_option is int, 0: run with cpu; 1: run " "with gpu; 2: run with gpu and use tensorrt backend." diff --git a/examples/vision/faceid/insightface/python/README.md b/examples/vision/faceid/insightface/python/README.md index 6bc55fe53e..0c735db3a0 100644 --- a/examples/vision/faceid/insightface/python/README.md +++ b/examples/vision/faceid/insightface/python/README.md @@ -11,9 +11,9 @@ ``` #下载ArcFace模型文件和测试图片 wget https://bj.bcebos.com/paddlehub/fastdeploy/ms1mv3_arcface_r100.onnx -wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/lite/resources/test_lite_focal_arcface_0.png -wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/lite/resources/test_lite_focal_arcface_1.png -wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/lite/resources/test_lite_focal_arcface_2.png +wget https://bj.bcebos.com/paddlehub/test_samples/test_lite_focal_arcface_0.JPG +wget https://bj.bcebos.com/paddlehub/test_samples/test_lite_focal_arcface_1.JPG +wget https://bj.bcebos.com/paddlehub/test_samples/test_lite_focal_arcface_2.JPG #下载部署示例代码 @@ -21,11 +21,11 @@ git clone https://github.com/PaddlePaddle/FastDeploy.git cd examples/vison/detection/insightface/python/ # CPU推理 -python infer_arcface.py --model ms1mv3_arcface_r100.onnx --face test_lite_focal_arcface_0.png --face_positive test_lite_focal_arcface_1.png --face_negative test_lite_focal_arcface_2.png --device cpu +python infer_arcface.py --model ms1mv3_arcface_r100.onnx --face test_lite_focal_arcface_0.JPG --face_positive test_lite_focal_arcface_1.JPG --face_negative test_lite_focal_arcface_2.JPG --device cpu # GPU推理 -python infer_arcface.py --model ms1mv3_arcface_r100.onnx --face test_lite_focal_arcface_0.png --face_positive test_lite_focal_arcface_1.png --face_negative test_lite_focal_arcface_2.png --device gpu +python infer_arcface.py --model ms1mv3_arcface_r100.onnx --face test_lite_focal_arcface_0.JPG --face_positive test_lite_focal_arcface_1.JPG --face_negative test_lite_focal_arcface_2.JPG --device gpu # GPU上使用TensorRT推理 -python infer_arcface.py --model ms1mv3_arcface_r100.onnx --face test_lite_focal_arcface_0.png --face_positive test_lite_focal_arcface_1.png --face_negative test_lite_focal_arcface_2.png --device gpu --use_trt True +python infer_arcface.py --model ms1mv3_arcface_r100.onnx --face test_lite_focal_arcface_0.JPG --face_positive test_lite_focal_arcface_1.JPG --face_negative test_lite_focal_arcface_2.JPG --device gpu --use_trt True ``` 运行完成可视化结果如下图所示 diff --git a/examples/vision/matting/modnet/cpp/README.md b/examples/vision/matting/modnet/cpp/README.md index df250c1990..e3ef2c1aa9 100644 --- a/examples/vision/matting/modnet/cpp/README.md +++ b/examples/vision/matting/modnet/cpp/README.md @@ -33,7 +33,8 @@ wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/li 运行完成可视化结果如下图所示 - + + ## MODNet C++接口 diff --git a/examples/vision/matting/modnet/python/README.md b/examples/vision/matting/modnet/python/README.md index 22b02c76f9..0bc9b84937 100644 --- a/examples/vision/matting/modnet/python/README.md +++ b/examples/vision/matting/modnet/python/README.md @@ -29,7 +29,8 @@ python infer.py --model modnet_photographic_portrait_matting.onnx --image test_l 运行完成可视化结果如下图所示 - + + ## MODNet Python接口 diff --git a/examples/vision/matting/modnet/python/infer.py b/examples/vision/matting/modnet/python/infer.py index 5403d66f3e..5980e1a1fc 100644 --- a/examples/vision/matting/modnet/python/infer.py +++ b/examples/vision/matting/modnet/python/infer.py @@ -45,7 +45,7 @@ def build_option(args): model.size = (256, 256) # 预测图片检测结果 im = cv2.imread(args.image) -result = model.predict(im) +result = model.predict(im.copy()) # 预测结果可视化 vis_im = fd.vision.vis_matting_alpha(im, result) diff --git a/fastdeploy/vision/evaluation/detection.py b/fastdeploy/vision/evaluation/detection.py index 4750756b75..33b1fd5e27 100644 --- a/fastdeploy/vision/evaluation/detection.py +++ b/fastdeploy/vision/evaluation/detection.py @@ -26,7 +26,7 @@ def eval_detection(model, from .utils import CocoDetection from .utils import COCOMetric import cv2 - from tqdm import trange + from tqdm import trange if conf_threshold is not None or nms_iou_threshold is not None: assert conf_threshold is not None and nms_iou_threshold is not None, "The conf_threshold and nms_iou_threshold should be setted at the same time" @@ -54,7 +54,7 @@ def eval_detection(model, im = cv2.imread(image_info["image"]) im_id = image_info["im_id"] if conf_threshold is None and nms_iou_threshold is None: - result = model.predict(im) + result = model.predict(im.copy()) else: result = model.predict(im, conf_threshold, nms_iou_threshold) pred = { From 16eb9395c822e9c91f4e31d29637be4280472079 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Fri, 12 Aug 2022 08:26:26 +0000 Subject: [PATCH 77/94] docs --- examples/vision/detection/yolox/python/README.md | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/examples/vision/detection/yolox/python/README.md b/examples/vision/detection/yolox/python/README.md index e93d058ac8..f91b28611b 100644 --- a/examples/vision/detection/yolox/python/README.md +++ b/examples/vision/detection/yolox/python/README.md @@ -38,10 +38,9 @@ YOLOX模型加载和初始化,其中model_file为导出的ONNX模型格式 **参数** -> * **model_file**(str): 模型文件路径 -> * **params_file**(str): 参数文件路径,当模型格式为ONNX格式时,此参数无需设定 -> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 -> * **model_format**(Frontend): 模型格式,默认为ONNX +>* **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +* **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +* **is_decode_exported**(bool): 表示导出的YOLOX的onnx模型文件是否带坐标反算的decode部分, 默认值为`is_decode_exported=False`,官方默认的导出不带decode部分,如果您导出的模型带了decode,请将此参数设置为True ### predict函数 From 7219015ebed9dd07fd69b539144227676e480d9f Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Fri, 12 Aug 2022 08:34:26 +0000 Subject: [PATCH 78/94] docs --- .../vision/detection/nanodet_plus/cpp/README.md | 10 +++++----- .../detection/nanodet_plus/python/README.md | 10 +++++----- examples/vision/detection/yolov6/cpp/README.md | 2 +- examples/vision/detection/yolov6/python/README.md | 3 ++- examples/vision/detection/yolox/cpp/README.md | 3 +-- examples/vision/detection/yolox/python/README.md | 15 +++++++-------- examples/vision/facedet/retinaface/cpp/README.md | 8 ++++---- .../vision/facedet/retinaface/python/README.md | 8 ++++---- examples/vision/facedet/ultraface/cpp/README.md | 6 +----- .../vision/facedet/ultraface/python/README.md | 8 +------- examples/vision/facedet/yolov5face/cpp/README.md | 3 ++- .../vision/facedet/yolov5face/python/README.md | 5 ++--- examples/vision/faceid/insightface/cpp/README.md | 15 ++++++--------- .../vision/faceid/insightface/python/README.md | 15 ++++++--------- examples/vision/matting/modnet/cpp/README.md | 10 +++++----- examples/vision/matting/modnet/python/README.md | 10 +++++----- 16 files changed, 57 insertions(+), 74 deletions(-) diff --git a/examples/vision/detection/nanodet_plus/cpp/README.md b/examples/vision/detection/nanodet_plus/cpp/README.md index d1abfd3700..a2aa7763cc 100644 --- a/examples/vision/detection/nanodet_plus/cpp/README.md +++ b/examples/vision/detection/nanodet_plus/cpp/README.md @@ -74,11 +74,11 @@ NanoDetPlus模型加载和初始化,其中model_file为导出的ONNX模型格 ### 类成员变量 -> > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] -> > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] -> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` -> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` -> > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` +> > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[320, 320] +> > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[0, 0, 0] +> > * **keep_ratio**(bool): 通过此参数指定resize时是否保持宽高比例不变,默认是fasle. +> > * **reg_max**(int): GFL回归中的reg_max参数,默认是7. +> > * **downsample_strides**(vector<int>): 通过此参数可以修改生成anchor的特征图的下采样倍数, 包含三个整型元素, 分别表示默认的生成anchor的下采样倍数, 默认值为[8, 16, 32, 64] - [模型介绍](../../) - [Python部署](../python) diff --git a/examples/vision/detection/nanodet_plus/python/README.md b/examples/vision/detection/nanodet_plus/python/README.md index 641c1ab5a4..de3421cd9b 100644 --- a/examples/vision/detection/nanodet_plus/python/README.md +++ b/examples/vision/detection/nanodet_plus/python/README.md @@ -63,11 +63,11 @@ NanoDetPlus模型加载和初始化,其中model_file为导出的ONNX模型格 ### 类成员属性 -> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] -> > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] -> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` -> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` -> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` +> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[320, 320] +> > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[0, 0, 0] +> > * **keep_ratio**(bool): 通过此参数指定resize时是否保持宽高比例不变,默认是fasle. +> > * **reg_max**(int): GFL回归中的reg_max参数,默认是7. +> > * **downsample_strides**(list[int]): 通过此参数可以修改生成anchor的特征图的下采样倍数, 包含三个整型元素, 分别表示默认的生成anchor的下采样倍数, 默认值为[8, 16, 32, 64] diff --git a/examples/vision/detection/yolov6/cpp/README.md b/examples/vision/detection/yolov6/cpp/README.md index 9d346e59a8..5bfe49c24a 100644 --- a/examples/vision/detection/yolov6/cpp/README.md +++ b/examples/vision/detection/yolov6/cpp/README.md @@ -78,7 +78,7 @@ YOLOv6模型加载和初始化,其中model_file为导出的ONNX模型格式。 > > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] > > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` > > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` -> > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` +> > * **stride**(int): 配合`is_mini_pad`成员变量使用, 默认值为`stride=32` - [模型介绍](../../) - [Python部署](../python) diff --git a/examples/vision/detection/yolov6/python/README.md b/examples/vision/detection/yolov6/python/README.md index 0734c37273..214a076682 100644 --- a/examples/vision/detection/yolov6/python/README.md +++ b/examples/vision/detection/yolov6/python/README.md @@ -64,11 +64,12 @@ YOLOv6模型加载和初始化,其中model_file为导出的ONNX模型格式 ### 类成员属性 + > > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] > > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] > > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` > > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` -> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` +> > * **stride**(int): 配合`is_mini_padide`成员变量使用, 默认值为`stride=32` diff --git a/examples/vision/detection/yolox/cpp/README.md b/examples/vision/detection/yolox/cpp/README.md index 4709c23797..f80aa4fe4d 100644 --- a/examples/vision/detection/yolox/cpp/README.md +++ b/examples/vision/detection/yolox/cpp/README.md @@ -77,8 +77,7 @@ YOLOX模型加载和初始化,其中model_file为导出的ONNX模型格式。 > > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] > > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] > > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` -> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` -> > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` +> > * **is_decode_exported**(bool): 表示导出的YOLOX的onnx模型文件是否带坐标反算的decode部分, 默认值为`is_decode_exported=false`,官方默认的导出不带decode部分,如果您导出的模型带了decode,请将此参数设置为true - [模型介绍](../../) - [Python部署](../python) diff --git a/examples/vision/detection/yolox/python/README.md b/examples/vision/detection/yolox/python/README.md index f91b28611b..5fb526e6e4 100644 --- a/examples/vision/detection/yolox/python/README.md +++ b/examples/vision/detection/yolox/python/README.md @@ -38,9 +38,10 @@ YOLOX模型加载和初始化,其中model_file为导出的ONNX模型格式 **参数** ->* **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] -* **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] -* **is_decode_exported**(bool): 表示导出的YOLOX的onnx模型文件是否带坐标反算的decode部分, 默认值为`is_decode_exported=False`,官方默认的导出不带decode部分,如果您导出的模型带了decode,请将此参数设置为True +> * **model_file**(str): 模型文件路径 +> * **params_file**(str): 参数文件路径,当模型格式为ONNX格式时,此参数无需设定 +> * **runtime_option**(RuntimeOption): 后端推理配置,默认为None,即采用默认配置 +> * **model_format**(Frontend): 模型格式,默认为ONNX ### predict函数 @@ -62,11 +63,9 @@ YOLOX模型加载和初始化,其中model_file为导出的ONNX模型格式 ### 类成员属性 -> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] -> > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] -> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` -> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` -> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` +> >* **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] +> >* **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] +> >* **is_decode_exported**(bool): 表示导出的YOLOX的onnx模型文件是否带坐标反算的decode部分, 默认值为`is_decode_exported=False`,官方默认的导出不带decode部分,如果您导出的模型带了decode,请将此参数设置为True diff --git a/examples/vision/facedet/retinaface/cpp/README.md b/examples/vision/facedet/retinaface/cpp/README.md index 261cc8f44d..d5b0123b0f 100644 --- a/examples/vision/facedet/retinaface/cpp/README.md +++ b/examples/vision/facedet/retinaface/cpp/README.md @@ -75,10 +75,10 @@ RetinaFace模型加载和初始化,其中model_file为导出的ONNX模型格 ### 类成员变量 > > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] -> > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] -> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` -> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` -> > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` +> > * **variance**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[0, 0, 0] +> > * **min_sizes**(vector<vector<int>>): retinaface中的anchor的宽高设置,默认是 {{16, 32}, {64, 128}, {256, 512}},分别和步长8、16和32对应 +> > * **downsample_strides**(vector<int>): 通过此参数可以修改生成anchor的特征图的下采样倍数, 包含三个整型元素, 分别表示默认的生成anchor的下采样倍数, 默认值为[8, 16, 32] +> > * **landmarks_per_face**(int): 指定当前模型检测的人脸所带的关键点个数,默认为5. - [模型介绍](../../) - [Python部署](../python) diff --git a/examples/vision/facedet/retinaface/python/README.md b/examples/vision/facedet/retinaface/python/README.md index 1e72072b55..4ac14f9ac1 100644 --- a/examples/vision/facedet/retinaface/python/README.md +++ b/examples/vision/facedet/retinaface/python/README.md @@ -66,10 +66,10 @@ RetinaFace模型加载和初始化,其中model_file为导出的ONNX模型格 ### 类成员属性 > > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] -> > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] -> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` -> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` -> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` +> > * **variance**(list[float]): 通过此参数可以指定retinaface中的方差variance值,默认是[0.1,0.2], 一般不用修改. +> > * **min_sizes**(list[list[int]]): retinaface中的anchor的宽高设置,默认是 {{16, 32}, {64, 128}, {256, 512}},分别和步长8、16和32对应 +> > * **downsample_strides**(list[int]): 通过此参数可以修改生成anchor的特征图的下采样倍数, 包含三个整型元素, 分别表示默认的生成anchor的下采样倍数, 默认值为[8, 16, 32] +> > * **landmarks_per_face**(int): 指定当前模型检测的人脸所带的关键点个数,默认为5. diff --git a/examples/vision/facedet/ultraface/cpp/README.md b/examples/vision/facedet/ultraface/cpp/README.md index 6670e8903f..dd9a5ed2b4 100644 --- a/examples/vision/facedet/ultraface/cpp/README.md +++ b/examples/vision/facedet/ultraface/cpp/README.md @@ -74,11 +74,7 @@ UltraFace模型加载和初始化,其中model_file为导出的ONNX模型格式 ### 类成员变量 -> > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] -> > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] -> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` -> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` -> > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` +> > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[320, 240] - [模型介绍](../../) - [Python部署](../python) diff --git a/examples/vision/facedet/ultraface/python/README.md b/examples/vision/facedet/ultraface/python/README.md index 4430b01431..be6c2b9300 100644 --- a/examples/vision/facedet/ultraface/python/README.md +++ b/examples/vision/facedet/ultraface/python/README.md @@ -65,13 +65,7 @@ UltraFace模型加载和初始化,其中model_file为导出的ONNX模型格式 ### 类成员属性 -> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] -> > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] -> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` -> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` -> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` - - +> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[320, 240] ## 其它文档 diff --git a/examples/vision/facedet/yolov5face/cpp/README.md b/examples/vision/facedet/yolov5face/cpp/README.md index ff6ce8c4bf..0f228aea0f 100644 --- a/examples/vision/facedet/yolov5face/cpp/README.md +++ b/examples/vision/facedet/yolov5face/cpp/README.md @@ -78,7 +78,8 @@ YOLOv5Face模型加载和初始化,其中model_file为导出的ONNX模型格 > > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] > > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` > > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` -> > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` +> > * **stride**(int): 配合`is_mini_pad`成员变量使用, 默认值为`stride=32` +> > * **landmarks_per_face**(int): 指定当前模型检测的人脸所带的关键点个数,默认为5. - [模型介绍](../../) - [Python部署](../python) diff --git a/examples/vision/facedet/yolov5face/python/README.md b/examples/vision/facedet/yolov5face/python/README.md index 72839d3afd..91ccbbaf3e 100644 --- a/examples/vision/facedet/yolov5face/python/README.md +++ b/examples/vision/facedet/yolov5face/python/README.md @@ -68,9 +68,8 @@ YOLOv5Face模型加载和初始化,其中model_file为导出的ONNX模型格 > > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] > > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` > > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` -> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` - - +> > * **stride**(int): 配合`is_mini_pad`成员变量使用, 默认值为`stride=32` +> > * **landmarks_per_face**(int): 指定当前模型检测的人脸所带的关键点个数,默认为5. ## 其它文档 diff --git a/examples/vision/faceid/insightface/cpp/README.md b/examples/vision/faceid/insightface/cpp/README.md index 4d6e7c7131..512d9345a0 100644 --- a/examples/vision/faceid/insightface/cpp/README.md +++ b/examples/vision/faceid/insightface/cpp/README.md @@ -76,15 +76,12 @@ ArcFace模型加载和初始化,其中model_file为导出的ONNX模型格式 ### 类成员变量 -> > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] -> > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] -> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` -> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` -> > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` -> > * **downsample_strides**(vector<int>): 通过此参数可以修改生成anchor的特征图的下采样倍数, 包含三个整型元素, 分别表示默认的生成anchor的下采样倍数, 默认值为[8, 16, 32] -> > * **landmarks_per_face**(int): 如果使用具有人脸关键点的输出, 可以修改人脸关键点数量, 默认值为`landmarks_per_face=5` -> > * **use_kps**(bool): 通过此参数可以设置模型是否使用关键点,如果ONNX文件没有关键点输出则需要将`use_kps=false`, 并将`landmarks_per_face=0`, 默认值为`use_kps=true` -> > * **num_anchors**(int): 通过此参数可以设置每个锚点预测的anchor数量, 需要跟进训练模型的参数设定, 默认值为`num_anchors=2` + +> > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[112, 112] +> > * **alpha**(vector<float>): 预处理归一化的alpha值,计算公式为`x'=x*alpha+beta`,alpha默认为[1. / 127.5, 1.f / 127.5, 1. / 127.5] +> > * **beta**(vector<float>): 预处理归一化的beta值,计算公式为`x'=x*alpha+beta`,beta默认为[-1.f, -1.f, -1.f] +> > * **swap_rb**(bool): 预处理是否将BGR转换成RGB,默认true +> > * **l2_normalize**(bool): 输出人脸向量之前是否执行l2归一化,默认false - [模型介绍](../../) - [Python部署](../python) diff --git a/examples/vision/faceid/insightface/python/README.md b/examples/vision/faceid/insightface/python/README.md index 0c735db3a0..1538d3780c 100644 --- a/examples/vision/faceid/insightface/python/README.md +++ b/examples/vision/faceid/insightface/python/README.md @@ -70,15 +70,12 @@ ArcFace模型加载和初始化,其中model_file为导出的ONNX模型格式 ### 类成员属性 -> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] -> > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] -> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` -> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` -> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` -> > * **downsample_strides**(list[int]): 通过此参数可以修改生成anchor的特征图的下采样倍数, 包含三个整型元素, 分别表示默认的生成anchor的下采样倍数, 默认值为[8, 16, 32] -> > * **landmarks_per_face**(int): 如果使用具有人脸关键点的输出, 可以修改人脸关键点数量, 默认值为`landmarks_per_face=5` -> > * **use_kps**(bool): 通过此参数可以设置模型是否使用关键点,如果ONNX文件没有关键点输出则需要将`use_kps=False`, 并将`landmarks_per_face=0`, 默认值为`use_kps=True` -> > * **num_anchors**(int): 通过此参数可以设置每个锚点预测的anchor数量, 需要跟进训练模型的参数设定, 默认值为`num_anchors=2` + +> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[112, 112] +> > * **alpha**(list[float]): 预处理归一化的alpha值,计算公式为`x'=x*alpha+beta`,alpha默认为[1. / 127.5, 1.f / 127.5, 1. / 127.5] +> > * **beta**(list[float]): 预处理归一化的beta值,计算公式为`x'=x*alpha+beta`,beta默认为[-1.f, -1.f, -1.f] +> > * **swap_rb**(bool): 预处理是否将BGR转换成RGB,默认True +> > * **l2_normalize**(bool): 输出人脸向量之前是否执行l2归一化,默认False ## 其它文档 diff --git a/examples/vision/matting/modnet/cpp/README.md b/examples/vision/matting/modnet/cpp/README.md index e3ef2c1aa9..4d31123905 100644 --- a/examples/vision/matting/modnet/cpp/README.md +++ b/examples/vision/matting/modnet/cpp/README.md @@ -76,11 +76,11 @@ MODNet模型加载和初始化,其中model_file为导出的ONNX模型格式。 ### 类成员变量 -> > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] -> > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] -> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=ture` 表示不使用填充的方式,默认值为`is_no_pad=false` -> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=false` -> > * **stride**(int): 配合`stris_mini_pad`成员变量使用, 默认值为`stride=32` + +> > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[256, 256] +> > * **alpha**(vector<float>): 预处理归一化的alpha值,计算公式为`x'=x*alpha+beta`,alpha默认为[1. / 127.5, 1.f / 127.5, 1. / 127.5] +> > * **beta**(vector<float>): 预处理归一化的beta值,计算公式为`x'=x*alpha+beta`,beta默认为[-1.f, -1.f, -1.f] +> > * **swap_rb**(bool): 预处理是否将BGR转换成RGB,默认true - [模型介绍](../../) - [Python部署](../python) diff --git a/examples/vision/matting/modnet/python/README.md b/examples/vision/matting/modnet/python/README.md index 0bc9b84937..9faf718769 100644 --- a/examples/vision/matting/modnet/python/README.md +++ b/examples/vision/matting/modnet/python/README.md @@ -67,11 +67,11 @@ MODNet模型加载和初始化,其中model_file为导出的ONNX模型格式 ### 类成员属性 -> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] -> > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] -> > * **is_no_pad**(bool): 通过此参数让图片是否通过填充的方式进行resize, `is_no_pad=True` 表示不使用填充的方式,默认值为`is_no_pad=False` -> > * **is_mini_pad**(bool): 通过此参数可以将resize之后图像的宽高这是为最接近`size`成员变量的值, 并且满足填充的像素大小是可以被`stride`成员变量整除的。默认值为`is_mini_pad=False` -> > * **stride**(int): 配合`stris_mini_padide`成员变量使用, 默认值为`stride=32` + +> > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[256, 256] +> > * **alpha**(list[float]): 预处理归一化的alpha值,计算公式为`x'=x*alpha+beta`,alpha默认为[1. / 127.5, 1.f / 127.5, 1. / 127.5] +> > * **beta**(list[float]): 预处理归一化的beta值,计算公式为`x'=x*alpha+beta`,beta默认为[-1.f, -1.f, -1.f] +> > * **swap_rb**(bool): 预处理是否将BGR转换成RGB,默认True From a1fbc84e1ffe8b02d55f7d192ab2822ababffd42 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Fri, 12 Aug 2022 08:56:21 +0000 Subject: [PATCH 79/94] docs --- docs/api/vision_results/face_detection_result.md | 2 +- examples/vision/faceid/insightface/python/README.md | 7 +++++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/docs/api/vision_results/face_detection_result.md b/docs/api/vision_results/face_detection_result.md index 3a0e54e0a3..ad2ad8ffcf 100644 --- a/docs/api/vision_results/face_detection_result.md +++ b/docs/api/vision_results/face_detection_result.md @@ -1,6 +1,6 @@ # FaceDetectionResult 目标检测结果 -DetectionResult代码定义在`csrcs/fastdeploy/vision/common/result.h`中,用于表明图像检测出来的目标框、目标类别和目标置信度。 +FaceDetectionResult 代码定义在`csrcs/fastdeploy/vision/common/result.h`中,用于表明图像检测出来的目标框、目标类别和目标置信度。 ## C++ 结构体 diff --git a/examples/vision/faceid/insightface/python/README.md b/examples/vision/faceid/insightface/python/README.md index 1538d3780c..b43425c02e 100644 --- a/examples/vision/faceid/insightface/python/README.md +++ b/examples/vision/faceid/insightface/python/README.md @@ -30,8 +30,11 @@ python infer_arcface.py --model ms1mv3_arcface_r100.onnx --face test_lite_focal_ 运行完成可视化结果如下图所示 - - +

+ + + +
## ArcFace Python接口 ``` From 9ddb5d2d2ce8827bc4f12d1de6d184df8e609c0c Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Fri, 12 Aug 2022 08:58:34 +0000 Subject: [PATCH 80/94] docs test for insightface --- examples/vision/faceid/insightface/python/README.md | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/examples/vision/faceid/insightface/python/README.md b/examples/vision/faceid/insightface/python/README.md index b43425c02e..6ef74bdefb 100644 --- a/examples/vision/faceid/insightface/python/README.md +++ b/examples/vision/faceid/insightface/python/README.md @@ -30,11 +30,13 @@ python infer_arcface.py --model ms1mv3_arcface_r100.onnx --face test_lite_focal_ 运行完成可视化结果如下图所示 -
+ + + ## ArcFace Python接口 ``` From af3d357937ef3c328d755b8434f32a3640f66e33 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Fri, 12 Aug 2022 09:01:09 +0000 Subject: [PATCH 81/94] docs test for insightface again --- examples/vision/faceid/insightface/python/README.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/examples/vision/faceid/insightface/python/README.md b/examples/vision/faceid/insightface/python/README.md index 6ef74bdefb..2fca741f3a 100644 --- a/examples/vision/faceid/insightface/python/README.md +++ b/examples/vision/faceid/insightface/python/README.md @@ -30,12 +30,12 @@ python infer_arcface.py --model ms1mv3_arcface_r100.onnx --face test_lite_focal_ 运行完成可视化结果如下图所示 - - +
+ + + +
+ ## ArcFace Python接口 From eb0b421bd32e2b6c2a1af5329439ba50f32fe575 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Fri, 12 Aug 2022 09:03:33 +0000 Subject: [PATCH 82/94] docs test for insightface --- examples/vision/faceid/insightface/python/README.md | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/examples/vision/faceid/insightface/python/README.md b/examples/vision/faceid/insightface/python/README.md index 2fca741f3a..a10e244bc5 100644 --- a/examples/vision/faceid/insightface/python/README.md +++ b/examples/vision/faceid/insightface/python/README.md @@ -32,10 +32,18 @@ python infer_arcface.py --model ms1mv3_arcface_r100.onnx --face test_lite_focal_
- +
- + +``` +Prediction Done! +--- [Face 0]:FaceRecognitionResult: [Dim(512), Min(-2.309220), Max(2.372197), Mean(0.016987)] +--- [Face 1]:FaceRecognitionResult: [Dim(512), Min(-2.288258), Max(1.995104), Mean(-0.003400)] +--- [Face 2]:FaceRecognitionResult: [Dim(512), Min(-3.243411), Max(3.875866), Mean(-0.030682)] +Detect Done! Cosine 01: 0.814385, Cosine 02:-0.059388 + +``` ## ArcFace Python接口 From bc53d99e4fd844c0faa860475d5a828090d25522 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Fri, 12 Aug 2022 12:03:25 +0000 Subject: [PATCH 83/94] modify all wrong expressions in docs --- .../vision_results/face_detection_result.md | 2 +- docs/api/vision_results/matting_result.md | 2 +- examples/vision/detection/README.md | 14 +++++ .../vision/detection/nanodet_plus/README.md | 9 +++- .../detection/nanodet_plus/cpp/README.md | 5 +- .../detection/nanodet_plus/python/README.md | 2 + .../vision/detection/scaledyolov4/README.md | 7 ++- .../detection/scaledyolov4/cpp/README.md | 4 +- .../detection/scaledyolov4/python/README.md | 5 ++ examples/vision/detection/yolor/README.md | 22 +++++--- examples/vision/detection/yolor/cpp/README.md | 4 +- .../vision/detection/yolor/python/README.md | 2 + examples/vision/detection/yolov5/README.md | 9 +++- .../vision/detection/yolov5/cpp/README.md | 4 +- .../vision/detection/yolov5/python/README.md | 2 + .../vision/detection/yolov5lite/README.md | 12 +++-- .../vision/detection/yolov5lite/cpp/README.md | 4 +- .../detection/yolov5lite/python/README.md | 2 + examples/vision/detection/yolov6/README.md | 12 ++++- .../vision/detection/yolov6/cpp/README.md | 4 +- .../vision/detection/yolov6/python/README.md | 2 + examples/vision/detection/yolov7/README.md | 10 ++-- .../vision/detection/yolov7/cpp/README.md | 4 +- .../vision/detection/yolov7/python/README.md | 2 + examples/vision/detection/yolox/README.md | 12 ++++- examples/vision/detection/yolox/cpp/README.md | 4 +- .../vision/detection/yolox/python/README.md | 2 + examples/vision/facedet/README.md | 10 ++-- examples/vision/facedet/retinaface/README.md | 7 ++- .../vision/facedet/retinaface/cpp/README.md | 8 +-- .../facedet/retinaface/python/README.md | 6 ++- examples/vision/facedet/scrfd/README.md | 10 ++-- examples/vision/facedet/scrfd/cpp/README.md | 10 ++-- .../vision/facedet/scrfd/python/README.md | 8 +-- examples/vision/facedet/ultraface/README.md | 7 ++- .../vision/facedet/ultraface/cpp/README.md | 8 +-- .../vision/facedet/ultraface/python/README.md | 6 ++- examples/vision/facedet/yolov5face/README.md | 13 +++-- .../vision/facedet/yolov5face/cpp/README.md | 8 +-- .../facedet/yolov5face/python/README.md | 6 ++- examples/vision/faceid/README.md | 10 ++++ examples/vision/faceid/insightface/README.md | 7 ++- .../vision/faceid/insightface/cpp/README.md | 53 ++++++++++++++++--- .../faceid/insightface/python/README.md | 16 +++--- examples/vision/matting/README.md | 7 +++ examples/vision/matting/modnet/README.md | 7 ++- examples/vision/matting/modnet/cpp/README.md | 8 +-- .../vision/matting/modnet/python/README.md | 6 ++- 48 files changed, 294 insertions(+), 90 deletions(-) create mode 100644 examples/vision/detection/README.md create mode 100644 examples/vision/faceid/README.md create mode 100644 examples/vision/matting/README.md diff --git a/docs/api/vision_results/face_detection_result.md b/docs/api/vision_results/face_detection_result.md index ad2ad8ffcf..6c9c09f007 100644 --- a/docs/api/vision_results/face_detection_result.md +++ b/docs/api/vision_results/face_detection_result.md @@ -1,4 +1,4 @@ -# FaceDetectionResult 目标检测结果 +# FaceDetectionResult 人脸检测结果 FaceDetectionResult 代码定义在`csrcs/fastdeploy/vision/common/result.h`中,用于表明图像检测出来的目标框、目标类别和目标置信度。 diff --git a/docs/api/vision_results/matting_result.md b/docs/api/vision_results/matting_result.md index 74f8937c47..3418400eca 100644 --- a/docs/api/vision_results/matting_result.md +++ b/docs/api/vision_results/matting_result.md @@ -1,4 +1,4 @@ -# MattingResult 目标检测结果 +# MattingResult 抠图结果 MattingResult 代码定义在`csrcs/fastdeploy/vision/common/result.h`中,用于表明图像检测出来的目标框、目标类别和目标置信度。 diff --git a/examples/vision/detection/README.md b/examples/vision/detection/README.md new file mode 100644 index 0000000000..72e7072eeb --- /dev/null +++ b/examples/vision/detection/README.md @@ -0,0 +1,14 @@ +人脸检测模型 + +FastDeploy目前支持如下人脸检测模型部署 + +| 模型 | 说明 | 模型格式 | 版本 | +| :--- | :--- | :------- | :--- | +| [nanodet_plus](./nanodet_plus) | NanoDetPlus系列模型 | ONNX | Release/v1.0.0-alpha-1 | +| [yolov5](./yolov5) | YOLOv5系列模型 | ONNX | Release/v6.0 | +| [yolov5lite](./yolov5lite) | YOLOv5-Lite系列模型 | ONNX | Release/v1.4 | +| [yolov6](./yolov6) | YOLOv6系列模型 | ONNX | Release/0.1.0 | +| [yolov7](./yolov7) | YOLOv7系列模型 | ONNX | Release/0.1 | +| [yolor](./yolor) | YOLOR系列模型 | ONNX | Release/weights | +| [yolox](./yolox) | YOLOX系列模型 | ONNX | Release/v0.1.1 | +| [scaledyolov4](./scaledyolov4) | ScaledYOLOv4系列模型 | ONNX | CommitID:6768003 | diff --git a/examples/vision/detection/nanodet_plus/README.md b/examples/vision/detection/nanodet_plus/README.md index a50c9579c0..a295e122fc 100644 --- a/examples/vision/detection/nanodet_plus/README.md +++ b/examples/vision/detection/nanodet_plus/README.md @@ -2,10 +2,10 @@ ## 模型版本说明 -- NanoDetPlus部署实现来自[NanoDetPlus v1.0.0-alpha-1](https://github.com/RangiLyu/nanodet/tree/v1.0.0-alpha-1) 分支代码,基于coco的[预训练模型](https://github.com/RangiLyu/nanodet/releases/tag/v1.0.0-alpha-1)。 +- NanoDetPlus部署实现来自[NanoDetPlus](https://github.com/RangiLyu/nanodet/tree/v1.0.0-alpha-1) 的代码,基于coco的[预训练模型](https://github.com/RangiLyu/nanodet/releases/tag/v1.0.0-alpha-1)。 - (1)[预训练模型](https://github.com/RangiLyu/nanodet/releases/tag/v1.0.0-alpha-1)的*.onnx可直接进行部署; - - (2)自己训练的模型,导出ONNX模型后,参考[详细部署教程](#详细部署文档)完成部署。 + - (2)自己训练的模型,导出ONNX模型后,参考[详细部署文档](#详细部署文档)完成部署。 ## 下载预训练ONNX模型 @@ -21,3 +21,8 @@ - [Python部署](python) - [C++部署](cpp) + + +## 版本说明 + +- 本版本文档和代码基于[NanoDetPlus v1.0.0-alpha-1](https://github.com/RangiLyu/nanodet/tree/v1.0.0-alpha-1) 编写 diff --git a/examples/vision/detection/nanodet_plus/cpp/README.md b/examples/vision/detection/nanodet_plus/cpp/README.md index a2aa7763cc..82b310113e 100644 --- a/examples/vision/detection/nanodet_plus/cpp/README.md +++ b/examples/vision/detection/nanodet_plus/cpp/README.md @@ -12,7 +12,7 @@ ``` mkdir build cd build -wget https://xxx.tgz +wget https://https://bj.bcebos.com/paddlehub/fastdeploy/cpp/fastdeploy-linux-x64-gpu-0.2.0.tgz tar xvf fastdeploy-linux-x64-0.2.0.tgz cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 make -j @@ -74,6 +74,9 @@ NanoDetPlus模型加载和初始化,其中model_file为导出的ONNX模型格 ### 类成员变量 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 + > > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[320, 320] > > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[0, 0, 0] > > * **keep_ratio**(bool): 通过此参数指定resize时是否保持宽高比例不变,默认是fasle. diff --git a/examples/vision/detection/nanodet_plus/python/README.md b/examples/vision/detection/nanodet_plus/python/README.md index de3421cd9b..5501823758 100644 --- a/examples/vision/detection/nanodet_plus/python/README.md +++ b/examples/vision/detection/nanodet_plus/python/README.md @@ -62,6 +62,8 @@ NanoDetPlus模型加载和初始化,其中model_file为导出的ONNX模型格 > > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) ### 类成员属性 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[320, 320] > > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[0, 0, 0] diff --git a/examples/vision/detection/scaledyolov4/README.md b/examples/vision/detection/scaledyolov4/README.md index 9b8a8e3f87..5a0ba000f1 100644 --- a/examples/vision/detection/scaledyolov4/README.md +++ b/examples/vision/detection/scaledyolov4/README.md @@ -1,6 +1,6 @@ # ScaledYOLOv4准备部署模型 -- ScaledYOLOv4部署实现来自[ScaledYOLOv4 branch yolov4-large](https://github.com/WongKinYiu/ScaledYOLOv4)分支代码,和[基于COCO的预训练模型](https://github.com/WongKinYiu/ScaledYOLOv4)。 +- ScaledYOLOv4部署实现来自[ScaledYOLOv4](https://github.com/WongKinYiu/ScaledYOLOv4)的代码,和[基于COCO的预训练模型](https://github.com/WongKinYiu/ScaledYOLOv4)。 - (1)[预训练模型](https://github.com/WongKinYiu/ScaledYOLOv4)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署;*.onnx、*.trt和*.pose模型不支持部署; - (2)自己数据训练的ScaledYOLOv4模型,按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)操作后,参考[详细部署文档](#详细部署文档)完成部署。 @@ -38,3 +38,8 @@ - [Python部署](python) - [C++部署](cpp) + + +## 版本说明 + +- 本版本文档和代码基于[ScaledYOLOv4 CommitID: 6768003](https://github.com/WongKinYiu/ScaledYOLOv4/commit/676800364a3446900b9e8407bc880ea2127b3415) 编写 diff --git a/examples/vision/detection/scaledyolov4/cpp/README.md b/examples/vision/detection/scaledyolov4/cpp/README.md index d13e926ab8..0d372877f2 100644 --- a/examples/vision/detection/scaledyolov4/cpp/README.md +++ b/examples/vision/detection/scaledyolov4/cpp/README.md @@ -12,7 +12,7 @@ ``` mkdir build cd build -wget https://xxx.tgz +wget https://https://bj.bcebos.com/paddlehub/fastdeploy/cpp/fastdeploy-linux-x64-gpu-0.2.0.tgz tar xvf fastdeploy-linux-x64-0.2.0.tgz cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 make -j @@ -73,6 +73,8 @@ ScaledYOLOv4模型加载和初始化,其中model_file为导出的ONNX模型格 > > * **nms_iou_threshold**: NMS处理过程中iou阈值 ### 类成员变量 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] > > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] diff --git a/examples/vision/detection/scaledyolov4/python/README.md b/examples/vision/detection/scaledyolov4/python/README.md index f198330832..f9fdaabb89 100644 --- a/examples/vision/detection/scaledyolov4/python/README.md +++ b/examples/vision/detection/scaledyolov4/python/README.md @@ -63,6 +63,11 @@ ScaledYOLOv4模型加载和初始化,其中model_file为导出的ONNX模型格 > > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) ### 类成员属性 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 + +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] > > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] diff --git a/examples/vision/detection/yolor/README.md b/examples/vision/detection/yolor/README.md index 772e0c6c1b..7889eac9f6 100644 --- a/examples/vision/detection/yolor/README.md +++ b/examples/vision/detection/yolor/README.md @@ -1,6 +1,6 @@ # YOLOR准备部署模型 -- YOLOR部署实现来自[YOLOR weights](https://github.com/WongKinYiu/yolor/releases/tag/weights)分支代码,和[基于COCO的预训练模型](https://github.com/WongKinYiu/yolor/releases/tag/weights)。 +- YOLOR部署实现来自[YOLOR](https://github.com/WongKinYiu/yolor/releases/tag/weights)的代码,和[基于COCO的预训练模型](https://github.com/WongKinYiu/yolor/releases/tag/weights)。 - (1)[预训练模型](https://github.com/WongKinYiu/yolor/releases/tag/weights)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署;*.onnx、*.trt和*.pose模型不支持部署; - (2)自己数据训练的YOLOR模型,按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)操作后,参考[详细部署文档](#详细部署文档)完成部署。 @@ -25,12 +25,16 @@ | 模型 | 大小 | 精度 | |:---------------------------------------------------------------- |:----- |:----- | -| [YOLOR-P6](https://bj.bcebos.com/paddlehub/fastdeploy/yolor-p6-paper-541-640-640.onnx) | 143MB | 54.1% | -| [YOLOR-W6](https://bj.bcebos.com/paddlehub/fastdeploy/yolor-w6-paper-555-640-640.onnx) | 305MB | 55.5% | -| [YOLOR-E6](https://bj.bcebos.com/paddlehub/fastdeploy/yolor-e6-paper-564-640-640.onnx ) | 443MB | 56.4% | -| [YOLOR-D6](https://bj.bcebos.com/paddlehub/fastdeploy/yolor-d6-paper-570-640-640.onnx) | 580MB | 57.0% | -| [YOLOR-D6](https://bj.bcebos.com/paddlehub/fastdeploy/yolor-d6-paper-573-640-640.onnx) | 580MB | 57.3% | - +| [YOLOR-P6-1280](https://bj.bcebos.com/paddlehub/fastdeploy/yolor-p6-paper-541-1280-1280.onnx) | 143MB | 54.1% | +| [YOLOR-W6-1280](https://bj.bcebos.com/paddlehub/fastdeploy/yolor-w6-paper-555-1280-1280.onnx) | 305MB | 55.5% | +| [YOLOR-E6-1280](https://bj.bcebos.com/paddlehub/fastdeploy/yolor-e6-paper-564-1280-1280.onnx ) | 443MB | 56.4% | +| [YOLOR-D6-1280](https://bj.bcebos.com/paddlehub/fastdeploy/yolor-d6-paper-570-1280-1280.onnx) | 580MB | 57.0% | +| [YOLOR-D6-1280](https://bj.bcebos.com/paddlehub/fastdeploy/yolor-d6-paper-573-1280-1280.onnx) | 580MB | 57.3% | +| [YOLOR-P6](https://bj.bcebos.com/paddlehub/fastdeploy/yolor-p6-paper-541-640-640.onnx) | 143MB | - | +| [YOLOR-W6](https://bj.bcebos.com/paddlehub/fastdeploy/yolor-w6-paper-555-640-640.onnx) | 305MB | - | +| [YOLOR-E6](https://bj.bcebos.com/paddlehub/fastdeploy/yolor-e6-paper-564-640-640.onnx ) | 443MB | - | +| [YOLOR-D6](https://bj.bcebos.com/paddlehub/fastdeploy/yolor-d6-paper-570-640-640.onnx) | 580MB | - | +| [YOLOR-D6](https://bj.bcebos.com/paddlehub/fastdeploy/yolor-d6-paper-573-640-640.onnx) | 580MB | - | @@ -38,3 +42,7 @@ - [Python部署](python) - [C++部署](cpp) + +## 版本说明 + +- 本版本文档和代码基于[YOLOR weights](https://github.com/WongKinYiu/yolor/releases/tag/weights) 编写 diff --git a/examples/vision/detection/yolor/cpp/README.md b/examples/vision/detection/yolor/cpp/README.md index 731c37585a..7798c7a436 100644 --- a/examples/vision/detection/yolor/cpp/README.md +++ b/examples/vision/detection/yolor/cpp/README.md @@ -12,7 +12,7 @@ ``` mkdir build cd build -wget https://xxx.tgz +wget https://https://bj.bcebos.com/paddlehub/fastdeploy/cpp/fastdeploy-linux-x64-gpu-0.2.0.tgz tar xvf fastdeploy-linux-x64-0.2.0.tgz cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 make -j @@ -73,6 +73,8 @@ YOLOR模型加载和初始化,其中model_file为导出的ONNX模型格式。 > > * **nms_iou_threshold**: NMS处理过程中iou阈值 ### 类成员变量 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] > > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] diff --git a/examples/vision/detection/yolor/python/README.md b/examples/vision/detection/yolor/python/README.md index d2c57cca0a..9fb737a01e 100644 --- a/examples/vision/detection/yolor/python/README.md +++ b/examples/vision/detection/yolor/python/README.md @@ -63,6 +63,8 @@ YOLOR模型加载和初始化,其中model_file为导出的ONNX模型格式 > > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) ### 类成员属性 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] > > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] diff --git a/examples/vision/detection/yolov5/README.md b/examples/vision/detection/yolov5/README.md index 0937a34ea5..e83dcdd504 100644 --- a/examples/vision/detection/yolov5/README.md +++ b/examples/vision/detection/yolov5/README.md @@ -2,14 +2,14 @@ ## 模型版本说明 -- YOLOv5 v6.0部署模型实现来自[YOLOv5 v6.0分支](https://github.com/ultralytics/yolov5/tree/v6.0),和[基于COCO的预训练模型](https://github.com/ultralytics/yolov5/releases/tag/v6.0) +- YOLOv5 v6.0部署模型实现来自[YOLOv5](https://github.com/ultralytics/yolov5/tree/v6.0),和[基于COCO的预训练模型](https://github.com/ultralytics/yolov5/releases/tag/v6.0) - (1)[预训练模型](https://github.com/ultralytics/yolov5/releases/tag/v6.0)的*.onnx可直接进行部署; - (2)开发者基于自己数据训练的YOLOv5 v6.0模型,可使用[YOLOv5](https://github.com/ultralytics/yolov5)中的`export.py`导出ONNX文件后后,完成部署。 ## 下载预训练ONNX模型 -为了方便开发者的测试,下面提供了YOLOv7导出的各系列模型,开发者可直接下载使用。 +为了方便开发者的测试,下面提供了YOLOv5导出的各系列模型,开发者可直接下载使用。 | 模型 | 大小 | 精度 | |:---------------------------------------------------------------- |:----- |:----- | @@ -26,3 +26,8 @@ - [Python部署](python) - [C++部署](cpp) + + +## 版本说明 + +- 本版本文档和代码基于[YOLOv5 v6.0](https://github.com/ultralytics/yolov5/tree/v6.0) 编写 diff --git a/examples/vision/detection/yolov5/cpp/README.md b/examples/vision/detection/yolov5/cpp/README.md index 99e67a332c..998046e096 100644 --- a/examples/vision/detection/yolov5/cpp/README.md +++ b/examples/vision/detection/yolov5/cpp/README.md @@ -12,7 +12,7 @@ ``` mkdir build cd build -wget https://xxx.tgz +wget https://https://bj.bcebos.com/paddlehub/fastdeploy/cpp/fastdeploy-linux-x64-gpu-0.2.0.tgz tar xvf fastdeploy-linux-x64-0.2.0.tgz cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 make -j @@ -73,6 +73,8 @@ YOLOv5模型加载和初始化,其中model_file为导出的ONNX模型格式。 > > * **nms_iou_threshold**: NMS处理过程中iou阈值 ### 类成员变量 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] > > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] diff --git a/examples/vision/detection/yolov5/python/README.md b/examples/vision/detection/yolov5/python/README.md index 2a9bd4e18d..8048d7b7ca 100644 --- a/examples/vision/detection/yolov5/python/README.md +++ b/examples/vision/detection/yolov5/python/README.md @@ -63,6 +63,8 @@ YOLOv5模型加载和初始化,其中model_file为导出的ONNX模型格式 > > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) ### 类成员属性 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] > > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] diff --git a/examples/vision/detection/yolov5lite/README.md b/examples/vision/detection/yolov5lite/README.md index 9e63ca7e09..4b95967d1d 100644 --- a/examples/vision/detection/yolov5lite/README.md +++ b/examples/vision/detection/yolov5lite/README.md @@ -1,6 +1,6 @@ # YOLOv5Lite准备部署模型 -- YOLOv5Lite部署实现来自[YOLOv5-Lite-v1.4](https://github.com/ppogg/YOLOv5-Lite/releases/tag/v1.4) +- YOLOv5Lite部署实现来自[YOLOv5-Lite](https://github.com/ppogg/YOLOv5-Lite/releases/tag/v1.4) 代码,和[基于COCO的预训练模型](https://github.com/ppogg/YOLOv5-Lite/releases/tag/v1.4)。 - (1)[预训练模型](https://github.com/ppogg/YOLOv5-Lite/releases/tag/v1.4)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署;*.onnx、*.trt和*.pose模型不支持部署; @@ -35,8 +35,7 @@ # 导出onnx格式文件 python export.py --grid --dynamic --concat --weights PATH/TO/yolov5-lite-xx.pt - # 移动onnx文件到demo目录 - cp PATH/TO/yolov5lite.onnx PATH/TO/model_zoo/vision/yolov5lite/ + ``` - 导出无decode模块的ONNX文件(不需要修改代码) @@ -48,7 +47,7 @@ # 导出onnx格式文件 python export.py --grid --dynamic --weights PATH/TO/yolov5-lite-xx.pt -``` + ``` ## 下载预训练ONNX模型 @@ -66,3 +65,8 @@ - [Python部署](python) - [C++部署](cpp) + + +## 版本说明 + +- 本版本文档和代码基于[YOLOv5-Lite v1.4](https://github.com/ppogg/YOLOv5-Lite/releases/tag/v1.4) 编写 diff --git a/examples/vision/detection/yolov5lite/cpp/README.md b/examples/vision/detection/yolov5lite/cpp/README.md index a622d2e8fd..88d6accf4c 100644 --- a/examples/vision/detection/yolov5lite/cpp/README.md +++ b/examples/vision/detection/yolov5lite/cpp/README.md @@ -12,7 +12,7 @@ ``` mkdir build cd build -wget https://xxx.tgz +wget https://https://bj.bcebos.com/paddlehub/fastdeploy/cpp/fastdeploy-linux-x64-gpu-0.2.0.tgz tar xvf fastdeploy-linux-x64-0.2.0.tgz cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 make -j @@ -73,6 +73,8 @@ YOLOv5Lite模型加载和初始化,其中model_file为导出的ONNX模型格 > > * **nms_iou_threshold**: NMS处理过程中iou阈值 ### 类成员变量 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] > > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] diff --git a/examples/vision/detection/yolov5lite/python/README.md b/examples/vision/detection/yolov5lite/python/README.md index 95c412f028..01e1a90dab 100644 --- a/examples/vision/detection/yolov5lite/python/README.md +++ b/examples/vision/detection/yolov5lite/python/README.md @@ -63,6 +63,8 @@ YOLOv5Lite模型加载和初始化,其中model_file为导出的ONNX模型格 > > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) ### 类成员属性 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] > > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] diff --git a/examples/vision/detection/yolov6/README.md b/examples/vision/detection/yolov6/README.md index 8bc9aa2fde..497778fe0f 100644 --- a/examples/vision/detection/yolov6/README.md +++ b/examples/vision/detection/yolov6/README.md @@ -2,9 +2,10 @@ ## 模型版本说明 -- YOLOv6 部署实现来自[YOLOv6 0.1分支](https://github.com/meituan/YOLOv6/releases/download/0.1.0),和[基于coco的预训练模型](https://github.com/meituan/YOLOv6/releases/tag/0.1.0)。 +- YOLOv6 部署实现来自[YOLOv6](https://github.com/meituan/YOLOv6/releases/tag/0.1.0),和[基于coco的预训练模型](https://github.com/meituan/YOLOv6/releases/tag/0.1.0)。 - - (1)[基于coco的预训练模型](https://github.com/meituan/YOLOv6/releases/download/0.1.0)的*.onnx可直接进行部署; + - (1)[基于coco的预训练模型](https://github.com/meituan/YOLOv6/releases/tag/0.1.0)的*.onnx可直接进行部署; + - (2)自己训练的模型,导出ONNX模型后,参考[详细部署文档](#详细部署文档)完成部署。 @@ -16,6 +17,8 @@ |:---------------------------------------------------------------- |:----- |:----- | | [YOLOv6s](https://bj.bcebos.com/paddlehub/fastdeploy/yolov6s.onnx) | 66MB | 43.1% | | [YOLOv6s_640](https://bj.bcebos.com/paddlehub/fastdeploy/yolov6s-640x640.onnx) | 66MB | 43.1% | +| [YOLOv6t](https://bj.bcebos.com/paddlehub/fastdeploy/yolov6t.onnx) | 58MB | 41.3% | +| [YOLOv6n](https://bj.bcebos.com/paddlehub/fastdeploy/yolov6n.onnx) | 17MB | 35.0% | @@ -23,3 +26,8 @@ - [Python部署](python) - [C++部署](cpp) + + +## 版本说明 + +- 本版本文档和代码基于[YOLOv6 0.1.0版本](https://github.com/meituan/YOLOv6/releases/download/0.1.0) 编写 diff --git a/examples/vision/detection/yolov6/cpp/README.md b/examples/vision/detection/yolov6/cpp/README.md index 5bfe49c24a..0d66f97646 100644 --- a/examples/vision/detection/yolov6/cpp/README.md +++ b/examples/vision/detection/yolov6/cpp/README.md @@ -12,7 +12,7 @@ ``` mkdir build cd build -wget https://xxx.tgz +wget https://https://bj.bcebos.com/paddlehub/fastdeploy/cpp/fastdeploy-linux-x64-gpu-0.2.0.tgz tar xvf fastdeploy-linux-x64-0.2.0.tgz cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 make -j @@ -73,6 +73,8 @@ YOLOv6模型加载和初始化,其中model_file为导出的ONNX模型格式。 > > * **nms_iou_threshold**: NMS处理过程中iou阈值 ### 类成员变量 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] > > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] diff --git a/examples/vision/detection/yolov6/python/README.md b/examples/vision/detection/yolov6/python/README.md index 214a076682..2dfd11a4b2 100644 --- a/examples/vision/detection/yolov6/python/README.md +++ b/examples/vision/detection/yolov6/python/README.md @@ -63,6 +63,8 @@ YOLOv6模型加载和初始化,其中model_file为导出的ONNX模型格式 > > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) ### 类成员属性 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] diff --git a/examples/vision/detection/yolov7/README.md b/examples/vision/detection/yolov7/README.md index 96d7a9f8f9..266aeace2d 100644 --- a/examples/vision/detection/yolov7/README.md +++ b/examples/vision/detection/yolov7/README.md @@ -1,6 +1,6 @@ # YOLOv7准备部署模型 -- YOLOv7部署实现来自[YOLOv7 0.1](https://github.com/WongKinYiu/yolov7/tree/v0.1)分支代码,和[基于COCO的预训练模型](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1)。 +- YOLOv7部署实现来自[YOLOv7](https://github.com/WongKinYiu/yolov7/tree/v0.1)分支代码,和[基于COCO的预训练模型](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1)。 - (1)[预训练模型](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署;*.onnx、*.trt和*.pose模型不支持部署; - (2)自己数据训练的YOLOv7 0.1模型,按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)操作后,参考[详细部署文档](#详细部署文档)完成部署。 @@ -18,8 +18,7 @@ python models/export.py --grid --dynamic --weights PATH/TO/yolov7.pt # 如果您的代码版本中有支持NMS的ONNX文件导出,请使用如下命令导出ONNX文件(请暂时不要使用 "--end2end",我们后续将支持带有NMS的ONNX模型的部署) python models/export.py --grid --dynamic --weights PATH/TO/yolov7.pt -# 移动onnx文件到demo目录 -cp PATH/TO/yolov7.onnx PATH/TO/model_zoo/vision/yolov7/ + ``` ## 下载预训练ONNX模型 @@ -42,3 +41,8 @@ cp PATH/TO/yolov7.onnx PATH/TO/model_zoo/vision/yolov7/ - [Python部署](python) - [C++部署](cpp) + + +## 版本说明 + +- 本版本文档和代码基于[YOLOv7 0.1](https://github.com/WongKinYiu/yolov7/tree/v0.1) 编写 diff --git a/examples/vision/detection/yolov7/cpp/README.md b/examples/vision/detection/yolov7/cpp/README.md index c67689570d..27bfa86ae7 100644 --- a/examples/vision/detection/yolov7/cpp/README.md +++ b/examples/vision/detection/yolov7/cpp/README.md @@ -12,7 +12,7 @@ ``` mkdir build cd build -wget https://xxx.tgz +wget https://https://bj.bcebos.com/paddlehub/fastdeploy/cpp/fastdeploy-linux-x64-gpu-0.2.0.tgz tar xvf fastdeploy-linux-x64-0.2.0.tgz cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 make -j @@ -73,6 +73,8 @@ YOLOv7模型加载和初始化,其中model_file为导出的ONNX模型格式。 > > * **nms_iou_threshold**: NMS处理过程中iou阈值 ### 类成员变量 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] > > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] diff --git a/examples/vision/detection/yolov7/python/README.md b/examples/vision/detection/yolov7/python/README.md index b3a4f12a1b..f28bef0afd 100644 --- a/examples/vision/detection/yolov7/python/README.md +++ b/examples/vision/detection/yolov7/python/README.md @@ -63,6 +63,8 @@ YOLOv7模型加载和初始化,其中model_file为导出的ONNX模型格式 > > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) ### 类成员属性 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] > > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] diff --git a/examples/vision/detection/yolox/README.md b/examples/vision/detection/yolox/README.md index b0c78536be..193089904b 100644 --- a/examples/vision/detection/yolox/README.md +++ b/examples/vision/detection/yolox/README.md @@ -2,7 +2,7 @@ ## 模型版本说明 -- YOLOX部署实现来自[YOLOX v0.1.1分支](https://github.com/Megvii-BaseDetection/YOLOX/tree/0.1.1rc0),基于[coco的预训练模型](https://github.com/Megvii-BaseDetection/YOLOX/releases/tag/0.1.1rc0)。 +- YOLOX部署实现来自[YOLOX](https://github.com/Megvii-BaseDetection/YOLOX/tree/0.1.1rc0),基于[coco的预训练模型](https://github.com/Megvii-BaseDetection/YOLOX/releases/tag/0.1.1rc0)。 - (1)[预训练模型](https://github.com/Megvii-BaseDetection/YOLOX/releases/tag/0.1.1rc0)中的*.pth通过导出ONNX模型操作后,可进行部署;*.onnx、*.trt和*.pose模型不支持部署; - (2)开发者基于自己数据训练的YOLOX v0.1.1模型,可按照导出ONNX模型后,完成部署。 @@ -14,7 +14,10 @@ | 模型 | 大小 | 精度 | |:---------------------------------------------------------------- |:----- |:----- | -| [YOLOX-s](https://bj.bcebos.com/paddlehub/fastdeploy/yolox_s.onnx) | 35MB | 40.5% | +| [YOLOX-s](https://bj.bcebos.com/paddlehub/fastdeploy/yolox_s.onnx) | 35MB | 39.6% | +| [YOLOX-m](https://bj.bcebos.com/paddlehub/fastdeploy/yolox_m.onnx) | 97MB | 46.4.5% | +| [YOLOX-l](https://bj.bcebos.com/paddlehub/fastdeploy/yolox_tiny.onnx) | 207MB | 50.0% | +| [YOLOX-x](https://bj.bcebos.com/paddlehub/fastdeploy/yolox_x.onnx) | 378MB | 51.2% | @@ -23,3 +26,8 @@ - [Python部署](python) - [C++部署](cpp) + + +## 版本说明 + +- 本版本文档和代码基于[YOLOX v0.1.1版本](https://github.com/Megvii-BaseDetection/YOLOX/tree/0.1.1rc0) 编写 diff --git a/examples/vision/detection/yolox/cpp/README.md b/examples/vision/detection/yolox/cpp/README.md index f80aa4fe4d..2094eddd0e 100644 --- a/examples/vision/detection/yolox/cpp/README.md +++ b/examples/vision/detection/yolox/cpp/README.md @@ -12,7 +12,7 @@ ``` mkdir build cd build -wget https://xxx.tgz +wget https://https://bj.bcebos.com/paddlehub/fastdeploy/cpp/fastdeploy-linux-x64-gpu-0.2.0.tgz tar xvf fastdeploy-linux-x64-0.2.0.tgz cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 make -j @@ -73,6 +73,8 @@ YOLOX模型加载和初始化,其中model_file为导出的ONNX模型格式。 > > * **nms_iou_threshold**: NMS处理过程中iou阈值 ### 类成员变量 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] > > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] diff --git a/examples/vision/detection/yolox/python/README.md b/examples/vision/detection/yolox/python/README.md index 5fb526e6e4..dfa3058a19 100644 --- a/examples/vision/detection/yolox/python/README.md +++ b/examples/vision/detection/yolox/python/README.md @@ -62,6 +62,8 @@ YOLOX模型加载和初始化,其中model_file为导出的ONNX模型格式 > > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) ### 类成员属性 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > >* **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] > >* **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] diff --git a/examples/vision/facedet/README.md b/examples/vision/facedet/README.md index fb3b3dfdfe..cd062e26b1 100644 --- a/examples/vision/facedet/README.md +++ b/examples/vision/facedet/README.md @@ -1,10 +1,10 @@ 人脸检测模型 -FastDeploy目前支持如下人脸模型部署 +FastDeploy目前支持如下人脸检测模型部署 | 模型 | 说明 | 模型格式 | 版本 | | :--- | :--- | :------- | :--- | -| [retinaface]() | PPYOLOE系列模型 | ONNX | | -| [ultraface]() | PicoDet系列模型 | ONNX || -| [yolov5face]() | Paddle版本的YOLOX系列模型 | ONNX | | - +| [retinaface](./retinaface) | RetinaFace系列模型 | ONNX | CommitID:b984b4b | +| [ultraface](./ultraface) | UltraFace系列模型 | ONNX |CommitID:dffdddd | +| [yolov5face](./yolov5face) | YOLOv5Face系列模型 | ONNX | CommitID:4fd1ead | +| [scrfd](./scrfd) | SCRFD系列模型 | ONNX | CommitID:17cdeab | diff --git a/examples/vision/facedet/retinaface/README.md b/examples/vision/facedet/retinaface/README.md index 60172ce0a2..525b6cb2f2 100644 --- a/examples/vision/facedet/retinaface/README.md +++ b/examples/vision/facedet/retinaface/README.md @@ -2,7 +2,7 @@ ## 模型版本说明 -- [RetinaFace CommitID:b984b4b](https://github.com/biubug6/Pytorch_Retinaface/commit/b984b4b) +- [RetinaFace](https://github.com/biubug6/Pytorch_Retinaface/commit/b984b4b) - (1)[链接中](https://github.com/biubug6/Pytorch_Retinaface/commit/b984b4b)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; - (2)自己数据训练的RetinaFace CommitID:b984b4b模型,可按照[导出ONNX模型](#导出ONNX模型)后,完成部署。 @@ -53,3 +53,8 @@ onnxsim FaceDetector.onnx Pytorch_RetinaFace_resnet50-640-640.onnx # resnet50 - [Python部署](python) - [C++部署](cpp) + + +## 版本说明 + +- 本版本文档和代码基于[RetinaFace CommitID:b984b4b](https://github.com/biubug6/Pytorch_Retinaface/commit/b984b4b) 编写 diff --git a/examples/vision/facedet/retinaface/cpp/README.md b/examples/vision/facedet/retinaface/cpp/README.md index d5b0123b0f..0b072c04d0 100644 --- a/examples/vision/facedet/retinaface/cpp/README.md +++ b/examples/vision/facedet/retinaface/cpp/README.md @@ -12,7 +12,7 @@ ``` mkdir build cd build -wget https://xxx.tgz +wget https://https://bj.bcebos.com/paddlehub/fastdeploy/cpp/fastdeploy-linux-x64-gpu-0.2.0.tgz tar xvf fastdeploy-linux-x64-0.2.0.tgz cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 make -j @@ -58,7 +58,7 @@ RetinaFace模型加载和初始化,其中model_file为导出的ONNX模型格 #### Predict函数 > ``` -> RetinaFace::Predict(cv::Mat* im, DetectionResult* result, +> RetinaFace::Predict(cv::Mat* im, FaceDetectionResult* result, > float conf_threshold = 0.25, > float nms_iou_threshold = 0.5) > ``` @@ -68,11 +68,13 @@ RetinaFace模型加载和初始化,其中model_file为导出的ONNX模型格 > **参数** > > > * **im**: 输入图像,注意需为HWC,BGR格式 -> > * **result**: 检测结果,包括检测框,各个框的置信度, DetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > * **result**: 检测结果,包括检测框,各个框的置信度, FaceDetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) > > * **conf_threshold**: 检测框置信度过滤阈值 > > * **nms_iou_threshold**: NMS处理过程中iou阈值 ### 类成员变量 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] > > * **variance**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[0, 0, 0] diff --git a/examples/vision/facedet/retinaface/python/README.md b/examples/vision/facedet/retinaface/python/README.md index 4ac14f9ac1..815ddf234e 100644 --- a/examples/vision/facedet/retinaface/python/README.md +++ b/examples/vision/facedet/retinaface/python/README.md @@ -15,7 +15,7 @@ wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/li #下载部署示例代码 git clone https://github.com/PaddlePaddle/FastDeploy.git -cd examples/vison/detection/retinaface/python/ +cd examples/vison//retinaface/python/ # CPU推理 @@ -61,9 +61,11 @@ RetinaFace模型加载和初始化,其中model_file为导出的ONNX模型格 > **返回** > -> > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > 返回`fastdeploy.vision.FaceDetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) ### 类成员属性 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] > > * **variance**(list[float]): 通过此参数可以指定retinaface中的方差variance值,默认是[0.1,0.2], 一般不用修改. diff --git a/examples/vision/facedet/scrfd/README.md b/examples/vision/facedet/scrfd/README.md index a0e7a51513..d1694d2c1c 100644 --- a/examples/vision/facedet/scrfd/README.md +++ b/examples/vision/facedet/scrfd/README.md @@ -2,7 +2,7 @@ ## 模型版本说明 -- [SCRFD CID:17cdeab](https://github.com/deepinsight/insightface/tree/17cdeab12a35efcebc2660453a8cbeae96e20950) +- [SCRFD](https://github.com/deepinsight/insightface/tree/17cdeab12a35efcebc2660453a8cbeae96e20950) - (1)[链接中](https://github.com/deepinsight/insightface/tree/17cdeab12a35efcebc2660453a8cbeae96e20950)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; - (2)开发者基于自己数据训练的SCRFD CID:17cdeab模型,可按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)后,完成部署。 @@ -28,9 +28,6 @@ - docker docker的onnx目录中已有生成好的onnx文件 - - # 移动onnx文件到demo目录 - cp PATH/TO/SCRFD.onnx PATH/TO/model_zoo/vision/scrfd/ ``` ## 下载预训练ONNX模型 @@ -67,3 +64,8 @@ - [Python部署](python) - [C++部署](cpp) + + +## 版本说明 + +- 本版本文档和代码基于[SCRFD CommitID:17cdeab](https://github.com/deepinsight/insightface/tree/17cdeab12a35efcebc2660453a8cbeae96e20950) 编写 diff --git a/examples/vision/facedet/scrfd/cpp/README.md b/examples/vision/facedet/scrfd/cpp/README.md index b467857e89..5309542a77 100644 --- a/examples/vision/facedet/scrfd/cpp/README.md +++ b/examples/vision/facedet/scrfd/cpp/README.md @@ -12,7 +12,7 @@ ``` mkdir build cd build -wget https://xxx.tgz +wget https://https://bj.bcebos.com/paddlehub/fastdeploy/cpp/fastdeploy-linux-x64-gpu-0.2.0.tgz tar xvf fastdeploy-linux-x64-0.2.0.tgz cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 make -j @@ -39,7 +39,7 @@ wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/li ### SCRFD类 ``` -fastdeploy::vision::detection::SCRFD( +fastdeploy::vision::facedet::SCRFD( const string& model_file, const string& params_file = "", const RuntimeOption& runtime_option = RuntimeOption(), @@ -58,7 +58,7 @@ SCRFD模型加载和初始化,其中model_file为导出的ONNX模型格式。 #### Predict函数 > ``` -> SCRFD::Predict(cv::Mat* im, DetectionResult* result, +> SCRFD::Predict(cv::Mat* im, FaceDetectionResult* result, > float conf_threshold = 0.25, > float nms_iou_threshold = 0.5) > ``` @@ -68,11 +68,13 @@ SCRFD模型加载和初始化,其中model_file为导出的ONNX模型格式。 > **参数** > > > * **im**: 输入图像,注意需为HWC,BGR格式 -> > * **result**: 检测结果,包括检测框,各个框的置信度, DetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > * **result**: 检测结果,包括检测框,各个框的置信度, FaceDetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) > > * **conf_threshold**: 检测框置信度过滤阈值 > > * **nms_iou_threshold**: NMS处理过程中iou阈值 ### 类成员变量 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] > > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] diff --git a/examples/vision/facedet/scrfd/python/README.md b/examples/vision/facedet/scrfd/python/README.md index 93bacc3aa6..efb1c863cb 100644 --- a/examples/vision/facedet/scrfd/python/README.md +++ b/examples/vision/facedet/scrfd/python/README.md @@ -15,7 +15,7 @@ wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/li #下载部署示例代码 git clone https://github.com/PaddlePaddle/FastDeploy.git -cd examples/vison/detection/scrfd/python/ +cd examples/vison/facedet/scrfd/python/ # CPU推理 python infer.py --model scrfd_500m_bnkps_shape640x640.onnx --image test_lite_face_detector_3.jpg --device cpu @@ -32,7 +32,7 @@ python infer.py --model scrfd_500m_bnkps_shape640x640.onnx --image test_lite_fac ## SCRFD Python接口 ``` -fastdeploy.vision.detection.SCRFD(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +fastdeploy.vision.facedet.SCRFD(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) ``` SCRFD模型加载和初始化,其中model_file为导出的ONNX模型格式 @@ -60,9 +60,11 @@ SCRFD模型加载和初始化,其中model_file为导出的ONNX模型格式 > **返回** > -> > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > 返回`fastdeploy.vision.FaceDetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) ### 类成员属性 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] > > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] diff --git a/examples/vision/facedet/ultraface/README.md b/examples/vision/facedet/ultraface/README.md index f1dcca0b98..678fb771f7 100644 --- a/examples/vision/facedet/ultraface/README.md +++ b/examples/vision/facedet/ultraface/README.md @@ -2,7 +2,7 @@ ## 模型版本说明 -- [UltraFace CommitID:dffdddd](https://github.com/Linzaer/Ultra-Light-Fast-Generic-Face-Detector-1MB/commit/dffdddd) +- [UltraFace](https://github.com/Linzaer/Ultra-Light-Fast-Generic-Face-Detector-1MB/commit/dffdddd) - (1)[链接中](https://github.com/Linzaer/Ultra-Light-Fast-Generic-Face-Detector-1MB/commit/dffdddd)的*.onnx可下载, 也可以通过下面模型链接下载并进行部署 @@ -21,3 +21,8 @@ - [Python部署](python) - [C++部署](cpp) + + +## 版本说明 + +- 本版本文档和代码基于[UltraFace CommitID:dffdddd](https://github.com/Linzaer/Ultra-Light-Fast-Generic-Face-Detector-1MB/commit/dffdddd) 编写 diff --git a/examples/vision/facedet/ultraface/cpp/README.md b/examples/vision/facedet/ultraface/cpp/README.md index dd9a5ed2b4..1610685c98 100644 --- a/examples/vision/facedet/ultraface/cpp/README.md +++ b/examples/vision/facedet/ultraface/cpp/README.md @@ -12,7 +12,7 @@ ``` mkdir build cd build -wget https://xxx.tgz +wget https://https://bj.bcebos.com/paddlehub/fastdeploy/cpp/fastdeploy-linux-x64-gpu-0.2.0.tgz tar xvf fastdeploy-linux-x64-0.2.0.tgz cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 make -j @@ -58,7 +58,7 @@ UltraFace模型加载和初始化,其中model_file为导出的ONNX模型格式 #### Predict函数 > ``` -> UltraFace::Predict(cv::Mat* im, DetectionResult* result, +> UltraFace::Predict(cv::Mat* im, FaceDetectionResult* result, > float conf_threshold = 0.25, > float nms_iou_threshold = 0.5) > ``` @@ -68,11 +68,13 @@ UltraFace模型加载和初始化,其中model_file为导出的ONNX模型格式 > **参数** > > > * **im**: 输入图像,注意需为HWC,BGR格式 -> > * **result**: 检测结果,包括检测框,各个框的置信度, DetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > * **result**: 检测结果,包括检测框,各个框的置信度, FaceDetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) > > * **conf_threshold**: 检测框置信度过滤阈值 > > * **nms_iou_threshold**: NMS处理过程中iou阈值 ### 类成员变量 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[320, 240] diff --git a/examples/vision/facedet/ultraface/python/README.md b/examples/vision/facedet/ultraface/python/README.md index be6c2b9300..d0406d65ac 100644 --- a/examples/vision/facedet/ultraface/python/README.md +++ b/examples/vision/facedet/ultraface/python/README.md @@ -15,7 +15,7 @@ wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/li #下载部署示例代码 git clone https://github.com/PaddlePaddle/FastDeploy.git -cd examples/vison/detection/ultraface/python/ +cd examples/vison/facedet/ultraface/python/ # CPU推理 @@ -61,9 +61,11 @@ UltraFace模型加载和初始化,其中model_file为导出的ONNX模型格式 > **返回** > -> > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > 返回`fastdeploy.vision.FaceDetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) ### 类成员属性 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[320, 240] diff --git a/examples/vision/facedet/yolov5face/README.md b/examples/vision/facedet/yolov5face/README.md index 34828b1938..424a76bbed 100644 --- a/examples/vision/facedet/yolov5face/README.md +++ b/examples/vision/facedet/yolov5face/README.md @@ -2,9 +2,9 @@ ## 模型版本说明 -- [YOLOv5Face CommitID:4fd1ead](https://github.com/deepcam-cn/yolov5-face/commit/4fd1ead) +- [YOLOv5Face](https://github.com/deepcam-cn/yolov5-face/commit/4fd1ead) - (1)[链接中](https://github.com/deepcam-cn/yolov5-face/commit/4fd1ead)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; - - (2)开发者基于自己数据训练的YOLOv5Face CommitID:b984b4b模型,可按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)后,完成部署。 + - (2)开发者基于自己数据训练的YOLOv5Face模型,可按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)后,完成部署。 ## 导出ONNX模型 @@ -31,12 +31,17 @@ | 模型 | 大小 | 精度 | |:---------------------------------------------------------------- |:----- |:----- | -| [YOLOv5s-Face](https://bj.bcebos.com/paddlehub/fastdeploy/yolov5s-face.onnx) | 30MB | - | +| [YOLOv5s-Face](https://bj.bcebos.com/paddlehub/fastdeploy/yolov5s-face.onnx) | 30MB | 94.3 | | [YOLOv5s-Face-bak](https://bj.bcebos.com/paddlehub/fastdeploy/yolov5face-s-640x640.bak.onnx) | 30MB | -| -| [YOLOv5l-Face](https://bj.bcebos.com/paddlehub/fastdeploy/yolov5face-l-640x640.onnx ) | 181MB | - | +| [YOLOv5l-Face](https://bj.bcebos.com/paddlehub/fastdeploy/yolov5face-l-640x640.onnx ) | 181MB | 95.8 | ## 详细部署文档 - [Python部署](python) - [C++部署](cpp) + + +## 版本说明 + +- 本版本文档和代码基于[YOLOv5Face CommitID:4fd1ead](https://github.com/deepcam-cn/yolov5-face/commit/4fd1ead) 编写 diff --git a/examples/vision/facedet/yolov5face/cpp/README.md b/examples/vision/facedet/yolov5face/cpp/README.md index 0f228aea0f..4c7368d594 100644 --- a/examples/vision/facedet/yolov5face/cpp/README.md +++ b/examples/vision/facedet/yolov5face/cpp/README.md @@ -12,7 +12,7 @@ ``` mkdir build cd build -wget https://xxx.tgz +wget https://https://bj.bcebos.com/paddlehub/fastdeploy/cpp/fastdeploy-linux-x64-gpu-0.2.0.tgz tar xvf fastdeploy-linux-x64-0.2.0.tgz cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 make -j @@ -58,7 +58,7 @@ YOLOv5Face模型加载和初始化,其中model_file为导出的ONNX模型格 #### Predict函数 > ``` -> YOLOv5Face::Predict(cv::Mat* im, DetectionResult* result, +> YOLOv5Face::Predict(cv::Mat* im, FaceDetectionResult* result, > float conf_threshold = 0.25, > float nms_iou_threshold = 0.5) > ``` @@ -68,11 +68,13 @@ YOLOv5Face模型加载和初始化,其中model_file为导出的ONNX模型格 > **参数** > > > * **im**: 输入图像,注意需为HWC,BGR格式 -> > * **result**: 检测结果,包括检测框,各个框的置信度, DetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > * **result**: 检测结果,包括检测框,各个框的置信度, FaceDetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) > > * **conf_threshold**: 检测框置信度过滤阈值 > > * **nms_iou_threshold**: NMS处理过程中iou阈值 ### 类成员变量 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] > > * **padding_value**(vector<float>): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] diff --git a/examples/vision/facedet/yolov5face/python/README.md b/examples/vision/facedet/yolov5face/python/README.md index 91ccbbaf3e..31a1362b26 100644 --- a/examples/vision/facedet/yolov5face/python/README.md +++ b/examples/vision/facedet/yolov5face/python/README.md @@ -15,7 +15,7 @@ wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/li #下载部署示例代码 git clone https://github.com/PaddlePaddle/FastDeploy.git -cd examples/vison/detection/yolov5face/python/ +cd examples/vison/facedet/yolov5face/python/ # CPU推理 python infer.py --model yolov5s-face.onnx --image test_lite_face_detector_3.jpg --device cpu @@ -60,9 +60,11 @@ YOLOv5Face模型加载和初始化,其中model_file为导出的ONNX模型格 > **返回** > -> > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > 返回`fastdeploy.vision.FaceDetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) ### 类成员属性 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[640, 640] > > * **padding_value**(list[float]): 通过此参数可以修改图片在resize时候做填充(padding)的值, 包含三个浮点型元素, 分别表示三个通道的值, 默认值为[114, 114, 114] diff --git a/examples/vision/faceid/README.md b/examples/vision/faceid/README.md new file mode 100644 index 0000000000..4950463a1a --- /dev/null +++ b/examples/vision/faceid/README.md @@ -0,0 +1,10 @@ +人脸检测模型 + +FastDeploy目前支持如下人脸识别模型部署 + +| 模型 | 说明 | 模型格式 | 版本 | +| :--- | :--- | :------- | :--- | +| [arcface](./insightface) | ArcFace系列模型 | ONNX | CommitID:babb9a5 | +| [cosface](./insightface) | CosFace系列模型 | ONNX | CommitID:babb9a5 | +| [partial_fc](./insightface) | PartialFC系列模型 | ONNX | CommitID:babb9a5 | +| [vpl](./insightface) | VPL系列模型 | ONNX | CommitID:babb9a5 | diff --git a/examples/vision/faceid/insightface/README.md b/examples/vision/faceid/insightface/README.md index 17affb4cd3..cf3371247a 100644 --- a/examples/vision/faceid/insightface/README.md +++ b/examples/vision/faceid/insightface/README.md @@ -31,7 +31,7 @@ ## 下载预训练ONNX模型 -为了方便开发者的测试,下面提供了RetinaFace导出的各系列模型,开发者可直接下载使用。 +为了方便开发者的测试,下面提供了InsightFace导出的各系列模型,开发者可直接下载使用。 其中精度指标来源于InsightFace中对各模型的介绍,详情各参考InsightFace中的说明 | 模型 | 大小 | 精度 (AgeDB_30) | @@ -55,3 +55,8 @@ - [Python部署](python) - [C++部署](cpp) + + +## 版本说明 + +- 本版本文档和代码基于[InsightFace CommitID:babb9a5](https://github.com/deepinsight/insightface/commit/babb9a5) 编写 diff --git a/examples/vision/faceid/insightface/cpp/README.md b/examples/vision/faceid/insightface/cpp/README.md index 512d9345a0..56af498c2d 100644 --- a/examples/vision/faceid/insightface/cpp/README.md +++ b/examples/vision/faceid/insightface/cpp/README.md @@ -12,7 +12,7 @@ ``` mkdir build cd build -wget https://xxx.tgz +wget https://https://bj.bcebos.com/paddlehub/fastdeploy/cpp/fastdeploy-linux-x64-gpu-0.2.0.tgz tar xvf fastdeploy-linux-x64-0.2.0.tgz cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 make -j @@ -34,14 +34,18 @@ wget https://bj.bcebos.com/paddlehub/test_samples/test_lite_focal_arcface_2.JPG 运行完成可视化结果如下图所示 - +
+ + + +
-## ArcFace C++接口 +## InsightFace C++接口 ### ArcFace类 ``` -fastdeploy::vision::detection::ArcFace( +fastdeploy::vision::faceid::ArcFace( const string& model_file, const string& params_file = "", const RuntimeOption& runtime_option = RuntimeOption(), @@ -50,6 +54,41 @@ fastdeploy::vision::detection::ArcFace( ArcFace模型加载和初始化,其中model_file为导出的ONNX模型格式。 +### CosFace类 + +``` +fastdeploy::vision::faceid::CosFace( + const string& model_file, + const string& params_file = "", + const RuntimeOption& runtime_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX) +``` + +CosFace模型加载和初始化,其中model_file为导出的ONNX模型格式。 + +### PartialFC类 + +``` +fastdeploy::vision::faceid::PartialFC( + const string& model_file, + const string& params_file = "", + const RuntimeOption& runtime_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX) +``` + +PartialFC模型加载和初始化,其中model_file为导出的ONNX模型格式。 + +### VPL类 + +``` +fastdeploy::vision::faceid::VPL( + const string& model_file, + const string& params_file = "", + const RuntimeOption& runtime_option = RuntimeOption(), + const Frontend& model_format = Frontend::ONNX) +``` + +VPL模型加载和初始化,其中model_file为导出的ONNX模型格式。 **参数** > * **model_file**(str): 模型文件路径 @@ -60,7 +99,7 @@ ArcFace模型加载和初始化,其中model_file为导出的ONNX模型格式 #### Predict函数 > ``` -> ArcFace::Predict(cv::Mat* im, DetectionResult* result, +> ArcFace::Predict(cv::Mat* im, FaceRecognitionResult* result, > float conf_threshold = 0.25, > float nms_iou_threshold = 0.5) > ``` @@ -70,11 +109,13 @@ ArcFace模型加载和初始化,其中model_file为导出的ONNX模型格式 > **参数** > > > * **im**: 输入图像,注意需为HWC,BGR格式 -> > * **result**: 检测结果,包括检测框,各个框的置信度, DetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > * **result**: 检测结果,包括检测框,各个框的置信度, FaceRecognitionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) > > * **conf_threshold**: 检测框置信度过滤阈值 > > * **nms_iou_threshold**: NMS处理过程中iou阈值 ### 类成员变量 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[112, 112] diff --git a/examples/vision/faceid/insightface/python/README.md b/examples/vision/faceid/insightface/python/README.md index a10e244bc5..f53e516355 100644 --- a/examples/vision/faceid/insightface/python/README.md +++ b/examples/vision/faceid/insightface/python/README.md @@ -18,7 +18,7 @@ wget https://bj.bcebos.com/paddlehub/test_samples/test_lite_focal_arcface_2.JPG #下载部署示例代码 git clone https://github.com/PaddlePaddle/FastDeploy.git -cd examples/vison/detection/insightface/python/ +cd examples/vison/faceid/insightface/python/ # CPU推理 python infer_arcface.py --model ms1mv3_arcface_r100.onnx --face test_lite_focal_arcface_0.JPG --face_positive test_lite_focal_arcface_1.JPG --face_negative test_lite_focal_arcface_2.JPG --device cpu @@ -45,13 +45,13 @@ Detect Done! Cosine 01: 0.814385, Cosine 02:-0.059388 ``` -## ArcFace Python接口 +## InsightFace Python接口 ``` -fastdeploy.vision.detection.ArcFace(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) -fastdeploy.vision.detection.CosFace(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) -fastdeploy.vision.detection.PartialFC(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) -fastdeploy.vision.detection.VPL(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +fastdeploy.vision.faceid.ArcFace(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +fastdeploy.vision.faceid.CosFace(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +fastdeploy.vision.faceid.PartialFC(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) +fastdeploy.vision.faceid.VPL(model_file, params_file=None, runtime_option=None, model_format=Frontend.ONNX) ``` ArcFace模型加载和初始化,其中model_file为导出的ONNX模型格式 @@ -79,9 +79,11 @@ ArcFace模型加载和初始化,其中model_file为导出的ONNX模型格式 > **返回** > -> > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > 返回`fastdeploy.vision.FaceRecognitionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) ### 类成员属性 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[112, 112] diff --git a/examples/vision/matting/README.md b/examples/vision/matting/README.md new file mode 100644 index 0000000000..1fba41f3e8 --- /dev/null +++ b/examples/vision/matting/README.md @@ -0,0 +1,7 @@ +人脸检测模型 + +FastDeploy目前支持如下人脸识别模型部署 + +| 模型 | 说明 | 模型格式 | 版本 | +| :--- | :--- | :------- | :--- | +| [modnet](./modnet) | MODNet系列模型 | ONNX | CommitID:28165a4 | diff --git a/examples/vision/matting/modnet/README.md b/examples/vision/matting/modnet/README.md index fc3f7c0080..dbeb901fed 100644 --- a/examples/vision/matting/modnet/README.md +++ b/examples/vision/matting/modnet/README.md @@ -2,7 +2,7 @@ ## 模型版本说明 -- [MODNet CommitID:28165a4](https://github.com/ZHKKKe/MODNet/commit/28165a4) +- [MODNet](https://github.com/ZHKKKe/MODNet/commit/28165a4) - (1)[链接中](https://github.com/ZHKKKe/MODNet/commit/28165a4)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; - (2)开发者基于自己数据训练的MODNet CommitID:b984b4b模型,可按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)后,完成部署。 @@ -40,3 +40,8 @@ - [Python部署](python) - [C++部署](cpp) + + +## 版本说明 + +- 本版本文档和代码基于[MODNet CommitID:28165a4](https://github.com/ZHKKKe/MODNet/commit/28165a4) 编写 diff --git a/examples/vision/matting/modnet/cpp/README.md b/examples/vision/matting/modnet/cpp/README.md index 4d31123905..64da2e9407 100644 --- a/examples/vision/matting/modnet/cpp/README.md +++ b/examples/vision/matting/modnet/cpp/README.md @@ -12,7 +12,7 @@ ``` mkdir build cd build -wget https://xxx.tgz +wget https://https://bj.bcebos.com/paddlehub/fastdeploy/cpp/fastdeploy-linux-x64-gpu-0.2.0.tgz tar xvf fastdeploy-linux-x64-0.2.0.tgz cmake .. -DFASTDEPLOY_INSTALL_DIR=${PWD}/fastdeploy-linux-x64-0.2.0 make -j @@ -60,7 +60,7 @@ MODNet模型加载和初始化,其中model_file为导出的ONNX模型格式。 #### Predict函数 > ``` -> MODNet::Predict(cv::Mat* im, DetectionResult* result, +> MODNet::Predict(cv::Mat* im, MattingResult* result, > float conf_threshold = 0.25, > float nms_iou_threshold = 0.5) > ``` @@ -70,11 +70,13 @@ MODNet模型加载和初始化,其中model_file为导出的ONNX模型格式。 > **参数** > > > * **im**: 输入图像,注意需为HWC,BGR格式 -> > * **result**: 检测结果,包括检测框,各个框的置信度, DetectionResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > * **result**: 检测结果,包括检测框,各个框的置信度, MattingResult说明参考[视觉模型预测结果](../../../../../docs/api/vision_results/) > > * **conf_threshold**: 检测框置信度过滤阈值 > > * **nms_iou_threshold**: NMS处理过程中iou阈值 ### 类成员变量 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(vector<int>): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[256, 256] diff --git a/examples/vision/matting/modnet/python/README.md b/examples/vision/matting/modnet/python/README.md index 9faf718769..1b38262a80 100644 --- a/examples/vision/matting/modnet/python/README.md +++ b/examples/vision/matting/modnet/python/README.md @@ -16,7 +16,7 @@ wget https://raw.githubusercontent.com/DefTruth/lite.ai.toolkit/main/examples/li #下载部署示例代码 git clone https://github.com/PaddlePaddle/FastDeploy.git -cd examples/vison/detection/modnet/python/ +cd examples/vison/matting/modnet/python/ # CPU推理 python infer.py --model modnet_photographic_portrait_matting.onnx --image test_lite_matting_input.jpg --device cpu @@ -63,9 +63,11 @@ MODNet模型加载和初始化,其中model_file为导出的ONNX模型格式 > **返回** > -> > 返回`fastdeploy.vision.DetectionResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) +> > 返回`fastdeploy.vision.MattingResult`结构体,结构体说明参考文档[视觉模型预测结果](../../../../../docs/api/vision_results/) ### 类成员属性 +#### 预处理参数 +用户可按照自己的实际需求,修改下列预处理参数,从而影响最终的推理和部署效果 > > * **size**(list[int]): 通过此参数修改预处理过程中resize的大小,包含两个整型元素,表示[width, height], 默认值为[256, 256] From 87cb228a198306983fef63633c9e3f1ed4a8c3f2 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Fri, 12 Aug 2022 12:04:56 +0000 Subject: [PATCH 84/94] modify all wrong expressions in docs --- examples/vision/detection/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/vision/detection/README.md b/examples/vision/detection/README.md index 72e7072eeb..f87d72d916 100644 --- a/examples/vision/detection/README.md +++ b/examples/vision/detection/README.md @@ -1,6 +1,6 @@ 人脸检测模型 -FastDeploy目前支持如下人脸检测模型部署 +FastDeploy目前支持如下目标检测模型部署 | 模型 | 说明 | 模型格式 | 版本 | | :--- | :--- | :------- | :--- | From bc7b36278e5a2d5426dbd73515b80291f2c5fc5f Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Fri, 12 Aug 2022 13:05:08 +0000 Subject: [PATCH 85/94] modify all wrong expressions in docs --- examples/vision/detection/README.md | 24 ++++++++++++------- .../vision/detection/nanodet_plus/README.md | 5 ++-- .../vision/detection/scaledyolov4/README.md | 2 +- examples/vision/detection/yolor/README.md | 2 +- examples/vision/detection/yolov5/README.md | 2 -- .../vision/detection/yolov5lite/README.md | 2 +- examples/vision/detection/yolov6/README.md | 5 ++-- examples/vision/detection/yolov7/README.md | 4 ++-- examples/vision/detection/yolox/README.md | 5 ++-- examples/vision/facedet/README.md | 10 ++++---- examples/vision/facedet/retinaface/README.md | 6 ++--- examples/vision/facedet/scrfd/README.md | 5 ++-- examples/vision/facedet/ultraface/README.md | 4 ++-- examples/vision/facedet/yolov5face/README.md | 4 +--- examples/vision/faceid/README.md | 10 ++++---- examples/vision/faceid/insightface/README.md | 4 +--- examples/vision/matting/README.md | 6 ++--- examples/vision/matting/modnet/README.md | 6 ++--- 18 files changed, 49 insertions(+), 57 deletions(-) diff --git a/examples/vision/detection/README.md b/examples/vision/detection/README.md index f87d72d916..79d1ae46f7 100644 --- a/examples/vision/detection/README.md +++ b/examples/vision/detection/README.md @@ -1,14 +1,20 @@ -人脸检测模型 +# 目标检测模型 FastDeploy目前支持如下目标检测模型部署 | 模型 | 说明 | 模型格式 | 版本 | | :--- | :--- | :------- | :--- | -| [nanodet_plus](./nanodet_plus) | NanoDetPlus系列模型 | ONNX | Release/v1.0.0-alpha-1 | -| [yolov5](./yolov5) | YOLOv5系列模型 | ONNX | Release/v6.0 | -| [yolov5lite](./yolov5lite) | YOLOv5-Lite系列模型 | ONNX | Release/v1.4 | -| [yolov6](./yolov6) | YOLOv6系列模型 | ONNX | Release/0.1.0 | -| [yolov7](./yolov7) | YOLOv7系列模型 | ONNX | Release/0.1 | -| [yolor](./yolor) | YOLOR系列模型 | ONNX | Release/weights | -| [yolox](./yolox) | YOLOX系列模型 | ONNX | Release/v0.1.1 | -| [scaledyolov4](./scaledyolov4) | ScaledYOLOv4系列模型 | ONNX | CommitID:6768003 | +| [PaddleDetection/PPYOLOE](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4/configs/ppyoloe) | PPYOLOE系列模型 | Paddle | [Release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) | +| [PaddleDetection/PicoDet](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4/configs/ppyoloe) | PicoDet系列模型 | Paddle | [Release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) | +| [PaddleDetection/YOLOX](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4/configs/ppyoloe) | Paddle版本的YOLOX系列模型 | Paddle | [Release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) | +| [PaddleDetection/YOLOv3](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4/configs/ppyoloe) | YOLOv3系列模型 | Paddle | [Release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) | +| [PaddleDetection/PPYOLO](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4/configs/ppyoloe) | PPYOLO系列模型 | Paddle | [Release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) | +| [PaddleDetection/FasterRCNN](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4/configs/ppyoloe) | FasterRCNN系列模型 | Paddle | [Release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) | +| [WongKinYiu/YOLOv7](https://github.com/WongKinYiu/yolov7) | YOLOv7、YOLOv7-X等系列模型 | ONNX | [v0.1](https://github.com/WongKinYiu/yolov7/tree/v0.1) | +| [NanoDetPlus](./nanodet_plus) | NanoDetPlus 系列模型 | ONNX | [Release/v1.0.0-alpha-1](https://github.com/RangiLyu/nanodet/tree/v1.0.0-alpha-1) | +| [YOLOv5](./yolov5) | YOLOv5 系列模型 | ONNX | [Release/v6.0](https://github.com/ultralytics/yolov5/tree/v6.0) | +| [YOLOv5-Lite](./yolov5lite) | YOLOv5-Lite 系列模型 | ONNX | [Release/v1.4](https://github.com/ppogg/YOLOv5-Lite/releases/tag/v1.4) | +| [YOLOv6](./yolov6) | YOLOv6 系列模型 | ONNX | [Release/0.1.0](https://github.com/meituan/YOLOv6/releases/download/0.1.0) | +| [YOLOR](./yolor) | YOLOR 系列模型 | ONNX | [Release/weights](https://github.com/WongKinYiu/yolor/releases/tag/weights) | +| [YOLOX](./yolox) | YOLOX 系列模型 | ONNX | [Release/v0.1.1](https://github.com/Megvii-BaseDetection/YOLOX/tree/0.1.1rc0) | +| [ScaledYOLOv4](./scaledyolov4) | ScaledYOLOv4 系列模型 | ONNX | [CommitID: 6768003](https://github.com/WongKinYiu/ScaledYOLOv4/commit/676800364a3446900b9e8407bc880ea2127b3415) | diff --git a/examples/vision/detection/nanodet_plus/README.md b/examples/vision/detection/nanodet_plus/README.md index a295e122fc..8ad107d9c5 100644 --- a/examples/vision/detection/nanodet_plus/README.md +++ b/examples/vision/detection/nanodet_plus/README.md @@ -1,11 +1,10 @@ # NanoDetPlus准备部署模型 -## 模型版本说明 - NanoDetPlus部署实现来自[NanoDetPlus](https://github.com/RangiLyu/nanodet/tree/v1.0.0-alpha-1) 的代码,基于coco的[预训练模型](https://github.com/RangiLyu/nanodet/releases/tag/v1.0.0-alpha-1)。 - - (1)[预训练模型](https://github.com/RangiLyu/nanodet/releases/tag/v1.0.0-alpha-1)的*.onnx可直接进行部署; - - (2)自己训练的模型,导出ONNX模型后,参考[详细部署文档](#详细部署文档)完成部署。 + - (1)[官方库](https://github.com/RangiLyu/nanodet/releases/tag/v1.0.0-alpha-1)提供的*.onnx可直接进行部署; + - (2)开发者自己训练的模型,导出ONNX模型后,参考[详细部署文档](#详细部署文档)完成部署。 ## 下载预训练ONNX模型 diff --git a/examples/vision/detection/scaledyolov4/README.md b/examples/vision/detection/scaledyolov4/README.md index 5a0ba000f1..b86ab7f79f 100644 --- a/examples/vision/detection/scaledyolov4/README.md +++ b/examples/vision/detection/scaledyolov4/README.md @@ -2,7 +2,7 @@ - ScaledYOLOv4部署实现来自[ScaledYOLOv4](https://github.com/WongKinYiu/ScaledYOLOv4)的代码,和[基于COCO的预训练模型](https://github.com/WongKinYiu/ScaledYOLOv4)。 - - (1)[预训练模型](https://github.com/WongKinYiu/ScaledYOLOv4)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署;*.onnx、*.trt和*.pose模型不支持部署; + - (1)[官方库](https://github.com/WongKinYiu/ScaledYOLOv4)提供的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; - (2)自己数据训练的ScaledYOLOv4模型,按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)操作后,参考[详细部署文档](#详细部署文档)完成部署。 diff --git a/examples/vision/detection/yolor/README.md b/examples/vision/detection/yolor/README.md index 7889eac9f6..859234ea06 100644 --- a/examples/vision/detection/yolor/README.md +++ b/examples/vision/detection/yolor/README.md @@ -2,7 +2,7 @@ - YOLOR部署实现来自[YOLOR](https://github.com/WongKinYiu/yolor/releases/tag/weights)的代码,和[基于COCO的预训练模型](https://github.com/WongKinYiu/yolor/releases/tag/weights)。 - - (1)[预训练模型](https://github.com/WongKinYiu/yolor/releases/tag/weights)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署;*.onnx、*.trt和*.pose模型不支持部署; + - (1)[预训练模型](https://github.com/WongKinYiu/yolor/releases/tag/weights)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; - (2)自己数据训练的YOLOR模型,按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)操作后,参考[详细部署文档](#详细部署文档)完成部署。 diff --git a/examples/vision/detection/yolov5/README.md b/examples/vision/detection/yolov5/README.md index e83dcdd504..7195e0f975 100644 --- a/examples/vision/detection/yolov5/README.md +++ b/examples/vision/detection/yolov5/README.md @@ -1,7 +1,5 @@ # YOLOv5准备部署模型 -## 模型版本说明 - - YOLOv5 v6.0部署模型实现来自[YOLOv5](https://github.com/ultralytics/yolov5/tree/v6.0),和[基于COCO的预训练模型](https://github.com/ultralytics/yolov5/releases/tag/v6.0) - (1)[预训练模型](https://github.com/ultralytics/yolov5/releases/tag/v6.0)的*.onnx可直接进行部署; - (2)开发者基于自己数据训练的YOLOv5 v6.0模型,可使用[YOLOv5](https://github.com/ultralytics/yolov5)中的`export.py`导出ONNX文件后后,完成部署。 diff --git a/examples/vision/detection/yolov5lite/README.md b/examples/vision/detection/yolov5lite/README.md index 4b95967d1d..5398277a92 100644 --- a/examples/vision/detection/yolov5lite/README.md +++ b/examples/vision/detection/yolov5lite/README.md @@ -3,7 +3,7 @@ - YOLOv5Lite部署实现来自[YOLOv5-Lite](https://github.com/ppogg/YOLOv5-Lite/releases/tag/v1.4) 代码,和[基于COCO的预训练模型](https://github.com/ppogg/YOLOv5-Lite/releases/tag/v1.4)。 - - (1)[预训练模型](https://github.com/ppogg/YOLOv5-Lite/releases/tag/v1.4)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署;*.onnx、*.trt和*.pose模型不支持部署; + - (1)[预训练模型](https://github.com/ppogg/YOLOv5-Lite/releases/tag/v1.4)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; - (2)自己数据训练的YOLOv5Lite模型,按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)操作后,参考[详细部署文档](#详细部署文档)完成部署。 diff --git a/examples/vision/detection/yolov6/README.md b/examples/vision/detection/yolov6/README.md index 497778fe0f..925776d993 100644 --- a/examples/vision/detection/yolov6/README.md +++ b/examples/vision/detection/yolov6/README.md @@ -1,11 +1,10 @@ # YOLOv6准备部署模型 -## 模型版本说明 - YOLOv6 部署实现来自[YOLOv6](https://github.com/meituan/YOLOv6/releases/tag/0.1.0),和[基于coco的预训练模型](https://github.com/meituan/YOLOv6/releases/tag/0.1.0)。 - - (1)[基于coco的预训练模型](https://github.com/meituan/YOLOv6/releases/tag/0.1.0)的*.onnx可直接进行部署; - - (2)自己训练的模型,导出ONNX模型后,参考[详细部署文档](#详细部署文档)完成部署。 + - (1)[官方提供的基于coco的预训练模型](https://github.com/meituan/YOLOv6/releases/tag/0.1.0)的*.onnx可直接进行部署; + - (2)开发者自己训练的模型,导出ONNX模型后,参考[详细部署文档](#详细部署文档)完成部署。 diff --git a/examples/vision/detection/yolov7/README.md b/examples/vision/detection/yolov7/README.md index 266aeace2d..8e98e7da6f 100644 --- a/examples/vision/detection/yolov7/README.md +++ b/examples/vision/detection/yolov7/README.md @@ -2,8 +2,8 @@ - YOLOv7部署实现来自[YOLOv7](https://github.com/WongKinYiu/yolov7/tree/v0.1)分支代码,和[基于COCO的预训练模型](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1)。 - - (1)[预训练模型](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署;*.onnx、*.trt和*.pose模型不支持部署; - - (2)自己数据训练的YOLOv7 0.1模型,按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)操作后,参考[详细部署文档](#详细部署文档)完成部署。 + - (1)[官方库](https://github.com/WongKinYiu/yolov7/releases/tag/v0.1)提供的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署;*.trt和*.pose模型不支持部署; + - (2)自己数据训练的YOLOv7模型,按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)操作后,参考[详细部署文档](#详细部署文档)完成部署。 ## 导出ONNX模型 diff --git a/examples/vision/detection/yolox/README.md b/examples/vision/detection/yolox/README.md index 193089904b..84b4ebeee6 100644 --- a/examples/vision/detection/yolox/README.md +++ b/examples/vision/detection/yolox/README.md @@ -1,11 +1,10 @@ # YOLOX准备部署模型 -## 模型版本说明 - YOLOX部署实现来自[YOLOX](https://github.com/Megvii-BaseDetection/YOLOX/tree/0.1.1rc0),基于[coco的预训练模型](https://github.com/Megvii-BaseDetection/YOLOX/releases/tag/0.1.1rc0)。 - - (1)[预训练模型](https://github.com/Megvii-BaseDetection/YOLOX/releases/tag/0.1.1rc0)中的*.pth通过导出ONNX模型操作后,可进行部署;*.onnx、*.trt和*.pose模型不支持部署; - - (2)开发者基于自己数据训练的YOLOX v0.1.1模型,可按照导出ONNX模型后,完成部署。 + - (1)[预训练模型](https://github.com/Megvii-BaseDetection/YOLOX/releases/tag/0.1.1rc0)中的*.pth通过导出ONNX模型操作后,可进行部署; + - (2)开发者自己训练的模型,导出ONNX模型后,参考[详细部署文档](#详细部署文档)完成部署。 ## 下载预训练ONNX模型 diff --git a/examples/vision/facedet/README.md b/examples/vision/facedet/README.md index cd062e26b1..b0716bb6ec 100644 --- a/examples/vision/facedet/README.md +++ b/examples/vision/facedet/README.md @@ -1,10 +1,10 @@ -人脸检测模型 +# 人脸检测模型 FastDeploy目前支持如下人脸检测模型部署 | 模型 | 说明 | 模型格式 | 版本 | | :--- | :--- | :------- | :--- | -| [retinaface](./retinaface) | RetinaFace系列模型 | ONNX | CommitID:b984b4b | -| [ultraface](./ultraface) | UltraFace系列模型 | ONNX |CommitID:dffdddd | -| [yolov5face](./yolov5face) | YOLOv5Face系列模型 | ONNX | CommitID:4fd1ead | -| [scrfd](./scrfd) | SCRFD系列模型 | ONNX | CommitID:17cdeab | +| [RetinaFace](./retinaface) | RetinaFace 系列模型 | ONNX | [CommitID:b984b4b](https://github.com/biubug6/Pytorch_Retinaface/commit/b984b4b) | +| [UltraFace](./ultraface) | UltraFace 系列模型 | ONNX |[CommitID:dffdddd](https://github.com/Linzaer/Ultra-Light-Fast-Generic-Face-Detector-1MB/commit/dffdddd) | +| [YOLOv5Face](./yolov5face) | YOLOv5Face 系列模型 | ONNX | [CommitID:4fd1ead](https://github.com/deepcam-cn/yolov5-face/commit/4fd1ead) | +| [SCRFD](./scrfd) | SCRFD 系列模型 | ONNX | [CommitID:17cdeab](https://github.com/deepinsight/insightface/tree/17cdeab12a35efcebc2660453a8cbeae96e20950) | diff --git a/examples/vision/facedet/retinaface/README.md b/examples/vision/facedet/retinaface/README.md index 525b6cb2f2..f7855673de 100644 --- a/examples/vision/facedet/retinaface/README.md +++ b/examples/vision/facedet/retinaface/README.md @@ -1,10 +1,8 @@ # RetinaFace准备部署模型 -## 模型版本说明 - - [RetinaFace](https://github.com/biubug6/Pytorch_Retinaface/commit/b984b4b) - - (1)[链接中](https://github.com/biubug6/Pytorch_Retinaface/commit/b984b4b)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; - - (2)自己数据训练的RetinaFace CommitID:b984b4b模型,可按照[导出ONNX模型](#导出ONNX模型)后,完成部署。 + - (1)[官方库中提供的](https://github.com/biubug6/Pytorch_Retinaface/)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; + - (2)自己数据训练的RetinaFace模型,可按照[导出ONNX模型](#导出ONNX模型)后,完成部署。 ## 导出ONNX模型 diff --git a/examples/vision/facedet/scrfd/README.md b/examples/vision/facedet/scrfd/README.md index d1694d2c1c..4a0c28eed0 100644 --- a/examples/vision/facedet/scrfd/README.md +++ b/examples/vision/facedet/scrfd/README.md @@ -1,10 +1,9 @@ # SCRFD准备部署模型 -## 模型版本说明 - [SCRFD](https://github.com/deepinsight/insightface/tree/17cdeab12a35efcebc2660453a8cbeae96e20950) - - (1)[链接中](https://github.com/deepinsight/insightface/tree/17cdeab12a35efcebc2660453a8cbeae96e20950)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; - - (2)开发者基于自己数据训练的SCRFD CID:17cdeab模型,可按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)后,完成部署。 + - (1)[官方库中提供的](https://github.com/deepinsight/insightface/)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; + - (2)开发者基于自己数据训练的SCRFD模型,可按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)后,完成部署。 ## 导出ONNX模型 diff --git a/examples/vision/facedet/ultraface/README.md b/examples/vision/facedet/ultraface/README.md index 678fb771f7..34bac38137 100644 --- a/examples/vision/facedet/ultraface/README.md +++ b/examples/vision/facedet/ultraface/README.md @@ -1,9 +1,9 @@ # UltraFace准备部署模型 -## 模型版本说明 - [UltraFace](https://github.com/Linzaer/Ultra-Light-Fast-Generic-Face-Detector-1MB/commit/dffdddd) - - (1)[链接中](https://github.com/Linzaer/Ultra-Light-Fast-Generic-Face-Detector-1MB/commit/dffdddd)的*.onnx可下载, 也可以通过下面模型链接下载并进行部署 + - (1)[官方库中提供的](https://github.com/Linzaer/Ultra-Light-Fast-Generic-Face-Detector-1MB/)的*.onnx可下载, 也可以通过下面模型链接下载并进行部署 + - (2)开发者自己训练的模型,导出ONNX模型后,参考[详细部署文档](#详细部署文档)完成部署。 ## 下载预训练ONNX模型 diff --git a/examples/vision/facedet/yolov5face/README.md b/examples/vision/facedet/yolov5face/README.md index 424a76bbed..e907186d95 100644 --- a/examples/vision/facedet/yolov5face/README.md +++ b/examples/vision/facedet/yolov5face/README.md @@ -1,9 +1,7 @@ # YOLOv5Face准备部署模型 -## 模型版本说明 - - [YOLOv5Face](https://github.com/deepcam-cn/yolov5-face/commit/4fd1ead) - - (1)[链接中](https://github.com/deepcam-cn/yolov5-face/commit/4fd1ead)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; + - (1)[官方库中提供的](https://github.com/deepcam-cn/yolov5-face/)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; - (2)开发者基于自己数据训练的YOLOv5Face模型,可按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)后,完成部署。 ## 导出ONNX模型 diff --git a/examples/vision/faceid/README.md b/examples/vision/faceid/README.md index 4950463a1a..bbb6f23cec 100644 --- a/examples/vision/faceid/README.md +++ b/examples/vision/faceid/README.md @@ -1,10 +1,10 @@ -人脸检测模型 +# 人脸识别模型 FastDeploy目前支持如下人脸识别模型部署 | 模型 | 说明 | 模型格式 | 版本 | | :--- | :--- | :------- | :--- | -| [arcface](./insightface) | ArcFace系列模型 | ONNX | CommitID:babb9a5 | -| [cosface](./insightface) | CosFace系列模型 | ONNX | CommitID:babb9a5 | -| [partial_fc](./insightface) | PartialFC系列模型 | ONNX | CommitID:babb9a5 | -| [vpl](./insightface) | VPL系列模型 | ONNX | CommitID:babb9a5 | +| [ArcFace](./insightface) | ArcFace 系列模型 | ONNX | [CommitID:babb9a5](https://github.com/deepinsight/insightface/commit/babb9a5) | +| [CosFace](./insightface) | CosFace 系列模型 | ONNX | [CommitID:babb9a5](https://github.com/deepinsight/insightface/commit/babb9a5) | +| [PartialFC](./insightface) | PartialFC 系列模型 | ONNX | [CommitID:babb9a5](https://github.com/deepinsight/insightface/commit/babb9a5) | +| [VPL](./insightface) | VPL 系列模型 | ONNX | [CommitID:babb9a5](https://github.com/deepinsight/insightface/commit/babb9a5) | diff --git a/examples/vision/faceid/insightface/README.md b/examples/vision/faceid/insightface/README.md index cf3371247a..fd276b200b 100644 --- a/examples/vision/faceid/insightface/README.md +++ b/examples/vision/faceid/insightface/README.md @@ -1,9 +1,7 @@ # InsightFace准备部署模型 -## 模型版本说明 - - [InsightFace](https://github.com/deepinsight/insightface/commit/babb9a5) - - (1)[链接中](https://github.com/deepinsight/insightface/commit/babb9a5)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; + - (1)[官方库中提供的](https://github.com/deepinsight/insightface/)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; - (2)开发者基于自己数据训练的InsightFace模型,可按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)后,完成部署。 diff --git a/examples/vision/matting/README.md b/examples/vision/matting/README.md index 1fba41f3e8..bb3a7eedf0 100644 --- a/examples/vision/matting/README.md +++ b/examples/vision/matting/README.md @@ -1,7 +1,7 @@ -人脸检测模型 +# 抠图模型 -FastDeploy目前支持如下人脸识别模型部署 +FastDeploy目前支持如下抠图模型部署 | 模型 | 说明 | 模型格式 | 版本 | | :--- | :--- | :------- | :--- | -| [modnet](./modnet) | MODNet系列模型 | ONNX | CommitID:28165a4 | +| [MODNet](./modnet) | MODNet 系列模型 | ONNX | [CommitID:28165a4](https://github.com/ZHKKKe/MODNet/commit/28165a4) | diff --git a/examples/vision/matting/modnet/README.md b/examples/vision/matting/modnet/README.md index dbeb901fed..e85cf8e58e 100644 --- a/examples/vision/matting/modnet/README.md +++ b/examples/vision/matting/modnet/README.md @@ -1,10 +1,8 @@ # MODNet准备部署模型 -## 模型版本说明 - - [MODNet](https://github.com/ZHKKKe/MODNet/commit/28165a4) - - (1)[链接中](https://github.com/ZHKKKe/MODNet/commit/28165a4)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; - - (2)开发者基于自己数据训练的MODNet CommitID:b984b4b模型,可按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)后,完成部署。 + - (1)[官方库中提供的](https://github.com/ZHKKKe/MODNet/)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; + - (2)开发者基于自己数据训练的MODNet模型,可按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)后,完成部署。 ## 导出ONNX模型 From c68da871e15e5245a705770b24f3bb5686ce78d7 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Fri, 12 Aug 2022 13:12:24 +0000 Subject: [PATCH 86/94] modify all wrong expressions in docs --- examples/vision/detection/yolor/README.md | 2 +- examples/vision/detection/yolov5/README.md | 2 +- examples/vision/detection/yolov5lite/README.md | 2 +- examples/vision/detection/yolov6/README.md | 2 +- examples/vision/detection/yolox/README.md | 2 +- 5 files changed, 5 insertions(+), 5 deletions(-) diff --git a/examples/vision/detection/yolor/README.md b/examples/vision/detection/yolor/README.md index 859234ea06..b7a4ff8513 100644 --- a/examples/vision/detection/yolor/README.md +++ b/examples/vision/detection/yolor/README.md @@ -2,7 +2,7 @@ - YOLOR部署实现来自[YOLOR](https://github.com/WongKinYiu/yolor/releases/tag/weights)的代码,和[基于COCO的预训练模型](https://github.com/WongKinYiu/yolor/releases/tag/weights)。 - - (1)[预训练模型](https://github.com/WongKinYiu/yolor/releases/tag/weights)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; + - (1)[官方库](https://github.com/WongKinYiu/yolor/releases/tag/weights)提供的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; - (2)自己数据训练的YOLOR模型,按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)操作后,参考[详细部署文档](#详细部署文档)完成部署。 diff --git a/examples/vision/detection/yolov5/README.md b/examples/vision/detection/yolov5/README.md index 7195e0f975..79a1b6fef3 100644 --- a/examples/vision/detection/yolov5/README.md +++ b/examples/vision/detection/yolov5/README.md @@ -1,7 +1,7 @@ # YOLOv5准备部署模型 - YOLOv5 v6.0部署模型实现来自[YOLOv5](https://github.com/ultralytics/yolov5/tree/v6.0),和[基于COCO的预训练模型](https://github.com/ultralytics/yolov5/releases/tag/v6.0) - - (1)[预训练模型](https://github.com/ultralytics/yolov5/releases/tag/v6.0)的*.onnx可直接进行部署; + - (1)[官方库](https://github.com/ultralytics/yolov5/releases/tag/v6.0)提供的*.onnx可直接进行部署; - (2)开发者基于自己数据训练的YOLOv5 v6.0模型,可使用[YOLOv5](https://github.com/ultralytics/yolov5)中的`export.py`导出ONNX文件后后,完成部署。 diff --git a/examples/vision/detection/yolov5lite/README.md b/examples/vision/detection/yolov5lite/README.md index 5398277a92..8eafee619b 100644 --- a/examples/vision/detection/yolov5lite/README.md +++ b/examples/vision/detection/yolov5lite/README.md @@ -3,7 +3,7 @@ - YOLOv5Lite部署实现来自[YOLOv5-Lite](https://github.com/ppogg/YOLOv5-Lite/releases/tag/v1.4) 代码,和[基于COCO的预训练模型](https://github.com/ppogg/YOLOv5-Lite/releases/tag/v1.4)。 - - (1)[预训练模型](https://github.com/ppogg/YOLOv5-Lite/releases/tag/v1.4)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; + - (1)[官方库](https://github.com/ppogg/YOLOv5-Lite/releases/tag/v1.4)提供的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; - (2)自己数据训练的YOLOv5Lite模型,按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)操作后,参考[详细部署文档](#详细部署文档)完成部署。 diff --git a/examples/vision/detection/yolov6/README.md b/examples/vision/detection/yolov6/README.md index 925776d993..fcef8d588a 100644 --- a/examples/vision/detection/yolov6/README.md +++ b/examples/vision/detection/yolov6/README.md @@ -3,7 +3,7 @@ - YOLOv6 部署实现来自[YOLOv6](https://github.com/meituan/YOLOv6/releases/tag/0.1.0),和[基于coco的预训练模型](https://github.com/meituan/YOLOv6/releases/tag/0.1.0)。 - - (1)[官方提供的基于coco的预训练模型](https://github.com/meituan/YOLOv6/releases/tag/0.1.0)的*.onnx可直接进行部署; + - (1)[官方库](https://github.com/meituan/YOLOv6/releases/tag/0.1.0)提供的*.onnx可直接进行部署; - (2)开发者自己训练的模型,导出ONNX模型后,参考[详细部署文档](#详细部署文档)完成部署。 diff --git a/examples/vision/detection/yolox/README.md b/examples/vision/detection/yolox/README.md index 84b4ebeee6..a392d5cebc 100644 --- a/examples/vision/detection/yolox/README.md +++ b/examples/vision/detection/yolox/README.md @@ -3,7 +3,7 @@ - YOLOX部署实现来自[YOLOX](https://github.com/Megvii-BaseDetection/YOLOX/tree/0.1.1rc0),基于[coco的预训练模型](https://github.com/Megvii-BaseDetection/YOLOX/releases/tag/0.1.1rc0)。 - - (1)[预训练模型](https://github.com/Megvii-BaseDetection/YOLOX/releases/tag/0.1.1rc0)中的*.pth通过导出ONNX模型操作后,可进行部署; + - (1)[官方库](https://github.com/Megvii-BaseDetection/YOLOX/releases/tag/0.1.1rc0)提供中的*.pth通过导出ONNX模型操作后,可进行部署; - (2)开发者自己训练的模型,导出ONNX模型后,参考[详细部署文档](#详细部署文档)完成部署。 From 2f80a40477015b9efb5310bd1729de695715294a Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Sun, 14 Aug 2022 15:32:52 +0000 Subject: [PATCH 87/94] modify docs expressions --- examples/vision/detection/README.md | 16 ++++++++-------- examples/vision/facedet/README.md | 8 ++++---- examples/vision/faceid/README.md | 8 ++++---- .../faceid/insightface/python/infer_arcface.py | 2 +- .../faceid/insightface/python/infer_cosface.py | 2 +- .../insightface/python/infer_partial_fc.py | 2 +- .../faceid/insightface/python/infer_vpl.py | 2 +- examples/vision/matting/README.md | 2 +- 8 files changed, 21 insertions(+), 21 deletions(-) diff --git a/examples/vision/detection/README.md b/examples/vision/detection/README.md index 79d1ae46f7..1d1a2ab2c8 100644 --- a/examples/vision/detection/README.md +++ b/examples/vision/detection/README.md @@ -10,11 +10,11 @@ FastDeploy目前支持如下目标检测模型部署 | [PaddleDetection/YOLOv3](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4/configs/ppyoloe) | YOLOv3系列模型 | Paddle | [Release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) | | [PaddleDetection/PPYOLO](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4/configs/ppyoloe) | PPYOLO系列模型 | Paddle | [Release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) | | [PaddleDetection/FasterRCNN](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4/configs/ppyoloe) | FasterRCNN系列模型 | Paddle | [Release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) | -| [WongKinYiu/YOLOv7](https://github.com/WongKinYiu/yolov7) | YOLOv7、YOLOv7-X等系列模型 | ONNX | [v0.1](https://github.com/WongKinYiu/yolov7/tree/v0.1) | -| [NanoDetPlus](./nanodet_plus) | NanoDetPlus 系列模型 | ONNX | [Release/v1.0.0-alpha-1](https://github.com/RangiLyu/nanodet/tree/v1.0.0-alpha-1) | -| [YOLOv5](./yolov5) | YOLOv5 系列模型 | ONNX | [Release/v6.0](https://github.com/ultralytics/yolov5/tree/v6.0) | -| [YOLOv5-Lite](./yolov5lite) | YOLOv5-Lite 系列模型 | ONNX | [Release/v1.4](https://github.com/ppogg/YOLOv5-Lite/releases/tag/v1.4) | -| [YOLOv6](./yolov6) | YOLOv6 系列模型 | ONNX | [Release/0.1.0](https://github.com/meituan/YOLOv6/releases/download/0.1.0) | -| [YOLOR](./yolor) | YOLOR 系列模型 | ONNX | [Release/weights](https://github.com/WongKinYiu/yolor/releases/tag/weights) | -| [YOLOX](./yolox) | YOLOX 系列模型 | ONNX | [Release/v0.1.1](https://github.com/Megvii-BaseDetection/YOLOX/tree/0.1.1rc0) | -| [ScaledYOLOv4](./scaledyolov4) | ScaledYOLOv4 系列模型 | ONNX | [CommitID: 6768003](https://github.com/WongKinYiu/ScaledYOLOv4/commit/676800364a3446900b9e8407bc880ea2127b3415) | +| [WongKinYiu/YOLOv7](https://github.com/WongKinYiu/yolov7) | YOLOv7、YOLOv7-X等系列模型 | ONNX | [Release/v0.1](https://github.com/WongKinYiu/yolov7/tree/v0.1) | +| [RangiLyu/NanoDetPlus](./nanodet_plus) | NanoDetPlus 系列模型 | ONNX | [Release/v1.0.0-alpha-1](https://github.com/RangiLyu/nanodet/tree/v1.0.0-alpha-1) | +| [ultralytics/YOLOv5](./yolov5) | YOLOv5 系列模型 | ONNX | [Release/v6.0](https://github.com/ultralytics/yolov5/tree/v6.0) | +| [ppogg/YOLOv5-Lite](./yolov5lite) | YOLOv5-Lite 系列模型 | ONNX | [Release/v1.4](https://github.com/ppogg/YOLOv5-Lite/releases/tag/v1.4) | +| [meituan/YOLOv6](./yolov6) | YOLOv6 系列模型 | ONNX | [Release/0.1.0](https://github.com/meituan/YOLOv6/releases/download/0.1.0) | +| [WongKinYiu/YOLOR](./yolor) | YOLOR 系列模型 | ONNX | [Release/weights](https://github.com/WongKinYiu/yolor/releases/tag/weights) | +| [Megvii-BaseDetection/YOLOX](./yolox) | YOLOX 系列模型 | ONNX | [Release/v0.1.1](https://github.com/Megvii-BaseDetection/YOLOX/tree/0.1.1rc0) | +| [WongKinYiu/ScaledYOLOv4](./scaledyolov4) | ScaledYOLOv4 系列模型 | ONNX | [CommitID: 6768003](https://github.com/WongKinYiu/ScaledYOLOv4/commit/676800364a3446900b9e8407bc880ea2127b3415) | diff --git a/examples/vision/facedet/README.md b/examples/vision/facedet/README.md index b0716bb6ec..cde8c71b80 100644 --- a/examples/vision/facedet/README.md +++ b/examples/vision/facedet/README.md @@ -4,7 +4,7 @@ FastDeploy目前支持如下人脸检测模型部署 | 模型 | 说明 | 模型格式 | 版本 | | :--- | :--- | :------- | :--- | -| [RetinaFace](./retinaface) | RetinaFace 系列模型 | ONNX | [CommitID:b984b4b](https://github.com/biubug6/Pytorch_Retinaface/commit/b984b4b) | -| [UltraFace](./ultraface) | UltraFace 系列模型 | ONNX |[CommitID:dffdddd](https://github.com/Linzaer/Ultra-Light-Fast-Generic-Face-Detector-1MB/commit/dffdddd) | -| [YOLOv5Face](./yolov5face) | YOLOv5Face 系列模型 | ONNX | [CommitID:4fd1ead](https://github.com/deepcam-cn/yolov5-face/commit/4fd1ead) | -| [SCRFD](./scrfd) | SCRFD 系列模型 | ONNX | [CommitID:17cdeab](https://github.com/deepinsight/insightface/tree/17cdeab12a35efcebc2660453a8cbeae96e20950) | +| [biubug6/RetinaFace](./retinaface) | RetinaFace 系列模型 | ONNX | [CommitID:b984b4b](https://github.com/biubug6/Pytorch_Retinaface/commit/b984b4b) | +| [Linzaer/UltraFace](./ultraface) | UltraFace 系列模型 | ONNX |[CommitID:dffdddd](https://github.com/Linzaer/Ultra-Light-Fast-Generic-Face-Detector-1MB/commit/dffdddd) | +| [deepcam-cn/YOLOv5Face](./yolov5face) | YOLOv5Face 系列模型 | ONNX | [CommitID:4fd1ead](https://github.com/deepcam-cn/yolov5-face/commit/4fd1ead) | +| [deepinsight/SCRFD](./scrfd) | SCRFD 系列模型 | ONNX | [CommitID:17cdeab](https://github.com/deepinsight/insightface/tree/17cdeab12a35efcebc2660453a8cbeae96e20950) | diff --git a/examples/vision/faceid/README.md b/examples/vision/faceid/README.md index bbb6f23cec..3053c3e544 100644 --- a/examples/vision/faceid/README.md +++ b/examples/vision/faceid/README.md @@ -4,7 +4,7 @@ FastDeploy目前支持如下人脸识别模型部署 | 模型 | 说明 | 模型格式 | 版本 | | :--- | :--- | :------- | :--- | -| [ArcFace](./insightface) | ArcFace 系列模型 | ONNX | [CommitID:babb9a5](https://github.com/deepinsight/insightface/commit/babb9a5) | -| [CosFace](./insightface) | CosFace 系列模型 | ONNX | [CommitID:babb9a5](https://github.com/deepinsight/insightface/commit/babb9a5) | -| [PartialFC](./insightface) | PartialFC 系列模型 | ONNX | [CommitID:babb9a5](https://github.com/deepinsight/insightface/commit/babb9a5) | -| [VPL](./insightface) | VPL 系列模型 | ONNX | [CommitID:babb9a5](https://github.com/deepinsight/insightface/commit/babb9a5) | +| [deepinsight/ArcFace](./insightface) | ArcFace 系列模型 | ONNX | [CommitID:babb9a5](https://github.com/deepinsight/insightface/commit/babb9a5) | +| [deepinsight/CosFace](./insightface) | CosFace 系列模型 | ONNX | [CommitID:babb9a5](https://github.com/deepinsight/insightface/commit/babb9a5) | +| [deepinsight/PartialFC](./insightface) | PartialFC 系列模型 | ONNX | [CommitID:babb9a5](https://github.com/deepinsight/insightface/commit/babb9a5) | +| [deepinsight/VPL](./insightface) | VPL 系列模型 | ONNX | [CommitID:babb9a5](https://github.com/deepinsight/insightface/commit/babb9a5) | diff --git a/examples/vision/faceid/insightface/python/infer_arcface.py b/examples/vision/faceid/insightface/python/infer_arcface.py index 2d725026e1..a9846b4cc8 100644 --- a/examples/vision/faceid/insightface/python/infer_arcface.py +++ b/examples/vision/faceid/insightface/python/infer_arcface.py @@ -18,7 +18,7 @@ def parse_arguments(): import ast parser = argparse.ArgumentParser() parser.add_argument( - "--model", required=True, help="Path of scrfd onnx model.") + "--model", required=True, help="Path of insgihtface onnx model.") parser.add_argument( "--face", required=True, help="Path of test face image file.") parser.add_argument( diff --git a/examples/vision/faceid/insightface/python/infer_cosface.py b/examples/vision/faceid/insightface/python/infer_cosface.py index 07f1a0b14b..7b45f7a402 100644 --- a/examples/vision/faceid/insightface/python/infer_cosface.py +++ b/examples/vision/faceid/insightface/python/infer_cosface.py @@ -18,7 +18,7 @@ def parse_arguments(): import ast parser = argparse.ArgumentParser() parser.add_argument( - "--model", required=True, help="Path of scrfd onnx model.") + "--model", required=True, help="Path of insightface onnx model.") parser.add_argument( "--face", required=True, help="Path of test face image file.") parser.add_argument( diff --git a/examples/vision/faceid/insightface/python/infer_partial_fc.py b/examples/vision/faceid/insightface/python/infer_partial_fc.py index b931af0dff..b1b2f3bf1d 100644 --- a/examples/vision/faceid/insightface/python/infer_partial_fc.py +++ b/examples/vision/faceid/insightface/python/infer_partial_fc.py @@ -18,7 +18,7 @@ def parse_arguments(): import ast parser = argparse.ArgumentParser() parser.add_argument( - "--model", required=True, help="Path of scrfd onnx model.") + "--model", required=True, help="Path of insightface onnx model.") parser.add_argument( "--face", required=True, help="Path of test face image file.") parser.add_argument( diff --git a/examples/vision/faceid/insightface/python/infer_vpl.py b/examples/vision/faceid/insightface/python/infer_vpl.py index 14c25bfb47..7618913f7d 100644 --- a/examples/vision/faceid/insightface/python/infer_vpl.py +++ b/examples/vision/faceid/insightface/python/infer_vpl.py @@ -18,7 +18,7 @@ def parse_arguments(): import ast parser = argparse.ArgumentParser() parser.add_argument( - "--model", required=True, help="Path of scrfd onnx model.") + "--model", required=True, help="Path of insightface onnx model.") parser.add_argument( "--face", required=True, help="Path of test face image file.") parser.add_argument( diff --git a/examples/vision/matting/README.md b/examples/vision/matting/README.md index bb3a7eedf0..1076d14b45 100644 --- a/examples/vision/matting/README.md +++ b/examples/vision/matting/README.md @@ -4,4 +4,4 @@ FastDeploy目前支持如下抠图模型部署 | 模型 | 说明 | 模型格式 | 版本 | | :--- | :--- | :------- | :--- | -| [MODNet](./modnet) | MODNet 系列模型 | ONNX | [CommitID:28165a4](https://github.com/ZHKKKe/MODNet/commit/28165a4) | +| [ZHKKKe/MODNet](./modnet) | MODNet 系列模型 | ONNX | [CommitID:28165a4](https://github.com/ZHKKKe/MODNet/commit/28165a4) | From a57c430c8f7bce4ecb74c3c3ce0d89426a81eee6 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 15 Aug 2022 02:08:50 +0000 Subject: [PATCH 88/94] fix expression of detection part --- examples/vision/detection/README.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/examples/vision/detection/README.md b/examples/vision/detection/README.md index 25ceb7936b..b7efc2c13e 100644 --- a/examples/vision/detection/README.md +++ b/examples/vision/detection/README.md @@ -4,13 +4,13 @@ FastDeploy目前支持如下目标检测模型部署 | 模型 | 说明 | 模型格式 | 版本 | | :--- | :--- | :------- | :--- | -| [PaddleDetection/PPYOLOE](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4/configs/ppyoloe) | PPYOLOE系列模型 | Paddle | [Release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) | -| [PaddleDetection/PicoDet](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4/configs/ppyoloe) | PicoDet系列模型 | Paddle | [Release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) | -| [PaddleDetection/YOLOX](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4/configs/ppyoloe) | Paddle版本的YOLOX系列模型 | Paddle | [Release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) | -| [PaddleDetection/YOLOv3](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4/configs/ppyoloe) | YOLOv3系列模型 | Paddle | [Release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) | -| [PaddleDetection/PPYOLO](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4/configs/ppyoloe) | PPYOLO系列模型 | Paddle | [Release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) | -| [PaddleDetection/FasterRCNN](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4/configs/ppyoloe) | FasterRCNN系列模型 | Paddle | [Release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) | -| [WongKinYiu/YOLOv7](https://github.com/WongKinYiu/yolov7) | YOLOv7、YOLOv7-X等系列模型 | ONNX | [Release/v0.1](https://github.com/WongKinYiu/yolov7/tree/v0.1) | +| [PaddleDetection/PPYOLOE](./paddledetection) | PPYOLOE系列模型 | Paddle | [Release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) | +| [PaddleDetection/PicoDet](./paddledetection) | PicoDet系列模型 | Paddle | [Release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) | +| [PaddleDetection/YOLOX](./paddledetection) | Paddle版本的YOLOX系列模型 | Paddle | [Release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) | +| [PaddleDetection/YOLOv3](./paddledetection) | YOLOv3系列模型 | Paddle | [Release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) | +| [PaddleDetection/PPYOLO](./paddledetection) | PPYOLO系列模型 | Paddle | [Release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) | +| [PaddleDetection/FasterRCNN](./paddledetection) | FasterRCNN系列模型 | Paddle | [Release/2.4](https://github.com/PaddlePaddle/PaddleDetection/tree/release/2.4) | +| [WongKinYiu/YOLOv7](./yolov7) | YOLOv7、YOLOv7-X等系列模型 | ONNX | [Release/v0.1](https://github.com/WongKinYiu/yolov7/tree/v0.1) | | [RangiLyu/NanoDetPlus](./nanodet_plus) | NanoDetPlus 系列模型 | ONNX | [Release/v1.0.0-alpha-1](https://github.com/RangiLyu/nanodet/tree/v1.0.0-alpha-1) | | [ultralytics/YOLOv5](./yolov5) | YOLOv5 系列模型 | ONNX | [Release/v6.0](https://github.com/ultralytics/yolov5/tree/v6.0) | | [ppogg/YOLOv5-Lite](./yolov5lite) | YOLOv5-Lite 系列模型 | ONNX | [Release/v1.4](https://github.com/ppogg/YOLOv5-Lite/releases/tag/v1.4) | From 9d14b11320b3ad362e111a338d4fe17ccdee2a09 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 15 Aug 2022 02:19:29 +0000 Subject: [PATCH 89/94] fix expression of detection part --- examples/vision/detection/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/examples/vision/detection/README.md b/examples/vision/detection/README.md index b7efc2c13e..63851c076e 100644 --- a/examples/vision/detection/README.md +++ b/examples/vision/detection/README.md @@ -1,4 +1,4 @@ -人脸检测模型 +# 目标检测模型 FastDeploy目前支持如下目标检测模型部署 From 7000178dcf7fe13a8c25a9fd6f54e653fbd2c1bf Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 15 Aug 2022 02:25:06 +0000 Subject: [PATCH 90/94] fix expression of detection part --- examples/vision/facedet/retinaface/README.md | 2 +- examples/vision/facedet/scrfd/README.md | 2 +- examples/vision/facedet/ultraface/README.md | 2 +- examples/vision/facedet/yolov5face/README.md | 2 +- examples/vision/faceid/insightface/README.md | 2 +- examples/vision/matting/modnet/README.md | 2 +- 6 files changed, 6 insertions(+), 6 deletions(-) diff --git a/examples/vision/facedet/retinaface/README.md b/examples/vision/facedet/retinaface/README.md index cb5b76ec3a..6aeb113ff0 100644 --- a/examples/vision/facedet/retinaface/README.md +++ b/examples/vision/facedet/retinaface/README.md @@ -1,7 +1,7 @@ # RetinaFace准备部署模型 - [RetinaFace](https://github.com/biubug6/Pytorch_Retinaface/commit/b984b4b) - - (1)[官方库中提供的](https://github.com/biubug6/Pytorch_Retinaface/)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; + - (1)[官方库](https://github.com/biubug6/Pytorch_Retinaface/)中提供的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; - (2)自己数据训练的RetinaFace模型,可按照[导出ONNX模型](#导出ONNX模型)后,完成部署。 diff --git a/examples/vision/facedet/scrfd/README.md b/examples/vision/facedet/scrfd/README.md index f5a0122bba..8434a3942e 100644 --- a/examples/vision/facedet/scrfd/README.md +++ b/examples/vision/facedet/scrfd/README.md @@ -2,7 +2,7 @@ - [SCRFD](https://github.com/deepinsight/insightface/tree/17cdeab12a35efcebc2660453a8cbeae96e20950) - - (1)[官方库中提供的](https://github.com/deepinsight/insightface/)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; + - (1)[官方库](https://github.com/deepinsight/insightface/)中提供的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; - (2)开发者基于自己数据训练的SCRFD模型,可按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)后,完成部署。 diff --git a/examples/vision/facedet/ultraface/README.md b/examples/vision/facedet/ultraface/README.md index e0b019326e..cd88f0ceff 100644 --- a/examples/vision/facedet/ultraface/README.md +++ b/examples/vision/facedet/ultraface/README.md @@ -2,7 +2,7 @@ - [UltraFace](https://github.com/Linzaer/Ultra-Light-Fast-Generic-Face-Detector-1MB/commit/dffdddd) - - (1)[官方库中提供的](https://github.com/Linzaer/Ultra-Light-Fast-Generic-Face-Detector-1MB/)的*.onnx可下载, 也可以通过下面模型链接下载并进行部署 + - (1)[官方库](https://github.com/Linzaer/Ultra-Light-Fast-Generic-Face-Detector-1MB/)中提供的*.onnx可下载, 也可以通过下面模型链接下载并进行部署 - (2)开发者自己训练的模型,导出ONNX模型后,参考[详细部署文档](#详细部署文档)完成部署。 diff --git a/examples/vision/facedet/yolov5face/README.md b/examples/vision/facedet/yolov5face/README.md index e907186d95..d9dc9f949d 100644 --- a/examples/vision/facedet/yolov5face/README.md +++ b/examples/vision/facedet/yolov5face/README.md @@ -1,7 +1,7 @@ # YOLOv5Face准备部署模型 - [YOLOv5Face](https://github.com/deepcam-cn/yolov5-face/commit/4fd1ead) - - (1)[官方库中提供的](https://github.com/deepcam-cn/yolov5-face/)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; + - (1)[官方库](https://github.com/deepcam-cn/yolov5-face/)中提供的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; - (2)开发者基于自己数据训练的YOLOv5Face模型,可按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)后,完成部署。 ## 导出ONNX模型 diff --git a/examples/vision/faceid/insightface/README.md b/examples/vision/faceid/insightface/README.md index fd276b200b..981c898e57 100644 --- a/examples/vision/faceid/insightface/README.md +++ b/examples/vision/faceid/insightface/README.md @@ -1,7 +1,7 @@ # InsightFace准备部署模型 - [InsightFace](https://github.com/deepinsight/insightface/commit/babb9a5) - - (1)[官方库中提供的](https://github.com/deepinsight/insightface/)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; + - (1)[官方库](https://github.com/deepinsight/insightface/)中提供的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; - (2)开发者基于自己数据训练的InsightFace模型,可按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)后,完成部署。 diff --git a/examples/vision/matting/modnet/README.md b/examples/vision/matting/modnet/README.md index e85cf8e58e..31c0718c8c 100644 --- a/examples/vision/matting/modnet/README.md +++ b/examples/vision/matting/modnet/README.md @@ -1,7 +1,7 @@ # MODNet准备部署模型 - [MODNet](https://github.com/ZHKKKe/MODNet/commit/28165a4) - - (1)[官方库中提供的](https://github.com/ZHKKKe/MODNet/)的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; + - (1)[官方库](https://github.com/ZHKKKe/MODNet/)中提供的*.pt通过[导出ONNX模型](#导出ONNX模型)操作后,可进行部署; - (2)开发者基于自己数据训练的MODNet模型,可按照[导出ONNX模型](#%E5%AF%BC%E5%87%BAONNX%E6%A8%A1%E5%9E%8B)后,完成部署。 ## 导出ONNX模型 From 5940f618016ad2ba143cc67337bc41ee5cfcb5f7 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 15 Aug 2022 03:20:49 +0000 Subject: [PATCH 91/94] add face recognition result doc --- docs/api/vision_results/README.md | 5 ++-- .../vision_results/face_detection_result.md | 2 +- .../vision_results/face_recognition_result.md | 25 +++++++++++++++++++ 3 files changed, 29 insertions(+), 3 deletions(-) create mode 100644 docs/api/vision_results/face_recognition_result.md diff --git a/docs/api/vision_results/README.md b/docs/api/vision_results/README.md index 844388cca8..64ea4fc671 100644 --- a/docs/api/vision_results/README.md +++ b/docs/api/vision_results/README.md @@ -6,5 +6,6 @@ FastDeploy根据视觉模型的任务类型,定义了不同的结构体(`csrcs | :----- | :--- | :---- | :------- | | ClassificationResult | [C++/Python文档](./classification_result.md) | 图像分类返回结果 | ResNet50、MobileNetV3等 | | DetectionResult | [C++/Python文档](./detection_result.md) | 目标检测返回结果 | PPYOLOE、YOLOv7系列模型等 | -| FaceDetectionResult | [C++/Python文档](./face_detection_result.md) | 目标检测返回结果 | PPYOLOE、YOLOv7系列模型等 | -| MattingResult | [C++/Python文档](./matting_result.md) | 目标检测返回结果 | PPYOLOE、YOLOv7系列模型等 | +| FaceDetectionResult | [C++/Python文档](./face_detection_result.md) | 目标检测返回结果 | SCRFD、RetinaFace系列模型等 | +| FaceRecognitionResult | [C++/Python文档](./face_recognition_result.md) | 目标检测返回结果 | ArcFace、CosFace系列模型等 | +| MattingResult | [C++/Python文档](./matting_result.md) | 目标检测返回结果 | MODNet系列模型等 | diff --git a/docs/api/vision_results/face_detection_result.md b/docs/api/vision_results/face_detection_result.md index 6c9c09f007..7fa83f164f 100644 --- a/docs/api/vision_results/face_detection_result.md +++ b/docs/api/vision_results/face_detection_result.md @@ -11,7 +11,6 @@ struct FaceDetectionResult { std::vector> boxes; std::vector> landmarks; std::vector scores; - ResultType type = ResultType::FACE_DETECTION; int landmarks_per_face; void Clear(); std::string Str(); @@ -32,3 +31,4 @@ struct FaceDetectionResult { - **boxes**(list of list(float)): 成员变量,表示单张图片检测出来的所有目标框坐标。boxes是一个list,其每个元素为一个长度为4的list, 表示为一个框,每个框以4个float数值依次表示xmin, ymin, xmax, ymax, 即左上角和右下角坐标 - **scores**(list of float): 成员变量,表示单张图片检测出来的所有目标置信度 - **landmarks**: 成员变量,表示单张图片检测出来的所有人脸的关键点 +- **landmarks_per_face**: 成员变量,表示每个人脸框中的关键点的数量。 diff --git a/docs/api/vision_results/face_recognition_result.md b/docs/api/vision_results/face_recognition_result.md new file mode 100644 index 0000000000..eacadc78bf --- /dev/null +++ b/docs/api/vision_results/face_recognition_result.md @@ -0,0 +1,25 @@ +# FaceRecognitionResult 人脸检测结果 + +FaceRecognitionResult 代码定义在`csrcs/fastdeploy/vision/common/result.h`中,用于表明图像检测出来的目标框、目标类别和目标置信度。 + +## C++ 结构体 + +`fastdeploy::vision::FaceRecognitionResult` + +``` +struct FaceRecognitionResult { + std::vector embedding; + void Clear(); + std::string Str(); +}; +``` + +- **embedding**: 成员变量,表示人脸识别模型最终的提取的特征embedding,可以用来计算人脸之间的特征相似度。 +- **Clear()**: 成员函数,用于清除结构体中存储的结果 +- **Str()**: 成员函数,将结构体中的信息以字符串形式输出(用于Debug) + +## Python结构体 + +`fastdeploy.vision.FaceRecognitionResult` + +- **embedding**: 成员变量,表示人脸识别模型最终的提取的特征embedding,可以用来计算人脸之间的特征相似度。 From ea7f1427f9e8459edbbf8580c43c6dee50d68372 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 15 Aug 2022 03:27:30 +0000 Subject: [PATCH 92/94] modify result docs --- docs/api/vision_results/face_detection_result.md | 2 +- docs/api/vision_results/face_recognition_result.md | 7 +++---- docs/api/vision_results/matting_result.md | 2 +- 3 files changed, 5 insertions(+), 6 deletions(-) diff --git a/docs/api/vision_results/face_detection_result.md b/docs/api/vision_results/face_detection_result.md index 7fa83f164f..1be8400315 100644 --- a/docs/api/vision_results/face_detection_result.md +++ b/docs/api/vision_results/face_detection_result.md @@ -1,6 +1,6 @@ # FaceDetectionResult 人脸检测结果 -FaceDetectionResult 代码定义在`csrcs/fastdeploy/vision/common/result.h`中,用于表明图像检测出来的目标框、目标类别和目标置信度。 +FaceDetectionResult 代码定义在`csrcs/fastdeploy/vision/common/result.h`中,用于表明人脸检测出来的目标框、人脸landmarks,目标置信度和每张人脸的landmark数量。 ## C++ 结构体 diff --git a/docs/api/vision_results/face_recognition_result.md b/docs/api/vision_results/face_recognition_result.md index eacadc78bf..fb0e7786b5 100644 --- a/docs/api/vision_results/face_recognition_result.md +++ b/docs/api/vision_results/face_recognition_result.md @@ -1,7 +1,6 @@ -# FaceRecognitionResult 人脸检测结果 - -FaceRecognitionResult 代码定义在`csrcs/fastdeploy/vision/common/result.h`中,用于表明图像检测出来的目标框、目标类别和目标置信度。 +# FaceRecognitionResult 人脸识别结果 +FaceRecognitionResult 代码定义在`csrcs/fastdeploy/vision/common/result.h`中,用于表明人脸识别模型对图像特征的embedding。 ## C++ 结构体 `fastdeploy::vision::FaceRecognitionResult` @@ -22,4 +21,4 @@ struct FaceRecognitionResult { `fastdeploy.vision.FaceRecognitionResult` -- **embedding**: 成员变量,表示人脸识别模型最终的提取的特征embedding,可以用来计算人脸之间的特征相似度。 +- **embedding**: 成员变量,表示人脸识别模型最终提取的特征embedding,可以用来计算人脸之间的特征相似度。 diff --git a/docs/api/vision_results/matting_result.md b/docs/api/vision_results/matting_result.md index 3418400eca..7a09c9cd7f 100644 --- a/docs/api/vision_results/matting_result.md +++ b/docs/api/vision_results/matting_result.md @@ -1,6 +1,6 @@ # MattingResult 抠图结果 -MattingResult 代码定义在`csrcs/fastdeploy/vision/common/result.h`中,用于表明图像检测出来的目标框、目标类别和目标置信度。 +MattingResult 代码定义在`csrcs/fastdeploy/vision/common/result.h`中,用于表明模型预测的alpha透明度的值,预测的前景等。 ## C++ 结构体 From 964606b8dcda2e80a013adbf3e504dee8af92fa5 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 15 Aug 2022 03:46:02 +0000 Subject: [PATCH 93/94] modify result docs --- docs/api/vision_results/classification_result.md | 4 ++-- docs/api/vision_results/detection_result.md | 6 +++--- docs/api/vision_results/face_detection_result.md | 4 ++-- docs/api/vision_results/face_recognition_result.md | 4 ++-- docs/api/vision_results/matting_result.md | 8 ++++---- 5 files changed, 13 insertions(+), 13 deletions(-) diff --git a/docs/api/vision_results/classification_result.md b/docs/api/vision_results/classification_result.md index 113db39608..bf94d0ff15 100644 --- a/docs/api/vision_results/classification_result.md +++ b/docs/api/vision_results/classification_result.md @@ -2,7 +2,7 @@ ClassifyResult代码定义在`csrcs/fastdeploy/vision/common/result.h`中,用于表明图像的分类结果和置信度。 -## C++ 结构体 +## C++ 定义 `fastdeploy::vision::ClassifyResult` @@ -20,7 +20,7 @@ struct ClassifyResult { - **Clear()**: 成员函数,用于清除结构体中存储的结果 - **Str()**: 成员函数,将结构体中的信息以字符串形式输出(用于Debug) -## Python结构体 +## Python 定义 `fastdeploy.vision.ClassifyResult` diff --git a/docs/api/vision_results/detection_result.md b/docs/api/vision_results/detection_result.md index e44a27b34c..a702d49899 100644 --- a/docs/api/vision_results/detection_result.md +++ b/docs/api/vision_results/detection_result.md @@ -2,7 +2,7 @@ DetectionResult代码定义在`csrcs/fastdeploy/vision/common/result.h`中,用于表明图像检测出来的目标框、目标类别和目标置信度。 -## C++ 结构体 +## C++ 定义 `fastdeploy::vision::DetectionResult` @@ -22,10 +22,10 @@ struct DetectionResult { - **Clear()**: 成员函数,用于清除结构体中存储的结果 - **Str()**: 成员函数,将结构体中的信息以字符串形式输出(用于Debug) -## Python结构体 +## Python 定义 `fastdeploy.vision.DetectionResult` - **boxes**(list of list(float)): 成员变量,表示单张图片检测出来的所有目标框坐标。boxes是一个list,其每个元素为一个长度为4的list, 表示为一个框,每个框以4个float数值依次表示xmin, ymin, xmax, ymax, 即左上角和右下角坐标 - **scores**(list of float): 成员变量,表示单张图片检测出来的所有目标置信度 -- **label_ids(list of int): 成员变量,表示单张图片检测出来的所有目标类别 +- **label_ids**(list of int): 成员变量,表示单张图片检测出来的所有目标类别 diff --git a/docs/api/vision_results/face_detection_result.md b/docs/api/vision_results/face_detection_result.md index 1be8400315..000b42a6be 100644 --- a/docs/api/vision_results/face_detection_result.md +++ b/docs/api/vision_results/face_detection_result.md @@ -2,7 +2,7 @@ FaceDetectionResult 代码定义在`csrcs/fastdeploy/vision/common/result.h`中,用于表明人脸检测出来的目标框、人脸landmarks,目标置信度和每张人脸的landmark数量。 -## C++ 结构体 +## C++ 定义 `fastdeploy::vision::FaceDetectionResult` @@ -24,7 +24,7 @@ struct FaceDetectionResult { - **Clear()**: 成员函数,用于清除结构体中存储的结果 - **Str()**: 成员函数,将结构体中的信息以字符串形式输出(用于Debug) -## Python结构体 +## Python 定义 `fastdeploy.vision.FaceDetectionResult` diff --git a/docs/api/vision_results/face_recognition_result.md b/docs/api/vision_results/face_recognition_result.md index fb0e7786b5..8316056184 100644 --- a/docs/api/vision_results/face_recognition_result.md +++ b/docs/api/vision_results/face_recognition_result.md @@ -1,7 +1,7 @@ # FaceRecognitionResult 人脸识别结果 FaceRecognitionResult 代码定义在`csrcs/fastdeploy/vision/common/result.h`中,用于表明人脸识别模型对图像特征的embedding。 -## C++ 结构体 +## C++ 定义 `fastdeploy::vision::FaceRecognitionResult` @@ -17,7 +17,7 @@ struct FaceRecognitionResult { - **Clear()**: 成员函数,用于清除结构体中存储的结果 - **Str()**: 成员函数,将结构体中的信息以字符串形式输出(用于Debug) -## Python结构体 +## Python 定义 `fastdeploy.vision.FaceRecognitionResult` diff --git a/docs/api/vision_results/matting_result.md b/docs/api/vision_results/matting_result.md index 7a09c9cd7f..67bcbc79d2 100644 --- a/docs/api/vision_results/matting_result.md +++ b/docs/api/vision_results/matting_result.md @@ -2,14 +2,14 @@ MattingResult 代码定义在`csrcs/fastdeploy/vision/common/result.h`中,用于表明模型预测的alpha透明度的值,预测的前景等。 -## C++ 结构体 +## C++ 定义 `fastdeploy::vision::MattingResult` ``` struct MattingResult { - std::vector alpha; // h x w - std::vector foreground; // h x w x c (c=3 default) + std::vector alpha; + std::vector foreground; std::vector shape; bool contain_foreground = false; void Clear(); @@ -25,7 +25,7 @@ struct MattingResult { - **Str()**: 成员函数,将结构体中的信息以字符串形式输出(用于Debug) -## Python结构体 +## Python 定义 `fastdeploy.vision.MattingResult` From 81ca1c30de79cb19beca8ea4fb3e974c9bc17d75 Mon Sep 17 00:00:00 2001 From: ziqi-jin Date: Mon, 15 Aug 2022 04:03:07 +0000 Subject: [PATCH 94/94] modify result docs --- examples/vision/README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/examples/vision/README.md b/examples/vision/README.md index 9f05d2d7f6..d95a315d79 100644 --- a/examples/vision/README.md +++ b/examples/vision/README.md @@ -8,6 +8,7 @@ | Segmentation | 语义分割,输入图像,给出图像中每个像素的分类及置信度 | [SegmentationResult](../../docs/api/vision_results/segmentation_result.md) | | Classification | 图像分类,输入图像,给出图像的分类结果和置信度 | [ClassifyResult](../../docs/api/vision_results/classification_result.md) | | FaceDetection | 人脸检测,输入图像,检测图像中人脸位置,并返回检测框坐标及人脸关键点 | [FaceDetectionResult](../../docs/api/vision_results/face_detection_result.md) | +| FaceRecognition | 人脸识别,输入图像,返回可用于相似度计算的人脸特征的embedding | [FaceRecognitionResult](../../docs/api/vision_results/face_recognition_result.md) | | Matting | 抠图,输入图像,返回图片的前景每个像素点的Alpha值 | [MattingResult](../../docs/api/vision_results/matting_result.md) | ## FastDeploy API设计

MDHhbQR%S#ky+YMQ9#@WTu3(ar@=}P zRON~P`_%2Uw1xky2kS$*Y^4pmFfuKajjCZRLVb%D{Fk{7SwQJtp#6On63RjS+}0zR zHUsh^Z=H|mXZ@6k3$nNJxRI&vMlQn}VcESvtyeOM=MYCD;}_-+od`N>WRD@@p_eD= z3HfCb%JHCY#S~kkeeTF92istHk;jcy_cl&!VX}?i4FY|x6n-NT>Y6m&3Q6^%NXN!wFGy?s2|j9c>JAyd z=A{p^QpTrASVP*USIl00;h~rxX1x^*aWA|1A3)}nS31FfX@uXnDqdi&(T8L7P|$5` ze9R)A*9s(HRk9w7${~B};SoLv7~VZEaW%#}885Bp*Hys(Nev=G9*8Ao%9v;wRqQMK zsgeDfAg?u{v1JpEE@o;U#)*q)oLi|U;+E^@WW8T}=zI)eTvNolIYFJZ1*`5*vCdKKWmT$y#`rj&6D~1qMi`KE`D&G^jUNAT^+on;)fK#(?8w1-NW{ zh*7Un&h0qaWk>b4{Y?ojwz{#To+ljxC^TlzPtNjzAyE z(hX+O(T)}|3PCrhmRfsG+-(||m7h;1KC~1NE3t+7`SWOJ$~Mt{3($7$Naoo|+1=F~0QG4(N*>LH7oGCs|)>CJI+K?~pzj$?b* z`7Vv~SZ1PTE`PRPn$C>8=ZRa^2KL{(g~-S7J6HeG-DqGF3M%sp2LV4BY(y@K_+-?yKHd`fs+@tUQF5hour!D;cAq z5-SSMEfH1Rg8NXZnrF!&D_ul)w+w5C>-lzxd+(U300jWKN*;CfC1ph9gRSuK&P2ZL|`M;{Ou3%o?PW z!yUC!2cYZr`jaqwd`OhqgkZ&9=a2ZInKzI)(h>wIx)EYg)o{)!`Z{`PFwtD}$j-Sb zTyj}p@b6k$P@vIr&_C+4g?Xth#xaN|q_Hj`$&x7t5TnU9y%~~HIcf;|1veEqLaj*7 zvh|pK@g=nRP9s&XK^ek4RK-kO6z8NOcG|LRu|wZ9g#po%`1cKX!KjKQNZv(pZQhsZr{*Ox&%Pi=P0i zq*ukw4NPhz!{H$<%p~T1Frt&YauEPOH5T+osSnD$xb5yzNu@?n(!TC|7;;x<>um&s zp*@l1*zk(BaVSJM>BX;2>z**hNM7eog8J%cpJJUxAGP0cHA)}zW;vLKkBF0R+1I!` zJQs9+cdi=4tcc;G&a80>eB)r~tG2Q}h5SloSs=qJCcYZ|{68qzS%Lr)nQ$cZg(S8N z*d7k!fZ;If^*>Cb!>)3bcNA>3sNUU=|DyWhX>nM|2R{caTT^tf-S|Rqc)nRED-ERL zEFF#+hcm~FfvJf;hbZvBhdW9ZRl~*{F=E=JC<%v9I#{Ipl4mO;j3qWA!D#W|Lh)QL z;G)FfKL6E8c#_q}N-$QJBVZz4aHrs)kt2G}RL?T=o)A`#J=^c9Vd7P?DmMx)n|!|B z^SnStX1I;E%Ie;Y_$mliI*P``3ZUbiFY67LIs~~4O_K8@1EV~;*gqg*T_WQ&8{_S# z?UPw|S52~NwJQ^OuUiMX7?GrYEWFeGykED9u~efUA=gCN)zA>AKyuetU&pIDD&m`H zo*nY#h*x^mE|A<_Iegy1=FO1;I*X3kh8SS>Q>i&RhUs~D9^2EFdmGeWJh2cRD@OvI->%RVnOqC&;R4fM~T5s*I9@4PRA zJ$x6*_;+^>nkO3OUceVuGYJf6s37Cm@oioa`F{lRZN6|rswWPk`!PS5l17jfJV~8v z^Tul^>BXPinmN?dxfPz_4bLX-oR5k^myz&mTD(rO6z{e3R0hu)UC1c;Q`^qLM(!sX z5+@p_iD)AJghY-<^%q8&(p#8SE z3Ynj%Iq&-}Z>gdiE3-t$1;koqKBuwO-yw90%~wYqx?f2bl|EV1rO+|qP#2W2Eh0|W zID;WkQPXj9R#vB$twGTm&CFZFf67Sr)zRWp{_F@8i*uIuORYRdUXWhZ{WlWZSW4x1 z32$cop=p2~K{oN!%KRONs=e`KQ)1-ZxcQvpJWh}Gp}K0uJ=3BkdW3r{U_fW#_yP6K zMM%bBz8sh4I zu#w9Qc;R}XO)ELVaE%J?IDXk`SGt+d0Px>Zx#(aK**g8t#+8wnWmU_PWKaIG>waT9 zVczk4)pB8A^g@Q8=;S}r--|=j9vqw9+U?a`Twt6Oei= zNC!@T)LrJV{?~kX{2;Id369*o9F`paJ|^!{BBuUodH$m=;k!J2ErY!$C{svVu3{k> zIb|4h#VH%b6V+R^BrqbvbN@%n1~_VCUE!oRsd@filXt33U>%yCr4x@%kcn?y-%F;- z+h_h9HWSRQ3AmI2Bw=Rp_BwCjkl+y&v;N|F-&cxY$GO-L8!6 zdcU<*eQ^uUG2m7XMG=;k&0(F>K;sJ@QFOC@Vn7=vNf5oCzZ3BMxrx^KSaw zl{h-edvLyXypigCBJBOZ7}v|5okN6jV(Xv48LSZ*`!ZJj1oQ?WkTHnEcmTS3_5z@H za%LormJ&hB+pM&y`lQGeg?BVS?zy;~!M-(6@8lxUU7X6Wv-cM(s3tP~5>Va|=-^dL zk?+3R{lk9dXz?=Akl7MJzl9v#FgcG9?^-|Eso+(`SZvme}*PI5M&~7V0oJpC^bU2n(AV zTf9~!m!?ssmBEupxqtWDzgj{RZh#EfM(hkf#p=&r^oYsZDm7H{TbXTu?Qyb{Q!XkF zT$5Gz{iFiI!JY67SG`a&S0t^BlZMsj!0@3Mw&D1U9V>wnsr7hMUVf(&U5A(W(rsMw zEWlCYHY;J@@24}$4ippzC|{EJaYVA%`pBG+Qi)dDsW#GIG~e1F^Oir~80iv77Ia@V z=9_LV1J!;>1e@L=b$r-LLX6j9=4AFT7DSWZRs6jE!7PL-yhRstaSy5ZB0oHYT{v^K z0kgc`c$iDL?FEfWg^Cb|`uVRq;9RD`RD>=9kuLj}8}1DOs&X=qEr!inUW=WI@M!+p zc`jU&6xSj^0Fz~AW&tGf`pjj>poM>YYZza;3aNP6YaDmlv;u-rWed?%v016=*f{&l!NO1a=ef1)$OR6~AxyNOWU%8_*%w zY`g`lH8lFGNFx4;!o#PbFpYDL(AR<7LN#T4(JR4w0{XccIO;g|343t;(PqvPT8_z$ zx#z86WtCk9(#^@)F{P=$e!$W&q}Vqgrbk0w(J3~YJKt}~1<&=S>1fkr_BL6fjT=t< zplmB^qff4mmNHKdF4hs*p7kl?kB^-FxrK7JJm7_yPGsJxw)9Fu&N?@P?UvYG_+oCe#{3rnntAyvZdp$iv zc<$WS=l8!aMZ}uaw-OdAzK!xxxxl6kcw7ZEy6)KoF-dt74oi%+lrIG>JZ1lnd%`yT9!m4QLG{Ibb)URK9dV0^@F!iguF^jc4#dvC_L zosq5$L>i8MfjZ861mWr*3}nlulMjvr^YifVNe#K@$KpBcBGiPkSQ5uut(kb>r7d)C zkmuEyQ9_M~I;l?y5cZumWli+uLPKzQFD+87Zyf;=d zzi_l#qgyx7p-GTh_f|H#TN+CYQ0ah!zyW5g9!C(#;Q=YLR?>>AH`79&E|$>?_Yzq&GL&t~=rUl*V#iAzbuPl_kaU z9yTU5FrM`|QaO!&k+Is4i?>ho^kDa`H-fxRIGW>Av+0dC>8R*Cx$ueGSQ0-Y!>k&6 zAlN!E;_QQiK`qZ3WnMnc9Wvoywt;qRvbZ=xY&FV%Hp5_bA9(0ktJ zNqOjI@6qj*gK=|;AD&}!KM`Vtb_0l+krFU7<@fI=R1Fl2@})&FUsPs$KL!GY!|3jR zT)q~>)dmhhKLq7{=JTz1Q-(AC{+xU(1ru%a?1z%TB*dif4siY%PHcY)3U%lyKmLSi zPdA58FNNQ`(6erCl5xd48?0}p5l5#KkXuP*2$`L?L5(zBMLn%GV=f!sF`Ze&90Uwj zsq(3ex5$w;23wO~P z&rwD*)Jp}%?r)U%!MUAOiER!P$YwTDBdf0kmXoN2CB*TF)wlY(;HA}C==bdSN~{zc zUq<}vpc)APAa2*bsIDZE5Q8&-{twX7Cwd+AtZ_?qKL7pNVx&n0!8WyK#fOAfI7E{> z741Z)k-d32Nl&{{mN*K>@z+$tMa3yLc0_}txP=lDJ}-w_G|5iMd;SmbkiZnolleaY zY%M9KCQiNe(SrQ7vjtgmz{`pUl>F5qUjYVskDtzM)HQnKB$!Cj-#8%}+~X2KX1U>| zza+t&kJbiRiOPurfJB;3fh>c;UV2(1>?n1S+W4jAy^6d4!`#?a*54vjQU*C zU{tSN*Iv{5wRU7xYh$I)2(BJ)t}FTaIE!565Jh(Iv`5bGt!Swu^}vMrDIMz{Clag@dS{3MW=w>5#8X4l zhKMD7)pSZq<)J+q+Qmtua~|eZd(E2+JA)sOK#D$Z#FahD@X2Ey%%+B!W?-Sh!b6Sl zp!`k%R=ECClE*!(R>@#1M!5nXefEno|NGnMKKJshhmkd7EO$~HIqH#WCEKsV7GPJj z9+7tzWR2nCX-1n}*(B8+@cF3F?B#NIYX(9mcJ--L`E^stN0~tn)&oT_#)!) zAk^{G)nngh`PFwGAUzE%%ioB^oz!`8f4!dQ#Rk0%M9(Kone0Ki|tP zcM@<&jvf!_OS34UFL_?hagP-Xfbj~PxpzM=3p7DHJ9oLDdZ>C77$EpE9So!jy9;D? zgm0AHg}_i<83B-c+c2KEYuQZ;e2slF(m?igTe1ux+@0y16a-}y3r~e^q$m37JwFuP zK|yQc$VPXSa8~eKOdMI*T}I$&Xr>neXJ7BHaS{n${WJ9=;acT&mSs1ZqHR@|aI`UB>N(wbG6xcqKLp%}_x|fDJwath z)VWCDKD2poXGlp%CpNzNlS^D5Fn|?@D|atvB@RvH>JJ~NH#y!a*2N{3q6_mmzIA!paJw6;O2pt)*qJ9y|QN!FvI;Jb|)3HBGHw22!_tJwK>DPz5y*}72J`Fru9 zy|x3>5kTDuwiE^k+Nl`@AEEg!Bi1p8@2(Vjj=Xh1S*xZ}HCn}_=cS-tM^P)#ALB+%JcIf)JS2h4IOf z@{k8hjt5=T5HI54gDTQ%b1REv!B$&l>qnVP{7Edd2EID2%H#ykC(d{hD@IBd=FqKg zeC=%ZLMi|$vzAVlQESp>B7}y8$-&m~m(^o?{uy4na_P0(WJ-2-cW+lcsb-`Q!=nMwS+1T=#u+2%#Gp#pb~ne_bPhE)=iEJN}!x9At3 zf4#ii_s#FH9k_D5xW1i0fN_)%cCfBN`F-^lTU>uv49`X0Rgwb>*SQf-@U~+YHC^~?Mhq-@>~8OGG+ zPgW3F9xZDtmFV&%kN78MEM}BWTE^uqHG4vt#gMq&*yV~4K2}+-w?|Cv8o%_7b$=M<9N?l0a3m%u^SD&kxX6g5|2su*3TS-L!wt1pP>ZLFG`JC2@bJ&vn{r zge_VxPck=NG5X*7D>Lpx|5i$50(42jS@?jEct#qfaYxgp#*x^2Q+{gk&J)cjy-+!U z^8h#9Fdrh~p#U)l+U05ljY1p85VL)MA|>2WJi4$j6xzZm5I73pBMUF>s<9%d)f=Aa z$qiO)i?{_vle;OEUxN8xb$IZB(Xvaxn4q935iQd6K^S$HD40nB@(6ZnN~5HYjj(6> z6wR~ty^iuf?P8d~{Fv{Z`nlNHhKY8uzz)$;zI+gjzRi*oAgvzV(x~j_ zpRWSwG#FOw_3_}WxcV%=ELJVL{XMA$PH#k3S#?HpIB4@4j1U29&h|qKE*$bI5E`hw z3{jd*Bdc4U*ug3;r@o;`nd)muxeSM#C|rekCqn;*w`!5V*ZuYfBq6e;)Og$(x%T14t z+O=;ykYhc-x>Yh{mA|BAJJoo47jUD>zf^JTHhjKl=_)(k;P^%XqYY{Q)a$2%Iygk& zUIc<-qA>WP;*R(?zk4)jLFpc*R!cR<>@V4~yjf;>J4h_7i_dn<;oQK~tZT3)tHa^H za@r+_EMaP)^X3>EtW~!})3K&G-2sUyb_V!U-oI?>U2C9S!u(40`^r z2n+BFb(Ypf0Gvgj#MA}n(ka`CS2Q+{GTy?HXi6JvnWqwpJ})^|2mTK*D(}t{Pm$d7 zK!4PBg+bh?4Usu82Obz0pVPmZrq(>PR&k9L=S5A0D_9_s+?RsO`*Pg?yT1}81y6QIYR`5?=Q5~kG+ABtDb!~M#? z^&)bka7aj+fdV zZcNzCsEc7pFGB3=?Q9Q3q=^jyyUuAfRf4V0W@ZF>Ooo7MJ}x(sssJbYFIP2OSG60j zjSB_QXI6*_+N4jUKFM(UKE@w~(JQ%Ur0HYk@gglj2-u`v0=*|wA5mwEwl=__dGSlQ zS&3Nr(grj2**qYIJI-cWlLf=UXy!Md5l@7ms~H~_!o38Y_zl+oqPSN2q;aT8Ab!TxpOQ^_1a(KCh>F=gt$ZUE zN546Yl!7nZL7Qzz)kpPZj*gCPQk`iq>brnYoVlp2(bt51K2`mqbUBRxh_e{wXK;HqhU|^WwfT3&s&&#wd3a%AYI2_QY2us+@BVg)gW3(w_5&){Vnz zP%w<;2w&+ggO359BAg|vnm|c6FgAD!%wp$FzP!zS1(G7iY=J0Dblz#zlFsjZyNYk@ zu^(Zn@42`l%(^{1zH)G_*yFBvK;}?%ID)eX-Dy*;#+071n{YSN z$hbWlvYfsPn!u+g>1`m##jfx==#jv^+mwn!F1i^7#~a z!Z)hPRvI&{yIT1oRsr9u+DJ18jn_&kB*J=c*7-KRLg-ze>*dcQWhlYU* ztT+mtG~KMV5oV4C@pB9&PW?etRW0740-Ud@*iM{hko{EP)Q@(-T@k4H=jJg?QLW)w zFLL<3l`>H$!;zAt{<(Ww77$^i4qM9ir++`E5$0@0L}&6#;j2f5!Ca3nC9d+DJ(RYJ z*R97)FzHS?Zt3fNgA(*OYmwG(lm8W|r%V~=jFi@>qLn632CyV*{2w2b?uEdlvKGeD0Py>jbga+wM*b9LpR8mSQ5s*DlATNC z4_Iv}0C+SFEhN^*uReg=J|ZVpZ$#%)_ZF zpq8~-&<-6~0>H1VD5)x;h@_{8Kgqu;h8&G1B zd01wwZpNR9LuB!h20{2?Nbn4$s_kguQyIpcd)ulr8VF`6M?`tNOL8l=E3wwQE6>`~ zJHtsh*r6$Ui#(KBFoDLVHaW}wYv5u#+0A2nZXDZSjc4fYN=3Q2%DoX7`qmj~!h9r& zuvljEld8k14Sy7syJo*!%n|I+lMo}U_v721=d5xHC2QCt@u_-O3(V4tL zkqXAx@sM2=yozf<%3l_Z<)RZ&@qsvl>tgbvX`SN3X~hKHLcj3-Q=Gu7Y7|LD}^ z1|C|IFm`UX7WeZ3B&g7wU`)j3?s zTOwytck*8t-Ps?2pY(ut3k64Hr9MjbS-CZ(6*I0?-zhTk2bg8FLdHV0nX7}j)Ee+K zZEySw^iIj34P9t^#*@I$9JAzbF5yL%n9M1)pm zx-soqy@!kWg{p^u%@L|~YvoM1LzyNpni>d?vH!{#sF}9yUfyT> z(j#S2_p95wNnZhr2vyiW%?Wc7tlG)lXtb?otg^dQHb`*Yd0rj)1HHa?SU zBjtrCAfE>>9~<+F+2%(v)yZ74R=0JX(;bX$#GYI?B)m3yrS+(_dCKnb-^YX#+B!DI z9xy}vxCJDp9~6?}HkTj8EE?~RN81*x(OdN&1;TQd#dkbbF^w(i_#rFA@)0hdM?)fo zxSGWC!LK_EDDhsJo1T>Vbmodh_2%*TbuZ(z7D-*`YyXKYGZq~wU)_6DXoaxlG6-#l zhx%(*Zeg);@a~$i{xZ~K22Jf{*B%@+Z%4@@antA=*07QMvY}FnCp(14BK+WysgH~3 zE>M`%{_}*Bp*hE`doGQn{ofmLsc65iMgX=SQ4!Bq-)#8GG@ZOIK?B86w{8|*=?_kd z<2uMmn?xxA1m$N8qj&Ico{J=uDZG<6&12Q4m@axPI*hdw@^+_F`p( z=|WiOB6~%g9_%rK6;aCk>6$I9Ti(ThFSm*E02Pi(w=Ctwqy~QVO1mVJxh>1g8OY1g z;Sq?7sIP$MgnQfdGqCEPnDg;5o$2KLlN(2bjSZhx|D?bPjr zskWxk&~r0ndG_S)Um#YOue$M)@?akKVcXj`#sT#OT|<;j?pYtqVLKz8U~feuH5e+JsQEM7tkmDn zVE=+$CpnK5Zl+>($$xE!yTXB_b77eaw{bjm3gsy|7g~#GDXnzgD2qQPSQS#+ce{pc z_2lrA!6PU_&qFFBoJ%eIQJq0auJSqEdOqUj*r(jCpz+r18w9CC%<`0k0y(|2KheKo zM5k2l@k8B_vc6$_zC&O+N|=Wm7J7D(nR<$11$x_DX7vnmm?tuT)^ux|OEgpKicy&~=A<{4Mxr>WB+={*pJF+Pe? z9bSLZSWUy6#>E@rX`ZC}5^_DR;Gl|v043J8~=UUNm z2rP=mMwTD#t;iuT<)JljQ8RWk&2JFBkfxGwnRP;MFe&iX7q%YqX zj0#{)0Vt(8V5AkwTLYW#YZ0*@R7oqZ`#)Dn8k{%TC`27mwI>QlN_ao1Gk(bmX?aVgSKnP_y z2o)7Z!3%yWfE9q=c*{gyaCP_RqwgvNQWG`j9n$MD@Ee-(&^)u(4WN&2K z>jGeqMT+>}izic{JjcLx#uJB{mAF#We zN!w|{*O%x|yDab0cT1|cvGhAYQiekKH(Rbsw%G$F{VWKMS|deY?7^q_+`xvUjZ1#u zr&Cg0f(LsX>k_7^hU*bx4xA-QNMsyK=$ zG-9!=EbZ=_Od5&ZKbfeQFr>aR`o6&4I)$jR|IU;SoK8)TX_hc;*|cDeIIx=LXM3i!{6)ARi zd1_3;C*1~Dy+&IdT$^ioq}2mGglp2pAdH$n1r&?L*ywpvZ}o&m=jBBr!o+iboeB!N z7)C~uw~}XXpU6922D?Uyx_w`F+ecU1ra7jKe~;(T1e5>jO>vGYxI9xM@$s6n4&90& zx_$CCy0|MWjK{|_>!3D?6T_D;RE%fXx`7boI&k=70PTCPqI17}u1*(|#^7Fa1Uhbh z5$V}dC!V30a%_L&&vT)QwoHX2^QJQu4NJxGo8fhgOaN$@*q>%Y^>Hb^DTz2?dQD?t zm*S9<(QJmv^-o`}^L#DyX~o0e&hW=8@-U8m)4Du2<*1R{rFr;h4@DeCN-R=`p=h<~ zEiG!H9F-=?QA&HR=X-$h$?HFx|Avus^*HfWRO8@@$KJ!Zd297%c$NBn+g2b|ax zq#4}&bg{Ur@`S^rRElPYCs?L=qy8;k#>y)rJYdeF+sr&aK+Vm>(NR8Ii)by1hO30i zLh$Y)xLP9!FMK>9>`GqpCprS>kqe?x&kBm~u(CXM7g;}5srGq+EN76GA_EJ0jysuV z9G|;NQFwXx-ZQG zYLE9OW8;2uW)>|7BfsdHmUxF!l=U40+|DWBF7+9*=}7wHE*>5Q$+C$Y$wrkd3l}!N zlzVT}s-~%jA4(Sgp?$4GSj|lEODECW__KYRWp3%XSc_q4!mYEn*Rpw7`;xD|c8)!usAzwBDaDf(s3?kS75 z6nX0UazXZx5~;7Ea;M0O;Wni)-G}FqP%^S{?~Vj<&BXW>q2bjFiDaX;_=&jP&Z1hk z1;%dP|BBd{6?f^~_R}-u?6cCw zg#lldWp$niP91HVty<4?Ah-5=7w{lNsSWb5of59tR@d>pNw6M!6*t3NeH`U{g~vFx z_wob1&(}fU!=_ji28=2DRZ;G4+{tMnM?v#poxN0y;L4Ee^M@wx!`Ju~UUxyM?db{! zC|9$TD=4NoDz&Y7sEAx_t{|(J-<^k=I3{JZ|G)g@{P=jaNgF$UrV+wa_<7Ib6P%IR z4>oj}w9~E>S4l2sHRorXf~rMkCfr?HQH`nYsfGFFA!)=);y2#Kc$TblHrGb|;4|1M z)5v@4&yV)!e)(thL!y)lnA0=eWR`Re4X)d!b#B7;P6Vc{%%0X#?IV}81@TReNLJ4% zxt!+!DNY=dt8(CS2C6iMj6L>TA^R#P=ua@`2=9)0sz5qJP63b%=x26QkD&y3^MO^}958w!R8&EoGWhr24f~9JB4JXWm-9dHEoEChZ#I2$%AfF& z=?CC}!oV7JOj*V(1=xq+@C%)dBHa5P5%E5gUo6q>)oRa+Ec%tGsR z;B=v`f+adc8t!i#3ID7KbSVSHgI`8P{vdb~NG z3C~5ZEZb{X6b5?--%!+wjtT_*ce+%u48_NWunC~^H^6<(vtU(SQU7D^pXn2SLFy+3 zTkOsErzz+t2*IDf%-df$^!ZvYiCZ=>sk2nvP-ibwV8Eg_uYvvCbBCe+3$P5?{{xUf zZ@&PkSLFT;>*mldBEM(qJk6=?Z&p7{Ge*s~Xc)jG00!U!jt9qAM+`|pBCmgYUJNft znv3e>wDnNX0U&lI5?HgaZ+1_1bC9kVaO5kx@)+g;!ygW*bgMsFRCtechAl%HJRRv# zbY~FTl175p+(y@B5t%BeU+{su++1WY2RTv4T>AKFRN5wlRffHJRlQ)=4v`s0z97|S zk4+PLt2?)$=|Ll~a$D0)go4B$NMz$G4+Qbk&f&_J{ltR9M;xBL9@@2`rR-q)efwU# z!n|R9dY3{wQJ2*%7~0HLwt{j*n~4}vkU%|AM+{n>M#@g1t5&1v8MQD)K`^NPrRk+h zYEjs)Yf#@}yv-uCEg1nqD(wWR!{oE)YAzR&da`&*rON>ca+ZB5&z~`=FoL&(6Tb94 zS53vL3=W}fq(q9-FcAO>M}{m!aj?KHSg;^ua!OtmX<1TI23NDvmIIsDrzkK*B5^y` z!#r#VYm2`^ALH+j_P{dYi$*% z&)rq6JhGVf7~^R-m3PYfnBqb=4pfi^+$ae~W^T_<3X9jPYSiTnR1IJb#MU(J4$J-K zn-f)|wl~w($E0LzhhZGUE$&^}!SFIqj-{NEsmPeIoD%wn7Gt)+=ucRTf>J;&4BPIi zmTYJsSyO(snBi-wc%7kxrqUI#5Ga!b5D6>u$4}YJOr?N=D0I6JLsIU~t@^=-i#DaL z$#YG%TGGVJT4uQ%!_^i6Z&;VkT2~taEo|8!GOh<8LWmZ{8Z~)=6mIs0f5TOf{>qrF*1=uRoaYi2@%t4LfKJ--$ zjW}wKwtYZoS(9lIyZiAG+3#P}3lq731b}c)Q^{JENJn)eyau3M!{3)s`$G`*fb=iq z8j!069Wl)FxUXQYq!J*H2#PQU#qt3FZ5ad39iOT325Jc_RRu$bG%jn_q`fVm5Q0G) z!=}mY#7OO_LFp?@ialDZfx`m4$$_|T1gR^=-k@BAHB6HAENo7l0CuN3yf7^UHoljw zoi!tM^`2;q6y{l_Nupe_VV_US4gnk!w47t27+G?bW>264YE{5(S;c)sIdUljN3DWs z)QUErOSI`Eps-15`vMs;fJoe-30!1G6pnh0a!yqo%%t088@9l;L+1UKs%KXphZCldaUsDLjZdi^fw>5!1n4nT4 zuF7`j8P6F~GMOhC_!#OCl@UO|oqg9^KR7Eytv|ceYDpwVedQKcu_vu)By-mFAJUjdBN8GD zzae(ut~Q9+k)Ez64zw>aLn%A#;8gDYO;|vPtamrk{YK#K7pqO0k*tMo*Oie~ho(bq zVtH08xDWWb#^6r27Z9mwLe<_0CFtR`sN`t&r$`{o)_9$vrdo?bX!SN(R^){uvmKF% zQ*kPCL1u_FYK`f(axl!NJF=z#liMLI8pLb38{Vz+VD4=PRI47D9Xj-?2;X|eHD`>j z$HEo}LYCY^F)Bs?Wy>+h6s8W@XE`tDO5VHts2T_gQXsXLkF=}TAA9T5HeMO7%F)Li zWd+E2RanTR?ZIM!dEgz(ojF)zj4l`w914BSvN zOi^Rgiwf`V$y~+%0OUlVor$+O1$Z4&kv|cGQS21uT9zkguj(R>r{_}!Kvh&B-6PS{ zbkkf@h6<@5S6L=R^w>k^lgE!uLwk?A3hl|TsY}L8PDQy^^sBk^zS=m@!zS=WT5E2t zeNuu?4xeg2U6S3_f-xe-#g4kYC=1YOR(dt8O<(~EE(ARh zsinO3maOpV(aN@Iv^E+jU;z*V7~q%og0dVHIAe@)1gt`k5)w@f-kRUSZA$0cx)t+#?eM4P(TdWR&Bv!9`hOZ7Ho8B%tvW5rRWVWKx?6TlR)iD zfgm*?TQEmdBsO46aj_E0Zu^mIl&vb&Nh(j!DnK|wX68OG}ZLGz<@6d zYaQDKIEIdC+@*LQymHX`Wg~Yp*fQ;1lActKKm?2ed0R=6W+bg_N96_eph#O_X9{!cF~-=W!BTQt7z{GG>VLz?lrt?=djY1vYD3ge zY*@nqT12n6x$oPDQPV9->19;lNf=Oe?oFyV+}l9#psogfD8J#+o?3&D^li_`c*KNe zp@!-9WmA3~Nh%69Y(qJBf;Rh~L6Ku80A#5JPa8ohKp{!!&Ng5MO4LeSH?zKNX!qXZ zR)GoHJFE6J1k!43o>KJbq$@r8%T7;885TKOBx8b00CD<@94RNueY; zcMg4qygJjCf+V2RE>MEjly3^r&t+_XRQ~`kdZnZ-8?%Vc<=V)GISxiMoyrGMcIZ){ zdN=t6FdXSuP$HfN5DFA9NVlzaR=YeC+$mQ1j?A?Yp0t?%0C^v&9BfDH1Ox8z{{RaN zSTjhLQ!J=Bu7LcldbKP8hh+=FDz~WWwkTNBsimN+9J4}Mr%aS$QPdIwulQRflz)T) zoTUUME>uc+fX!CE`-WpmmuViXL`c=wv)fj5iL6I1mbMy7@F|)IC9>iDg@|Qy>7P-H zo_Anl8S31msG-#PZp46GQ!s62&DXO00-0Ebz%{H3;Fida=d` z^#RK$PD2oQRmGx-i&BiVcTLG+ZJ{k~2x>Pti!%$snymp(OnVS($sU|xj@#UhF(a<$ zD1PH}6Aa4lC;*lR(_Nsc-PCxDB2nF!{lSzF0Bu=|S(%8{-%$<;HG!iQXvZRb^2Uyk z*NRCZa;(0av}@`v9H|CL6b;CB@(U6=%|Kd`%NKIo-HB})gMS8wI|v_J*K6C>ZNW}$ zJfy1!+t{rX)!p0$P!=aeFX{v&$QE-+2tE`ZRAT(LsfGj? zk+)hJEmKRgrfQcgtGseopu1)pVBDyr$+vI_t`W1l1xEmZYD|fRN?hO=FmqOSYYud^ zp&f()TTYIku)0*U+AUcmmds7%qeA&Wfo_C3+XpZRis*eSq^yP*E?gs-s45>t^NJ>X|Fn6F#=(yvNMV%085v#2GA2S~I3daL9yQ!C+RbQUu%_h*rQEl*-Dg(S_2{Wy zr(u5Gq^(tBdQ1}nMTr!V5e!!WRY+z4k_qckeT?-dwC zNiDla>>`dR5J4R~C)BE3Zo)e#+VgE31GHsiW?W=-PZcN-7Ldu@4)v&d9ZOb#L8Y~a zt?S0*G0z+@SF;MH)29Cb;a&Ekg=c0Y<(nr8pni}xl^s@48NkZG2GrHfdhcD!F^76M zfv420*V7=lU~M)H>nupK;hFt4R%aO}8%Q4k!#G^7Ixrn(T2%M>>CDvX#h$PvlM(Av zR`h)_6`=-WEX!Q8M;1wA8OIqVydRJ}XQ@!UHg{PX>#Z&|rtpA{)Y7jC^V}Mn3Tt53 z)7DfXX`un6_kailvkz%v0U!cNAY8;5WdkZ)3+Ylor$KY{5Cb-_?334z?M&{MwD_1> z0To=v0#qg2Hxr9r*j^k@DxhG=G>OEbcXmA>okJvq>_;LI4d#Q<(QmmrMi zAbtoAu}V8TgUPS3Z9H^na5seNeZf~ryK=M@7SKcz+Mf-{loXB#+&K;jQJ2}c5>y|a zr!fWYW}&Y-$sv)YVOhcv?<&AFL6l^SvonGI z9;~K(;Bu1h(xB`ZvGM>x01ZSvtVyQ`8X7d3geelE3&mxXrVF=djQ~jT?;%Fw2OQ** zb4-=Y3X=QZ#MOl!+?mV`P2#n$_MCSrX*P9DD)YlFt1+zlB(fPtrXw34}#@_|q$h9d{lIq$(~>)B4-G46J;ui03l)D`5} z5-<=bS6q9f&KEf#V;yWKh)PjRq#&HfLMS{)pgTLn2}yS(dd7d;{j@ZTZ8n{#oBC{d zs|4*;>I*nP%*u}2u)K^PBxH4@nbPx=Aiax%{(dnZ=Bz|kd1X#J@rKjFkxi&;r1mtfdo%@1}lFRA=o(}IBAoWgSlPyS8!9n!1dJ_edl+pv!dn)C9 zYfM{lG!-nnn53$BmNspL;w%EXue&@9ZNWWQUM^XZr3WlE13GPxm8gpVUQ7(j!7gR(m7tSm5i2|X<0X~CUZvL`LsJ57(Nc!leQ`tFWIYN z{1_%52J!#3oOA`XZYiId#wIK67q9W&TY7}M{n5?GJbrO1N zBYfbIO^hXvgQncGjp#fU}65=)giYeRiRq-W`qk5B$JsEQjVcw3(Bi* zJcIrJ06C8nB~smMB!fn4I4)l1pJM^i)QV91rBYSp0Q1QQ1Of-`j~;VmU<-u+ z%-sj=01QmY^>C< zZxR}0&|kfBI_slpuZMd-YRMgU3bLee#u=HM84F;k8@VLo2dBp+Myg4nVRNcnxTS|S zCXkXGMXr=5O6_VhQmJ-o@rfguC1?_6ELORAaD~bo`azjPXL5`&Vi1u!WWL%}0R{YZ z==wPiXNFNqOBk&}=Tg#rD%5`QEYB=1^Q3bq3c-Yk&5|V89Dp;BKyDkX@V~>YS(F6_ zBhyMd2d1sG6Fdzzp%#&%Ln1*snI%f*-0?@ha2_Ec_kp?4Fx!$9a&V_M!h1kbLiN^- z;MER#^DPSSO}@|5uEnF7hjnRl039sTk`;pFCEpXE2YRvDx16p~wqw-XB^Z@q5DJo$ zo}?FXqYW4u=uW?tq4K2D?FYeEPtaj|(Nev7cv?;RfRfRPV!#rJh>(!sAyK=F}DBD-y7J!s2~O?F6|k2fOyO18wx#2s2ksC6#2A za$KsP>TEeK8bCnmV5~93giI$cx)SZ6a6EZ|SdEQf-42DrsLu7J)M@F=#Zau!y}6-~ z?}b$va9Dsprm7KyTr_9Q#7RQRfc&Map*H^jmgKwFP+`&@iiPW%zw;5Mh-t>q!k&b+ z^yU%3WslQIA9hI(Aq#~n{um8`Krv09+o|RN0$HeBhO56vEI>K~Q#)R&>-NT9#A+BoV12 zU??237tj*m7bK7~F|13p6I!>Z*OtYasIko;rD)9Ru^dRzxG}r>U}0K$9b1k|V?9un zDL8^=OF+=NbZR?;H2(ld84;NzSsbB_ z9RlqnoD$LRP^+#oWI}sB`McDYt*K*Tz#Z)3*AT~YcS75@sZr4ltwuvt7HaD>lTT^z zo2*9)DI~`WF(ERsp$v8$p>$v-;s6CEmAxE0I3|y6_M{j}Q&);zFRNN3>Xxa`s8iCO zIDJK0Eb5b#7ve{67Bj+4(mIlk0n*bL@`hr#m7$7 zs$Q(AE!@d5B3l=16~uek5rl}HzwXsah5)NEz}wb3aKKDTe<-I%zczhe?#!x(iEixI zbmZ6XHOq&%Elo1&HQ!L3^%}PC1Q2LNVS2soHstoDy6Ps;=)vpPP$YN)?T#mC0FQCP#EQ}aPm&m`9sXPMkPN*@PwW-w%2c?SwuaATU1 zgOO#JH+Yuu@uwqph8D~rnc}HdM7WdDI+KhJCU0^Shux!5VYcu zqNWuh#1bn<-nJKEQfkDFB0%aDqtt6zv2L^#CA498mdmfLA^ZYZpk?+djlf`?zIu6S zRMg~9pdN#uHr$R?gzXU)JC3lcx@hfBt5qgMhQ!8cV`J)j04TUtW>FCcGs6LZM?}0R z6BH$|LW?+{YFEHjPhB9x8l51@l4<(hlN!MdX(}exY35aoN{T>PGEP6HBtQwvGbcNX z5!7nViMdqaR$z56LI$OcsMWMNNHHx`)TJ>AWwgn5JYQ!5ymD3B9_Uwj+#CWJU}1!e zNx(S>r4xW$!YWPl=D=IY$4a7dmJt`bPL9kiW=&6UWH;i0R>Rju*jX94D&a@?!Cq6G zwhKr=BzB{^4xQIXEa`f5`w9x27K2Ux-B}{Ba@y%~wMk@xS?SAU`U=}sH#@9ykV>%( z5S0=#vH+mm!%20ryQm6!F)l@b-L;?wguNll5dhDjM_)~nT{*N70)H-yjP~lbnW3$%+jy_zEAlT_2-Blq72b>Xa?quRJyMu{AkU3T+h|LbxzbV|rnk zPT?2;uGL-O@sRD3k`|&yP&7Jg!A zYEN746F#8~%g^Xl1`WGTNN)!gRKca7GOa+|wdeYs^%S_9)I^EWE$Z)fg$-qutxBP1 zzh-zVq>+Uv#xh;pp!Bdo-oaG{GtIPOO!>lG)wnHMS(&Oi5CwtXM#umpLpL1g~0fP#SDmG{X%j*wo z38iR}&tp*ae&WP!6ha#lSD5Y_+G9LyRyN-j)Q@otuG^6A&sun8NtGxmAT3)LcBstO zYW3EF1g5o!mAxLJr$b(znOY~QkoMKsNen*JP}>PrA=@TENN-mM00JsyPiC=@l{=F3 zBAOi>bsXXXr)UaGPiB^-97k<=g4q(Y5hNrMuBQM3k{$`;^bOpA-2f?;!6~hcz0*Km z^o5z>#9I*kygr-LjbRc^B#X@Q9n8T(ev(%>&zv_QusW&zrIjl)ojD2td$Iay1Wu5C zmb#voES6o+QBabj(BRwbr^nEsG5VHLs<RVY={YNFb) zYG;KN#3DQikEGGzMouJB2Q7iO8@c3247Dh!RIj7C{k(cWkl~;`OYJWzAH0HWlN5<& zAT2sdNFW0nNA0NvHKJjCV&h*54s$+Yx=V(kP;tXr+A*r?Md^4&4_M;4iqAlyof5^_lm{vp+AVaN+-ADH;`J$hcyY}JHomMI8aNvBPEH<9NytQC7E zatKj2z);44pV*6&yq03aJ!yelp!EYUjk`Z1J!39uNh=|Og=x~&cPz&4=SZF%O9EX()rvat%IZ=o)?+&g zs&ONb2ecI#S-qff zfLIjZ0&~>GNOnjx-JkbvjC>&%A^@TE_WqYcPYq@pyX%IM#TLRt5iA-h7jP>sH@W;M z2PZtwh{elM&KkA^xq9-mfH}DEa0|r5ld90E90%@aNEx0-w(|yIw2hfK3y{AA?-{|t z>oJz49OaVq3M1Si^MoKwq(eEvSOVMb8zW5{jc>s_D-O3R3dr}S!y>tg-OG;XuZ;8g|0i?!Gi@`9+P zvPc}{0L_i3HrK``CP0%cF)dvOfzsbg!---d`=jV&-Mf0t8pZ^dbWA4oRbW74&hp4# z+ku`5U<%|ZJ9^en6dkf<%{p~2PHgq%5M(Z5CXZsX+0=wGJ-ULV{`j&Al~6>>yO6k5 zet97O0E<07D;#7LrBqQO`hsoaXh|8tXs_xWruFNX>?E>UhDiS6D%+$gWFb@bak~Q` z`DGd8b$V7XO)j6yTAo+w?`T`WRs7H~k5$1b;~+4Bnnfxng#l@NvhF%MUztIGZh+9pKF)wXR#TkR+eXY)c;L zAc=O;XY&`!k4l7h298{M^sXP}8FbeJycaH1`QZKhb#Bac*uAEmb7 zHdhLKXFX@k#3?e$spF@&tQtgFbZh!`%i-s9p~%S5%>Ia!&}MC|A&-Lc7HoM3InDqF ztf{i*qq=+H{{Zp>N6-KtX6F(B4Ptg(HnLaKLg`Np8KIU+kiiQ=_QO8?%_$D2V})D= zz*PhG>Jx}r08F|~OZa+^dc(F_M56ark)&0iRQyJ5r4RTG3pavMf`QI7|(9QC4Z* zJ5KSuZ?J4q=%}!%44^Ug#!Dd^xa(x&P?cwx*-Z%KO-;#BxnWa7XkrbaaOiqneI6QG zooM5)X<`$!t?NY?^vX2wzSU=S0E6u-BH-sdiDyndqE#r6tu&*%9i0CF;(VZTxjRD7 zVQJbnr)C(h$9>>$ww+>KiC`=4Y1K(A9dN8heX4Wwj;JpiGHMGBV4K*6=z3K2hZKfv z{VJPSn#(QUR=t>HMaU^4mj*_1m?2Opj0qHx#z6OSN#e5b!6c@jk46At*OaQwTs%+&3+GaHmIfVO03|r#WJa8kr1)NyA~yI0P`C7u$xNaw4gUxr9mR9H*8tZ^OD0y7%oB-|PJP=%jv@zql_ zgc-8anMpP@OWnz#)}T~_M*zWu0$9aIP3&7bB#Eg_Jo8Nrg*2ew98xeWOhto|OE3cYNZh8C2;=TRjM)yE=)m(eC3&l{8{w>iX4hR>j$(uQYABW0c6s&@mu@ zKRZFl2lSj1oMWgHD4~#)m-kie|lNr>67CY`U0?AYrFx$HFKvogP>@zX%lI^y-B3L)>b&B0b-q|Fn!E<5eU^mLt)RN z?i>OUX4*3-#-0xnnJ8FXoaD1;+Mf-ppbrR)#0R88$)jqGPMYfT)48eWk@>j=SW*=n$;?a6R0boevZhF$uEohB=8sE$U;{NM6mCr*&R`xV ztLYKjwQc3MVyJQVHR&x@X*M8M%RJlOG}*MB#hDp~?g=8i<;s>zOQwVxRPtveJ2ub+ z6JraDQLjYQ)h`Vy_uNVYEsao%wj|DGlQICAEg~{5(z{gc-V24UC5*w6th%tEV8Bt$ zg-O(m%)!ixQ|DNV)jOUYDy1Z)rA=JR8=dzc5V`)FGD@rMATyKgCn^E?!Rj<&4CO1d zSS;2rs}OHtuBEhwBnt@px%)1*qeEXYgmg`I?k*7uUA1nmo%uA z;C=EC!~wkegAK7*$DSV;AoW6gq=W=6T3t6{=9(J-C|>qwX9mL?!kn#y5%qUKI5#mtxSP#!=x?vI2Iv$|wUIHto6Sntc3}dYvJlS#;ok-3!V?nrP zp&v+XS+&#Of>-Sov3(uuVsEM&swN1H}LI^2_X#({mruNwaV>Zwr* zOtE*SgORhdF*n+{O=jA)_A+Qff^?qw~wjifUf(^BU(8%NMKq3BH>i8g%*q#H*y zs|nh=gqqb2DK(u&)CQ&PLrT(887UP(=|El4RTw))G8LF2s}{&AwvdF)k|g${B?t;6 z+|;#dKqbLqOKRp71~!S!Lshffx(L;8$$sqhbNNao*blVTlStfMXBZtS(YbDN4Ap1nEr&g&o+bO$V4PP}DUV6VQWJrAegw z=p|Vsj-WFd!}auUExrS(e(O~ zX_TqSEr@krR$=I_qJ0IvxhCIXY>lkj9SkZ|mJ6NhxeDwE;mZjTgi2?sSuSoW z%t+iFDj06=8WAQVH3rnbmo!qvWi=|bWz;gW$ak3^(L@g626tthcE~@b#FZPfDZ)#Z zha_A(fEZk|YIzG9@`h6C1VjG-GVaY1DNS0JPNi1KG1ZaPfpY*XqDLnJHUJVpbtHr+ ztCJce;FP}lfN;(QyTywDDn*!(MN5*<;d{luP3~&;=xWvLY4D<6De5Po8d-?OcOxSY z7%oGQ+gO4~@wSkeHbkXKn99u6HS#8*_S93HF=wPPtoESR9j!2$Tu&qsLLC|(QMS7` zq@-_RuYy!<pnO&+t$W82RLC8`5&k3|KDXDhF{kJCmT*36}vCAx19X)vj z%&CPccNZJeC*eY=AY(JglRFd_QQZ!Ay(rFa;Zqdm0WB_lyjEk>;=5L@y3<`6ld=h< zhy+t3pd6rYgO72?E6#Z8C8VKhN}DSi@;Vmu=cE&c1VK>Tx@bgXuVN}>%29+(5Ru5n z!wUf#SC7QP%KFfGbapQ zSu=>YD3U_Oy~P!MxCf>ZCQVUMCB?QiX6LY`=C+9_)QH&R(_=Ed=KEbv8t}-J(Q_lp zu8PV@$VCyCVBC$-V{il$c2t0|F)LXBJ5q(np|cyGquMJoK-!x}r`nY%&o8KvNVa2^ zvY*u%Vqe_K+lwwFQ=SHKw;di(f)#gWeyqcg4?}*X;hq%@rlxf3)U8DRoWeq6fp@D1 z+5jvFkcQgZbF}9;{1Oyp5Qpb$AG3SiHn7Ax5>|aW7I|aTtH~UN>rRriEsVwn9LC## zR>P3qLC4I~T3O36vaC7ieqI)Yk+zUtth#erE%w&0sVY({2-RiwkQpRc+<>H^J4htw zoP0o)i3(CB(ISppUw3#SGgQD#WOjg1 z)|IH%_tK555ZV)3(m_P&bqu;X+gOL(!7BlbIEA&pa0(yvvTNTWy6!1u-K>}t|Rzz1FDIc_&|h5VkK66YmAGcJ|BXBD@g z*OVnNsI&$(8ajPcky<;V3D8CsBJwl4hxF8)xGK^GIbLzc9GfOw#T84S%qnyM27PV@ zo-nM@VyRxtm!p!r^4&_&zpmyG@gi<*Bte~)z)gThhyRc_^P*nP9 z2G6V%wJg|*fu6jBO|@CgNGnLrSRvRXe`Zm=ML<|>C2^7vjOigJT0zlH_NS@!fi-~X z%&@Dq*kqZaknR&$1xtb#oD;O<9Dq41j&L!ZGd&QKPL}EX+h}mw!FsKTj*hfosXP>t zIK4^Q#v}DTP2RI)Wka;xKLUzS!oXP9NrlG&X? zq>Y`;PcyA{fho#!2zJJhx^AG>HA_+^r4SPuq=$4<8Zxuskc?5)wlUmBdTn$FG0-i(-tSbZ?#t9PARh}wl&6hFHa?$jj#yuipSdo}wCJ`&M9oqn8%LeuX z7;`xZT9@8uVD1IRW)#)D4l>;oTm2KRC zjNot}!=ZE16F9LH4s2-Wp?4eT@M-}hv=6K4+DCCj`h!@i`ktW+QjQHPPy1&PIEq-> zFsiYWwN;2vIYXRo3~#D@)BvZliBL8fd$ny8Ngk!`5wK_0DAsfl$^;V2Cb+|5OoY{4N_UJ0#FrLinrL;hM^?F zy8YInqswBAda?n%`=w7fQURMW{ zO4RKB^4BD)buQS`wBe%t#*Ja2iW)3p^x05P87kXlcMOshZNqj-g&@_KtTq=9HFBmX zxk`(YNFPHfUUPmfvJTOgLS;0v3r-ZO&Vj zSb{pA|@=AI#d8{LWv`~DN8*~YAEpI^F-BOrxSLjp?65rq`6>Q zd5@uu?Fm=i>7=9DP7nrO2_&f9oOPV8Dx6{j`Jp2y9w3X6eP~Bg&}D@p%MWSV8g-_W zrHl3?0^9{)NFH*5ceE%-KwEh#TNy$?_!%q4r?d@3iyrYgUE48YSTF^hh!3m`Dz|S< z1Qx8LiD8N6sw&9~yeWX{7C5+4S)9mQcgO{BGECyfb24tl-u5NcdIkV`)(EzdCs0Wh z!#p$U5HMsbayt&M8vv*oiEIZ8fw@ZnP6K1AeY)kSE?Ev8x75+9&!J-4JTw|>+ls`n zw7Q0%=~y%dyxz1XHrp?>1|eCM!*brjNF#6~qr@d)lQ;*VxFiA|tnN1(-;ljvy*hh( zwxrfzj=g%1JZo`UH)O(%!AW3?+N%gQsjZSk201dgrCl`$of7@VMWE%~$uUD#cqXhmU)z1a=@Q4W%f z@>`sY8y{^m zNlw)49x(o#Vn7QOl@93r#AFn5SA}kYa#WA+B$6Jq)K>Qf;)TdzXNduLV%KIJ3sYMZ zHG6Z%6wsQ`q?09cD6SD59Z3G0#=(~jxT6L3o#(3)v6DoThL}60$)MD|L2;W$B9$#+ z4(8${*G1FmT9$EPY%Q4n?pdNnbhL!4AazMFWh1vTj)LTxbH0NVK*>9-{eoJkLO3fw~A<1<7RhgJ8ymY#l0OXKZ5>0D(BP<7V6|HZ!=y!9!@+n2b6gku5)9Ne{ylsYv2_=z? zvF?x^$8j49?pAH1a!xvAvOf}xwJsgVHF^OKW-y^@wl{#1cZTV(lN) z4FXpFX{zXmbrcFIO*Ep@fE5-=oq)`WN=b#_;fk{m0qTtWJn644QqEI<(_6KeXnAP{ zk^!iTRGP+<*~<+nwB1r0utXLMb41%GW&;gg7=eyNW}Dlza$6wbHnv*OtVF)(1xBW} zccQZz00myV-X;8|CTn*)OQ=%Q&3dLeXK00Vys<+H$&8(n1yxeTO78tEr*h<+V+jZd zQp>$7mCYY#a@^UiNFcIHKpJm#CX)n^G*(*Lys>GJKA8TT5k(*l%67Dy!@Pl%IU#UD zuKWwrBpIm+3hz)T<{F0r;8mztRl#@%A=9Kz=zZU%HEmk;hz74>f|_&3HCVk#tEx1v zkVv3~j54YAvo_aLv@Tn!D&h0xkdUmPl$Hup-JFKFW+dffN|z9M(gmIbyQn<%5h6`f zTGMNo;~Qi3Cx&ymp=FJ{5?66`3ZB9U&+8;%2v2B|60adwa7(!iIhy$iTbk5ER<){K z%9_5fJ-r~s7RXvwD_TaHI_3RnH=Mj^ibLT*8;Yhgk!KIXXUdieQbpK4gqw}rIAU~q zu!S*To#0AZ)|IMiI)sUOmANWXJL7_8j;V6!a9TBG%LkAaI|1~mulD$Es(M+%8Ux2om6EvlB!q^PE!oLB0EKP zV6_1k0JFOXH{^M16Ly6ntw<7mKCP`v?9)*zX41(TSg&qq+wOfK?-MeCA{1k>TalFo zh{sS#nxQLICe;Fg^#S!20iR&dE(v%CQrBNku`NLam7uL6{hfF)hxFGV?Krf8vH?jNxyq6OCXC^erh&X+X5}AFwQl!B)`pv8*~|-GeJ@b62k;2lSq$Fn_Rq)36dhf zw&hF+(O3k=TMnl?dFs@@$Sf&jbNFV_ zY@;@DeF(F?E=hXo1y~mXBeetvA9QQhYF(|V%TBFGWR| z@}QSey*J^cWI>ckB!U5}5pck$J-RUsV zg(=!^zSQ+AI&o_nhxAk>W%uD>GP^R!8FwT>&!`bou7u(+$iu2eW?%?4dI!n{8n^3K zq(%~6&~}@m>ND#r6fo1ZsjXP6($sECA&ePnyFppi<7mXB6@)Y8al0zNscYcyB6eA5 zAZIV5R;R?VYcaXj8B5CQIzgA!b<9j6bvJhKv809GnaV=-xMsz)(a-KqfuLda7{5KpRU`dQK>wJm)uY}#}ySM8vR zHS|eH_DJusu#W!LAim?sAWbz%#ATRoO-h`d+yMSo5YaEAcfEhNxkAOmH0ZIULRzn6 zhCA7!$xy|ga=38g3y|3)Vw#Xll2k9gR$+a}uGcSk6GI;!^Q%=RjuTr>)Hg<~Wlb$@vyk_;qLCXHsfOq7x;mb#dMfv!MU=ST zQwk+zDHv2xc-*_l-Nr%2TPLU$hntX1S~Ad4s5P%b&fYX=Fc{JYhq)Q)?`NLUMqsQV z`;<`307AZ|!Y~zpWl{+*9y62Cehe2;U}`~Q)!XFusnQAS1`D0P43gP4bqJnTh9pUF zjbe8!tWo1!D-UUGg#>a=KvOVD2|bm=(`116ee5c3+r=^gygs@$DwDi)tjA^sXo@>- zE`gcEh!!5SjAISeDa-D;HD~k;`V+VxIB+5tUx>pNt8%uD1!|B;hDn4zgz~hp z?v6AE+*!7>l5voPgOa^kloVEpa~5+EL2v*pNzsF_=5}~xSdnOihq)@D2XKiH5fT0fpA5S`)y8e>q>4S5`9Yko2b~@ zBZlp)OA^?l43Vr#fD^IvhHqh3Qa~6u!63{NDq=GbSu~*{r>5H$HxBIpkU@gC_ND=`tUccYkZloq9D#X)5g$S}@f+t{z7C_lC zhQn?k?N%K7N$U6DX6Fh4t6&_ka^phOXSg(|5X1vmhSjx+>Md`&wGD*U?;@5nRz(vSfpR$o3~a*+O9?pur$nqmF)CVt8C94w9N(4b`WI`4BIFoN zuCaCP=8I7kMYPl@=~3C0WVAQMw17rfA`-=qs4%334ng3zSwMDksI=xFbD`R=X1E!M zrKc+pZ z0S%CP)WPB;s#;9I`cp<~g$H`m`63xbi!~y@uR%Sit_pEk79P6GD6a;{s*IIl03>0! zlW710BLD#LOwCOBh?t-{R;TyWibFF*evjJAsu?u<`jp+brp+TtY)u@ta>~V{%s!z_ zyR#b>Tx>ZbBb`|@(J6hL^3-c;2DUEj+Jk63z^0}wCr+y#pWF6gmMXKwU)L{L9SmET zv~1^$20NT=9y8#D4U08Olmdb8D4JEn7Nr=_UHb0>o&dAlR&_+v=+}y~#aUT`$u8Cj z$#7hexm@MB8;SYB1E(yRB`c+9TYVcw_hDp`9f}N47&f-KeDzOdjbwyll zazG_tA(#`8Nh7FJF$-Q&Lk(X`7t~(54vh|T0Z53}?V3}|Pg9azKU6g0vmA{gS%qtb zz$MX$4%bs+AiZl#DkDrH*=7%px%1GXd}2LrORt$Tb6NmJsMuip9d7lJ{lRbsGA9 zolTbgaN@h9He*9#NIqv^+?`LlprC1aImYcv29-53ZG9 z;1(|AD1ulTi{*yQFCFbUTBz%X}?qsF9386~&O z+&7Cb1hK2VA=6vM)3?hev8UcL5|Y`BNo3e6k+uYjDFZu|inis){{WL5t~1tuW)^~g zKx$N3%U7m{pEx@l(GjSLp_fNW9*oo5V**x+ypoxT^wlJgIRyKoE3-T|yn9Z1vn?Wd zS~9aWk0RhR-QC5Cly(Ia!PP=4hCGgibi0N6k6|dKucJ?hdR<2uqfB7=$ zxO2FigWv(l$5iG>?3SMJ2?L$=V%BfsuxVmB#20bv^*ej)V@h{cd(*`%v}aY4$EM53 zaEw642v%SL!?s318uBpz0E!9;A(9I!pgzIdsRFhQ0%&3h`{zj6rGgeqH`X~~FqaJp zJ42aQXf7E{ZktOIp^iXd)w9EKBRQ~3yX&(Z2rdh?TVn`>krr(VrCl2II@}i?3)Gb< z2-GZYg{BfC6SvxcaJzsoTx3g)n8ITLxw6g1vn*hwMb^3M`Eq#q5GF*@$`c%wyq^c~;CW)~XXr_opY=RUSP^R_)9|d;-fDUqm9MZIA zDq1ctR|51lxGg|j>|#oUurSi9TeT&ccH@pf(IZU_L=hneVG5vNXWHSs9PU1Nb*P{O zOiR$vn(uy94SgYnOT@euV!w6+cKk8XLnF=TxiFw0F@uHL0QVkS%M1~>nzIs6laVf> z%}VQ5CErR5Uhyz4YGHM|iqS_e7MrQbV8|Hh^3NV5P*o6mg;g;51dv&GhLs7}ffV@_8-s;q%qe8_kAv<_U3QaDLS{b2KNYsXk6#z1-Te!`rM@elj z-4=hg7tDK^A!J4{p~OcGBNC%50U-4fQi+L53sAe|eRr{H0QPn;#b-#a>ph_&Yr1`E zTAW%u`fK_x9-j&b$dLm~VI>eVz9jcB<&YJ?0ClpQQgG^0Ry#qyW1*!@Sug9Y;wL85 zh}ruqx$MJoB-NwDQ%hYF?_-VRXTv~}xQH}NQPm3tE9w*JgYN5IULy#lcLPgzsHktv ztwkvQFf4BodWU8AH+2~6>R0M%x>mqz*VL@ox``L3nALZyNb7*HpBOAl1p}cxR|aO4 zO9TTPt3j>8gJ)iC%Q5k0NX@yWEkfG6LD!cH}c)fC1{P zzB@H(A!&9ras@!=KzFUG3&Zwd!3HIC&!x2`xedtfS+KENX(#l&s(zTZ%;PU3NYP{{ z&cMVlI8XzLXGu<0rN3vO6>fSFSUjKyiET4NvlnXJvNcJnBoIel-D+l2>x2a$^(!tB zq1+?dNZq-Z0gj;UCS+GqYZlX z=HDw8Eaq~m$7{13CndD2nw-F6z>0 z%QCceYq8($3ANruTsS9TJ6Ah~KqQVS%Eu^63GDAzZgv;5inR`cB@EYtDrgH+Q5I?U z1+67)BhNj0*A8Z2g>YG9G6epr$Z}hG3>0MU>SAj`ox`Ok8e|lw7Mj=wy)124uRjP(OSjHD zJjp$CtgNxe7MRiL%N_ZriYnonK^0^qNbTu*dzqxcWeLs{j;>-QryDOiO9Q3No|XhL zEa#wjK}1#~rMB_UENg8nLX~7eBDQPNE|x){H7H z_G?3xbBSF#+H2Zv3J|`db)eqSfez_RWn9Rm8BSp$H6_j*ENpN*n{uQrWXnXn$Cibj zjA}-;AbguR8pIs)B$CK%S(fh95${^QqB$*>k%NX-OgI=~HxjEpctDH9ma(nDW|Kvw*rx?}p)Xk~TWo2oU3HMOd?^wBJKAeU9C8L*8c z7%J-+kx(lMA|pE(scfe89aSpAAfCnBl{I}!lc+r@48{sIs7qm})wZZ&WQpOcO(3%j zPBMxVh5?CV?gJP(!OliJ#Vi3aNhRv!k_i`fA;8=CCeTG48Z=v%G~3qV(<~OJ9CFxC zRy8{gSRd)TJ{t*=2Emt8jN3%ScYAi`*ZZ&<`cej>(b_%ClD(zV)~puoFWQ>LT1v>H z1SpK*f|lFv+yKgxv;)<1C;Nl644dr!HLve@g{erDw?*}sr-k3qwO$8nU(2hNTFx3M zgbEJg;ze-k!0sxnHk@-2$P?Qj0_0q<{Z-lAS4S|}YS7Kz)Tu*Nv8F9uKKr`QKJ8-I zj+|ya*VJu=upQT6_&h}tDCpQV!oAtlWY?Q2LuJ40Kf2(bI3T3Xp2EuAKUSS;EM)?yaL6iBVEh?O~D|a2s20+%U-}1`JZW0A_@^qlTjw9*uDC zLugr$krk*fS15RfFyz>UCvmQelRz6wJu^Mv-Wz}_;>nPp zsV0HKgOujV4UMxg1E>zsAW@Qmok=qmO;iM3l?dG-KcG4p5lj12 zsQ?npROf-7S859+I1b{P_%XKkv%@VxCLFt^Sc^%OVz!Dc&01J1y(E?r7z~fN4opV@ z2;PNoO0&kw-AOMCAg4UiM8GJ{Y}ByOQNnu?@!zpgVl^T50fk_vo4-O;*X)AEacQZ zHi-K|p!SFzOX>y0mKgPID_hd%w_PUF?b*~77I#Kf0zmmJzMk+lgD6)#zC{~aak_2T~+i)ZYQpy($MmJ;RV;JhV z!}g&`4i2=YALhPRPd| z(5Er6hqRDe1{P6ThJ3s})Bm zBmzcP7*T~*VS>EQWgy9z5?C$$4btzM_AK{I0Bnu>_R zG~CWc2w+vWFHfT--;Od@t5{rNl%CC5RzP9k3lPm}u@#|ryg;Oa4L@j& zt*ytW$$HVT8(6Zdlykf8L~L1Ok}y=`(+YNhk-Lu^1j$RAl?_4Q&!{#1Pgl~g;IuQN z_Z;%aZED(Ah3;&SyKLHwHz9GsKEaNDSKzSdr(u9>!Kvm&3twHg3R`$~q)OYXQCSiy z?3TiU)> zHP*58fH4TnOrb-%s}FzV5(($0RJEyHttm|nhWfVPJ>XJA0S>NpLwZ?6wd9GdS#MUr zERhU3U>n-tD=tD4$sXJsh(8Kk2~0yL?AjN5IQcPI_K515lof4R-6)VouN;?DhWmDA z+ND)UCk4ZoP(e~YPe!>)T2n@LAZ`G1V9Xb2Rvvg6BHW3IQpB>7cE+tm1?KO;eP=bO2`oj*~%2A?hQYK5OcqNX>)5<<+Lq|Vp@gKk*2s)h~^ zw1Lq}nVw#0jK;MWdedEsnaGA?q)c{~X3KxkSoGK)g`fN^htv-B(PP^jp%H^QiB=X0 z3kNw|9F5Z?ElSR(A@vt}onEeL!Ku4MsjcCvouNV+FvY1Gm$c}t5}F+BV@PBWY#D3| z4Oq4|BLJ()?s0_?&&9O9+ED^w?v*$shj#<=g#h5|$QLkrF%dgQwx2;wK&^`RG|JJW z)-BkHi>rWRCE5K$vz5kKqX*bm13gxqFwSX+1hS_Wbk*c-I#C)0E1Ksl8^!v$|MIQuy@o` zdJK{_tNmdUT1fpBE6gEdAJpF&A(W~D9kGmKqLn!ril&Uq zRVWQ=*|rRI1lI7x5Ie9nyZVQ3%^Hnbt;sCa`>INmy*KpQMmuCPMzVWnZV$EiQLux8 z7^ype>YCD(BxFqi-i`%`)LgrV4B#<`N~>6&hg;G{ops&TIqPBUfnqo$ffxsf#-&L< znyTdcxC+DpyR82J!(s~3);6IapUS7N>Ll%`4@i)dfbl7((5tKBD;hY|!Xb*a3#uc; zWeF>n7?GTkJ;aiD<2^$2WlREE@{(MUsePV}$PP|n0tj*;Omq@9of8wso8ohXn z!!X3824{K3@)!*J6@0$+J5(vW1hFkkm`-O6W^3v|I{{BK2$zVBS68zw>oQM&Pt}rd zNjKM*C=`CR9-Dae0Aj##w}3ZbM>8=|1;VQI724sgVa~Rz0`-As)wQcOUVr5W?WQJ7 z%UW-8uvbQTgKgU~Zf;ZHh1_@!Fmh$ca-!jZG-0I%=`=cVq zV-$C3%VJpM`+ZD^{RRcq8e$}3gbz`;8@7dVc7g?olGcJHD7kHev145-);eh9Kvn_M z>*_NBD&A;Cr&p@9?#GEhf!_=P!UaCU4?D{M7d;4&gba!z)$zB9^cQmo9KdCQHEnNF z(5za6`DKlXGc;81L%IcpeX@p6c|;5uN;4dKV5Zz@|Eo?Z@g`I%6bBsi|(zgif4 z)T}E+?m2ZTM&&r6tu0PBJgSkRGP1{lR}2AS0+1AjINCFqie}212~jQg*1vYx=u{$+ zC1Td6s8o);I+YqQSgAZVl50`UB7s9lwYH)xez1@gP@X-5&g@RSB)-r&1wdvMYWVId z!tX{fNSUM3D)eT#N-a$dOFCYT&I7&S(xi}LR`#I{kLGh&io_B$vAh)^fv0f^53z%!ajS+3jI39McFAJI z^;&efJ*iBj1cE5))BsQ0oeq%Mm{u#jy*-UGN*acz70Px5jO%6#5zQhl3aczAyO3Zh zB2rre2nQLyp(qrIYJa|$=d}&Y5VfLaN7ijt(zN-(i|t{NVT{}j9C7c(SvTN1l1DL0 z01vR_Asq-9Wn@rRWf4~t2T%ZiJwwBgpkwPpR@LPoA&z@gaVd!ukc+7CH2!%YcTYKI^@HQLTt4qrk5s5p7ZrEYrA|gm+Sz=#MLn``3*cKu{GK}D!rN$vL6WOHqRZwheKnNDq>rGye$wH5L z;4L>y(XHRNCbe@xS6=-Vn(2hA4OMZ;s(qwR>e&G8i$OTvdwQZ1!UFBvSXJwHv(2xU7|`L8h< zNtB}H=x;;O>Mn1ncyYWFq?ltC+L~B_?9K^irvCtOakqs)tTF8XVUp^~i6rpHxR+&MS0PYmaLYRJw|*y(lBH$^l3X*= z*6Uw%h~Gg4K$Gh?%=IFR5X$nqEN44cCm7r_lvjN4IOk|RP?Lw5FchIKSX-l?m$ME| zkyF(kJrh&abo!9`aM7V&J>ppelP^3wh6E4lV+d4X6k`LO92opil`?H}sYRGD=o^-= zbG$(?AOt2I9QK0;8*Y(coeexiu?#PP1aP zR%Cy-hB%gK5A`r&fseRjZ@>X?02lyd=SF54T2+);{{HLsh53V+Z%&3!U5K$LRj#%X zJ-bp!5?zi4Fc0+96Y@Abby<@#iW`pwWpUE9*w*kXERshk8s47{r`uY-qc-nSzKK1^ zp$+{cW+9nEocl>$SI0lw8#p9oUuFro>vs7>S0*%_2Bw`qWXGzukwi~s#r-a15n&z} z^q6fX?s8l`nE-N2Ds5FDC>>Z_pd_S%ZhR_F&b4^4kRK!TjNj^qWmD|ENWPIhim}VS zm!Nh$ajQ%>G6qw%&KKN2;UB(eU@=RYoU;HoE^b&@^1qw~rZh>go_fnG%w>`*PoyRSFBL>pJ6_hW1S=Gj#8B6+tlpGt(@wsp zoyFC6C@jPP$B!7CfVl@Xh!m?-6KiBNdz&4R#?P9 zF~=pzP{p&7Nk2VQlQiPgCM*CzNvAPim8}Q>@Gok24e4N}?I`u`24|co-lZiAmHciF z!M{2Ehf5VH63W07A5c%yB^79yr>;BMpH4L@n$q1VgBdJ}0CEtMmE2pBIrb(9#(Jy} z0zupZ>fT(f2AGqhLA#&4b?qv9Qq_Sy7HO&^anC80q_6ImX5tYMb|@iKf?{{V+?T4Rag zCSda@?@a>-p!Z0G2UA7bKEcqnKJls})1aUZq;e}nm^_${cZS`SPw{8<9Ak`gzv@%i zrRc(rc^XykTfqRzan2uuwDj9m*5r`;qqbsLRPBf=qJRpH7-Ci!Y>Z=%%{Ag?rd46GjLq4ZK zPs~4@M!FhA{{Yj|4V(93`$%G|3^6iRk8n{NF;fjN_)@czn!6unL1;nt;%QuJI3c$f4hz*`jH+>*;!Cm#A2w6?*WiB#O3u zF(iUb!Q|TQ?mjri2OUSnvf=S5?9v@B0o8z3z8=hL;S{CHa(#kF(klM|%Am3JCXkVH zC8v^ECQuSm(EB4L)B=1KAd|SU&OygPDMeUH*ip2tO$|9xz)~#&ND+NSn{~*ou7%HE?Y}k4h6cXH!;Y(NDERXHOL|CyVWY`@Wy|c z7O!SmhUa9C6)FK33P^ILfXNu=fO=wa_@e=SEMNcz>R@STfAALiD!fbgb?^3PL$>;Tcp|YQ zeXN!qKT=1DqiI61Y~-s9D<5)#aDIBBg_JQ<(k7BXs1CpoLUi9zVswWiD0r!9^ZmxB zGD#Fq6I7!rs~{y9rZOD+zI+l#13AZ9^F~QSDg8?xyIaUaX7O**ZdAQ6i-N<_rW2C9Ds1J`n0`aBul9@dr`9iv*!O7YdRB#lm4 zk(r)I+1ERZ6Ou47_{MyaN|K{Cc$T_=AH)OUu8qzF&^Ur4tt4_ys7G?tHj_?*$~-cd z$}wo;!j-{MwU2@hetNSlAt+N1%6_`Sd&M_e)MTNmY8UWqO$}C<3FMH1QXSYNFl;eU zTLcr2Jzm4h6Vi!Fqp7Ico?wbS98wrUHG4r*N|CG9lkRi_tTMa`K#=M7BuKn-5jMwI(4a5QGrySe;Fs^sAaPa)_3qGd7Vq*m@KfHEz_V(oN~= zH-%Z*pVTD2_uopti&0At;pwKJN`tfGxS< zj}X@Nnh2ppU;ICzEQu76xf|9%AVObuNiClzsu;v2GgXu^v+`hDW-nS+0Va?=D>BWY z>4Zq})@sn(YD(?sJhHy$cT!IBNXNfCapSDX;7OiHse7BSu;e;5YZfit(cj2m7#xsKkQ+G}>m^dhX6CQGYn9)q)0H6w5~himYMY&~wEp5~Vw%*= zA%(Uxx+Cq7@yiXr+m_=eJwj%xQ>a_JO+2|hH5x$<_23Y*QjP9IZgsa7j>b9JgSvWk z<5j_6eizT4P6+5q`$c71K?Q6^?xXB!0p^B_H4I_JiGamVSGIK`(@1BhJXNBwFeD2kxqd+W zwm|SOFaYZGu$7L`iwA8ie%HR{1QI}pE$h;2^))O-K+N@b8Mk^+M1t=F!Q2*V>9g7~*Rl-9)UR?Cu?(p!;Taud-I0kH zgXA0>L>x(TCckkzT7jYgbtMCvL^6hHLF`~kZ@ zbNY|%jY(-vAw9Z-6_N{B4qaPUcJ6X%|*ttmN@px|hIyW3w% zL4!!MRGqt%we-DG(x8?rSIC4Qc$q|sy9&5(Z@iojkPqiIX2?z!SxO0@PhB=>Fax|+ zJGWcZw0eEDE}f`PLiLR*YXmaXDBcq zs)xaY_0{*~NM@u)_p?bZnz5wv!Bb9>GfI*YlPaTnbaP}Mt+vnY2asjJ%5FWo{MTUe%8!pCE0rp|@bs@IJJ2Hm$R zNgfA?&Q^8HHDHvvy9R=4JQqW`wJkz}qgnLgnt$q|#;v@paRp+`3uoG=InNmINI6)U zt4afxwf*ve075mllS=J+H)Eb>wLQzV9o|;Qrww|{Fra~!Y2BFPISY-WAl>{dqk~Q% zyUJ7=O?~o+t2(u}yQNqYyj-+mI0=bWLc4&wFjw3M=_9}%I?F7ytPm+t;zs`fSefe+ zow;2vcV+E1bq^F3H^(jjkyynOme1!df9v|W=;g~wq?H{(K17eF($He1pnmA?Yn`{C zt-9Wu`Zba@g_NQnOsEWY6=F}8=iGTYJrswVJRlsd)~23cUU0;)h4yr3P>N@1WrS3$ zwj)?1WX!5%b#`EP95Bu^x6VP&K{aIngzEl)cs*};5wsUptWqwer!r+HnKn(5B!I|^ zB#ei~;?V=SgAuv5;O!!LGjiP^&~NwV+fo>snCw)l+e`OsSNn+txL4DbX<9wY7DI(C zfY~|7z{v5-XEPCqn3bUkpeb^|Rmr~aS{RMfG}?NL=@F$!^!YU#FCCi{GYqbg#_Sg$ z{GToQv4sp205qmz7MTR!AfwlBzOaA~5}GGrX_Pg3@A4z;5mUhfkrAP9=RDAIHdS5~haVoG+ZdR1L_z$BgAJi88m zsueWiE(cCg6AQ&>yZgrehpRr7Ut=Ww#&lM*D?3cEMHFs@ zPV6h?j#vSop0sdyL`=db6is_P?M8B2XKxrvQ&XHuce)PQuNBHTNu|>3>Ojb%MoA{8 z>d3p;7G%iVhb(sSo(2TSN~WSwv1k7P$+V#GYQXC8l56^lRqnfUtT8;b(pceX6t|}y z@;xvK#^36kdDpIA%I+|_WMQv!0R<@ z%OsSBEIxkx%uJ98V+1{`8ihNiSyO#2ja|iPOCv}G4B04heKb|)Amfm7dL-i~&QLQH z_8tL%_BE~IRHZ3N${O((R-tECoDQ(+EsU?FXIT*tb`6R1M?x2zspJ)Rh-kwl^20&ed zwQ18*!OptCB$DPKdwW5tHo1F9buC_`d!dlv$c)5$fg{Mz9C6ihwJ0WOy05h&Km>pP E*$X~lF#rGn diff --git a/examples/vision/detection/yolov6/python/visualized_result.jpg b/examples/vision/detection/yolov6/python/visualized_result.jpg deleted file mode 100644 index d8a7eea96ec7c3a963ba5ae892be14c51f4c60f1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 205296 zcmbTdd0dj&|2}Nfo|)5PmR5c$%PF_XwcPkrlV)a4HsXTFH0~0T`-U*nqNS;$rX)@} z<(49n3!;E9ipwaXxsRYAxReV5T7t3oyXX6Ty`JZv=a1)kZeQFlms>dRbKd8i>$=|8 z+4^T|M&sLC&aTcH+qP+FYy-YDwzwMCHNM&Q@AdEfn}4qz+yA}q+_7W(j$J!^Xa_zFo(2NMdODOZ20?`;O`! zJFa)`ywP_TE?QaJ*j~P3cm0PO&Mr4yZ{7RJ3+(OV>lYXl91GR~ z)3o$w&olD!UltT%UlrlrmX%jjR=umPX=rRBHIrLfDILttuI`?{d;5k*M#sh{{+Z+m zgtK$=3m-o%E=kwcKY!Vf$v40L8`n0C?f)AVaQ}ad>mU%XDvM?I=@5@?K^5{B{(PjH?;qW?EhV0kN$r} z_J0NTf5*kq*uQ-nFnHS!YJfE439jEyg-hGi<<@#-%{k^c^#vg`Zy@y4?&{}F)a6a4;qsx-Tk8T0ksS)afJ{Pd15wXnKkGp-4-#^SY zy!Qhk>cntT#E|(kml}b`;MJn!^p02JFT_LsiEi$3KaB7eMS*#+0^HFlZbIZ{5gMu5 zi=VB?Vg7ReJKj`{xHs13?dWW!LYZypEx#_U3K)^elb3YhK85BMh`Y4ECt_^IHoap| zV?9j8DW#YEwJxe&u(&ljuGfxty5nM0EBt8%Q7y6=__X9&Nlc1S**Exdm zLEq8Weg@`nbL`2|uOtLRHpmL-nq?ig%FJ}r)Y{!)XPvQ>K^edbpxP^#uuYtG%j+`O z=YlPb!b}n!R=lbl8%PXT_Zg$PCke#6m%NYa-E(VRtc^6xHoaQ5u4f$#RtdYQbST5p zW`XE;tlvbyK1M}KVOtsq*zeWdP4r=rgezIV6tTiU_2|cMmUZMbH@Ok))*@638isSV zAx^_^tzypEjZV2#`e@*zi)4x|(6oO>tCXlqvRccKo1xZyHBFi1h)wu>O^u3=jZ3=x zMOTGYsI87WNe8f!n?(WpgdK-V+40qILtExlJjFh zYhf>8z6GYq^LW>*M`Tm9NtVW#33o<34bsEkr#=CPX`+w>rJ1VSns_P^%cQvf(eR0W zQjSbIoF(?Fqt_9u#WCMqo-bBeA3Ef%is|YT3fZK`=p5mtmMKHE2Idw=^07!MViVs53W7b| z(%5Hh&fyDP9xJ`>2d3;!=Ln%YiA|zD_J*k8W_=@yF#k6-o`B-%W-kgY`k}r*HkKXh z8(J2B_hl&ZHIIYqFdcPm8$2yp&6y3S4}3Lx{>7u78D&t6j||Q}uD_6lDw>4IAqho5 zuW|Cb0*=e)D;I4_PRm7t(t|!>GH=K)upoT;A-!4X8jIYNFMS8fx4p~van z69$uCsR4#rNtP}<&=K8u`*4ZXDpvlc?_;IDha2h4$TX!*RyP9eUq{ z0JA=eRE{i`N9u}`;yRR;lF}D~4@y&_`|LZE&iz^KgY0tN64>+{ufF>FxsKj*bKuPN zq)Uydul*dU<++UXI5hD@fVRDWlUQNhYqZ)1iT9S}Q5W1@t%x;OW=DXDb7^-?(`z-AZ~g{k3BvXhRC zG(ufBJe($j6zsXb@$;hf8y3P6J8E)ZM5L6tzr4c#xX&yhOHY)T_9O*`zawh zH@+v}%?xP&KXASIk7-{R+abh_69=2X8_nRFk&C@2qMfM;#@98?`=~`^3l+Zz0`ZZ@ zl=W)ypNk!AlXTkJYE*ayh&e<}dcdbC2P%;G?FCr4Nd_-e7Qc!VY&LIc#KAG0CDvoHumju(N>{(7At=}l zpEAhntf^tCi3{pAwG@+{Fs&f9l`#cb2f9ecj^N9tB#47l7&59V5s()bX{$jUB*SGxbCkl!uL#PoEl7G+1S>+wiH)WjkM)FfFQF zore%`E@OI5=OP7ueH0U_y6fOFv!(IvBK;KZc!X5_Emi?3RZ1 z_3Cg+g|IE{&)z80zW(~nXcM#xFZ%|P#H{+YdZR$Z5tTZaUi5VKVc%7G#QWe)QGT*pvgsZp_J@Ep!rO{)c{ROu7dgUS+ z%&B}_?MLH9bW?G>8(`Heil7G88%e3$$h1dk$Zt#uvsE7>Z)seeu;KI=-Wv9YOu3xw z1Oc5m{m}yW{o#O*(v<5GUb+O{<^2`(T)B8!GXAIfcCKyFd9E)V+Fx74u$5uu_+G|+ zhUTZQp-=B!8x7D8ZIQ>!y!N&AAV;&|s9G*W^==FeR_pbc5y05J!l~G%#a?)$ccx;K zGWBFB#k;QAO^W>Nl(?nwsLF@ytGp#itw_t!Dnjm?GO3@eMc>0z+7@yHfefJ|g~r%1=?%$cjup>)S^q3Efy0BJ*COm$hu0ajstT_-1n>iX>p1 zvPyOHk+d+USb`c)bTB#v8e_0bsI4jo&ak~_!Y)P z?`>{|CL1)lr&xAbzs64DKvP5jP>5|UQEa8xdX?ygK;@=Bh}oZ5ZL*-y`NJD5iqb@{ z@|<%az$81WUdJRLFm4~%;Zp4b!!;9U3J7?G1-WqJpUsHNSME3N%BKcE6`MHz0riO` znB-SEGN5tnfv3Lc*3otIyfB>Oum$neP>qt`ij;YgQy0pm^hab)VMWC!r*G=h2Cq^= zM@fhSYT-h6$T}##F}2be!ICxiP}ROlhINXO^6*C{=m6akW$$sA2lbnVpVG~6+c`4# zUg5EkN}oKxPHQz>+h8lYr|@f{HT9BmI&?)PFqHN@X;L{aIG%@J{)H?JD1%pf%w2sK z!Rf@_gP2ggWl`Ml_x~O0W?byGdcco^OaDkXG%N8@{@^#9-$*YeBLgxN@YK6s;98y! zJt}f`459Z1xW=xbej3q(EAX&-pYUc%`Ni=@^!h^HjZN=)ue{6LgrepfAz&zpDR*Ng zYd>=^Klggh0s`Y~=KA})R-;8_n~>-ZRx6EBJU>;#Pp0?Cf2*1bSd6adH<)bRpd?J9 zKyj-4{fr33HR9K2=W}GGmM)s^>$YNBGky_%ez|nVl3zkkR?J_6q3^;g*PYelkGh&j zzLU6Pli=?PFwcvcm5dI8dMw~x=jGZsySk><;NWwJJa8M`V{k6m^N+H#uj=K=C1<2R z@w62|(flRPMSOWvg0vjIkvUHe&P5|P7xfX~DaPtJD4S-45EwiItB<|MkTz4$Lsmm3 zr8Hl|tV&>4%b8zyV5s&{LC^U6QLX~Vn0DMog#gnCS@x4{#AB_;KTQ_*r`-ra)YMQh zi3EMyK7#*3bvX~t(6<6W-6DnQW;$Mt{8`$oAX@gfaAn0RKP8uJ#;1`suEg7UeM$)% z^-MS3v=UJmh!^)zeKvMtov?V$9WLVMh;bR#+tbS1udEhQ(A=|N(XMY%w*YLj@ghz? zi6J4kzn5oxT^d;L0^48$$xurAvpV$O>$YDD!;WK;0FGB2&ljTzegWqt`z?^!6}1k4iQwXVO~7`cXdilgQnQ0hy)(;uwTH2d7P94Ke>=^ zxOl$Wpm2z@8Um$9uX^RjAcFIw8xh3&sxqgO%kiMbW%}cpUP5e}liJ;RVCgbrcrM$q znQ&@eHTSS)Gvz+A0F5jv!q=*vpta=bvlA=in2-{k;=A?a@yMp>0C=ymf{b%5E1u5^ z3|_~bqy}vULHTA&UtmW@(yrClIj~A|ITNcT17Bm6H&*5^kmizLE6WnqRZ~3 z6DY@RJXRImJ>e#4YkPvti^%tPW}liY3|3fl4~as85%^Xz0$k@s>H*KcnQq6cGF)xsHV21=DqKuS_&*|BC1-6(SEY1qziX z$TFxn5MIqiDx1RYXI17a2y?3nRsiY5a7`IR-_P~Orr@JvuUnDk5Z_PFvuy8jzWZ$O z1XM51QQKa}CzjrlR;C~Ltzz0g$$Ns&ITX3(cX?b;Xb%qd)sKxJkg&ZW!CoBpr_w`P z8Z!kLeVV>NF?b4!N|B$DawpxBk^*CdTDSJfiPMw) zd6p8}PV?tSWhG}OOqAwPmCJsgv2NAYpV=pj&9O3XR)}gU%hHy;)=kB)|54m&5+7AW zy7+s=E|Jc6v%FlB9|4p%(-xa!W(Q{@-Gl5OQ2IL~C%%0N3Y8F|tDmfFS{C%kOh zw54&S@UpVIH8|UPibZjC60=(iBCuWXRPPOpyceH%e$cBC3;q!2aWU1+g~C6P@@Cp7 z*FxFgVi;|ll)2VG<`LagC1Qbmw-nzAuqUD@y&@nn(){p^6MLiH`EsW=EhCW)?jKY6 zrUgZ1E!oalL5e;WHPTjjp`1(w)ngY9Kc0*1B2QF_ls zx}}jxyAUmM@}=(^%1{zZE2L`uFj}WD$h!7 zMV1m|Cq~!dD$zKczXyHNCVd7T?=zkvx;7m`&+NZ3-3q>97_!Mo9Cik%&Q}Il0)Qa% z$`Q1QJYV3F9%ryUv0-3fnWgw46Y+M*dtB1H;CiN)^dPi2T7)$3#kun>de`JdGJWR7 z&YB~`F_}>~NLA2iV8cRFzW2)T#DAl}qYFyo7)5kI;)XWoaC?c3{Eg7#P@)G*0dBQ@ zexFLMCy(mc`|6;&L&5-j2ptmUg95?$TMhPi+R9Ow{dw zaa=2>62V6y`l-K!*BMfB3kMUhXZ@@)&sthqS)ll4oX^C^rCRD!?Uiz(0Q)6OtI$&4bid1hmU zoP2%4%)#Bzdm`Tij!du;={q;LE?8Qo^1ong0JW^yK5Hph`=FlWR)ux{TU_)@h$c_6} zxbZh<{f)2$=VpK&UZneNX|Qx);>dMmCg@uSty%WHc90|obw&;sY_s%h$JB%+^Zs1t zN0M`5u~Abz|LeQPLsWB@x`%BRykG_JmBC;6&9+@gJ-Mw=*2&nR^qnxw@fkKZ_`&_! z((zyDEnefK9J9o^rvBh8kZj-w8cQx$Pr_l1Rls_uCqKGy#65<2>NMrUm%p!l_pK}9 z;fH>oo;ADLT!f0Nv%k3g0vp|{yf)-;eM@8KHPx$Ft+N{mQs;u%wq3KXEs}0xtsL|h z=lq|s?0DnF$T6+YH_JSnZp=SV3hA7&)t(uZ;qkl}Qv# zEC(U$f<31keSGx!u6IW-13dSi`gcvOV|91MPJL%A@Vvp???|}2OTkkvzjgf_F%kQ5 z&v3@e)`F&}h4@b^WZqzXZP~?qHKTvVTKgY8MGsm}9&=wn%y8Nv;dg4K+-__hQSPpT z>y3Ub&G_ONUZ}W{!SS(*spK?mco%Nlj~#dxay`!ajm3=vF71xGg4fODYDSiWkx*Ml zem^wL*k#D!D!eMf!hOEyW0OZhU=)0<(N*?lnpEILU-zD57;I?-^0N#Mf+8{*^6x-@ z5sdCiFifJhf8x>Q_L9p(G(9rM`!|A>mW?sy+oiW%9o=Pa$OF*-nrx)CoVhH_z8Zu4 zm|R(}xPk&nOP`vlFMi@T*rq#}zV%;@?mRt=I$l|B_rup>7HZ>QfA#h$w+D5-Cs=w- zcYE{-JghFv4kWTn`o4d;G+=?XUdcl=l^wqlEAz}7i#y%z{wcAG-(ETin~^=Hh0Z|T zgyv~h=Fh?4gu=Z7k7E%DGF?6B7uvv`S0NtS=UyZX(W00e%l7TtVt;oQ?O(V?Kcae# z3E>}A52$segWVu*K3Ed^9Cf!cV2NY!m;~Dc<1uw!CRfa7wdSpk4@)P{<@L?%8G9S+ zwdqw5y`>QyuIJH1)aeqIK7}>A{Dvfn)BGqA-lxo3nK#Fw!hakKfO+{>zS4PMNC8Qm)fqpFR^R-fYJ>&P2bRN2Ymxwj^17DS-7R~ z_ZY&|044iw#9HVQW%7HuUHt|$r_W>Wz%dm|Yuxl#kW-c^DEXgRY z{$sW~#h~T6oB`sqaX@E4T@kWVZcC0(h|lV_G)O`9BQL&Y`dvlmHq}?OXSgLspC|}p zp0P8mN+|s4&GLUFIV&IEMSL5EGs;#5S0=_wPeWb&$7s8eI@3g~qV;TcZ=7>hSum7k z#L~oSsw~Eg)VneGs2Om@Rmo{UWnBSV;lbUnhC4$$% z69h~XvM6MYv)sQX^S5*!PP)sa*no2uf{D&FRS}>ELF`2a9X~3~=rr7fCp;@ep*|W< z&|^l@L$R99Lk*my{G^AqF9_;7XJi4{yWo_GV9Mk|UmGmjfi;cMM8JZ_E|HDIXFr0D z%NuV=JRW_9v$VDSmPaBA6<=;phQ(*H2c+oWb$yn?@oPh)RRlp+hO}G=E|KA6hz2oLK5b>dIVN4 zUKvY}7<6&4?WMcq%QvJ>`Q^=ovH*8!FiG=f5D8}IVWGVu_d~Fbly*{m$>v`vvp}G* zf0k#&ojH?(?E$96OoKxPp5qS2CsnKUZD%(!5E5PHIIitW)}Y8%5|DbH2W|dZ*jNJ} zj~|zkXb&1(4LNpAQL8GhrP+Kif^Qs2AU-o_5O;{v0`%3q;%>B+4TiIE$sU$Sb}#Aa z#VsA1^aT%GIqG3r?iB*XpXV1~NG=Mf$Q?w)IqQe zuE&Js{S&$edla`mf#NXp{Ad!TN6`iL)j@j!va=WZnDVrQ*oox8J>`Ky&@)O^dhDka zv*XqxU8eCn|JBB@4N=63+pz z;Ayfvj==x8N=S{wbZ!QCWmes(ylBPEC}{n&?)8_Z{AAh4-@?9Gm&rSxu1jBCw=@<_ z&D{rgj$rQdIhs%jV`EF>1t<*kE6-Hw@()-HrnRW5o7mUy``7Fiu|+1w>&+9<4>Bs9&>(C zU-q!^CZiJ%gDn{}82oj!B5w^T|4#e40Q|h71up(|0!9l)unm+?$(E@LoErYcX3i#u z<=iTVE1=56QM#Ub!mb09MzgM1WJq%TIUQQJ;6~)s!;;I0YPmh)>T2lIDY`r^%kf%LG8x@eV=jPAUoeD z9CQeMFfpZ_(#T0nlPBr9bPZ}v-x^;wL__Q_;TP*B&*XvHs4*xO6!U^HQvz!`H_`b- z3QBL9Hveg4)xqUdmSgv0p`$hLc-`?gzoF8K5=d-BzL&naB$exDc**zfZ4hx#_&8>wZ-Mn01)hZYJ$ zV8YD=^J%19_c8hdtR5&u1z zTtJjxx0zR;I0Cn2hbjXTf9lIsy)t z9L+dZOTaV+K1VD-x#cjvMV&C9{5sa1Tdu?ut|4sQZM?lDQ&2xbXAIY7 z-YXY^WRd&T27)b(-@xke>Am;)Ob#-AXOpSConfs(^KqLxb+D@c0WPZDNK#ra}G6S7;J!-E&~ zctr4K5jIjRgKORxH(8lQYJEz^rmdLD$OUj!8+=CuTSf<}7jj=b(qoL~YMvh2NRxV} z-ES=#_gX_;)r7d{->j(_K?JWKXFTca?Q zJ$l6gG|cH>_Bty}6qvry(wZ+1L>AA(c&?-ZOk2Qn#5*`&`~EsDW>)(^A1<#KE)X3> ze(Q2X_mJ7-d7Q7G&f0w_!zz=SFbyk4B-%sdH4__kk39jf73PutwYyp<-V6JIGiO(h z2#@&CyO3l`F_?->pU=WP4-B3T$lkph_j5EZW8!t`#c_^oXfYKr0WZD~-FxkjE18Z< zGEGQHI+ax+$Ggj`19fv8l8j-?_pA!PzRX|N0JW!pGwVwk4NhC{ z99qq)%=Insy04&Oxj+34osrY3}V@11CUUpYu zJp~Fye#tuVuz4)E5$=G)SM5d#m@w3He&Q z>xS+^whhu4$txZprLBPkirNoA1yv_tq(Z)E&31xb@1Q%#Eq-Ul8L{GolY}~sBiHXn zQ_1ByB()VYImKVyywj+-%tQU6`m=Z=V+4aRcgqutVu^FH&X-=_?6Uwb*Z84UQm`9P z;ncAQD`t6o^kAVW!(3&>VN7qL#Zs@0kuT3JHj*7j`5i;zfB2RN4qMjT0AwIIZ*gG^ zRXoUnY$O%(SGF{kL@4AL6n~Y-BKgu+7ZiLZDS|NGCMYB~rv}av+r3}EMKqYTS!ML` zr-2#_XvXK@miyr-QVV!DU5sWD!0GcQUIfkVbmOO`N@>oiIs{7tA%|Pj99$l9cYs+bk7F zWp~9n+E$V2I-WfunCegvbxMsZy#KEs^S9gmy&dM&t;$10j%Zw#FZER%04?+QMT z#hLFn`F+XH-}d(iy_B1^EK-n7*@kjQB-N|8U&9stTWf$7)N%^3{+s{LjTp?6^M{{a$`wTfYgGuuf!JBG!~_ z*4k#Nd}C~rF1kuY1vL+OD{R`WjT4J}I`O(#V_P1At#2dZo3qw{Ip7gDJE#?w{N$^V z(iW2*uxBRg?dWLBN=&5m;`?}ap^K!j zzLK4~dvcXTvxM`ul77wm?n6ozW=QMr{Ab z7tO25Ln1*d>TkyiZKfEOF3lc5wGSwp$1i>P^mAjs#hoDvXZ$DkH)V6SbKB zMWv>2d2r8U`wYG`osw3ttF)4NvSz7}pof4`i^{za{R~SBB_o*#KXrLr&x-P1GS;Gd z0Kb#zdwUpfi5hku>fi%~ONOn&dVYf->2YmyD@Jm){Y&=d`av=cc23)3f!^0Gils87 zVk-E-YEyOH8(R9tU!$gHa)YL$jGmaE&5u^r))TpgNUAIWL9}tO&%vKxy7LE;L8qts zPVT>PrEkC$H3VP-9n#>p%Zy!0*NX6Ft%rGu-e6}R#A+FstT1`Orrdz?#h8=?o`STV z(_RY9zXNJrIiwwS*x|nLU{$ybvuAGTi%x-h$gyiY=iNo-iEZeRys*$qpLfIX&TyjV+E+*_pX!zKpsyC_31bW5v|9AA^ZTx#R^PTgZyB&yyYpf{!#T3hG;)S=+oWJJ;ZjOc-a| z$1X>|lnl79S5(Sixohb?gBcS*FR9Jon8T!B-qY)oDC0brsSn(qHRb`a*d&6A{{)j8 zg0ieq3^?0=lGxPOFdvkhk(6BxJuy=2Zarst-RdDxR)l45Vlt63y6pNa<5y6^m zKOM))$+spn$*M8+>XL#skmPKi9$Yy5$Nd9dI$~p&!f{0{gWz(l`LGsvwfq5bTr5G9 zqLz3Djjb(DS3Xag>Y%!?p7i&*K~K?M!I|^WewKP;njz5xl7Z_qFktX>BN@wGJ>)!g zmXu+ignCy-_L)O|!SG<_V$8mYCvv2}gz)kCX-2Vdy!ifG)#56)!~4)w-B`%x&yXy( zdSkwh>@x)D0y&4mZQ{rI%~s>%@D!_tRk_0NT zR6WOMq@S}*fSapY#mzlls6K+-p`Q%j&eeV)jqqeZhRuirD^%p8TN;Yp9x3cl#Vz>r zpdgx$Lj|;t8t4@Q0A`>{lz>115Bvx}%0aHcOpB*#y(f51B(j@V$n+c&Y28^WW0|BB zt7<;OGLt|4R;Se7S^BHm!auP?-=2Rag_X0zCVjr4gBrLtmy2^}ttVvylh}K=^xj_> zP5SvnTlZo5IptlcZ8~nymy+TW3)2MF;7)F#b)KtDuR$S;KjO@9I zi6BwPQ+Vovf*us(Vk9A7W_;qiBdJz46v&4h>+Ws&8VH!s0Zt>AQBPi4T!_Omg&3SB~uKAJFJ)bYjH+BZb z_OIeN6ihEDarHl}YSpbDpQdyTLWey(tcr)XG~S4ZHgX^5lrDct#Rf_yn1zj2$wq0` z&k=LaYv)I?Zn#x1Qka+DWy9ixNm>`Is0?t+abdRT;gWCD1!B=OP?R~UX9dvh7lfc; z$UuWHcsw}#w9hQGpRhL7qnc)zO_L%9d-$3-H%yb9+vS(bRaDeBG=?bnaX2?Nw|HR_ zE61)*JW)@ca*`C3yqOUNGW7QmO6xYq(=+F=+;B!sU&O>Cj3?c^1lydy6#hv{^ z2Qxd5gPtcEp9n?9${)jhVdc2w!*^Yy*IlZ`Ajyw8X*N#!^0Q^~g_AJPacfh9uCH#9 z*_hAziF0{3xF^ZV_fsrA<-hJNGK#c@dC}Asctce|@0tBOq;U)+xL6ODi%tFZnogEy z6}scsof$SFmTGzo@J^9>x`;dE)R_RPfT0~Ae?CXcm{>v7nBrIS2Yj7YqH2LG+=k#= zsHRCXm?NjWrO^Ayvz9{E+hw{IQQ zcrNil$K8oiNL|f|YB(GQH~`HSESaMlqHa^C9xer&HoV+(^to`_z!b4Y zCD-jxlj7znE7Ma8S%Dd@fX&A;8gCi7wp=T@L&sq^&gDE;A6oYQUR5&y+4*6mj+P0D z&n?S>MD)y)#h|BvsSGgVL`WIwm_tr`7bP`t-ZA-`(4?rS{SCc>Y0oRBWkt=KB{41$ z!>(rmT{dz{BiJi`!u>o!a@cbix9;*@-kO4mGC}UhgAcVIwt7&@x~O{U6^ImLlIS9o z7y2N3J|f|9#v~s`4Xiunf8Cq>QSIWba8CsJkX5f}_W?Y!Pv-W%fh5`W#&os5)?=eO z!Ne&Dapd7moROt;5?(-mIGZtdhF0pD`>^nwq|VZW;5qzmDftJRc)ft;jjv8G2!~yf@7yV~ zf9K1hT@@`{xOpKrzClR{0KDuwa;55eVV38~TZ`iyu3(^;VD|c>%_x1$;YPK|ge6)8 zfv0Bo9g7zQbE$wo#ayaJK6d(Eo?9dTd1~1MQLa2k$KHhFw=`bIrM38$g@m65hoRc| zahOc8_YkOIaHUOQraVWLH!7@`7`pTeaLf_Z$#-yVp}xDp?e`y)PDT<{ML(tKdzf|C zSl4I}F%Qa+osDt<0a}di}>QiMCv7` z`_O!eUwmM!%-GwUG8gKq3IKAzBE@J6*sXvh77NYZC_K=E^t}?oKOglN#IPJcX#RY5(D|wnd!6LcU^=zhx|TmSinIC z4U*Y0cGpe`hPy`p6MFhZO7`Bnc`d}dQpO5damH^^eO4M&h-m_s3dK(X4AQ-!+q?Bl z>FFtvSY2Fd(akw{ar5&n4dMMGGZ0uUuWAr+B{|kmmmZlXC+R;3D z8V3?W*v^#S+hrQ*Gg9XJ%?WwEDUl|$5$hrrg5r=GV z8n8141{PMMvtp|Lo5nf1ZMy&2JLs&UWM}WC*8QWGbdG%LQoNqM;A_vkBqVRx&REQf}XgoG3KC1$oa1o(t+G<26n;HHDJfRRgDTg}ri{8yhMmV@q# z<<}j;6^Gt7^-Db!~DiRq2S!NtpwCiKH!TvaV)%RU-JHAbAU z@nM*`NW2}jUzHzA+_9v-JWLeRE(l8Z*L30e@|@1^dv=gVu0E9dQ5KUO*+YfF%`LeV zYcONve^8b`5PQ4-&_BVy^zO~QLa*RE;T4Tff_KjoM#QnNK6Q9zaYm36>SG_(TJp?W z60ztUMslwcZAGRqU^{#^DFt04H#tx2QfEyoK7MV64u_MYOfvZK?l!6Y)Od0>%6r5? zUuvn|i%*?Z1%ewJi#r1x=o409f)QK_>v@0EaToHxk@E%K*^!^Qpay;JYdfJ9rR`# zZ~P~3Y(Q_X=e=SD1Z5_7lj<@9bYMHtV-y&m=cg4~1}p}gDixZJ6AAj$*S^8pf{)gJ z!Xf(nGVLKM2dfe1%9!MR4%v++kof?En&YY~t)$C+e{h@C1E#SYSx(BiNJ{9v$$NiR zegS`8<_ET_vK@5FlxO9ev!%P>2FFDxkC@aM&>nu5P#-c$@2xQz>gNTMU*XR}dKO$x zz|W6wCLq)Rtro#;9?z5l`{_>gm9Vc2J6IOv>O`fl?>V?V05KCA%z0eu0D?eO1t(y% zY1+WrD3u(@n!#wG0c3N?Y}LWzUE<*zqt?WepU{_DNgv{l)iupA8VmQ7y4nhlBO?E! z7+>mJ`MdAWqsL&9yQRNwX}qt@QJ95mD~y@7i8;WLjE2WvlE~^%>8aJVf$#n^7DQ@E zx=$YwZF7dhzrZeBH-`6YP&Lc=v(*SQ6f!=v)Fz>2- z%|>S3m8UQnhv^3srA`C*N~KrMO$u&YZJo1{T#(g8TN*}i@C!G#+x7!Q9%x7VC8c?V zKgxGp2@yi9Q;b0eARW}Yg2Sqdo2b8)%~Nd3<#N~t8YbihJXPHBLARq#RQWVFjD!gI zAqJnKexX3y)LALDD3`RanvU1(p9SUmX`6zb*T&;GjFk9!DBnVA@V>2Qr6P2OsN=65 zDgDNps5>yRaVZ@0kJ`Rv_-HqOWzEv0#2?X$bB4(Qa>Iunb2{=BtM$wG8I_H9AEW9P zFSHU4Px!hGuiU+3iL6<}R*p07^rQl#uq1&oQ(%*!9hB~hIK9eN{1UFB6NL0i@$p+E z2(!EwkcHzrRUdhq3?A{4e43Dm`KuZ84b8C{#vy$CO>Gen#$k66hW}oJEUrVCT5}nM zwVreM|8g1PQfhOX8g>xVZWgOwMIFKXJg9r7fygO`z2H27#Q{gK^y_rJl4Ia z3-Wfw(Ov;Gv=hf)39WXIA|^6^9DQJwLd>94F6$o+h8Wv$s8wOJ&l6l*$yVHipY95l zL%gCI-c<~q-}Db5u#Y=W%qM^gnxNLC_U=3TOcC?u0rgOiR}L^ErpSEbavN=wV2%-( zN_9Ih={Bd1(Qymr^$_+>-lO^Xsn&naWCwQ~PRo@MS-FdR$t~hDPx;v%Ti%W#2SnK- z#p5~sP*;p_neE~HzLn}$9TzsQm&~@Xl6Gh0{*9PTECl~#xqB@i$#OEmXv#kVjyA1{ zzfNhTbrF$(#JPTZh&-p^~60%D?umGzNJro{O%M1$X{ zyTA#X1M;&%-6yE+6^ZGdq}DN7l)??_LEzuaWyMq6T#$3FM(kFvqB;m`6OG_oqo3J6 zqn$%yjC-U<1x9&b)ilL~0URed>te`HkYm#S<KRYUCeFWy zx0y3_>F36(@_(FSpFjuUZRXRXo9Dfz_<$fmWVI~+T3?iHldYGPSof2tzhZ;bCOB@G z=L$z0Y9l!&lfJu0E#EA}mCKs(kvZ-OpTd-J@_97TKod!rjkG#z^|C z92(utqJeA9A_e9|j(Kes(Z%M9?-F}D7`srGwTXD)#7et>e?q?8jho_Bwh7&@ciIkJ zJ>lfHY4g3y11{>7cRz3f7I3j6^GM5f-l8gnQ`5n#LXUs)1x>nFy()^;huH|7vk1N| zd;Q|Sr&kzM)slS5Wyh>UZvFT#ptsA@^ z`dpbP-C0Rg#G-bj?IpR9>#_c0bw`C>p|ELXEnz)!YRI<#UwQejRO)r4eUG!gCgIjJ z*-|F)2%68=rh~34E`-^+JW`s^;8~8I zeFldkur`OF{-(1ohU?wzsO>Hk$C?_vYJp3R!~&t+MfcuQ_Xk{QmU}DCDX$EF5nY?& zE^?~UXpkC@cfP$=#lbCEA-REw_nRwyTrVuZIO)ZPf(#$Kmy&-d6EV!6m{rGqiZAfX z&o61(^uCxke8=8qr8dyd^VV?Nh$lA%4SSw53)4-6cN$3Uf%cv+7V3Ay4K$YYPL8+( z`^$Lt8i;IFhzKj=!@L0j6V2-3=`$$94iI75Xf4l%@-$d+ zJEQ(@mPIFPt)`)8Z52VqbF4AX?Z9uOZV*+a(|_OzCErRz+N2ll!<1)dJa#U|ERH*! zB!^8tec2dbl4ntSm)nGfiO=G0gKl?G3=Z{ph87h|xgKL;~lEZPiDNo}7 zw&bwHq<%5ktfZfcqMmg0dee|Dr@d;fTbL%t{6qo#noG!2?CQky_AL$l|HILD$0fD@ z|M$J^X-h3l?Uv)_yyd#(`ZTP}%odW8aw|6?InhM84K*|Ks;McZ*IX$gIe-*}n=?f+ z7bXa#rshCE3sA=Wz5D&|;~^Z*Iq&D|xn9kCUVG1LQ?t8^Og+`*E!mX43=%5K;TvnmUa71?HRR}E0v!og7 zVJU#yR}O^~iM@*CzfB1{sBP)zq)Bgwvyf+Ph&ck z%^IO1sGv|A*svMf-BG~6EdS`_mTUCz%20K{4PlL8hU}QH4fc|dNR1suPNGIo&&5yELs#l=5Ww^EwJ)#d?eJ69XNnZlj zg4E&YmcC7)m-PP1Ooic`Va{giRp^uKr;SkC>P1mNg0ONprG4Nv{yB*(9ZpDJ~(@hC`&UPezeH)>EIk9LG;*df9Sy7JZrOj{-nxMP^UBw56RNko;)5vgn=b zNwFZfZ_KAgSqf!+W0#|Ey(`^Z8Cd1yp^UX!Xst>^vTf~qucT&fM{9;>=AU~M@ZnB_#vQwbToKvHh= zX%as!Y61uB0Z$4|nyET2?VH2?TT{143_sk2AAZ-3Myx?cKM_+lO4Z5O#TIbvzSD9nDVeeay@6-)mw(rZ`iXkN%p zRdlW3#~dU&WcDi!m)5&H$_T+FX%{@ojm6rfSeOqmQ85)B8>Uw1UfC!fhp=qLl;thf z%q3DgMM<$te;MEI6VF_v9$ps(>(cqquk5c6PeJ#9Z;~jIBHj&PjZYT)w5ieq#&eT3 zgLC}liN(OiA;>8}Iu&@*l<=6)AEb`HJzdeuAzu9bvFWliEU}<%eySd+U`Q;+7zv9b zeS;lhm4ofoX6zN^*xzb$G=r($JbAA|xK*4UW3|O2NOI2A4cgA0H_wB@_A~lXCChZw zIPVxqg5;@&->9uIk@Aol%rQxLL6Nr2tE-9E+U#?sJ~6K*$gH9C@E*#1lN0}rwW*K? z)Z0@eY`t71iR?pvo*o#CrYBT8Z~AoGm-&-Ph*tFuGu8bD2k1xSgfHkP&1N7xb$52v znOvIq#4F=;eJZBg&?_DC57~=nm^P&rQZa%yKe})Fc!9iXZzY7p>dm@v6dewQZxhid zsWfEe$U}mom+@Y6(GeqRqQmzwY>{|ly9FWuTQHT8m$@< zH#47}8#GiR@16V6IQnAp_q~XYKfg}t-|%h7Yw2lhxTl03C&|5b;qH)`j{GR7~hw^QW{s%Z?NXgsYn7$#9}_eq7q5 zfB$2_M08-V+8>d&0cC+3Rn0I}0z6vn)KqoTs)KmXzgc3?-eNb8q`^wFZd|10;1}!N ztT7Brfm}0o+ChyimkUMi2w6Q<=j@aak@`PGvsUYGv>4pr;5U3fMSuLD4fCrGA+K;{H;S76^)0) z3UA1))J|#~hg3v(d4U*zeTKY9@xA!GVYqPP9WlD~GSBMOKZ4E1O9TAlM)`-|!D(c) z73;q>iHQOkqPzK)!cZAVm$-gx!4_QHIhVzXxP_wlFB4^0VcwLYiqWy0$Wffc>MY)g zw@2qW?910U2*_9P^uePr2Nr^~g9}Kg0u#q1IcwmCXsi#+!3m=S zHj>(KyW@^=PY#(-hOHJv&aLz)cf(u-G&)kqTM~LL4Fj641tp&T*#D9X_dC`=v>df8 za1B%&bAcdL55As-N15{Yr|4I1F}`-Ke}w0F(}x)#fQspj@-qsp!Rt{0BesCeJ;F3q#(&e)9#4zZ?m1vlISdOg6A%4jBk z$KPl^n#v#t{)&Y+gC7CNP6EhpRJ?<6eohV=BZ$vaO_A~nJx)caZA2BEMDieO?s~~t zj)Bc*HFMDA19o#bhq1&NnaZTgSWOpxptfV%iniLgYzV#wf^>1VPWsF3vBw0nY`+8Y zgbIP_ztn5oiTI!)i!HtWlpKobu@&SV)L3t-fcUh(?a{KHHzVQDYjBF7l|sNTHjy5l zm;u=dB>Z9F|4F)9t@J=B7@7OZGBqlG((K_s4rYAj8>8=+Ll5l_4=dC=>y+KL_dbUo zb9u{l-XHq=+SoJCU(R`39C%g35r*f@^S*CtwF-!u6mWvnn_o>5-Mg;JH*7zBX}}6& zTPX+`(LQnJuKV_}+P*2#RiWN zm2dRTb#Jw=#ufeR)2qWS3DN7IYES;WH&u%@1GQ6+I=Cyd+PRoDAZCinsm`cdDs?hW}7PH%VZlr$g9s%)y;rbKp(x|b+W?G=@q$s@pfav zxOwg~>4522;>!*DjIK|etBU)mu~T`*H)=AuvHDx?h)%^5f9u)UrY9>)9ud=H=lfx~ zA%~MT?a%yZedssK(?aPD5~=Hntx)%h{9)-BM*H29@31(Or#$7>TFab$);_uk#yMr~ zU;0lUgr@q1+=nIWM=#uEM&8_RZ)6$bOR(do3ZQ}24bXc_QuN-C!Gx0Wq6Y%8+Yzjl zyr-?LAf;oxVm41FJ+~;H&`UBsY1IDO=IOyT%^%TWk9VFw7kK1A#G%Evy4x z)It~hZN@6)A$oCS+E;*%RB;p)(ik~j+~+Q_{|j?;0=_lW&Z8o(u19u_{f{I}D>qMj zEBeLaVsRF0Gn? zy%$!(G9TUiW#b>+A8ULcR3ugWc0C-5W6=3Nx}I9{qKPUw+Ox4ScL4lISM7H@ea|#R zoj6FLHvV}f@KM(I$>npoumU zUG}C(sE?fuAP%Pdl2q0A1JcsSxm_e}vR#~HTlaA6kaPMHvl z4*SgC9^dz#X7%q3_869l+ej9yVNQ^=l%`@<#uKH*WQfvbBGLX-|HAiax6S9CcPJFS zd40=5War?~+O&}Ky?>n*96L8%@UN;C>lBwVX_bM*TC00;I~~^&G5f_c&y>e+O}1RS zkR&R)+{lh$S-Uq5<1bT|O5QdV@DofB*K6Nxwr!|ehsS(gO&F)PTaDi4{zMShz6j5C zT77Y|VeI684i?L?dyn4Sequa`?%r5)vCjT8MiF;AOVgwL@S**XDcni_cRuk?Dvz7_ zcG#EVsTMzcZD`QuT>NZ!k@=5UrG|c!DzkaB;$(<=o22gnU0-fw`C~rc2DkIWp)|KT z>d@pn{hL@HQ&@>psWhw>X69y+<4_$oe^_7RU&eauX79LI#e{aMS}V9{)b6|_q$#34 zc+iKj`&`=H>nWQ-PW?jnQ`6FfiG;^~Ee_+5(w$*8_C^&nUB`Y?JD`^k-eeIJcRt;0YhJJN#8JL84X z>Gu%+@lTMJTJ~#d|Ih7@kD7_=<86Zj+&T#lS~y6<#es~Lz??MR4|J~}XG2oPcVxeIqQ7~=6$WU9o<4eiB4_j8CFW1H zr=R_=$=Omq7$rG8k@wg!M&fXe&aSqdE;LKnPfkimHy*9&7`oy;#@d^A7q-Q8O#e)tO$woY~ve{2h4%9U_)mwOLKTZ zJbU6;$4q=Etvq-V4tk!(1iVU@+eKUDCSCmf5D< z3%TfcL?bRO6f_is|0Y_%y`4@DDQQ$_4wz>$m(7t&b&EXOv~Ld=3vU6K=@#s`p|~&A zAaEns94-s(Va$D15LE(omx2YyrDEuMt!AD;4vBw}(0p@hW>ueN@LupB0T(6qYz?9K zav9?~wGg${Y7l^q+W~pg_x^Z3Kpkyzi#*U}AZ{rV=_*)_xbHgO)SbeE*9(v|MsqJ0 z1|kERhqy|Ds$+$U(^hy;7)L$xLTv8cTvRZcXLP^xz296Mj2k~6XBj;1LC}r!>!8D1 zsOUUuxCqQ3lMRZ#`@jgXczEZSa-G<5RtM>t-KrHvgNcwlz3AV$W8L-|`ae925ELPe zk(8%is}?%zB3}3lmIx@-T($c$j@*=0X>Z~G{yJXaDGJl5cyWIY zf@;l*mES)w4EyU6Mb`hV(O;qTAMp+C5{9;XRZF7=FuL;NHn*pj$Kfdf@p@S4D}!E| zH&!Z;R%T=N%Pry!vSY+;Zn-W;UcciZWO?de1fW=g@P@pCzc)tMtS!G5I}H%L*eW-p zwu$ti$g*$TD_{i=thV5KRHeHal@dW=1y z{M!l4YGK(MFn07AQrPUd0f+Ge^|v&i?cE*m*XD_qT!SQdJ-ov}?utV^7HVG*^B;~o zSx$dFheMaj2^V#?KaC~nQS!|*Lx1`443D=xzrSqR;-w1JXRkBy;-$!TiYY&jtK-^XpB(r@n1ZPzg=I%cVIu> zTnSyAJyb!LZye8C;08$)fR22Sj``Jj59LH|%bPfc`%rYzo-#TqqLSzStCoGN+g<9> zi`h4w=*0+|i03y@w;lh|{bKdmM7?FB!U}U#el1>M6Gxczs{N;|>ly5F{a_3Mpa0#> z_+9#Kulqv{c8}>^?Bz#y?Dt)(j};W-{NH6+CEcssv1?NPYVv$fb$Y#v`1#z+8F})) zFS6r$1lrN0utsJP2VP)t+>O9UfRr=q5^e3;Pu&Q8c3_0+_f!hsqHcG2Yn2?0S|gvF zM76?&=bPam4wPCES2OW>HutVTH^|zj>p)%4Q_V5=1CRSsTsPg}tJ5W2(wGJ+u(Lj+ zZ&VN6d+K~p{OpAwMBE9K!JUshLU6m{qG!z;UW6Ad#CGaZw}$0Z`a;@cySIB-`R=NOxDU+@OC zqPACe-skam$P$MIru2J^5)zg^7JrhAsn{j2;KIoK%e}f_}DZ>Mzs7D zfPgerx_Y;|kD#!T%cNd~dWQx(ILSo758vchj25$v>L2vH7cL(Dt9#dQ_(UDq*IsCR z&0H(g{)0r+1%B7P*pHu-8%ET-+r+eXwG}Y9A!E{`PKooLg|DNK%J}p5w)dF#8+FGY zY>Ll$dPgS>%8Yur#`2n}8~)VsfE)X@tHglN+pK2m|H{54FO%3`5_gt;G0=1$2QRVW z3cjy%<@(;8i1f;{xv}({7hygH8ufWs@S2HVJSbLB{qdAHW&8bZ(uLDmQ^>Qj5aUH2 zp26|mHRzLTvQ<)E#EM`y|fc&VqaI(Wo-lEl2IBZ-b?|o)M0<5%D$~Q1ZOj9f#KE+E+@pc34ejX!GNq&ax0e z_IW#A-8#I&3<}z7w!NiBxz_me)jwo!$JE!X15$016r!WAMq=SJE1HayDbzBceMdg_ z*%pl$UENc3z2If`6|bp!zSM`>BE2~AD#XI7xL+BlN_O5OADp-|y9#Yh-SN)R=+2v) zahm_Ud**9=>er_sK8ekjMw2&f+kGuzQ~3@Z#T-yJub$%sPji02G!P zZegf12rf8u45>)3N8ly}v2}(1+&5pozLd%EFuo85or$IyHfW&i>v8eZ1K2LW=IAZh z8!vyEE=nuA-W2%m9q+KEC?~I10r>mLA)$-(j4EI0U@idN6-S?S1O_at+p77l(@3>U zWJeiB64b1F?!Vthr+n6wc2MOE2@nN*!ouba-+W^&*9jFp47q4!1xLGSw#VBd)TJd; zSQVY^xB^1ze;pm$+AcnjbwbI~k!M_nAI(#6!<~JHfLvXdu^2as}bRTDGmK2a;(2ct1S;y6!GljIDbBL^rN%pn0%0U+~w^?XL; z`Qu)2u?Ok1iJN3S^+`j~X!dDJOI=I?43EGkEj?Cbty`vO-PE#Z*p<{4zCs}2rS7Vi zrSN)R%x|^P*;A@1P{7C#TD@AvNo~K?HZW0irw^=UOVI>qD-JBn_!z95Nd!?sf~5%m zleHsUy3u^SAhOZ~Wv}*E_+$vJnbkOuh#G}^4DGp^^2PX>@v_&A^5yXTxvmP3dk5Kf z`Vu>bv5=>*;qsiOsQ-Fyt>(%cndJbN$(Cb9;|enWYz_C<(b`e02R9F`T#psn9sl+E z@9k;i{N^{KSO6S@z~A00uQ4t2j=GLVv3P2uBNt4&U%r>`7h?r#|5wgOsUi%M8im_2 z>e2$dx*eB&u^(O58K+y6%aMT-F1bd_{wp|%Xku)5q&F}H_43YHUXyVqZgd5U%6;|t z+-epdDz2&#GTH#D3alOu%xNs*>+?ymcduaq*s13YGv~F7wuLPk1@W_OhdV(|vUw2q zFYdc3a%2Iou%Fi&41pGHY5>j5Y2Fr7NoAQi#R1$yAWx$!)E>b-TIb^?rKrJ* zMeE@$TPffjDqh1({$yqj51lBo$j^aJD;TOADBE04(nRx%Pq(3+$;ZbHR}st+{mvi(Zv{%q6>^`8NGQB@T^0wIp3w2kPT z2 zLA7{8|65b5vG6|rjmI00FB&@&%jdjxV2`oC&R@1EjAdt|^P*`~Bm^Zw!WW768e+w5 z_N(QsO1jS?T!!PQWuV#f%6UB9BTa_&5X68a*qc?UsclOY!N`f!E2=1*x=Qj%+CNZX zTx=F)R8JX!x#WMniv5>S`<3Iba{;3jvJfURY$De-OL7~epT^vDx(u0n;7ITNw~~B3 zs(Nc{KoFaZ>NEhKu|%@f3x7kNB);R$MuqFjs3V+@|14n$GN{UI3Md0rKpGe-A2oa` zDbeEh5M?lh0Ca?dgohi&ly%IEX2M)nfhf*IaOI0hQ%JfSJNtRGX8{z-|0sjbBYU48 zPAHRGEjj9fd-p*Qwr!PPq>rz}e)RD%j4;I~a_3QejzE++H(alJgbVWB zI9^yLKJ6jqzjf5E3jKTTL`G70GXx_=Yk#Wwyr|>EEeP-m@Z!l}$OQY*el+U*C8IKmBjhS0#YfjI-W7wx~3jy|Sj>a#()5sIVK7Te~KMjLp$|0oWCER8^$ zAsKS4@4(Nu1*muME{6DuJ5#Bs#ZeHc%q&UD&pHo9%OK7&1*H2a2x{*5qp~TLqh$3+ zxd{sm>oxTbG1jjh&c~12KjLU6BfKBcC@X0M4;eNvz)Nkg48|sdN2^gr^)=ixA8K=OeN#zv*dTWdnu@*HbuSG zV9C_djaOe_uvw}9DKR6U&0qyVy_yNh1LM+vYov(7KB|26mbgNm4J+Yfb7N|FF0QVd z}0AdPM!uZRWJyS9fG>x6FmFp2wj@I8EOJ zRP#30rSv~KYATJE2Ra+(hs&;9e^ih)S`pZyGgLFOi6SH~;jvz*CP9Y``@@7&T;NK1 z<{_%T*IC1bDXM`=v#UW)n>_BEST==!1mS?jKc@S}PETFi{$=uSqqvN)3w7p2XjBgu z_XVzI#brDeTlr!qHMJ7_&9gR~i@TxZvTYQM1(G`Hl2@`V0>yhR93vX=kW8w2{j#N? zpxG3a&OiR_{IvULjuRJ?e=r>nxVi);il&|$x`qjoL<}1Ql7eh*LsU#yRVjih3in!p zsYpLF+ZbYy++XxK+ zT!ppt+S~oN#+>Fgf$;4iG}7S(9t?YyT6kV?z;>Q>-gP_q-F7X((FRdYEYQ`aL1LwGnyx;1X}TEYrj*yPGxw#F`lyI=EKpBUPd2Mk>w z%J%Xy;$d%mc)0$OF##@CB-aqD1TA2?CSS^ud4%ZRJKKfh`LBRmB|t4xErUybUp`Lc zR(WV9z5LWbkm9mjJs1m%YCUkkbW7?x?Jrk+K9p>m1@R@I8lfr?9`FT46Q%Np8}&(x zx0pt?i=7g~5IdV-Mm&Zwfm&2D6qa+LV}~_X?qqxk1er)in;Ms$A53bY;#fFa#P#*ygg1n+kAYmnSoKX0)i4UL1Q)M8ZsV}3H6qf9 zuKUL43}xxo|3O9PfZnK~W-jAmo9CrQL*wq1RR}64$ZB4PEOwLf(^a^I_4#;(B-NY0uxhdHR2(W=?)QHA2F~ z$D~Yjy^mOTmo<%XYsx)t5$^c8JiXb96d!qtiwz#d2B0R?Rv;17B@tbd>}Z*QH~57@ zp&mgosDxt;3>);5y7+7}QxZ!dm_W%8fg=-_UN2z|Q52_I3ezON#HPj2Y@XE1aW_#` z;m}CYa;pl#B^4;b&N*HBJBQzLa23h>mP;DoZ-BSQHBag?eU7@BXa)q8E%1r(0YMJ^ zBjOcNfe-fPf>LDADM7v3Bd~7939VibfgYCd6R-03+AyqFQy0DCdK-79fngT@B`ls$nsc~$7qCLUuufq4?J7Iak1=O{=9M5oq54w0fgFNpaDoEk%B~LcaC=;J3qFI)#lp z2niGO%%DbX9bsqf-pDtD-kDc;r;{@8FD*Al!9(8Xr)c!)S_w^ij$WAeKD#&oD=o~U zKa)}2^C41k$-OPhO=O{z{aAQh0&hh<%H{8q%%oJ~$c7Sqa%E`lsv&jvXg(^& zERAyGYZ0Qn87# zkzm)a&%!;|-K~A#dBxXT3zz-pyDEs;^{cPToM9D4EfWjbR5!TCRhy1m0e+AE;gOIn z*ARmxj-JvhDx$OLtPbN5BQUQn#Ru>$Pq3zPpXu`z>9~gRgv7wk>+bQTci-%5-)mlE zd6RPX817bi?mp@%syec(QLi`=6i&%<7^#RQgfv0*1bZY-w-cmLiYBe}f-^_uQSIy# z=`F8X9u4?;IJFFEU$~=j>>%{_eazio%<4CN{);L5j%&zUy~FhgqwcXBK0AmE%yqHI z!Nv6wz=@&&?*0Qo@sHsbw3`fiIEO1pD&|H)jf-Q4-ewXl*c-R_B1al^?;7Bz)5Y@5`dod~; zG?ehUKgZpU#PGtgId!RGzvY|nf`(I7ONpPUN*a!)5{Rx9oOu=?X!NEBcyYmb2aX{p za15>fx8@@y#ZO^TK$#k2Q#~lQs5~azNok)S<4bMfyQ70_ghtYsz-b(b`A zhGCu_VIaYgsi4*;^c&+(T8fUrlXJ{vU!MhEpx7(GnKSYT%MmmPS{bQ)AU;dbdp#jk83om ziNf1JNhHK0C{DfAy}(@XY>^8m1ion6=e(if5#g%ypev$+zgese&gh(NG z32c@CE)QG$IC@%Xq;@sE_~TokYSO9h+_JGKhYsN|;wydpGdox4sKzCx45liQYsT9M zG|9B%HMfSUEf2XT5~nX_o8km8XjGzzMpLha>Q2NG%4TPyqRLr{4(O7P3MxW!h`PuC zR6bV5D=x$;?LIK-PA%2 zg2bDL6TNdi7%{K2Tv2%_5Ro4gepy`kyX9$VjlFa@>$uAuQrg8SH-l}q;(@POo}W!R z&wrT#0;S?A$n5?iKPic7r#q=iolnFy#(-em1^NT^vg?3wsjgMBE&PWC+^Q6H-My3B0=Ae|`3*!~ z;?jO=$on|5PF6+3dk=(3KmAbc`h&~DP(y_Rf+FpuRLvX^Pym=O;%7lCtHtT%oE&e z@pJBtH*<6-%={XQ>UrIyZqjSaaZP)Id2%dP9_e)ap(3(gIstn9#g>9D@?m491&A%bf(n!+Z zO=V-uW0(*`y1|XcMqg(MDeP;|6So3A&nsO88M^q)-!{$e&zLIOA@7+34&{h1?_9=r z4}Gm4YFz~w&~+i>j2`*p>3%Hc^;`Yv_fJw~S>%~{*S zLJ5l*$0gHs#2M1UG~}DEnNXdSr@fh*tajhnx|b?wd9JyuZU%9JvHuH-+tr0@ND&%+ z5sfNZ;oHNo`j``+7G{QjJpK`o-L8|w#f6;HQ_XM}CMoXw_(MMDbal-#b-hbA&4gTV z*{(RL?)Td!cUQzR~{1uAcj5a*(jugGV{rFrE{^f*mnJm`43a&~olS3ZWCZvMdO z_Nvx$@#hJl$?w&F8^zo@ws0nD^QqFwz7M(S%XR||ltGxWh~DJK9ItCaSh3y=by&q7 zds<3u^01HLdNjQf2?Hb=uKkYN#U;`L)hp-2p#_(SQ8l5fiTwLzvq$di$f=pF&ys~8 z!!kdg0dnn7^w$mSHQC|$0vYLuQ;~9y!K6Wlr@pW9^Z=0BG`>*_W>TF^Zk)OX6;pCz zY^O%WeJR4WRLkPT+Mb@eZNEQ7@j53wJ6ty5P`LlYU{qwR-YoE?>L#GXZSPlOh}2}| zF3=eo>i))M^oHORopYdMSAj&HE*@Onr=cZ|C$QacOQfODcoXW{mVu76#Ud@W&THs>CL>S&?bI& zTYd#P$Sk;lE;LM>(T%W!6i(UP{p`?l9R`Y}1T znopHNa&t<*{W8F8zlVNUz5P{5n{9JsDAmsL*VNpYH!b&fx6}PxA3yi-;joXs7OrobcktYX|t(D^Ut=+zXoP!6VJTJiWper8B?? zp8{oQ5S{lbkM{pyX}hb(PS8n65(3AuG8)OP{kNwQx#eF?#{uu_`C0+|(en>5EjK5( zLpT2MvHE&h><={KSZI%uLiJk78iWDck7ab=aQ^F&zdscB4{O=4xFnG2sKqMKamC>M zwq?&zJaA=C|e?VfMDW<2h`p(9Kdqmy&;M2W-R_Q(EPLsLzu zby)bxpcLW_BbKV&`k2{@MQQi6sY=MA{@mDdAGa0yq<+hhQC8x-@9NEx;VAUuirOJqF%lFEg++W>Mh9{ClEj9Kyl~QHpsJm5I{+X# zG6Ww549DPvS6h%zC@=RLHvcY=7wQH#vymfSRrK47ke`ehBLysO(Lv~4Kuu=wPn8(< zoj>v9^Z^Dv$a9<*qef%6mFW>%x-3;4P_=h;H8&41?crd<03e-SjWfu`FAIOKEeN43 zz@pPZE7-Cb5`e*J1&dPLBqzRxb!l^Lf19E-H7^}{msxVdFk23bo#s>%Wwr7$L}`f=LSe`~bZD~ONW3CN?|!J91G zDp!<)>s>);5?FT7M1#qSI@BbzBwTIUa%a+gE6a)W-x`WdT#4dL#Idew&dT2_MKJ{~ zNAq)HmAzFx3`=3duv<6HMt%F4PT5C}$Jx>1>k0#w$;Ww}m8XIQ@H4o?mME@Tuz$Ge zhGG_I6t;10OuCOt!#6SPW13Y?6HACHflC?a!#Ffb;Q?xEZcmJ~|4uj&a@B+4eJIC^ z$3P#dnfBJ5f+z&`Y9_3+(9+FzPtafOHU(!}S%|47P{ zYHf!b%pL!(VkAs^=AmVI3P808Pq_U$7Hcf*CJ)nnnMxOcRAb+%+$T&@8it#jheXrC zVJLM+OvE-ne=pfKT61pp6eK&8%9Js_xY&B7OxQhq1M5>|?5M27CvqpB_4Rmh8yQnH zOF<4UO))&v3_aXXSdQ(v66;WDS^c#sI$86OXI`%_!;-DGQlix>Z_3}f{<0fSPh>Qv zMb={ktD2F>$?dz5mIgb!WBXxlh?)0b5gJwq@y4U_r>#~yR&X3IpErU%bUp96RA53c z+-3NrzQZt=Scl4|Lo(54>k@_ii(?GYwOk8}aQb(B)(?lM=t6f?;}qQ+7yVIyRw{?L zq1ior_tJDKfV<~c7+>IG0})PO+ly-r8P9l2FI}x3;EGlZ>$!*B$P}S(o!#^bUBp)~ ztoW5=l2%BQUV0~?8ivqf$I^Y6@pkDEQ{hW8Qy>S8!^c;ks-g)$e&i+ha`mguO84SV zl;Ulsq#4sx=gEuAC6aiEf)0u9llsB1pPQd6Xg_n1r)u0UN~p=UVf37;`euMy?m58u zV06ldfZ_^(RjokNmL%<8CgziX7n&PPnVXfSTfBbYidqRI(xUA7V`LZDllp0|A7_5E zLI3S*)Tzqtc~4Y(yQCrP6#`&4pF>GsBT*-&Aj2|HiUOv|cyIhP@8&6GgxuH0CB$>* zd2YC!qjd-xkDpfb5S6A?+yXGc&paZIKU+eE>)I5o4=&I2hh(|rU^;j*sy$z=$MT$S zSoLrzFuIm~B7VPc@_JFP_bps@*hLyBD3UQ+JiIHs;A6dgE#AP13aYOlOj5hXy7E2~ z-z3LQ?w+vhC~F*`vVYn@6GRZ9iT7Yk5^e!UfyW;WkEITlznX+kpe)dKf))jf22~7S zmpHR;HgHhguZNG@zqaK}sA6AZ?q5^E@k`+0{cCZhQc;5xIkTdsPXo%e&=$CI7U2Ly zpr@)mVC@B#_&2a%nJ6M8{%)_7V(ooCBZB&K&QSL$hPTL>?;VI(0kO4+qgybd(h69n zBNUVif|@4>UXq+&6HWx&t0U!;vXjc!JAl zl~vSxt7e0V{lek~-(4@ptXAJG=J?ryk&wMY|KS-w{wl#OxO!S_JIFLNXe!WL)*AQ5 zC3|s~arazWC@qDGbA#UH@x?sjA70PgAan@GB{+aJm<846#$gFUO)DENVmdQ>Ci$a< z0w!c9Kt3zU|jvHpxy#s>dYktb(QTf(uG%7h+(Uxo1XRUe-gQby55|SPN$AFo!NA!g{odaItUs^kEDa7Ak}U_zLX6BOlumIQd?-Y&`2OuNtAJ88%wma$-bjI|_GTjhytuXP`Qp*)*O zZ%#U0`ARL^%yu67bM560DNi8owm60i>bbq$u z(7TkWto_>40f7N4I8iRa8=@PU{C016yo22A#*kI%K+Ub%=%&~ZpYdTB6E1}+1Xil= z#H*y%BHehPLTkmwM%o0^77dpVEE~ATe8um6OFFE^GK5_g-nqIwP^K9sju8x(Z6Ew(uD%=L}g%i z$t4rG90a)op5R2C9H9Jb>IV>x|@?o>qt>|XxUOr!s=~!+}Sr`WA%KF{%0F!o44!To)q+dO(A>E#wSJwCS zvsuUO+OBN~lNW**A2&^~4wxq+%xP?wJ9tvXRtsI_@_VZY;W2@?dqTqN%-YNeaXH24 z;)Xv-(`V=FW3N9raUYI**=Ol(to7=|mFee(QvpPt2}sonzDE2hkJ|XCeD1`N(1Lru zA_AgYNTKP4;kIC0Q5C9M?Q{3PasY+7=Qnhh0e^);5|elV3Z{D4djPBRt>Dq1-@G;~ zC%X@pwCpIb26?vWvpoQ=fCfy$fQ8zC3lrj}AH~xGvUHg-5Y)s9qlKm*`*`eFpI?&| zU(_9&B`J-~j8%#Fi9CQ%TGT=;)ZB8jt&#S!ljfr`?dXi(`;}|sQ87RQGx}PPQ<^&E zZbZr@(qgAFu}jrBdzLIemUt2&UY!2$d2AHHDPbNXw{#R1Au3i7`qL|ncpIcikKhD zNT`x)w^J{>?a0|xm(C9k9M+FFvES3RyE@I(^}xsXhwQhSza%|&)(W9J$UA^yyMO*f zhq@vnUi?*_eY1aR@L)r>!mI0i=2vZFyXEur!akwL?@RpiwcW5FSUvo9xjcJ{n#}Ib_c;q*r8jy7b86pe zaD)Iyci#BTPqV_3&&ZF`yajtL+WtH0If~#X=h)07d(qWz2P~By3Wsm^WDlJW%k!j| z6X6xo7H{VRel5K^#H8V`l!~iO^^GsipRwQ7g_?GCTt*&rbiP79+9&>He>B+YKG6){ zH{vChJ&7sOq7W)Vc7|rpZbq)P9iw_deQJul!oj+xb#;-;;9@2K@PyP1h^0 zK6Rqxn2VT;gicR}-F%LpS-v1I-*{JR8JbzU0${`@OA2j)f5eyqd(7s~64B-nAXEwq zXT#;YNiu9GN@>P}M6U?iPdGKP?1st9dX%a~1+g)2N)2{|$_R@BZr{H;6_2A)YcXf) z28!GpNAy*@7Ms0Y9xooQr#$&Nrb#zIK?EK-`pN5mwQhxC8AT{z)x)^au6un=~mU}+ z=!U+t7G=2BN&5As-1C{?Ho<;3V4;fvp!cD<8k{do8IDI1E2JSQNB<-(>`7)hWo8Kx@u!7}(4w=3*BIoWD^SG<%Er{n_LY%Dj2 zYLk%fO&GJ)NO3++&ku*U&)?3%E_xppe~+r+vB+pQV>Wm@1?Z|Vy~BD03DZacTE%(Uy1(-dj5Ff*L^{a4I-dK+%rM>*?06Hi98W;h zMNTRBLmhg=q13P4v?-ox)k?aIYFbBz*AB$Y6MaCWuYiwmZx`&fHYKu}NUS9SW&p{R z<%JK!Tb9W8l8WwQE{$we;V0cIYaEARuyokjXPyoA3g?>$ntJ6YC4wf1Jr~ASTiC=2 zQYz=i>>$RXxJAfL2`U-ojLiii2nmqKH*XsCoOr)cI3e4tEQCZjsnIjXBzX5MR0+(VaJ?z@R=hER8Q4B-LC(mC zR&2$G{F5#O81eMkcMtZB7W1gRUJ$qiY#IRfFg2V8KLu8=VLnZW^)n^9;}5z^f=6_Q z*{KmBHRpovveB2{ubab;Y%gnCYZJL}ex1L1>a)%L2W_OcWg*W3 z_d;DIjLlzuAjh>!W2Kz1D4+6|+#CyU&J9O0hwcO3GProq&3p~{-DUgdF}s%PIOFkl z^SWlX-S*ge+z$4$vp4=9M`s=gb^ibH_S?2wNlHa1wj7nSoLf0|D|b?47}spMjWJt} zag3SWgb+f^B5XN|apV|-VPSWvtsZq6AxK5lv^A?Itx4R8)Fs1AJZ8g@ zQIFT#A#YU28SwOGtr9Nuq+$~&(r9`9YMN)z#4*~+B)@_2Ynb6N`$vA)Up_l=mTr3j zZD{`%qNsBF{G2MO|-8Pyp{8ue-J_q&eQE-{} zZMh#~H8~rfDl+Fs3m%O~m+zT5Mcx_Q+M?LwQx6wFR?rqgw?Jjp;@Ka&M&U>EwSBm9 z)oL%=ODps*d|t}jXFYq$MF_6ivsJ}3`fMa#_$&JOxFG{F?_kteFvM=wEscz9PXE~# zQGKQWV&s<{%0jk*nE@|ON$7>XHZHm+)JEG5w_TsZQ`uHKbBlF|vd>c|B8WgB zKKuoB^~K*(r`2%#ju67kO~r31zQXmxE7w%gfN}3#xX0kN`QEM{uuo@HiPlY78m>@- z@_i-J|Mcy9{5Yx>ruD(+@{kNnT7K|wny`*EneB(zilqEimhjw*(`XJ6KNBf&nH?Cj z_F;2b(f#Z$`Fx8fG>MnSSB`{#iA?u`%fHczorT+NtT%u;TWoSq=#sb`(Y}P;Wf(URIHk9Eu9)QNEmG z8jXFoG6D_a$5bbE3>}~T72$U}3+iLxKgzO-DCl>6=!}_udTJZDmOYDbbQYL}Rj(-e zSHJDHNHcZB2W``u63}IXsNi2@id|VVSyu zlWiX|aFAG?n+DFupY2B1M$>Fci$hb_P51w^=UrVZ=~CV|u^l^jhtHAuKFWWjul5A{ zk}ToXBM&=J?p1*BDSG5ylAca3l2J?}f)jztEqT8f=Dj2Y+$SLN(s<-Qt zon|{w#W`jF&?m@F+#F&TQ(3U0BM%*IbA1%Y8E=?dV8S`jU8ycknMu?Uv33Uv0m;uL}h7 z*MleuMtV<_O-CtnCx#^^G&!vC9>&D=zvM<9uGq*-NuT?f!H8?2tkpy|z#1_XX z(ySRV1sk+5t6+Y4xe1c>wMp;nzfDDRge%6iej&LRn?9sCYJt?EWASMp^QO&PuUbK2 zN@pw;(K&CbQE?>m)uFKY&1)`a#%+z%8WA2F*6mPC=T32bZSPQmB6u*R-vznztB)2` zdU@Tdi%VaAE#BZ{fSy~m5Cn0f`stKJ@@s)c$d<*k;!{pEllO^^$*HDXot5(?)H(Kf zsM_&=R~7%)of`1n%>%nP{!h zA{FJ+qd}*lyA)i{0gcdLE6L)Cxy*{Hb`*AO$)Rk?GVmuesSj8t<5UttY&+0AEN^?3NQ+>Hd9)8fd2|8hib*zyx-BqEq7{6Em$Yn*d$RX#OB{QB>=iJY>0H*b%g(FXyHmL>_GjaeT6Gl>=` zDzqx&q-RuT*X1`36*qb;?+aZzq5tixfExcxzOAFA%VH1;SVCq@^U}+EPs6j3qHA%> z^u68YTsfNCrpMX+-aKCul_?se`Z0H?egp0S)4UEr;zF03*t=L(5ia5WwP76#S1G ziiN}#+>+c(qQ(8C*doN+<-O}siE$<_Hi&#C959M@9~%}Bt?0f9F1}vF_1{GJDy@~I zcloQgpsZ1t-GsJUn!65NRO(et+R5xBA$N!%@K0~uD8^+mB_FXh#6M8z0k_xxNcYcX z`7>H_u(Ng)PUY~UGQpGUlWh`eos{8WQYjA5Ul@Mh?5>kZyt54mxw7#FzcB=zvMf zmlxB4leDLgY6^^BKW-uiYoqQB37cqDBewzk6g%;LfVh-f2SQ0I&!A zNW5MW+kZHJCGPR&z<>LkmGn4lc`Bq}pg;&ROM=&G4DYuQ<20lOjVDgCecehL-lvr& zn$dAo2q?QjZw4hrGD(}El{oiEqOtdo6R}R&`FtM|*=n&4V0NIl1|lNAs*2yttM7eu zEWijw`SgftQhB4+3TC-4*jrCfVQTWCysX;nRW8N9Xr4(;9Z0~j(js@^=j++KzCg#k zHUUXBNkwaw50+0&6Ebjm1t*P>kBE=Oi+&mFnjF7fjQ=hGU$`Qo=h|({fnCI91wSZT zn`d~B(_GG;1NdPrC)dA-KTG2qskSUs4Sq=bLlk1cz~{92TFzsQX#{J}H4uH}D-E8>CI;f%87qinRSOu6 zMkH*qKUi$Yi^o39^%IJ1=n+TT~z1%dcC!X4bxx{m7^(`H?g(hwQd~ zKCsXiSpWxF-;i%Wrg59bzuXlFsZ#J*@L3V%XJDN10@kkUfJ?e#`=-9Aio)=#g=9F4 z)V{XyT3go{cM%a%htj|lr4*KH$q;#vA+U9)Dr5Ov{|-3+>A%IRI_`mQiPID>fzI+K+l+UIsF?~7b+G2MQ%tee(|m$gN*D(h<&r|WLi>sQwAVgeZcZv(HH z5tAGHBHVN|lGZpwrYPBtkxu>ko__pJDPoYsg0)gQC^|76%T3a%9-eI&tD$X+-`tui zi8Ba*%#*B$8p~JhvR0IQ^JhH7&->JJE9}>;>!(Lnvi<#qq$~x!a|Gd8t=aR~?3^Ov$ zDQnelK>t^r+Pqe!cZyf#%$R!p)tgq}6oO-W%0g)04Po=CCasA!YYT2AZo0=yGa@Iv zhKVzhtq^c+uEx>UbkSF5@eDf$Ssgj0)TylNczm*Hi>?Q9uPHZTgXRbhNej^AwN^s~ z7wm}Ky(oXlCi}JbRgPkyDdn02Jr~hWZ|g?j!6tBrl2V^`$vOKa@@)=5*!4c0Z@O+h zZhAAKbbwppD7{6(?U0?LGl`rX;|V$us3!h#aTQeHCe1ITd^{0Tt=bp!F3V`-TQgq{ z!!5#gFgw(yb|*lcY}#>**7pp}Iw+!Jjo!G>NvLd4z)Tyog<`G8OFlB zGHbphUeC~5Tk9v~VQK&EgHX5{U^~}TXMd}}+pkI$;kgk_SeLavLe!gEjg3T9)-k*s zh+)=?=z8r~mq>t3|E60|Tn#Tgpu6Sa<>Mll;_^Qfgzw0qid4sQ7mXmcPS5GUo#S+t)4`cYz)GdVXH2>yuC8 zWf9pge^*9P)KAaBK&B}(j1BR4&9sJQ&V}H=BJ`kk7G}zYwo$gQ*Pp$j0(y;GyL;(I zUi=-|`hO;x{@Zumgck~gnOwd07RpK_4c!H}|48l? zebMvL)eywB59kTYTM`|R#Uf_*SIxkH^Xx>?ccjg+NY3Q0$=8BQ&1DTHoNA~buj&wFMZJ(Twj^s9|S_=b?R**VZx9g^u_3mPRXh7QV8SV`Hv*#fs&6dUMJlT z@prIr6UGW+0<$JrkzU{s6LncM5F-NYZtFT;T;{WLV|vQst>|2KO)1o4(2Z!4x0COv z1I2>J7KMm(igK@SYs6Y?h<-fXci%`~0#W zU;u|QTPd5vP$B}nZ$npNR+KHsL=fzynuD?$6=ygQcX6Opf^OjTLb-r#(+O zgs`xeu=yr;lV-v`31mEqX682r7)WTOsy=c36frb#++z`z1B9~$lKo8uDqu#Tq&a9L zDT9Z+@9+O_9|guBaz&LOO9MRon&V@o?}?{4;U-!#k3rc5z4)xjfSOj7X*t&DN?H?89P~&P!J931* zt9sS>X+*%;h*^(!td@LwukpD#8eW>!@lU<2WRhp{tyD`1-mLvh=_H>Zic`%`!j(-0 z_|j#F9P_ZD)@pAvWm-o^u0zH+$Tjryu>9}pL?7gid;6lWn_esFjh^!|3RPsFg4mUY z+?ysd860-dq!=KW#{s)>eQH#4_$ro*>?l6erd1YYHKX#qz{HWe{dvSR+K?8`4k|Y< zv4h#a(B(l{!znXTnPWS#u&PDJ{ zz$X7%7hI5E0}E+sO^lvooT7yGg9nwOeLdJGI`_1PwI@v!;kT*$#ZvUy-*KFy?wxz;Z6Dg20y&e8p% z{21Li=epJ>j21ZaS#{z@%+FDz57J=~HPb6(H3tKaA`cR&XQXy(Rljb~=ZHeebw>OH zh5G^DO+Obpcl&NHyDn8Incau9UTiKaf!i!2^nQDL_`as|TL+WVy*Wh8a&6%#xxnJ) z8h+@Y07-3WDsW1_MTA7t*Xpgm^zNWQjA3OJrHkGqYNQV(at4;)c-;KbM-cT85o?rg z;^Uh2cF-zOov|JNOcD;svGleNc{9bB&!;j=KzZ-VYCsQ?1i&vYY$UpVTE*E`3Q}kj zC(|rO-8VEgbr$?xkUHq00u(UY07DX3kGEKzAWS67mXwt3YyHWc2#AmxrfE$=hcSa+llIRoA#y_(kBr zYVp~1DR^8f-eMZXj6D{$YUNnQ?SPAN(CpRuM$?;o3m|+hwB~OJ*16X6=0ufe=4S$r zeQib4j6OD|N~d+R(#pi`3_^R=T~BY%s^!?wfxYKaXWXC1%kF!aJwk!k(%{{c?~v6P z4G^z9-qv&B=n`ymSFXN( z0`3BFCeEu>Ou+`*rW5(m6T%M;X4K{6-%kS#Y`KBLX|-43y)k@jYLPr|sV8GeL}oq6wEuM$mN;V$vQXIH z#m?syO)xu|C((z>0I}oz)It=8<&mZ+aU~ga^&NRZRX#h{l;``Lc5rczucY#PSY-c6 zVzF!?_l75vNWLU8^W%9JBj&v>pq^MX`sGi|tH{uyf$XLB+*fj1>SG+W8%OD>5ic~~ zeyL5gn2rB$ACdFcqA>2llkf`mQkp4g==5&=?J7EY@KXnsfTj6;d$zXi=k)&hZ(mw* z!%zqIN6{4l>A~FbdAF_R`R3*k1SHn%#>ClAy%FEnRUuTP;ff&fhcvLM%Gfj^7Jk|6 zyF6BS`^^0qZ++YA%Wr}-THP6_pY>mbN~tJV=Uvq^y27@^TW`T3dn?IMunF`leUr(b$#cyV?-$!}O4Sldkbhk(~H7Z{{F|(=|@T{zb zD~dG!wL}LV@aTK@S8GHkLg(!f`xcvc>b^@ox(y3JNi9;~c5nBt_0HpmWCcmkQH>D~ z2c8o%@GcG7*X(tBgkHvN!Q%SteGXSDJbfMCQBd!jMf({$53dd4^LKJ@PvS-_316~< zCA?C97aE?B(^SuawbJwJIig=OmlqKx^C-HkeRLMIe6l3XzZurrPrjXMA|$!8yaq)^ zskN5xi5nM}CnX^bXmyfaol+f=< zlcHFj+@xLO2;0>4-5um?MzHsX--*U|wXfI_ADt@>YrC_*QtpNoqnM55jj~@{#|$0o ze&f9ixcDnPPeEpt;z4iE+mr7DrBB@IQ?gyE+d$W=ZK~e=ya9VYqunAH1;UUAqg7~& z`F7nE;T#U!_3wk6>MKFD-r-VhGBxiKeacm=#O4i*fJ@gQTS#CRD~lrL%sl2KwL&w| zPrR`!-@f*WaYD#M$cLKhxl!88K^JW5<6h z@+sfyJ^7~zb7{1ZU(PvAqQ!}bv(KUCko#VzuN*dh^;;p`u4p{QzLpkak7^s297UVh ziZ+}^R_BkFo#_+(@(v*rZM6+lQ!y#a<_i5gFlmt)Hef+)vX)<-R5ooHHOZ}?zA^Er zzM~&Zi>C1)k)0@{SFXVgO-5|X;4w!Gb~mEs<#D(FrGhfs_$yuFt`8cD4LpJlOMHvl zpt)Uhz)r&?UKPTv*9Ql?)z*N=bpY5!Wd-s4k`&HC=h=)u9~j&~8w8{p%s@Qfk^4TwovV|;AuedaR6K3V1222R9CYy0YS(U0 zX0_D^j^2=y3i@B3leOP3&^Q?o5_ft6#!dE)DmrWXg(x(Ym%ElTR{eV=y&!lO;|bp( z0w*{j+M?6(`C-zk|>;AW| zAZRH@W$Hy({)drJ=AdhJZhvjSNRRW=QzZxhiA;0sg*ADDjCC2wkjhc44@F8)-r4P~ z+6b~hP$g)mKy8Gs`%UB#XO9({WxHfG+$8J@fsj{fKjt=&bql0tNiJY$#f07`mep6-`x^V@Ftge-dJ zJFP--8D+K+cDxbjRcLSH$^ZL0DuCY{S=V^);B<)Qv6@J1^@CDIAbv@l%H3EDhXp1B zXc>Nr;7{((y!;2sVFA9w+hBA(!pwG2GE&!SX+LNThrsl0K~sCiMEct9b&tgj5L7_j zB{%5)GxwgOw{o&p$K>5w{^q-p&89tOSa*@Zj}ZMwv4~haY1e4O!kH{9nJR9S>BfT< zqUbQUxWi&!RCZ@%Yj`hIFzCZOA7yIxF4z%%hG^p8BgTm<9hU50nqk)bWp%N_9^yQ_Pj|-HM=I9E;d#pcwE&h%bKv^nA{%j!LvF> zZGZ+fltBP$0{mBtKNX4pESu|z!Y2t{azb}YL4of?X5mlPO1&xxrsnpCE^MmVnk20j zgE>IEx0U`?jBMkA#{ny2FF%*qin4EVc_Q-WNLq-Ii-I%CadW(=ZlO4xLNF>$dq}Di z=yC14rYWgcftOE z4X9*j<}4}hMLo>RkvQNdB)j+E2}FGk$ah7~zuWS%?~Dm;oRlaDkhAm)`B1q@#LIzE z!0lPwpu^Dvl~v4W3}Vn55z=|Fs@PaL)oFNYf>V?>)XE8dZM*WC2thKVrL!TcXImSLU zQ^8a7C_jZdAsjrbl<-lB41|wHv4xWwrKMlr+iDC3 ze_(per~O0VS;ZTs<970&PNqpJSLXYz0x^hTPM$vPWH3*>)47#MUyo#4?ef%7+a2^7XQ$wpl}x?`SIi)scBNKTxT~LbYhQD_4e93 zgD3c@Z!x2NhVnSfApusl*U(;l741xZx**W`jO{Rmh7m9un;Y!zNEr8(M-;Y`&-CYM z08hK|Qe^Y{GzL!4)S%y>mq+3Nc4Rc_zM^B-;15HNMQ2U!`;(A)>uS?<41|7RZ*jxL z1scgg&h3(eN1*99^8efC`!=Gs+ZA-fE9yUg&4MNKde{Ej_bzJD@c@Qe8_ga4AP>QE ztJ^G&5lMpL=p(L~rN8v-NFmFkb8r9i$LjICoEMnQq9|sDE3gCX1Bbw-24lX$JluG$ zjL=^67B$P15%48`&lv)2PrqOK%F-SEt1t}-_R5<~ABLjAdsLt#Rh%TzNB_jTur|!K z_;5LDy>oPR&!nU9npT?PN6yt+1m_1_u&dZj|A!5iQeP)yUG_h&Pp}{Ekbn9t_{N8x z?`>_~d<|%K2O4Nr!>xwEA)a&0;p5TsdgnvWXq~%y)@IM0z!iAjzSL%9$Wo4rdlUJX zP`DDZL%>nrT1*BF9d3bSMq(l-_f|iPDr^h`s~#q->$+Ri*VxuO8M9$~LEa0(?<9Ts zZ`qDK@=}M2jOoKx2(diMd(|az>F$_x$`1t6Xxsb0-nUK9_;NmA#o!pzrxqg>hwp#em%`@Q-po?P}q3;BEAl132@(Lk5SAMz0<~7Dp_`9p|0k{|J`$ ze~H4m5liuHu{?^^*cvAwab7aP-?4jUGu*FXmDZa$y2f6#dV9Tx)jx|rv$yo2_9?)N zB>j_W7Bd4&DpKGjmdzJFituPhby>(Qn51b#B3onDgmTHqlfma3o+br$9&c{A@(?~z zB?3!K_!nSNUg`GP{3+$*P$w0nw*u>tR~?&%Uy5V4k%)1lcbi)%*}k+31t+zumyF{# z4Yc9JFx!?qq8=|4sOwimD^0O4sYffscM3_dd{<&{k==Tq@|aJOW1b~!N#cLDsZbcO zdW{NOX4bEFo~|ysD49Y8@iaJ)?9dpyR(Q|J$K5wxeTncHa^#-)Ra_RKmoam2#QZ1p z5$^w@BG-FjBFf98i!(T(ui;2SkNUm+%jf(xzf(H<$>1EsPl3i{56M_Zm=OrCuHC=# zrqxR)+iDVK%&tCiOhj@;g50$A{7s8=tgLB---1e_w*v4LgI(YUSF#3gAU8jeFsCHt zLb8tvk8{0yD~+?pN>E}$AZ!v<*XpuswwrdZdIjXtTt&4<`^wh%@(^q4Z~V{ijHp0U zmB+yzHGL#>7r73j;j@(VeD{^#-~Opri#DGqG95PxPl7fEhW?}vc4=3Fv!k7$$@81O zGU3N8FTPyJ5f_GFgOqAhoSDJQ{cQ#hY1pTP)%J5kU8#L)4Py^RkjkTnX}A1Nd!9dj z4l#Vp*K7UQO+1e|zmy_T6~rucP^7j;xvdV4&?Kkvh}wOD(XH+2$FFWEnz`udydi8V zsA-+CqpDp#_RDiEXI@R&JRW%*Z6e6z?(ma&R)vUJ*kLyh!AGhSm!HX(Ipqc1Zk|-E z7Tt7!`1^p)TQ|QZS4%&e>X&a46nAy?PROra>bJ$iq3v7u#0!YkI3(R(HK+ef^bjXO|D&^Q$`Fu;}Q;++C?C5u$*A26OjrI=>7|r!Ex0Sus z>^7qCw4-l4_lg|vON?H(1&uft*<#)@i%oLJWITElJlq>shIX6Kx%a2rrfi=$@rk9Y z8}-ObVf@v@@E+`JxdqqhzkR3OKYuMcEl`T65~x^>L{JkZM8D384|)c+=9Xz6f2JK8 zFn*4l;?eB$Br^TyFOE9u1ybi8JO2ZdNTmrO-k8yAsBYLnL25@U@~NR1?ocG0(@iK} zb`Sk)HTXRu5n;!fSX^e;K+fx)UG#_gBMVRVP`C8wuCX<1Wo+#oJgUBsSCYkSiUaO@ z19XeShZ{RSBhqzV^-!*CTCcdd5BD-K9>X(~+l@Sb*i&O{6HlfzeE6nQSpHeV#wmb;HY=&G$Yz zrq{@Rco}!J{@+%mo>km*){(M*ygpnOz)H-GM2@_p)wA%gqEo;h*tFy_KFQvrfEMPE zb=aebLwTigVcpf1(FjDKuh2B@{Eb9|BM