diff --git a/.flake8 b/.flake8 index 32c8384..2120283 100644 --- a/.flake8 +++ b/.flake8 @@ -1,11 +1,11 @@ -[flake8] -extend-ignore = E203, E266, E501 -# line length is intentionally set to 80 here because black uses Bugbear -# See https://github.com/psf/black/blob/master/docs/the_black_code_style.md#line-length for more details -max-line-length = 80 -max-complexity = 18 -select = B,C,E,F,W,T4,B9 -# We need to configure the mypy.ini because the flake8-mypy's default -# options don't properly override it, so if we don't specify it we get -# half of the config from mypy.ini and half from flake8-mypy. +[flake8] +extend-ignore = E203, E266, E501 +# line length is intentionally set to 80 here because black uses Bugbear +# See https://github.com/psf/black/blob/master/docs/the_black_code_style.md#line-length for more details +max-line-length = 80 +max-complexity = 18 +select = B,C,E,F,W,T4,B9 +# We need to configure the mypy.ini because the flake8-mypy's default +# options don't properly override it, so if we don't specify it we get +# half of the config from mypy.ini and half from flake8-mypy. mypy_config = mypy.ini \ No newline at end of file diff --git a/.github/workflows/python-app.yml b/.github/workflows/python-app.yml index 9ced7ee..3c22fe5 100644 --- a/.github/workflows/python-app.yml +++ b/.github/workflows/python-app.yml @@ -1,45 +1,45 @@ -# This workflow will install Python dependencies, run tests and lint with a variety of Python versions -# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions - -name: Python package - -on: - push: - branches: [ master ] - pull_request: - branches: [ master ] - -jobs: - build: - - runs-on: ubuntu-latest - strategy: - matrix: - python-version: ['3.7.1', '3.8', '3.9'] - - steps: - - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install poetry - poetry install - - name: Lint with flake8 - run: | - # stop the build if there are Python syntax errors or undefined names - poetry run flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics - # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - poetry run flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics - #- name: Check type hints with mypy - #run: | - #poetry run mypy --ignore-missing-imports . - - name: Check whether black formatter was run - run: | - poetry run black --check . - - name: Run tests and check coverage - run: | - poetry run pytest tests/ +# This workflow will install Python dependencies, run tests and lint with a variety of Python versions +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions + +name: Python package + +on: + push: + branches: [ master ] + pull_request: + branches: [ master ] + +jobs: + build: + + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ['3.7.1', '3.8', '3.9'] + + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install poetry + poetry install + - name: Lint with flake8 + run: | + # stop the build if there are Python syntax errors or undefined names + poetry run flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics + # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide + poetry run flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + #- name: Check type hints with mypy + #run: | + #poetry run mypy --ignore-missing-imports . + - name: Check whether black formatter was run + run: | + poetry run black --check . + - name: Run tests and check coverage + run: | + poetry run pytest tests/ diff --git a/.gitignore b/.gitignore index 6a44bd5..8eb014c 100644 --- a/.gitignore +++ b/.gitignore @@ -1,17 +1,20 @@ -# Virtualenv -.venv - -# Raw data, too large to use Git. -data_raw -data -osrm -osrm.zip -brazil-201110.osm.pbf - -# Testing -.coverage -__pychache__ -tests/results - -# Bytecode -*.pyc \ No newline at end of file +# Virtualenv +.venv + +# Raw data, too large to use Git. +data_raw +data +osrm +osrm.zip +brazil-201110.osm.pbf + +# Testing +.coverage +__pychache__ +tests/results + +# Bytecode +*.pyc + +# Output +output/* \ No newline at end of file diff --git a/.mypy b/.mypy index d51eff9..bca4c82 100644 --- a/.mypy +++ b/.mypy @@ -1,32 +1,32 @@ -[mypy] -# Specify the target platform details in config, so your developers are -# free to run mypy on Windows, Linux, or macOS and get consistent -# results. -python_version=3.8 -platform=linux - -# flake8-mypy expects the two following for sensible formatting -show_column_numbers=True - -# show error messages from unrelated files -follow_imports=normal - -# suppress errors about unsatisfied imports -ignore_missing_imports=True - -# be strict -disallow_untyped_calls=True -warn_return_any=True -strict_optional=True -warn_no_return=True -warn_redundant_casts=True -warn_unused_ignores=True -disallow_any_generics=True - -# The following are off by default. Flip them on if you feel -# adventurous. -disallow_untyped_defs=True -check_untyped_defs=True - -# No incremental mode -cache_dir=/dev/null +[mypy] +# Specify the target platform details in config, so your developers are +# free to run mypy on Windows, Linux, or macOS and get consistent +# results. +python_version=3.8 +platform=linux + +# flake8-mypy expects the two following for sensible formatting +show_column_numbers=True + +# show error messages from unrelated files +follow_imports=normal + +# suppress errors about unsatisfied imports +ignore_missing_imports=True + +# be strict +disallow_untyped_calls=True +warn_return_any=True +strict_optional=True +warn_no_return=True +warn_redundant_casts=True +warn_unused_ignores=True +disallow_any_generics=True + +# The following are off by default. Flip them on if you feel +# adventurous. +disallow_untyped_defs=True +check_untyped_defs=True + +# No incremental mode +cache_dir=/dev/null diff --git a/LICENSE b/LICENSE index d39774c..25cce7b 100644 --- a/LICENSE +++ b/LICENSE @@ -1,21 +1,21 @@ -MIT License - -Copyright (c) 2021 Loggi - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. +MIT License + +Copyright (c) 2021 Loggi + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/README.md b/README.md index 01de1c2..f244d78 100644 --- a/README.md +++ b/README.md @@ -1,146 +1,146 @@ -Loggi Benchmark for Urban Deliveries (BUD) -================== - -The Loggi Benchmark for Urban Deliveries contains datasets and benchmark -scripts for large-scale problems using as a testbed some of Brazil's largest -cities. The data is synthesized completely from public sources and -representative of the challenges we face on real world deliveries for the -problems outlined in the repository. - -To get started with the development using LoggiBUD, check out our -[quickstart tutorial](./docs/quickstart.md). - -# Dataset - -The dataset simulates the challenge of a large delivery company in the -last-mile step of the supply chain. Worldwide, metropolitan areas like Rio de -Janeiro witness the delivery of tens or even hundreds of thousands of parcels -and mail. Deliveries on those areas are unexpectedly very unevenly -distributed. Rich and densily populated areas drive most of the deliveries. -Besides economic factors, our data also do consider the geography of the -represented locations and real street travel distances, as the reality of -complex cities is not well modelled by euclidian distances. - -On our first version, We provide 90 train instances and 30 evaluation instances -for the end-to-end problem on Rio de Janeiro (RJ), Brasília (DF) and Belém (PA). -The instance sizes range from 7k to 32k deliveries. The challenge is to position -consolidation hubs across the city and split demands into delivery vehicles. - -
- -While our goal is to solve the problem on an end-to-end manner, we also provide -a baseline distribution of demands into 11 hubs obtained using a p-hub -assignment. This leads to 990 train instances and 330 evaluation instances on -the CVRP challenge. - -
- -# Tasks - -### Task 1 - Last-Mile Capacitated Vehicle Routing Problem - -This first task is a classic Capacitated Vehicle Routing Problem (CVRP) with -minimizing travel distance as objetive. Although solving such large CVRPs with -complete information is often impractical, these solutions are good targets for -Task 2. - -### Task 2 - Last-Mile Incremental Capacitated Vehicle Routing Problem - -This is considered a particular case of Stochastic and Dynamic Capacitated -Vehicle Routing Problems (SD-CVRP). The objetive is to sort deliveries into -routes one by one as they arrive. You may use only historical information and -the previously routed deliveries from the same instance. No recombination is -allowed after assignment. - -From an operations point of view, this solution class is a lot more powerful as -it doesn't require full information before start routing. Instead, hub -operators can route deliveries as they arrive. - -### Task 3 - End-to-end last-mile problem - -This is an extension of the task 1.2, but without fixed dispatch locations. -Therefore, locations and allocation of requests to dispatch locations is -considered part of the solution, but should be the same accross multiple -instances. This can be considered an end-to-end optimization of last-mile -delivery operations. - -# Motivation - -This benchmark is an effort to make new operations research solutions closer to -real-world applications faced by delivery companies. We believe this work can -help both practitioners and academics to reduce the gap between -state-of-the-art and practice. - -We identify that several promising solutions in academic literature have -evaluation issues. Several papers only include experiments for small or -unrealistic problems. We also expected real-world instances to help researchers -to come up with new ideas for solving large routing and location problems. - -### Why a new dataset - -There are several famous datasets for the Vehicle Routing Problem (VRP). -However, there are limitations to these instances that make them hard to apply -to real-world last-mile problems. Some of them include: - -* Small instances -* Ignore streets, use only euclidean distances -* No discussion on aggregation levels - -Most instances also fail to model real challenges of dynamic and stochastic -instances, where the stochastic information is usually presented as -probabilities on deterministic points. We instead present stochastic -information as historical deliveries performed under the same distribution. - -### Why a GitHub benchmark - -We want to make publishing results as easy as opening a pull-request. We also -want to provide you with code to evaluate your solution and make sure it's -ready for publication. If you're not familiar with making open-source -contributions, don't worry. Open an issue, and we'll be happy to guide you with -your submission. - - -# How to submit - -To include your method to the benchmark leaderboards, you must submit the -solution data into a pull request. - -While submitting the code is not required, we strongly suggest making your -research code available to make reproducing the results more accessible. You -can publish the code on your repository and include a link on your submission. -We also have some baselines on this repository. If you want to include your -code as a baseline, we'll be happy to revise it. - -Don't forget to acknowledge the literature and code you have used on your -solution. - -# Instance generation pipeline - -If you want more details on how we generate our instances and how they relate -to actual deliveries, please check our [generation pipeline -documentation](./loggibud/v1/instance_generation/). It also includes the steps for reproducing the -provided instances. - - -# Citing this work - -If you use this data in academic work, please cite our repository. A full paper -describing it should be available soon. - -```@misc{Charles2013, - author = {Loggi}, - title = {loggiBUD: Loggi Benchmark for Urban Deliveries}, - year = {2021}, - publisher = {GitHub}, - journal = {GitHub repository}, - howpublished = {\url{https://github.com/loggi/loggibud}} -} -``` - -# Disclaimer - -This repository is not an official Loggi product. Use it at your own risk under -the terms of the MIT license. +Loggi Benchmark for Urban Deliveries (BUD) +================== + +The Loggi Benchmark for Urban Deliveries contains datasets and benchmark +scripts for large-scale problems using as a testbed some of Brazil's largest +cities. The data is synthesized completely from public sources and +representative of the challenges we face on real world deliveries for the +problems outlined in the repository. + +To get started with the development using LoggiBUD, check out our +[quickstart tutorial](./docs/quickstart.md). + +# Dataset + +The dataset simulates the challenge of a large delivery company in the +last-mile step of the supply chain. Worldwide, metropolitan areas like Rio de +Janeiro witness the delivery of tens or even hundreds of thousands of parcels +and mail. Deliveries on those areas are unexpectedly very unevenly +distributed. Rich and densily populated areas drive most of the deliveries. +Besides economic factors, our data also do consider the geography of the +represented locations and real street travel distances, as the reality of +complex cities is not well modelled by euclidian distances. + +On our first version, We provide 90 train instances and 30 evaluation instances +for the end-to-end problem on Rio de Janeiro (RJ), Brasília (DF) and Belém (PA). +The instance sizes range from 7k to 32k deliveries. The challenge is to position +consolidation hubs across the city and split demands into delivery vehicles. + +
+ +While our goal is to solve the problem on an end-to-end manner, we also provide +a baseline distribution of demands into 11 hubs obtained using a p-hub +assignment. This leads to 990 train instances and 330 evaluation instances on +the CVRP challenge. + +
+ +# Tasks + +### Task 1 - Last-Mile Capacitated Vehicle Routing Problem + +This first task is a classic Capacitated Vehicle Routing Problem (CVRP) with +minimizing travel distance as objetive. Although solving such large CVRPs with +complete information is often impractical, these solutions are good targets for +Task 2. + +### Task 2 - Last-Mile Incremental Capacitated Vehicle Routing Problem + +This is considered a particular case of Stochastic and Dynamic Capacitated +Vehicle Routing Problems (SD-CVRP). The objetive is to sort deliveries into +routes one by one as they arrive. You may use only historical information and +the previously routed deliveries from the same instance. No recombination is +allowed after assignment. + +From an operations point of view, this solution class is a lot more powerful as +it doesn't require full information before start routing. Instead, hub +operators can route deliveries as they arrive. + +### Task 3 - End-to-end last-mile problem + +This is an extension of the task 1.2, but without fixed dispatch locations. +Therefore, locations and allocation of requests to dispatch locations is +considered part of the solution, but should be the same accross multiple +instances. This can be considered an end-to-end optimization of last-mile +delivery operations. + +# Motivation + +This benchmark is an effort to make new operations research solutions closer to +real-world applications faced by delivery companies. We believe this work can +help both practitioners and academics to reduce the gap between +state-of-the-art and practice. + +We identify that several promising solutions in academic literature have +evaluation issues. Several papers only include experiments for small or +unrealistic problems. We also expected real-world instances to help researchers +to come up with new ideas for solving large routing and location problems. + +### Why a new dataset + +There are several famous datasets for the Vehicle Routing Problem (VRP). +However, there are limitations to these instances that make them hard to apply +to real-world last-mile problems. Some of them include: + +* Small instances +* Ignore streets, use only euclidean distances +* No discussion on aggregation levels + +Most instances also fail to model real challenges of dynamic and stochastic +instances, where the stochastic information is usually presented as +probabilities on deterministic points. We instead present stochastic +information as historical deliveries performed under the same distribution. + +### Why a GitHub benchmark + +We want to make publishing results as easy as opening a pull-request. We also +want to provide you with code to evaluate your solution and make sure it's +ready for publication. If you're not familiar with making open-source +contributions, don't worry. Open an issue, and we'll be happy to guide you with +your submission. + + +# How to submit + +To include your method to the benchmark leaderboards, you must submit the +solution data into a pull request. + +While submitting the code is not required, we strongly suggest making your +research code available to make reproducing the results more accessible. You +can publish the code on your repository and include a link on your submission. +We also have some baselines on this repository. If you want to include your +code as a baseline, we'll be happy to revise it. + +Don't forget to acknowledge the literature and code you have used on your +solution. + +# Instance generation pipeline + +If you want more details on how we generate our instances and how they relate +to actual deliveries, please check our [generation pipeline +documentation](./loggibud/v1/instance_generation/). It also includes the steps for reproducing the +provided instances. + + +# Citing this work + +If you use this data in academic work, please cite our repository. A full paper +describing it should be available soon. + +```@misc{Charles2013, + author = {Loggi}, + title = {loggiBUD: Loggi Benchmark for Urban Deliveries}, + year = {2021}, + publisher = {GitHub}, + journal = {GitHub repository}, + howpublished = {\url{https://github.com/loggi/loggibud}} +} +``` + +# Disclaimer + +This repository is not an official Loggi product. Use it at your own risk under +the terms of the MIT license. diff --git a/docs/osrm.md b/docs/osrm.md index 7ea7876..7c5c46c 100644 --- a/docs/osrm.md +++ b/docs/osrm.md @@ -1,40 +1,40 @@ -Computing Street distances -====== - -Some of our instances are provided with precomputed distance matrixes. However, for some problems providing these matrixes is unfeasible due to the quadratic cost of computing them. For these problems we recommend solvers to run their own street distance calculator. We recommend it to be consistent with our own evaluation pipeline. - -Our pipeline for computing street distances is based on OpenStreetMaps and OSRM. Two amazing open source projects that make this work possible. - -# Running a distance server - -The easiest way to use to reproduce the distances computed on the benchmark is thorough an OSRM server using docker. This can be done with the following steps: - -1. Download and install docker according to your operational system. -2. Download our [precompiled distance files](https://loggibud.s3.amazonaws.com/osrm/osrm.zip) (5.3Gb compressed, 12.6Gb decompressed). -3. Extract the files into an `osrm` directory. -3. Run an OSRM backend container with the following command: - -``` -docker run --rm -t -id \ - --name osrm \ - -p 5000:5000 \ - -v "${PWD}/osrm:/data" \ - osrm/osrm-backend osrm-routed --algorithm ch /data/brazil-201110.osrm --max-table-size 10000 -``` - -# I have no resources to run my own server - -Don't worry, OSRM provides a test instance at `http://router.project-osrm.org`. It may not be 100% equal to our distances, but it should be broadly consistent. It is probably ok to evaluate your solution using OSRM public server and just obtain the final results with our version of the maps. - -# Recompiling map files - -We provide the precompiled files to ease development. We recommend using them for further devolopment. - -This section describes how we compile the maps, in case you want to reproduce the provided files or generate them with the latest version of the maps. Our pipeline is based on Brazil OpenStreetMaps version 201110. You can download it [from Geofabrik](http://download.geofabrik.de/south-america/brazil-201110.osm.pbf) or through our [S3 mirror](https://loggibud.s3.amazonaws.com/osrm/brazil-201110.osm.pbf). - -We use the basic configuration from OSRM using contraction hierarchies. This may take about 14Gb of RAM and 3h on an i7 CPU. Double chek your RAM and swap space before proceeding. - -``` -docker run --name osrm --rm -t -v "${PWD}:/data" osrm/osrm-backend osrm-extract -p /opt/car.lua /data/brazil-201110.osm.pbf -docker run --name osrm --rm -t -v "${PWD}:/data" osrm/osrm-backend osrm-contract /data/brazil-201110.osrm -``` +Computing Street distances +====== + +Some of our instances are provided with precomputed distance matrixes. However, for some problems providing these matrixes is unfeasible due to the quadratic cost of computing them. For these problems we recommend solvers to run their own street distance calculator. We recommend it to be consistent with our own evaluation pipeline. + +Our pipeline for computing street distances is based on OpenStreetMaps and OSRM. Two amazing open source projects that make this work possible. + +# Running a distance server + +The easiest way to use to reproduce the distances computed on the benchmark is thorough an OSRM server using docker. This can be done with the following steps: + +1. Download and install docker according to your operational system. +2. Download our [precompiled distance files](https://loggibud.s3.amazonaws.com/osrm/osrm.zip) (5.3Gb compressed, 12.6Gb decompressed). +3. Extract the files into an `osrm` directory. +3. Run an OSRM backend container with the following command: + +``` +docker run --rm -t -id \ + --name osrm \ + -p 5000:5000 \ + -v "${PWD}/osrm:/data" \ + osrm/osrm-backend osrm-routed --algorithm ch /data/brazil-201110.osrm --max-table-size 10000 +``` + +# I have no resources to run my own server + +Don't worry, OSRM provides a test instance at `http://router.project-osrm.org`. It may not be 100% equal to our distances, but it should be broadly consistent. It is probably ok to evaluate your solution using OSRM public server and just obtain the final results with our version of the maps. + +# Recompiling map files + +We provide the precompiled files to ease development. We recommend using them for further devolopment. + +This section describes how we compile the maps, in case you want to reproduce the provided files or generate them with the latest version of the maps. Our pipeline is based on Brazil OpenStreetMaps version 201110. You can download it [from Geofabrik](http://download.geofabrik.de/south-america/brazil-201110.osm.pbf) or through our [S3 mirror](https://loggibud.s3.amazonaws.com/osrm/brazil-201110.osm.pbf). + +We use the basic configuration from OSRM using contraction hierarchies. This may take about 14Gb of RAM and 3h on an i7 CPU. Double chek your RAM and swap space before proceeding. + +``` +docker run --name osrm --rm -t -v "${PWD}:/data" osrm/osrm-backend osrm-extract -p /opt/car.lua /data/brazil-201110.osm.pbf +docker run --name osrm --rm -t -v "${PWD}:/data" osrm/osrm-backend osrm-contract /data/brazil-201110.osrm +``` diff --git a/docs/quickstart.md b/docs/quickstart.md index 91590e0..fef9f09 100644 --- a/docs/quickstart.md +++ b/docs/quickstart.md @@ -1,207 +1,207 @@ - -# Getting started - -## Contact information - -If you have any issues during development, you can contact our team using github issues, or alternativelly through bud@loggi.com. - -## Dataset - -The full dataset is available to [download here](https://loggibud.s3.amazonaws.com/dataset.zip). Make sure you unzip the dataset into the data file. It should contain two directories named `cvrp-instances-1.0` and `delivery-instances-1.0`. - -Alternativelly, you can generate the instances yourself from public data using the [generation pipeline](../loggibud/v1/instance_generation/README.md). - -## OSRM Server - -To correctly evaluate distances, you should use OpenStreetMaps distances provided by the OSRM server. Our recommended way of running OSRM is Docker. To run it, please follow these steps. - -1. Download and install docker, follow the instructions according to your operational system. -2. Download our [precompiled distance files](https://loggibud.s3.amazonaws.com/osrm/osrm.zip) (5.3Gb compressed, 12.6Gb decompressed). -3. Extract the files into an `osrm` directory. -4. Run an OSRM backend container with the following command: - -``` -docker run --rm -t -id \ - --name osrm \ - -p 5000:5000 \ - -v "${PWD}/osrm:/data" \ - osrm/osrm-backend:v5.24.0 osrm-routed --algorithm ch /data/brazil-201110.osrm --max-table-size 10000 -``` - -For more information, check our [OSRM detailed documentation](./osrm.md). - -## Python API - -We provide an API for loading and running Python solvers. It currently supports any Python version >= 3.7.1, which is natively available in most up-to-date operating systems. - -### Repository setup -This project uses [Python Poetry](https://python-poetry.org/docs/) to manage dependencies. You can follow its docs to install it, but a simple - -```bash -pip install poetry -# Or with sudo to install it system-wide -# sudo pip install poetry -``` - -normally suffices. Check if it worked with `poetry --version`. - -Then, at the root of the project install the dependencies with - -```bash -poetry install -``` - -With everything in place, any Python command can be executed by preceding it with `poetry run` (e.g., `poetry run pytest tests/`). This is usually enough for executing the code in this project, but the user who demands more information can check the Poetry's website. - -To implement a new method, we suggest you to create a Python `solve` function that takes an instance and outputs the solution to a file. - -### Task 1 - -```python -from loggibud.v1.types import CVRPInstance, CVRPSolution - - -# Implement your method using a solve function that takes an instance and returns a solution. -def solve(instance: CVRPInstance) -> CVRPSolution: - return CVRPSolution(...) - - -# Loading an instance from file. -instance = CVRPInstance.from_file("path/to/instance.json") - -# Call your method specific code. -solution = solve(instance) - -# Saving your solution to a file. -solution.to_file("path/to/solution.json") -``` - -To evaluate your solution inside Python, you can do: - -```python -from loggibud.v1.eval.task1 import evaluate_solution - -distance_km = evaluate_solution(instance, solution) -``` - -## JSON schemas - -If you don't use Python, you should implement your own IO functions. The JSON schemas for reading and writing solutions are described below. - -**CVRPInstance** - -```javascript -{ - // Name of the specific instance. - "name": "rj-0-cvrp-0", - - // Hub coordinates, where the vehicles originate. - "origin": { - "lng": -42.0, - "lat": -23.0 - }, - - // The capacity (sum of sizes) of every vehicle. - "vehicle_capacity": 120, - - // The deliveries that should be routed. - "deliveries": [ - { - // Unique delivery id. - "id": "4943245fb66541edaf54f4e3aaed188a", - - // Delivery destination coordinates. - "point": { - "lng": -43.12589115884953, - "lat": -22.89585186478512 - }, - - // Size of the delivery. - "size": 2 - } - // ... - ] -} -``` - -**CVRPSolution** - - -```javascript -{ - // Name of the specific instance. - "name": "rj-0-cvrp-0", - - // Solution vehicles. - "vehicles": [ - { - // Vehicle origin (should be the same on CVRP solutions). - "origin": { - "lng": -43.374124642209765, - "lat": -22.790683484127058 - }, - // List of deliveries in the vehicle. - "deliveries": [ - { - "id": "54b10d6d-2ef7-4a69-a9f7-e454f81cdfd2", - "point": { - "lng": -43.44893966650845, - "lat": -22.742762573031424 - }, - "size": 8 - } - // ... - ] - } - // ... - ] -} -``` - -### Evaluation scripts - -```bash -poetry run python -m loggibud.v1.eval.task1 \ - --instance tests/results/cvrp-instances/train/rj-0-cvrp-0.json \ - --solution results/rj-0-cvrp-0.json -``` - - -## Contributing - -First of all, thanks for the interest in the project. If you found a bug or have any question, feel free to open an issue. - -If you prefer to contribute with code or documentation, first fork the repository, make the appropriate changes and open a pull request to our `master` branch. - -Notice we use Python Poetry to manage dependencies. So if you need to add, remove or update any dependency make sure to use the proper [`poetry`](https://python-poetry.org/docs/) commands to write the changes in the `pyproject.toml` and `poetry.lock` files. - -Moreover, before opening a pull request, make sure the following were taken care: - -- The `black` formatter was run: -```bash -poetry run black . -``` - -- The code is conformant with `flake8`: -```bash -poetry run flake8 . -``` - -- The tests are still passing: -```bash -poetry run pytest tests/ -``` - -### Note to Windows users - -In some cases, Windows uses CRLF as end of line instead of LF, which is the norm in Unix-based systems. This erroneously makes git thinks that a whole file was changed when saved in different operating systems. - -To alleviate this issue, we recommend Windows users to do one of the following: - -- When installing Git for Windows, choose the option "Checkout Windows-style, commit Unix-style line endings" [(see this StackOverflow answer)](https://stackoverflow.com/questions/1889559/git-diff-to-ignore-m) - -- If Git is already installed, write the following in the LoggiBUD repository before making any commit: - -```bash -git config core.whitespace cr-at-eol -``` + +# Getting started + +## Contact information + +If you have any issues during development, you can contact our team using github issues, or alternativelly through bud@loggi.com. + +## Dataset + +The full dataset is available to [download here](https://loggibud.s3.amazonaws.com/dataset.zip). Make sure you unzip the dataset into the data file. It should contain two directories named `cvrp-instances-1.0` and `delivery-instances-1.0`. + +Alternativelly, you can generate the instances yourself from public data using the [generation pipeline](../loggibud/v1/instance_generation/README.md). + +## OSRM Server + +To correctly evaluate distances, you should use OpenStreetMaps distances provided by the OSRM server. Our recommended way of running OSRM is Docker. To run it, please follow these steps. + +1. Download and install docker, follow the instructions according to your operational system. +2. Download our [precompiled distance files](https://loggibud.s3.amazonaws.com/osrm/osrm.zip) (5.3Gb compressed, 12.6Gb decompressed). +3. Extract the files into an `osrm` directory. +4. Run an OSRM backend container with the following command: + +``` +docker run --rm -t -id \ + --name osrm \ + -p 5000:5000 \ + -v "${PWD}/osrm:/data" \ + osrm/osrm-backend:v5.24.0 osrm-routed --algorithm ch /data/brazil-201110.osrm --max-table-size 10000 +``` + +For more information, check our [OSRM detailed documentation](./osrm.md). + +## Python API + +We provide an API for loading and running Python solvers. It currently supports any Python version >= 3.7.1, which is natively available in most up-to-date operating systems. + +### Repository setup +This project uses [Python Poetry](https://python-poetry.org/docs/) to manage dependencies. You can follow its docs to install it, but a simple + +```bash +pip install poetry +# Or with sudo to install it system-wide +# sudo pip install poetry +``` + +normally suffices. Check if it worked with `poetry --version`. + +Then, at the root of the project install the dependencies with + +```bash +poetry install +``` + +With everything in place, any Python command can be executed by preceding it with `poetry run` (e.g., `poetry run pytest tests/`). This is usually enough for executing the code in this project, but the user who demands more information can check the Poetry's website. + +To implement a new method, we suggest you to create a Python `solve` function that takes an instance and outputs the solution to a file. + +### Task 1 + +```python +from loggibud.v1.types import CVRPInstance, CVRPSolution + + +# Implement your method using a solve function that takes an instance and returns a solution. +def solve(instance: CVRPInstance) -> CVRPSolution: + return CVRPSolution(...) + + +# Loading an instance from file. +instance = CVRPInstance.from_file("path/to/instance.json") + +# Call your method specific code. +solution = solve(instance) + +# Saving your solution to a file. +solution.to_file("path/to/solution.json") +``` + +To evaluate your solution inside Python, you can do: + +```python +from loggibud.v1.eval.task1 import evaluate_solution + +distance_km = evaluate_solution(instance, solution) +``` + +## JSON schemas + +If you don't use Python, you should implement your own IO functions. The JSON schemas for reading and writing solutions are described below. + +**CVRPInstance** + +```javascript +{ + // Name of the specific instance. + "name": "rj-0-cvrp-0", + + // Hub coordinates, where the vehicles originate. + "origin": { + "lng": -42.0, + "lat": -23.0 + }, + + // The capacity (sum of sizes) of every vehicle. + "vehicle_capacity": 120, + + // The deliveries that should be routed. + "deliveries": [ + { + // Unique delivery id. + "id": "4943245fb66541edaf54f4e3aaed188a", + + // Delivery destination coordinates. + "point": { + "lng": -43.12589115884953, + "lat": -22.89585186478512 + }, + + // Size of the delivery. + "size": 2 + } + // ... + ] +} +``` + +**CVRPSolution** + + +```javascript +{ + // Name of the specific instance. + "name": "rj-0-cvrp-0", + + // Solution vehicles. + "vehicles": [ + { + // Vehicle origin (should be the same on CVRP solutions). + "origin": { + "lng": -43.374124642209765, + "lat": -22.790683484127058 + }, + // List of deliveries in the vehicle. + "deliveries": [ + { + "id": "54b10d6d-2ef7-4a69-a9f7-e454f81cdfd2", + "point": { + "lng": -43.44893966650845, + "lat": -22.742762573031424 + }, + "size": 8 + } + // ... + ] + } + // ... + ] +} +``` + +### Evaluation scripts + +```bash +poetry run python -m loggibud.v1.eval.task1 \ + --instance tests/results/cvrp-instances/train/rj-0-cvrp-0.json \ + --solution results/rj-0-cvrp-0.json +``` + + +## Contributing + +First of all, thanks for the interest in the project. If you found a bug or have any question, feel free to open an issue. + +If you prefer to contribute with code or documentation, first fork the repository, make the appropriate changes and open a pull request to our `master` branch. + +Notice we use Python Poetry to manage dependencies. So if you need to add, remove or update any dependency make sure to use the proper [`poetry`](https://python-poetry.org/docs/) commands to write the changes in the `pyproject.toml` and `poetry.lock` files. + +Moreover, before opening a pull request, make sure the following were taken care: + +- The `black` formatter was run: +```bash +poetry run black . +``` + +- The code is conformant with `flake8`: +```bash +poetry run flake8 . +``` + +- The tests are still passing: +```bash +poetry run pytest tests/ +``` + +### Note to Windows users + +In some cases, Windows uses CRLF as end of line instead of LF, which is the norm in Unix-based systems. This erroneously makes git thinks that a whole file was changed when saved in different operating systems. + +To alleviate this issue, we recommend Windows users to do one of the following: + +- When installing Git for Windows, choose the option "Checkout Windows-style, commit Unix-style line endings" [(see this StackOverflow answer)](https://stackoverflow.com/questions/1889559/git-diff-to-ignore-m) + +- If Git is already installed, write the following in the LoggiBUD repository before making any commit: + +```bash +git config core.whitespace cr-at-eol +``` diff --git a/execute_task1.py b/execute_task1.py new file mode 100644 index 0000000..abccc9a --- /dev/null +++ b/execute_task1.py @@ -0,0 +1,92 @@ +import sys +import csv +import json +import os +import time +from pandas.io.json import json_normalize +from csv import writer + +python_execute = "python3 -m " +caminho_para_task1 = "loggibud.v1.baselines.run_task1 " +solves_task1 = [ + "lkh_3", + "kmeans_partition_ortools", + "kmeans_aggregation_ortools" +] +extension_py = ".py" +extension_csv = ".csv" +extension_json = ".json" + +solution = "data/output/results.csv" +way_method = "loggibud.v1.baselines.task1." +way_to_dirs_not = "data/cvrp-instances-1.0/train" + +def main(): + cities = [ + "df-0" + ] + + output = solution + with open(output, "w+") as f: + w = writer(f) + w.writerow(["cidade","entrada","metodo","qtd_rotas","tempo_exec"]) + for city in cities: + pasta = way_to_dirs_not+"/"+city + caminhos = [os.path.join(pasta, nome) for nome in os.listdir(pasta)] + instances = [arq for arq in caminhos if os.path.isfile(arq)] + execute_methods(w,city,instances) + f.close() + +def extract_routes_output(instance,method): + print(instance) + with open(instance) as data_file: + json_obj = json.load(data_file) + + qtd_rotas = len(json_obj["vehicles"]) + return qtd_rotas + +def execute_methods(w,cite,instances): + create_csv(w,cite,instances) + +# def execute_methods_teste(cite,instances): +# create_csv_teste(cite,instances) + +def create_csv(w,cite,instances): + for intance in instances: + for method in solves_task1: + name_file_with_extension = intance.split("/") + name_file = name_file_with_extension[len(name_file_with_extension)-1].split(".") + output_file = "output/" + method +"/" + awaymethod = way_method + method + # tempo inicio antes do programa + print(python_execute + caminho_para_task1 + " --instances "+ intance +" --module "+ awaymethod +" --output "+ output_file) + time_init = time.time() + os.system(python_execute + caminho_para_task1 + " --instances "+ intance +" --module "+ awaymethod +" --output "+ output_file) + time_finish = time.time() + # tempo final do programa + time_run = time_finish - time_init + name_output = output_file+name_file_with_extension[len(name_file_with_extension)-1] + qtd_rotas = extract_routes_output(name_output,method) + w.writerow([cite,intance,method,qtd_rotas,time_run]) + +def create_csv_test(instances,solves): + for intance in instances: + for method in solves: + name_file_with_extension = intance.split("/") + name_file = name_file_with_extension[len(name_file_with_extension)-1].split(".") + output_file = "output/" + method +"/" + awaymethod = way_method + method + # tempo inicio antes do programa + print(python_execute + caminho_para_task1 + " --instances "+ intance +" --module "+ awaymethod ) + time_init = time.time() + os.system(python_execute + caminho_para_task1 + " --instances "+ intance +" --module "+ awaymethod ) + time_finish = time.time() + # tempo final do programa + time_run = time_finish - time_init + print("tempo exec = ") + print(time_run) + + +if __name__ == "__main__": + main() + diff --git a/generateCsv.py b/generateCsv.py new file mode 100644 index 0000000..2fe0217 --- /dev/null +++ b/generateCsv.py @@ -0,0 +1,180 @@ +import os +import csv +import json +from statistics import mean +from loggibud.v1.distances import OSRMConfig +from loggibud.v1.eval.task1 import evaluate_solution +from loggibud.v1.types import * + +def convertToSolution(solutionOPT: CVRPSolutionOPT, inst: CVRPInstance): + namee = solutionOPT.name + timee = solutionOPT.time_exec + vehicless = [] + for v in solutionOPT.vehicles: + deliveriess = [] + for d in v.deliveries: + if d.size != 0: + delivery = Delivery( + id = inst.deliveries[d.idu].id, + point = d.point, + size = d.size + ) + deliveriess.append(delivery) + originn = inst.origin + vehicle = CVRPSolutionVehicle( + origin = originn, + deliveries = deliveriess + ) + vehicless.append(vehicle) + solution = CVRPSolution( + name = namee, + vehicles = vehicless, + time_exec = timee + ) + sol = {"": solution} + return solution, sol + +def rowCreateOptimizate(inst_path, sol_path, w, method, osrm_config): + line = [method] + inst = CVRPInstance.from_file(inst_path) + instance = {"": CVRPInstance.from_file(inst_path)} + solutionOPT = CVRPSolutionOPT.from_file(sol_path) + solution, sol = convertToSolution(solutionOPT, inst) + line.append(solution.name) + stems = instance.keys() + results = [ + evaluate_solution(instance[stem], sol[stem], osrm_config) for stem in stems + ] + line.append(sum(results)) + line.append(solution.time_exec) + line.append(len(solution.vehicles)) + line.append(mean([sum([d.size for d in v.deliveries]) for v in solution.vehicles])) + w.writerow(line) + +def rowCreateBasicsMethods(inst_path, sol_path, w, method, osrm_config): + line = [method] + instance = {"": CVRPInstance.from_file(inst_path)} + solution = CVRPSolution.from_file(sol_path) + sol = {"": CVRPSolution.from_file(sol_path)} + line.append(solution.name) + stems = instance.keys() + results = [ + evaluate_solution(instance[stem], sol[stem], osrm_config) for stem in stems + ] + line.append(sum(results)) + line.append(solution.time_exec) + line.append(len(solution.vehicles)) + line.append(mean([sum([d.size for d in v.deliveries]) for v in solution.vehicles])) + w.writerow(line) + + +def rowCreateKpprrf(inst_path, sol_path, w, method, osrm_config): + # pathsa = sol_path.split('/') + # path_s = pathsa[0]+"/"+pathsa[1]+"/kmeansp/"+pathsa[3]+pathsa[4].split(".")[0] + "-kmeans.json" + # solKmeans = KmeansSolution.from_file(path_s) + line = [method] + instance = {"": CVRPInstance.from_file(inst_path)} + solution = CVRPSolutionKpprrf.from_file(sol_path) + sol = {"": CVRPSolutionKpprrf.from_file(sol_path)} + line.append(solution.name) + stems = instance.keys() + results = [ + evaluate_solution(instance[stem], sol[stem], osrm_config) for stem in stems + ] + line.append(sum(results)) + line.append(solution.time_execution) # + solKmeans.time_execution + line.append(solution.total_vehicles) + line.append(mean([sum([d.size for d in v.deliveries]) for v in solution.vehicles])) + w.writerow(line) + # instance_broke = inst_path.split('.') + # print(instance_broke) + # name_broke = instance_broke[1].split('-') + # day = name_broke[len(name_broke)-1] + # generateEspecificDay(city, day, root) +# Gerar 1 arquivo Geral sobre dados comparativos +# Algorithm | input | distance | time | n_veiculos | VAR(dif de max e min de pacotes\\veiculos) +# Ler arquivo por arquivo +# Captar os dados desejados +def generateGeneralCsv( + path_outcsv: str, + city: str, + output: str, + path_input: str, + methods: list, + osrm_config: OSRMConfig, + num_days: int +): + path_input = path_input + city + "/" + name = "cvrp-"+city.split("-")[1]+"-"+city.split("-")[0]+"-" + head = ["Algorithm", "input", "distance", "time", "n_veiculos"] + f = open(path_outcsv, 'w', newline='', encoding='utf-8') + w = csv.writer(f) + w.writerow(head) + for method in methods: + outputx = output + method + "/" + city + "/" + outputs = [ + outputx+name+str(i)+".json" + for i in range(90,90+num_days) + ] + for sol_path in outputs: + path_broke = sol_path.split('/') + name_instance = path_broke[len(path_broke)-1] + inst_path = path_input + name_instance + # metodo + print(sol_path) + try: + if method == "kpprrf": + rowCreateKpprrf(inst_path, sol_path, w, method, osrm_config) + elif method == "krs" or method == "krso" or method == "krsof": + rowCreateOptimizate(inst_path, sol_path, w, method, osrm_config) + else: + rowCreateBasicsMethods(inst_path, sol_path, w, method, osrm_config) + except Exception as e: + print(e) + # Construir Cabeçalho + # Read city per city + # print(outputs) + + +# Gerar 1 arquivo especifico sobre os veículos name-day +# id_vehicle | capacity_used | n_deliveries +def generateEspecificDay(city: str, day: int, root): + path_out = "output/csvs/"+city+"/especificDay-"+day+".csv" + head = ["id_vehicle", "capacity_used", "n_deliveries"] + f = open(path_out, 'w', newline='', encoding = 'utf-8') + w = csv.writer(f) + w.writerow(head) + for id in range(len(root['vehicles'])): + line = [id] + line.append(computeCapacityRoute(root['vehicles'][id])) + line.append(len(root['vehicles'][id]['deliveries'])) + w.writerow(line) + +def computeCapacityRoute(vehicle): + s = 0 + for delivery in vehicle['deliveries']: + s += delivery['size'] + return s + +def main(): + osrm_config = OSRMConfig(host="http://ec2-34-222-175-250.us-west-2.compute.amazonaws.com") + path_outcsv = "output/csvs/" + cities = ["rj-0"] + num_days = 30 + output = "data/results/" + path_input = "data/cvrp-instances-1.0/dev/" + methods = ["krsof"] + for city in cities: + pathcsv = path_outcsv + city + '/generalCity.csv' + generateGeneralCsv( + pathcsv, + city, + output, + path_input, + methods, + osrm_config, + num_days + ) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/generateCsvVehicles.py b/generateCsvVehicles.py new file mode 100644 index 0000000..d23f824 --- /dev/null +++ b/generateCsvVehicles.py @@ -0,0 +1,168 @@ +import csv +import pandas as pd +import matplotlib.pyplot as plt + +from itertools import zip_longest + +from loggibud.v1.types import * +from loggibud.v1.distances import OSRMConfig, calculate_distance_matrix_m + +def calculate_distance_vehicle( + instance: CVRPInstance, + vehicle: CVRPSolutionVehicle, + method: str, + matrix_distance): #matrix[dep->idu][dep->idu] + route = [0] + distance = 0 + dicio = {} + if method == 'krsof' or method == "kpmipo": + for d in vehicle.deliveries: #outra possibilidade é trazer o instance para saber a ordem + route.append(d.idu) #idu ou id?? + else: + for i in range(len(instance.deliveries)): + dicio[instance.deliveries[i].id] = i+1 + for d in vehicle.deliveries: #outra possibilidade é trazer o instance para saber a ordem + route.append(dicio[d.id]) #idu ou id?? + for origem in range(0,len(route)-1): + destino = origem+1 + distance += matrix_distance[route[origem]][route[destino]] + return round(distance/1_000, 4) + +def calculate_distance_vehicle_per_packet( + instance: CVRPInstance, + vehicle, + method, + matrix_distance): + distance = calculate_distance_vehicle(instance, vehicle, method, matrix_distance) + packets = len(vehicle.deliveries) + if packets != 0: + return round(distance/packets, 4) + else: + return round(0,4) + +def createCapacities(method, solution): + line = [method] + line.append("Capacities") + for v in solution.vehicles: + if len(v.deliveries) != 0: + line.append(sum([d.size for d in v.deliveries])) + else: + line.append(0) + return line + # w.writerow(line) + +def createDistances( + instance: CVRPInstance, method, matrix_distance, solution): + line = [method] + line.append("Distances") + for v in solution.vehicles: + line.append(calculate_distance_vehicle_per_packet(instance, v, method, matrix_distance)) + return line + # w.writerow(line) + +def selectSolution(method: str, solution_path: str): + if method == "kpprrf" or method == "kpmip": + return CVRPSolutionKpprrf.from_file(solution_path) + elif method == "krsof" or method == "kpmipo": + return CVRPSolutionOPT.from_file(solution_path) + else: + return CVRPSolution.from_file(solution_path) + +def generateCsvVehicles( + path_outcsv: str, + dir_city: str, + nameInstance: str, + output: str, + path_input: str, + methods: list, + osrm_config: OSRMConfig): + pathcsv = path_outcsv + dir_city + '/'+nameInstance+'.csv' + input_path = path_input + dir_city + '/' +nameInstance + '.json' + instance = CVRPInstance.from_file(input_path) + points = [instance.origin] + for d in instance.deliveries: + points.append(d.point) + matrix_distance = calculate_distance_matrix_m( + points, osrm_config + ) + lines = [] + for method in methods: + output_path = output + method + '/' + dir_city + '/' + nameInstance + ".json" + solution = selectSolution(method, output_path) + lines.append(createCapacities(method, solution)) + lines.append(createDistances(instance, method, matrix_distance, solution)) + columns_data = zip_longest(*lines) + f = open(pathcsv, 'w', newline='', encoding='utf-8') + w = csv.writer(f) + w.writerows(columns_data) + f.close() + return pathcsv + +def createImages(path_outimgs, name, methods, dados, chave): + values = [] + x = [i+1 for i in range(len(methods))] + for method in methods: + if chave == "Distances": + ext = ".1" + key = method+ext + else: + key = method + values.append(dados[key][chave]) + plt.figure(figsize = (11,6)) + colors = ['red', 'lightblue', 'lightgreen', 'purple'] + bplots = plt.boxplot(values, vert = 1, patch_artist=False) + plt.title("Boxplot de "+chave, loc="center", fontsize=18) + plt.xlabel("Metodos") + plt.ylabel(chave) + plt.xticks(x,methods) + plt.savefig(path_outimgs+'/'+name+chave+".png") + plt.close() + +def generateImage(path_outimgs, name, pathcsv, methods): + # ler o csv + header = pd.read_csv(pathcsv) + dados = {} + for chave in header: + dados[chave] = {} + dados[chave][header[chave][0]] = [ + float(header[chave][i]) for i in range(1,len(header[chave])) + if header[chave][i] != '0' and pd.isnull(header[chave][i]) != True] + + createImages(path_outimgs, name, methods, dados, "Capacities") + createImages(path_outimgs, name, methods, dados, "Distances") + +def main(): + osrm_config = OSRMConfig(host="http://ec2-34-222-175-250.us-west-2.compute.amazonaws.com") + path_outcsv = "output/vehicles/" + path_outimgs = "output/imgs/vehicles/" + cities = ["pa-0"] + num_days = 30 + output = "data/results/" + path_input = "data/cvrp-instances-1.0/dev/" + methods = ["kpmip", "kpmipo"] + for dir_city in cities: + for day in range(90,90+num_days): + try: + mc = dir_city.split("-") + month = mc[1] + city = mc[0] + nameInstance = "cvrp-"+month+"-"+city+"-"+str(day) + print(nameInstance) + pathcsv = generateCsvVehicles( + path_outcsv, + dir_city, + nameInstance, + output, + path_input, + methods, + osrm_config) + generateImage( + path_outimgs + dir_city, + nameInstance, + pathcsv, + methods) + except Exception as e: + print(e) + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/generate_task1.py b/generate_task1.py new file mode 100644 index 0000000..0e8d50c --- /dev/null +++ b/generate_task1.py @@ -0,0 +1,97 @@ +import csv +import json +from matplotlib import pyplot as plt +import time +import pandas as pd + +from loggibud.v1.distances import OSRMConfig +from loggibud.v1.plotting.plot_solution import plot_cvrp_solution +from loggibud.v1.types import CVRPInstance, CVRPSolution, CVRPSolutionVehicle, Point, Delivery +from loggibud.v1.baselines.task1 import lkh_3, kmeans_aggregation_ortools, kmeans_partition_ortools +from pathlib import Path + +def ploter_map(number): + solJson = open("../cvrp-0-pa-"+str(number)+".json") + data = json.load(solJson) + vehicles = [] + for v in data['vehicles']: + deliveries = [] + for d in v['deliveries']: + point = Point(lat = d['point']['lat'], lng = d['point']['lng']) + delivery = Delivery(id = d['id'], point = point, size = d['size']) + deliveries.append(delivery) + pointOrigin = Point(lat = v['origin']['lat'], lng = v['origin']['lng']) + vehicle = CVRPSolutionVehicle(origin = pointOrigin, deliveries=deliveries) + vehicles.append(vehicle) + solution = CVRPSolution(name = data['name'], vehicles = vehicles) + plot_cvrp_solution(solution) + +def solve_with_lkh3(osrm_config, input:str, output: str): + instance = CVRPInstance.from_file(input) + start = time.time() + lkh_params = lkh_3.LKHParams(osrm_config=osrm_config) + solution = lkh_3.solve(instance, params=lkh_params) + finish = time.time() + output_dir = Path(output or '.') + output_dir.mkdir(parents=True, exist_ok=True) + solution.time_exec = finish-start + solution.to_file(output_dir / f"{instance.name}.json") + return solution + +def solve_partition(osrm_config, input:str, output: str): + instance = CVRPInstance.from_file(input) + start = time.time() + solution = kmeans_partition_ortools.solve(instance, osrm_config = osrm_config) + finish = time.time() + output_dir = Path(output or '.') + output_dir.mkdir(parents=True, exist_ok=True) + solution.time_exec = finish-start + solution.to_file(output_dir / f"{instance.name}.json") + return solution + +def solve_aggregation(osrm_config, input:str, output: str): + instance = CVRPInstance.from_file(input) + start = time.time() + solution = kmeans_aggregation_ortools.solve(instance, osrm_config = osrm_config) + finish = time.time() + output_dir = Path(output or '.') + output_dir.mkdir(parents=True, exist_ok=True) + solution.time_exec = finish-start + solution.to_file(output_dir / f"{instance.name}.json") + return solution + +def solve_loggibud(alg: str, osrm_config, input: str, output: str): + if alg == "lkh3": + return solve_with_lkh3(osrm_config, input, output) + if alg == "kmeans-aggregation": + return solve_aggregation(osrm_config, input, output) + if alg == "kmeans-partition": + return solve_partition(osrm_config, input, output) + return "Not implemented" + +def execute_methods_loggibud(): + methods = ["kmeans-partition"] + cities = ["rj-0"] + num_days = 30 + input_dir = "./data/cvrp-instances-1.0/dev/" + output = "../output/" + + osrm_config = OSRMConfig(host="http://ec2-34-222-175-250.us-west-2.compute.amazonaws.com") + for method in methods: + for city in cities: + for i in range(90,90+num_days): + output_complement = output + method + '/' + city + '/' + cit = city.split("-") + instance = "cvrp-"+str(cit[1])+"-"+str(cit[0])+"-"+str(i)+".json" + input = input_dir + city + "/" + instance + print(input) + try: + solution = solve_loggibud(method, osrm_config, input, output_complement) + except Exception as e: + print(e) + +def main(): + execute_methods_loggibud() + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/includeVehicles.py b/includeVehicles.py new file mode 100644 index 0000000..b01d74b --- /dev/null +++ b/includeVehicles.py @@ -0,0 +1,58 @@ +import json +from unicodedata import name +from loggibud.v1.types import * +from collections import defaultdict + +def constructVehicles(instance : CVRPInstance, params: ParamsVehicles): + vehicles = [] + id_v = 1 + for v in range(len(params.types)): + for num in params.num_types: + for i in range(num): + vehicle = Vehicle( + id = id_v, + type_vehicle = params.types[v], + capacity = params.capacities[v], + cust = params.custs[v], + origin = instance.origin + ) + vehicles.append(vehicle) + id_v += 1 + return vehicles + + + +def instanceToHeterogene( + instance : CVRPInstance, paramsVehicles : ParamsVehicles): + name = instance.name + region = instance.region + origin = instance.origin + vehicles = constructVehicles(paramsVehicles) + deliveries = instance.deliveries + return CVRPInstanceHeterogeneous( + name, + region, + origin, + vehicles, + deliveries + ) + +def recreate(dayStart, dayFinish, cities): + nameDirIn = "data/cvrp-instances-1.0/dev/" + nameDirOut = "data/cvrp-instances-2.0/dev/" + nameParams = "data/cvrp-instances-2.0/params/" + for city in cities: + for day in range(dayStart, dayFinish): + instanceDir = nameDirIn + city + "/" + nameInstance = "cvrp-0"+city+"-"+day + fileDir = instanceDir + nameInstance + instance = CVRPInstance.from_file(fileDir) + instance_heterogeneoun = instanceToHeterogene(instance) + + return + +if __name__ == "__main__": + cities = ["pa-0","df-0","rj-0"] + dayStart = 90 + dayFinish = 119 + recreate(dayStart, dayFinish, cities) \ No newline at end of file diff --git a/loggibud/v1/baselines/run_task1.py b/loggibud/v1/baselines/run_task1.py index 4a483ed..5e29b81 100644 --- a/loggibud/v1/baselines/run_task1.py +++ b/loggibud/v1/baselines/run_task1.py @@ -1,58 +1,55 @@ -import logging -import os -import importlib -from argparse import ArgumentParser -from pathlib import Path -from multiprocessing import Pool - -from tqdm import tqdm - -from loggibud.v1.types import CVRPInstance - - -if __name__ == "__main__": - - logging.basicConfig(level=logging.INFO) - parser = ArgumentParser() - - parser.add_argument("--instances", type=str, required=True) - parser.add_argument("--module", type=str, required=True) - parser.add_argument("--method", type=str, default="solve") - parser.add_argument("--output", type=str) - parser.add_argument("--params", type=str) - parser.add_argument("--params_class", type=str) - - args = parser.parse_args() - - # Load method from path. - module = importlib.import_module(args.module) - method = getattr(module, args.method) - params_class = ( - getattr(module, args.params_class) if args.params_class else None - ) - - # Load instance and heuristic params. - path = Path(args.instances) - path_dir = path if path.is_dir() else path.parent - files = [path] if path.is_file() else list(path.iterdir()) - - if args.params and not args.params_class: - raise ValueError( - "To use custom settings, you must provide both params and params_class." - ) - - params = params_class.from_file(args.params) if args.params else None - - params = None - - output_dir = Path(args.output or ".") - output_dir.mkdir(parents=True, exist_ok=True) - - def solve(file): - instance = CVRPInstance.from_file(file) - solution = method(instance, params) - solution.to_file(output_dir / f"{instance.name}.json") - - # Run solver on multiprocessing pool. - with Pool(os.cpu_count()) as pool: - list(tqdm(pool.imap(solve, files), total=len(files))) +import logging +import os +import importlib +from argparse import ArgumentParser +from pathlib import Path +from multiprocessing import Pool + +from tqdm import tqdm + +from loggibud.v1.types import CVRPInstance, CVRPSolution + +if __name__ == "__main__": + + logging.basicConfig(level=logging.INFO) + parser = ArgumentParser() + + parser.add_argument("--instances", type=str, required=True) + parser.add_argument("--module", type=str, required=True) + parser.add_argument("--method", type=str, default="solve") + parser.add_argument("--output", type=str) + parser.add_argument("--params", type=str) + parser.add_argument("--params_class", type=str) + + args = parser.parse_args() + + # Load method from path. + module = importlib.import_module(args.module) + method = getattr(module, args.method) + params_class = getattr(module, args.params_class) if args.params_class else None + + # Load instance and heuristic params. + path = Path(args.instances) + path_dir = path if path.is_dir() else path.parent + files = [path] if path.is_file() else list(path.iterdir()) + + if args.params and not args.params_class: + raise ValueError( + "To use custom settings, you must provide both params and params_class." + ) + + params = params_class.from_file(args.params) if args.params else None + + params = None + + output_dir = Path(args.output or ".") + output_dir.mkdir(parents=True, exist_ok=True) + + def solve(file): + instance = CVRPInstance.from_file(file) + solution = method(instance, params) + solution.to_file(output_dir / f"{instance.name}.json") + + # Run solver on multiprocessing pool. + with Pool(os.cpu_count()) as pool: + list(tqdm(pool.imap(solve, files), total=len(files))) \ No newline at end of file diff --git a/loggibud/v1/baselines/shared/ortools.py b/loggibud/v1/baselines/shared/ortools.py index 441019b..b9bfc32 100644 --- a/loggibud/v1/baselines/shared/ortools.py +++ b/loggibud/v1/baselines/shared/ortools.py @@ -1,146 +1,146 @@ -""" -Implements a CVRP solver based on Google OR-tools. -""" - -import logging -from datetime import timedelta -from dataclasses import dataclass -from typing import Optional - -import numpy as np -from ortools.constraint_solver import pywrapcp -from ortools.constraint_solver import routing_enums_pb2 - -from loggibud.v1.distances import calculate_distance_matrix_m, OSRMConfig -from loggibud.v1.types import ( - CVRPInstance, - CVRPSolution, - CVRPSolutionVehicle, - JSONDataclassMixin, -) - - -logger = logging.getLogger(__name__) - - -@dataclass -class ORToolsParams(JSONDataclassMixin): - first_solution_strategy: int = ( - routing_enums_pb2.FirstSolutionStrategy.PATH_CHEAPEST_ARC - ) - local_search_metaheuristic: int = ( - routing_enums_pb2.LocalSearchMetaheuristic.GUIDED_LOCAL_SEARCH - ) - max_vehicles: Optional[int] = None - solution_limit: Optional[int] = None - time_limit_ms: Optional[int] = 60_000 - - osrm_config: Optional[OSRMConfig] = None - """Config for calling OSRM distance service.""" - - -def solve( - instance: CVRPInstance, - params: Optional[ORToolsParams] = None, -) -> Optional[CVRPSolution]: - """Solves a CVRP instance using ORTools""" - - # Initialize parameters if not provided. - params = params or ORToolsParams() - - # Number of points is the number of deliveries + the origin. - num_points = len(instance.deliveries) + 1 - - logger.info(f"Solving CVRP instance of size {num_points}.") - - # There's no limit of vehicles, or max(vehicles) = len(deliveries). - num_vehicles = params.max_vehicles or len(instance.deliveries) - - manager = pywrapcp.RoutingIndexManager( - num_points, - num_vehicles, - 0, # (Number of nodes, Number of vehicles, Origin index). - ) - model = pywrapcp.RoutingModel(manager) - - # Unwrap the size index for every point. - sizes = np.array( - [0] + [d.size for d in instance.deliveries], dtype=np.int32 - ) - - def capacity_callback(src): - src = manager.IndexToNode(src) - return sizes[src] - - capacity_callback_index = model.RegisterUnaryTransitCallback( - capacity_callback - ) - model.AddDimension( - capacity_callback_index, 0, instance.vehicle_capacity, True, "Capacity" - ) - - # Unwrap the location/point for every point. - locations = [instance.origin] + [d.point for d in instance.deliveries] - - # Compute the distance matrix between points. - logger.info("Computing distance matrix.") - distance_matrix = ( - calculate_distance_matrix_m(locations, config=params.osrm_config) * 10 - ).astype(np.int32) - - def distance_callback(src, dst): - x = manager.IndexToNode(src) - y = manager.IndexToNode(dst) - return distance_matrix[x, y] - - distance_callback_index = model.RegisterTransitCallback(distance_callback) - model.SetArcCostEvaluatorOfAllVehicles(distance_callback_index) - - search_parameters = pywrapcp.DefaultRoutingSearchParameters() - search_parameters.first_solution_strategy = params.first_solution_strategy - - search_parameters.local_search_metaheuristic = ( - params.local_search_metaheuristic - ) - - if params.solution_limit: - search_parameters.solution_limit = params.solution_limit - - search_parameters.time_limit.FromTimedelta( - timedelta(microseconds=1e3 * params.time_limit_ms) - ) - - logger.info("Solving CVRP with ORTools.") - assignment = model.SolveWithParameters(search_parameters) - - # Checking if the feasible solution was found. - # For more information about the type error: - # https://developers.google.com/optimization/routing/routing_options - if not assignment: - return None - - def extract_solution(vehicle_id): - # Get the start node for route. - index = model.Start(vehicle_id) - - # Iterate while we don't reach an end node. - while not model.IsEnd(assignment.Value(model.NextVar(index))): - next_index = assignment.Value(model.NextVar(index)) - node = manager.IndexToNode(next_index) - - yield instance.deliveries[node - 1] - index = next_index - - routes = [ - CVRPSolutionVehicle( - origin=instance.origin, - deliveries=list(extract_solution(i)), - ) - for i in range(num_vehicles) - ] - - # Return only routes that actually leave the depot. - return CVRPSolution( - name=instance.name, - vehicles=[v for v in routes if len(v.deliveries)], - ) +""" +Implements a CVRP solver based on Google OR-tools. +""" + +import logging +from datetime import timedelta +from dataclasses import dataclass +from typing import Optional + +import numpy as np +from ortools.constraint_solver import pywrapcp +from ortools.constraint_solver import routing_enums_pb2 + +from loggibud.v1.distances import calculate_distance_matrix_m, OSRMConfig +from loggibud.v1.types import ( + CVRPInstance, + CVRPSolution, + CVRPSolutionVehicle, + JSONDataclassMixin, +) + + +logger = logging.getLogger(__name__) + + +@dataclass +class ORToolsParams(JSONDataclassMixin): + first_solution_strategy: int = ( + routing_enums_pb2.FirstSolutionStrategy.PATH_CHEAPEST_ARC + ) + local_search_metaheuristic: int = ( + routing_enums_pb2.LocalSearchMetaheuristic.GUIDED_LOCAL_SEARCH + ) + max_vehicles: Optional[int] = None + solution_limit: Optional[int] = None + time_limit_ms: Optional[int] = 60_000 + + osrm_config: Optional[OSRMConfig] = OSRMConfig(host="http://ec2-34-222-175-250.us-west-2.compute.amazonaws.com") + """Config for calling OSRM distance service.""" + + +def solve( + instance: CVRPInstance, + params: Optional[ORToolsParams] = None, +) -> Optional[CVRPSolution]: + """Solves a CVRP instance using ORTools""" + + # Initialize parameters if not provided. + params = params or ORToolsParams() + + # Number of points is the number of deliveries + the origin. + num_points = len(instance.deliveries) + 1 + + logger.info(f"Solving CVRP instance of size {num_points}.") + + # There's no limit of vehicles, or max(vehicles) = len(deliveries). + num_vehicles = params.max_vehicles or len(instance.deliveries) + + manager = pywrapcp.RoutingIndexManager( + num_points, + num_vehicles, + 0, # (Number of nodes, Number of vehicles, Origin index). + ) + model = pywrapcp.RoutingModel(manager) + + # Unwrap the size index for every point. + sizes = np.array( + [0] + [d.size for d in instance.deliveries], dtype=np.int32 + ) + + def capacity_callback(src): + src = manager.IndexToNode(src) + return sizes[src] + + capacity_callback_index = model.RegisterUnaryTransitCallback( + capacity_callback + ) + model.AddDimension( + capacity_callback_index, 0, instance.vehicle_capacity, True, "Capacity" + ) + + # Unwrap the location/point for every point. + locations = [instance.origin] + [d.point for d in instance.deliveries] + + # Compute the distance matrix between points. + logger.info("Computing distance matrix.") + distance_matrix = ( + calculate_distance_matrix_m(locations, config=params.osrm_config) * 10 + ).astype(np.int32) + + def distance_callback(src, dst): + x = manager.IndexToNode(src) + y = manager.IndexToNode(dst) + return distance_matrix[x, y] + + distance_callback_index = model.RegisterTransitCallback(distance_callback) + model.SetArcCostEvaluatorOfAllVehicles(distance_callback_index) + + search_parameters = pywrapcp.DefaultRoutingSearchParameters() + search_parameters.first_solution_strategy = params.first_solution_strategy + + search_parameters.local_search_metaheuristic = ( + params.local_search_metaheuristic + ) + + if params.solution_limit: + search_parameters.solution_limit = params.solution_limit + + search_parameters.time_limit.FromTimedelta( + timedelta(microseconds=1e3 * params.time_limit_ms) + ) + + logger.info("Solving CVRP with ORTools.") + assignment = model.SolveWithParameters(search_parameters) + + # Checking if the feasible solution was found. + # For more information about the type error: + # https://developers.google.com/optimization/routing/routing_options + if not assignment: + return None + + def extract_solution(vehicle_id): + # Get the start node for route. + index = model.Start(vehicle_id) + + # Iterate while we don't reach an end node. + while not model.IsEnd(assignment.Value(model.NextVar(index))): + next_index = assignment.Value(model.NextVar(index)) + node = manager.IndexToNode(next_index) + + yield instance.deliveries[node - 1] + index = next_index + + routes = [ + CVRPSolutionVehicle( + origin=instance.origin, + deliveries=list(extract_solution(i)), + ) + for i in range(num_vehicles) + ] + + # Return only routes that actually leave the depot. + return CVRPSolution( + name=instance.name, + vehicles=[v for v in routes if len(v.deliveries)], + ) diff --git a/loggibud/v1/baselines/shared/p_hub.py b/loggibud/v1/baselines/shared/p_hub.py index 0ec10e1..adbfb9e 100644 --- a/loggibud/v1/baselines/shared/p_hub.py +++ b/loggibud/v1/baselines/shared/p_hub.py @@ -1,81 +1,81 @@ -from dataclasses import dataclass - -import numpy as np -from ortools.linear_solver import pywraplp as linear_solver - - -@dataclass -class PHubProblem: - p: int - """Number of hubs in the solution.""" - - demands: np.ndarray # shape (n,) - """Demand units for every demand region.""" - - transport_costs: np.ndarray # shape (n, n) - """Transportation costs from node i to node j.""" - - -def solve_p_hub(problem: PHubProblem) -> (np.array, np.array): - """ - Solves p-hub location problems. - """ - - n_demands = problem.demands.shape[0] - n_candidates = problem.transport_costs.shape[1] - - solver = linear_solver.Solver( - "p_hub", - linear_solver.Solver.CBC_MIXED_INTEGER_PROGRAMMING, - ) - - # Facility decision variable. - x = {i: solver.BoolVar(f"x{i}") for i in range(n_candidates)} - - # Allocation decision variable. - y = { - (i, j): solver.NumVar(0, 1, f"j{i},{j}") - for i in range(n_candidates) - for j in range(n_demands) - } - - # Number of active hubs must be smaller than p. - solver.Add(sum([x[i] for i in range(n_candidates)]) <= problem.p) - - # All demands must be satisfied. - for j in range(n_demands): - solver.Add(sum(y[i, j] for i in range(n_candidates)) == 1) - - # Only active hubs can be selected. - for i in range(n_candidates): - for j in range(n_demands): - solver.Add(y[i, j] <= x[i]) - - objective = solver.Objective() - - # Objective is to minimize the cost per unit. - for i in range(n_candidates): - for j in range(n_demands): - objective.SetCoefficient( - y[i, j], problem.transport_costs[i, j] * problem.demands[j] - ) - - solver.set_time_limit(120_000) - status = solver.Solve() - - assert status == linear_solver.Solver.OPTIMAL - - # Retrieve the solution decision vars. - x_sol = np.array( - [x[i].solution_value() for i in range(n_candidates)], dtype=np.bool - ) - - y_sol = np.array( - [ - [y[i, j].solution_value() for j in range(n_demands)] - for i in range(n_candidates) - ], - dtype=np.bool, - ) - - return x_sol, y_sol +from dataclasses import dataclass + +import numpy as np +from ortools.linear_solver import pywraplp as linear_solver + + +@dataclass +class PHubProblem: + p: int + """Number of hubs in the solution.""" + + demands: np.ndarray # shape (n,) + """Demand units for every demand region.""" + + transport_costs: np.ndarray # shape (n, n) + """Transportation costs from node i to node j.""" + + +def solve_p_hub(problem: PHubProblem) -> (np.array, np.array): + """ + Solves p-hub location problems. + """ + + n_demands = problem.demands.shape[0] + n_candidates = problem.transport_costs.shape[1] + + solver = linear_solver.Solver( + "p_hub", + linear_solver.Solver.CBC_MIXED_INTEGER_PROGRAMMING, + ) + + # Facility decision variable. + x = {i: solver.BoolVar(f"x{i}") for i in range(n_candidates)} + + # Allocation decision variable. + y = { + (i, j): solver.NumVar(0, 1, f"j{i},{j}") + for i in range(n_candidates) + for j in range(n_demands) + } + + # Number of active hubs must be smaller than p. + solver.Add(sum([x[i] for i in range(n_candidates)]) <= problem.p) + + # All demands must be satisfied. + for j in range(n_demands): + solver.Add(sum(y[i, j] for i in range(n_candidates)) == 1) + + # Only active hubs can be selected. + for i in range(n_candidates): + for j in range(n_demands): + solver.Add(y[i, j] <= x[i]) + + objective = solver.Objective() + + # Objective is to minimize the cost per unit. + for i in range(n_candidates): + for j in range(n_demands): + objective.SetCoefficient( + y[i, j], problem.transport_costs[i, j] * problem.demands[j] + ) + + solver.set_time_limit(120_000) + status = solver.Solve() + + assert status == linear_solver.Solver.OPTIMAL + + # Retrieve the solution decision vars. + x_sol = np.array( + [x[i].solution_value() for i in range(n_candidates)], dtype=np.bool + ) + + y_sol = np.array( + [ + [y[i, j].solution_value() for j in range(n_demands)] + for i in range(n_candidates) + ], + dtype=np.bool, + ) + + return x_sol, y_sol diff --git a/loggibud/v1/baselines/task1/HOW_TO_EXECUTE.md b/loggibud/v1/baselines/task1/HOW_TO_EXECUTE.md new file mode 100644 index 0000000..031982b --- /dev/null +++ b/loggibud/v1/baselines/task1/HOW_TO_EXECUTE.md @@ -0,0 +1,32 @@ +# Run Tasks 1 + +As an example to execute the LKH method it is necessary to have the input file, and the module that is the strategy to be used, each strategy has a solver and as the standard method will be the "solver" it is possible to create a specific path for the executed output , in addition to being able to add parameters of the class and others. + + +## LKH + +As a test, after having the OSRM server connected to your computer, run: +``` +python -m loggibud.v1.baselines.run_task1 --instances data/cvrp-instances-1.0/train/rj-0/cvrp-0-rj-0.json --module loggibud.v1.baselines.task1.lkh_3 --output output/lkh_3/ +``` + +With the output obtained in the `output/lkh_3/` path it is possible to perform the [benchmark](../../../../docs/quickstart.md) + +## KMEANS AGGREGATION ORTOOLS + +``` +python -m loggibud.v1.baselines.run_task1 --instances data/cvrp-instances-1.0/train/rj-0/cvrp-0-rj-0.json --module loggibud.v1.baselines.task1.kmeans_aggregation_ortools --output output/kmeans_aggregation_ortools/ +``` + +With the output obtained in the `output/kmeans_aggregation_ortools/` path it is possible to perform the [benchmark](../../../../docs/quickstart.md) + +## KMEANS PARTITION ORTOOLS + +As an example to execute the LKH method it is necessary to have the input file, and the module that is the strategy to be used, each strategy has a solver and as the standard method will be the "solver" it is possible to create a specific path for the executed output , in addition to being able to add parameters of the class and others. + +As a test, after having the OSRM server connected to your computer, run: +``` +python -m loggibud.v1.baselines.run_task1 --instances data/cvrp-instances-1.0/train/rj-0/cvrp-0-rj-0.json --module loggibud.v1.baselines.task1.kmeans_partition_ortools --output output/kmeans_partition_ortools/ +``` + +With the output obtained in the `output/kmeans_partition_ortools/` path it is possible to perform the [benchmark](../../../../docs/quickstart.md) \ No newline at end of file diff --git a/loggibud/v1/baselines/task1/kmeans_aggregation_ortools.py b/loggibud/v1/baselines/task1/kmeans_aggregation_ortools.py index 638ed2d..2d7b31c 100644 --- a/loggibud/v1/baselines/task1/kmeans_aggregation_ortools.py +++ b/loggibud/v1/baselines/task1/kmeans_aggregation_ortools.py @@ -1,148 +1,144 @@ -""" -This baseline is a simple partioning followed by a routing problem. - -It uses pure K-Means to partition the problem into K regions and them uses the ORTools solver to solve -each subinstance. It's similar to the method proposed by Ruhan et al [1], but without the balancing -component, as we observed that most instances are already well balanced and far beyond vehicle capacity. - -Refs: - -[1] R. He, W. Xu, J. Sun and B. Zu, "Balanced K-Means Algorithm for Partitioning Areas in Large-Scale -Vehicle Routing Problem," 2009 Third International Symposium on Intelligent Information Technology -Application, Shanghai, 2009, pp. 87-90, doi: 10.1109/IITA.2009.307. Available at -https://ieeexplore.ieee.org/abstract/document/5369502. - - -""" -import logging -from dataclasses import dataclass -from typing import Optional - -import numpy as np -from sklearn.cluster import MiniBatchKMeans - -from loggibud.v1.types import ( - CVRPInstance, - CVRPSolution, - CVRPSolutionVehicle, - Delivery, -) -from ..shared.ortools import solve_cvrp as ortools_solve, ORToolsParams - - -logger = logging.getLogger(__name__) - - -@dataclass -class KmeansAggregateORToolsParams: - fixed_num_clusters: Optional[int] = None - variable_num_clusters: Optional[int] = None - seed: int = 0 - - cluster_ortools_params: Optional[ORToolsParams] = None - aggregate_ortools_params: Optional[ORToolsParams] = None - - @classmethod - def get_baseline(cls): - return cls( - variable_num_clusters=100, - cluster_ortools_params=ORToolsParams( - solution_limit=300, - time_limit_ms=10_000, - ), - ) - - -def solve( - instance: CVRPInstance, - params: Optional[KmeansAggregateORToolsParams] = None, -) -> Optional[CVRPSolution]: - - params = params or KmeansAggregateORToolsParams.get_baseline() - - num_deliveries = len(instance.deliveries) - num_clusters = int( - params.fixed_num_clusters - or np.ceil(num_deliveries / (params.variable_num_clusters or 1)) - ) - - logger.info(f"Clustering instance into {num_clusters} subinstances") - clustering = MiniBatchKMeans(num_clusters, random_state=params.seed) - - points = np.array( - [[d.point.lng, d.point.lat] for d in instance.deliveries] - ) - clusters = clustering.fit_predict(points) - - delivery_array = np.array(instance.deliveries) - - deliveries_per_cluster = [ - delivery_array[clusters == i] for i in range(num_clusters) - ] - - def solve_cluster(deliveries): - if len(deliveries) < 2: - return [deliveries] - - cluster_instance = CVRPInstance( - name=instance.name, - deliveries=deliveries, - origin=instance.origin, - vehicle_capacity=instance.vehicle_capacity, - ) - - cluster_solution = ortools_solve( - cluster_instance, params.cluster_ortools_params - ) - - return [v.deliveries for v in cluster_solution.vehicles] - - def aggregate_deliveries(idx, deliveries): - return Delivery( - id=str(idx), - point=deliveries[0].point, - size=sum([d.size for d in deliveries]), - ) - - subsolutions = [ - deliveries - for group in deliveries_per_cluster - for deliveries in solve_cluster(group.tolist()) - if group.any() - ] - - aggregated_deliveries = [ - aggregate_deliveries(idx, s) for idx, s in enumerate(subsolutions) - ] - - aggregated_instance = CVRPInstance( - name=instance.name, - deliveries=aggregated_deliveries, - origin=instance.origin, - vehicle_capacity=instance.vehicle_capacity, - ) - - aggregated_solution = ortools_solve(aggregated_instance) - - vehicles = [ - CVRPSolutionVehicle( - origin=v.origin, - deliveries=[ - d - for v in solve_cluster( - [ - d - for groups in v.deliveries - for d in subsolutions[int(groups.id)] - ] - ) - for d in v - ], - ) - for v in aggregated_solution.vehicles - ] - - return CVRPSolution( - name=instance.name, - vehicles=vehicles, - ) +""" +This baseline is a simple partioning followed by a routing problem. + +It uses pure K-Means to partition the problem into K regions and them uses the ORTools solver to solve +each subinstance. It's similar to the method proposed by Ruhan et al [1], but without the balancing +component, as we observed that most instances are already well balanced and far beyond vehicle capacity. + +Refs: + +[1] R. He, W. Xu, J. Sun and B. Zu, "Balanced K-Means Algorithm for Partitioning Areas in Large-Scale +Vehicle Routing Problem," 2009 Third International Symposium on Intelligent Information Technology +Application, Shanghai, 2009, pp. 87-90, doi: 10.1109/IITA.2009.307. Available at +https://ieeexplore.ieee.org/abstract/document/5369502. + + +""" +import logging +from dataclasses import dataclass +from typing import Optional + +import numpy as np +from sklearn.cluster import MiniBatchKMeans + +from loggibud.v1.types import CVRPInstance, CVRPSolution, CVRPSolutionVehicle, Delivery +from loggibud.v1.distances import * +from ..shared.ortools import solve as ortools_solve, ORToolsParams + + +logger = logging.getLogger(__name__) + + +@dataclass +class KmeansAggregateORToolsParams: + fixed_num_clusters: Optional[int] = None + variable_num_clusters: Optional[int] = None + seed: int = 0 + + cluster_ortools_params: Optional[ORToolsParams] = None + aggregate_ortools_params: Optional[ORToolsParams] = None + + @classmethod + def get_baseline(cls): + return cls( + variable_num_clusters=100, + cluster_ortools_params=ORToolsParams( + solution_limit=300, + time_limit_ms=10_000, + ), + ) + + +def solve( + instance: CVRPInstance, + osrm_config: OSRMConfig, + params: Optional[KmeansAggregateORToolsParams] = None, +) -> Optional[CVRPSolution]: + + params = params or KmeansAggregateORToolsParams.get_baseline() + + num_deliveries = len(instance.deliveries) + num_clusters = int( + params.fixed_num_clusters or + np.ceil(num_deliveries / (params.variable_num_clusters or 1)) + ) + + logger.info(f"Clustering instance into {num_clusters} subinstances") + clustering = MiniBatchKMeans(num_clusters, random_state=params.seed) + + points = np.array([[d.point.lng, d.point.lat] for d in instance.deliveries]) + clusters = clustering.fit_predict(points) + + delivery_array = np.array(instance.deliveries) + + deliveries_per_cluster = [ + delivery_array[clusters == i] for i in range(num_clusters) + ] + + def solve_cluster(deliveries): + if len(deliveries) < 2: + return [deliveries] + + cluster_instance = CVRPInstance( + name=instance.name, + region=instance.region, + deliveries=deliveries, + origin=instance.origin, + vehicle_capacity=instance.vehicle_capacity, + ) + + params.cluster_ortools_params.osrm_config = osrm_config + + cluster_solution = ortools_solve( + cluster_instance, params.cluster_ortools_params + ) + + return [v.deliveries for v in cluster_solution.vehicles] + + def aggregate_deliveries(idx, deliveries): + return Delivery( + id=str(idx), + point=deliveries[0].point, + size=sum([d.size for d in deliveries]), + ) + + subsolutions = [ + deliveries + for group in deliveries_per_cluster + for deliveries in solve_cluster(group.tolist()) + if group.any() + ] + + aggregated_deliveries = [ + aggregate_deliveries(idx, s) for idx, s in enumerate(subsolutions) + ] + + aggregated_instance = CVRPInstance( + name=instance.name, + region=instance.region, + deliveries=aggregated_deliveries, + origin=instance.origin, + vehicle_capacity=instance.vehicle_capacity, + ) + + aggregated_solution = ortools_solve(aggregated_instance) + + vehicles = [ + CVRPSolutionVehicle( + origin=v.origin, + deliveries=[ + d + for v in solve_cluster( + [d for groups in v.deliveries for d in subsolutions[int( + groups.id)]] + ) + for d in v + ], + ) + for v in aggregated_solution.vehicles + ] + + return CVRPSolution( + name=instance.name, + vehicles=vehicles, + ) diff --git a/loggibud/v1/baselines/task1/kmeans_partition_ortools.py b/loggibud/v1/baselines/task1/kmeans_partition_ortools.py index 6109f17..05c7910 100644 --- a/loggibud/v1/baselines/task1/kmeans_partition_ortools.py +++ b/loggibud/v1/baselines/task1/kmeans_partition_ortools.py @@ -1,99 +1,98 @@ -""" -This baseline is a simple partioning followed by a routing problem. - -It uses pure K-Means to partition the problem into K convex regions and them uses the ORTools solver -to solve each subinstance. It's similar to the method proposed by Ruhan et al. [1], but without the -balancing component. - -Refs: - -[1] R. He, W. Xu, J. Sun and B. Zu, "Balanced K-Means Algorithm for Partitioning Areas in Large-Scale -Vehicle Routing Problem," 2009 Third International Symposium on Intelligent Information Technology -Application, Shanghai, 2009, pp. 87-90, doi: 10.1109/IITA.2009.307. Available at -https://ieeexplore.ieee.org/abstract/document/5369502. -""" - -import logging -from dataclasses import dataclass -from typing import Optional - -import numpy as np -from sklearn.cluster import KMeans - -from loggibud.v1.types import CVRPInstance, CVRPSolution -from loggibud.v1.baselines.shared.ortools import ( - solve as ortools_solve, - ORToolsParams, -) - - -logger = logging.getLogger(__name__) - - -@dataclass -class KmeansPartitionORToolsParams: - fixed_num_clusters: Optional[int] = None - variable_num_clusters: Optional[int] = None - seed: int = 0 - - ortools_params: Optional[ORToolsParams] = None - - @classmethod - def get_baseline(cls): - return cls( - variable_num_clusters=500, - ortools_params=ORToolsParams( - time_limit_ms=120_000, - solution_limit=1_000, - ), - ) - - -def solve( - instance: CVRPInstance, - params: Optional[KmeansPartitionORToolsParams] = None, -) -> Optional[CVRPSolution]: - - params = params or KmeansPartitionORToolsParams.get_baseline() - - num_deliveries = len(instance.deliveries) - num_clusters = int( - params.fixed_num_clusters - or np.ceil( - num_deliveries / (params.variable_num_clusters or num_deliveries) - ) - ) - - logger.info(f"Clustering instance into {num_clusters} subinstances") - clustering = KMeans(num_clusters, random_state=params.seed) - - points = np.array( - [[d.point.lng, d.point.lat] for d in instance.deliveries] - ) - clusters = clustering.fit_predict(points) - - delivery_array = np.array(instance.deliveries) - - subsinstance_deliveries = [ - delivery_array[clusters == i] for i in range(num_clusters) - ] - - subinstances = [ - CVRPInstance( - name=instance.name, - deliveries=subinstance.tolist(), - origin=instance.origin, - vehicle_capacity=instance.vehicle_capacity, - ) - for subinstance in subsinstance_deliveries - ] - - subsolutions = [ - ortools_solve(subinstance, params.ortools_params) - for subinstance in subinstances - ] - - return CVRPSolution( - name=instance.name, - vehicles=[v for sol in subsolutions for v in sol.vehicles], - ) +""" +This baseline is a simple partioning followed by a routing problem. + +It uses pure K-Means to partition the problem into K convex regions and them uses the ORTools solver +to solve each subinstance. It's similar to the method proposed by Ruhan et al. [1], but without the +balancing component. + +Refs: + +[1] R. He, W. Xu, J. Sun and B. Zu, "Balanced K-Means Algorithm for Partitioning Areas in Large-Scale +Vehicle Routing Problem," 2009 Third International Symposium on Intelligent Information Technology +Application, Shanghai, 2009, pp. 87-90, doi: 10.1109/IITA.2009.307. Available at +https://ieeexplore.ieee.org/abstract/document/5369502. +""" + +import logging +from dataclasses import dataclass +from typing import Optional + +import numpy as np +from sklearn.cluster import KMeans + +from loggibud.v1.types import CVRPInstance, CVRPSolution +from loggibud.v1.distances import OSRMConfig +from loggibud.v1.baselines.shared.ortools import ( + solve as ortools_solve, + ORToolsParams, +) + + +logger = logging.getLogger(__name__) + + +@dataclass +class KmeansPartitionORToolsParams: + fixed_num_clusters: Optional[int] = None + variable_num_clusters: Optional[int] = None + seed: int = 0 + + ortools_params: Optional[ORToolsParams] = None + + @classmethod + def get_baseline(cls): + return cls( + variable_num_clusters=500, + ortools_params=ORToolsParams( + time_limit_ms=120_000, + solution_limit=1_000, + ), + ) + + +def solve( + instance: CVRPInstance, + osrm_config: OSRMConfig, + params: Optional[KmeansPartitionORToolsParams] = None, +) -> Optional[CVRPSolution]: + + params = params or KmeansPartitionORToolsParams.get_baseline() + + num_deliveries = len(instance.deliveries) + num_clusters = int( + params.fixed_num_clusters + or np.ceil(num_deliveries / (params.variable_num_clusters or num_deliveries)) + ) + + logger.info(f"Clustering instance into {num_clusters} subinstances") + clustering = KMeans(num_clusters, random_state=params.seed) + + points = np.array([[d.point.lng, d.point.lat] for d in instance.deliveries]) + clusters = clustering.fit_predict(points) + + delivery_array = np.array(instance.deliveries) + + subsinstance_deliveries = [ + delivery_array[clusters == i] for i in range(num_clusters) + ] + + subinstances = [ + CVRPInstance( + name=instance.name, + region=instance.region, + deliveries=subinstance.tolist(), + origin=instance.origin, + vehicle_capacity=instance.vehicle_capacity, + ) + for subinstance in subsinstance_deliveries + ] + + subsolutions = [ + ortools_solve(subinstance, params.ortools_params) + for subinstance in subinstances + ] + + return CVRPSolution( + name=instance.name, + vehicles=[v for sol in subsolutions for v in sol.vehicles], + ) \ No newline at end of file diff --git a/loggibud/v1/baselines/task1/lkh_3.py b/loggibud/v1/baselines/task1/lkh_3.py index e7b1e55..c3f2e2f 100644 --- a/loggibud/v1/baselines/task1/lkh_3.py +++ b/loggibud/v1/baselines/task1/lkh_3.py @@ -1,170 +1,159 @@ -"""Implements the Lin-Kernighan-Helsgaun (LKH) solver -The solver used here is the LKH-3 version [1], which is able to solve CVRP -instances. - -References ----------- - [1] https://github.com/cerebis/LKH3 -""" -import logging -import os -import subprocess -from dataclasses import dataclass -from itertools import groupby -from math import ceil -from typing import Dict, List, Optional - -import lkh -import numpy as np - -from loggibud.v1.types import ( - CVRPInstance, - CVRPSolution, - CVRPSolutionVehicle, - JSONDataclassMixin, -) -from loggibud.v1.distances import OSRMConfig -from loggibud.v1.data_conversion import to_tsplib, TSPLIBConversionParams - - -logger = logging.getLogger(__name__) - - -@dataclass -class LKHParams(JSONDataclassMixin): - - time_limit_s: int = 60 - """Time limit in seconds to step the solver.""" - - num_runs: int = 1 - """Number of runs (as in a multistart heuristic).""" - - osrm_config: Optional[OSRMConfig] = None - """Config for calling OSRM distance service.""" - - -def solve( - instance: CVRPInstance, params: Optional[LKHParams] = None -) -> CVRPSolution: - """Solve a CVRP instance using LKH-3""" - - params = params or LKHParams() - - conversion_params = TSPLIBConversionParams(osrm_config=params.osrm_config) - tsplib_instance = to_tsplib(instance, conversion_params) - - # LKH solution params, for details check the LKH documentation. - lkh_params = dict( - mtsp_objective="MINSUM", - runs=params.num_runs, - time_limit=params.time_limit_s, - vehicles=_get_num_vehicles(instance), - ) - - current_path = os.path.dirname(os.path.abspath(__file__)) - lkh_solution = lkh.solve( - f"{current_path}/LKH", tsplib_instance, **lkh_params - ) - - solution = _unwrap_lkh_solution(instance, lkh_solution) - - return solution - - -def _unwrap_lkh_solution( - instance: CVRPInstance, lkh_solution: List[int] -) -> CVRPSolution: - """Read the files generated by the solver - - The output is stored in a TSPLIB-like format. Here is a typical example. - - Suppose a problem with depot at node 1 and deliveries at 2, 3, 4, 5 and 6. - Now, suppose the solution has two routes such as: - - Route 1: [1, 2, 3] - - Route 2: [1, 4, 5, 6] - - The output would be written as a sequence like: - 1 - 2 - 3 - 7 <--- - 4 - 5 - 6 - - - The first node is 1, the depot, and the following are deliveries in the - first route. Then, we reach a node 7, which is greater than all nodes in - the problem. This actually marks the start of another route, and if we had - more routes, it would be split with an 8, and so on. - - The reading goes on until a -1 is obtained, thus marking the end of all - routes. - """ - - num_deliveries = len(instance.deliveries) - - # To retrieve the delivery indices, we have to subtract two, that is the - # same as ignoring the depot and reindexing from zero. - delivery_indices = np.array(lkh_solution[0]) - 2 - - # Now we split the sequence into vehicles using a simple generator. - def route_gen(seq): - route = [] - - for el in seq[1:]: - if el < num_deliveries: - route.append(el) - - elif route: - yield np.array(route) - route = [] - - # Output last route if any - if route: - yield np.array(route) - - delivery_indices = list(route_gen(delivery_indices)) - - # To enable multi-integer indexing, we convert the deliveries into an - # object np.array. - np_deliveries = np.array(instance.deliveries, dtype=object) - - def build_vehicle(route_delivery_indices): - deliveries = np_deliveries[route_delivery_indices] - - return CVRPSolutionVehicle( - origin=instance.origin, deliveries=deliveries.tolist() - ) - - routes = [build_vehicle(indices) for indices in delivery_indices] - - return CVRPSolution(name=instance.name, vehicles=routes) - - -def _get_num_vehicles(instance: CVRPInstance) -> int: - """Estimate a proper number of vehicles for an instance - The typical number of vehicles used internally by the LKH-3 is given by - - ceil(total_demand / vehicle_capacity) - - Unfortunately, this does not work in some cases. Here is a simple example. - Consider three deliveries with demands 3, 4 and 5, and assume the vehicle - capacity is 6. The total demand is 12, so according to this equation we - would require ceil(12 / 6) = 2 vehicles. - Unfortunately, there is no way to place all three deliveries in these two - vehicles without splitting a package. - - Thus, we use a workaround by assuming all packages have the same maximum - demand. Thus, we can place `floor(vehicle_capacity / max_demand)` packages - in a vehicle. Dividing the total number of packages by this we get an - estimation of how many vehicles we require. - - This heuristic is an overestimation and may be too much in some cases, - but we found that the solver is more robust in excess (it ignores some - vehicles if required) than in scarcity (it returns an unfeasible solution). - """ - - num_deliveries = len(instance.deliveries) - max_demand = max(delivery.size for delivery in instance.deliveries) - return ceil(num_deliveries / (instance.vehicle_capacity // max_demand)) +"""Implements the Lin-Kernighan-Helsgaun (LKH) solver +The solver used here is the LKH-3 version [1], which is able to solve CVRP +instances. +References +---------- + [1] https://github.com/cerebis/LKH3 +""" +import logging +import os +import subprocess +from dataclasses import dataclass +from itertools import groupby +from math import ceil +from typing import Dict, List, Optional + +import lkh +import numpy as np + +from loggibud.v1.types import ( + CVRPInstance, + CVRPSolution, + CVRPSolutionVehicle, + JSONDataclassMixin, +) +from loggibud.v1.distances import OSRMConfig +from loggibud.v1.data_conversion import to_tsplib, TSPLIBConversionParams + + +logger = logging.getLogger(__name__) + + +@dataclass +class LKHParams(JSONDataclassMixin): + + time_limit_s: int = 60 + """Time limit in seconds to step the solver.""" + + num_runs: int = 1 + """Number of runs (as in a multistart heuristic).""" + + osrm_config: Optional[OSRMConfig] = None + """Config for calling OSRM distance service.""" + + +def solve( + instance: CVRPInstance, params: Optional[LKHParams] = None +) -> CVRPSolution: + """Solve a CVRP instance using LKH-3""" + + params = params or LKHParams() + + conversion_params = TSPLIBConversionParams(osrm_config=params.osrm_config) + tsplib_instance = to_tsplib(instance, conversion_params) + + # LKH solution params, for details check the LKH documentation. + lkh_params = dict( + mtsp_objective="MINSUM", + runs=params.num_runs, + time_limit=params.time_limit_s, + vehicles=_get_num_vehicles(instance), + ) + + current_path = os.path.dirname(os.path.abspath(__file__)) + lkh_solution = lkh.solve( + f"{current_path}/LKH", tsplib_instance, **lkh_params + ) + + solution = _unwrap_lkh_solution(instance, lkh_solution) + + return solution + + +def _unwrap_lkh_solution( + instance: CVRPInstance, lkh_solution: List[int] +) -> CVRPSolution: + """Read the files generated by the solver + The output is stored in a TSPLIB-like format. Here is a typical example. + Suppose a problem with depot at node 1 and deliveries at 2, 3, 4, 5 and 6. + Now, suppose the solution has two routes such as: + - Route 1: [1, 2, 3] + - Route 2: [1, 4, 5, 6] + The output would be written as a sequence like: + 1 + 2 + 3 + 7 <--- + 4 + 5 + 6 + The first node is 1, the depot, and the following are deliveries in the + first route. Then, we reach a node 7, which is greater than all nodes in + the problem. This actually marks the start of another route, and if we had + more routes, it would be split with an 8, and so on. + The reading goes on until a -1 is obtained, thus marking the end of all + routes. + """ + + num_deliveries = len(instance.deliveries) + + # To retrieve the delivery indices, we have to subtract two, that is the + # same as ignoring the depot and reindexing from zero. + delivery_indices = np.array(lkh_solution[0]) - 2 + + # Now we split the sequence into vehicles using a simple generator. + def route_gen(seq): + route = [] + + for el in seq[1:]: + if el < num_deliveries: + route.append(el) + + elif route: + yield np.array(route) + route = [] + + # Output last route if any + if route: + yield np.array(route) + + delivery_indices = list(route_gen(delivery_indices)) + + # To enable multi-integer indexing, we convert the deliveries into an + # object np.array. + np_deliveries = np.array(instance.deliveries, dtype=object) + + def build_vehicle(route_delivery_indices): + deliveries = np_deliveries[route_delivery_indices] + + return CVRPSolutionVehicle( + origin=instance.origin, deliveries=deliveries.tolist() + ) + + routes = [build_vehicle(indices) for indices in delivery_indices] + + return CVRPSolution(name=instance.name, vehicles=routes) + + +def _get_num_vehicles(instance: CVRPInstance) -> int: + """Estimate a proper number of vehicles for an instance + The typical number of vehicles used internally by the LKH-3 is given by + ceil(total_demand / vehicle_capacity) + Unfortunately, this does not work in some cases. Here is a simple example. + Consider three deliveries with demands 3, 4 and 5, and assume the vehicle + capacity is 6. The total demand is 12, so according to this equation we + would require ceil(12 / 6) = 2 vehicles. + Unfortunately, there is no way to place all three deliveries in these two + vehicles without splitting a package. + Thus, we use a workaround by assuming all packages have the same maximum + demand. Thus, we can place `floor(vehicle_capacity / max_demand)` packages + in a vehicle. Dividing the total number of packages by this we get an + estimation of how many vehicles we require. + This heuristic is an overestimation and may be too much in some cases, + but we found that the solver is more robust in excess (it ignores some + vehicles if required) than in scarcity (it returns an unfeasible solution). + """ + + num_deliveries = len(instance.deliveries) + max_demand = max(delivery.size for delivery in instance.deliveries) + return ceil(num_deliveries / (instance.vehicle_capacity // max_demand)) \ No newline at end of file diff --git a/loggibud/v1/baselines/task1/relaxfix.py b/loggibud/v1/baselines/task1/relaxfix.py new file mode 100644 index 0000000..3f14470 --- /dev/null +++ b/loggibud/v1/baselines/task1/relaxfix.py @@ -0,0 +1,31 @@ +import pyomo.environ as pyo +from pyomo.environ import * +from pyomo.opt import SolverFactory + +model = pyo.ConcreteModel() + +model.casa = pyo.Var(bounds=(0,None), domain=Integers) +model.predio = pyo.Var(bounds=(0,None), domain=Integers) + +casas = model.casa +predios = model.predio + +model.obj = pyo.Objective(expr = 3000*casas+5000*predios, sense=maximize) + +model.pedreiro = pyo.Constraint(expr = 2*casas+3*predios <= 30) +model.servente = pyo.Constraint(expr = 4*casas+8*predios <= 70) + +objetive = model.obj +pedreiros = model.pedreiro +serventes = model.servente + +opt = SolverFactory('gurobi') +opt.solve(model) + +model.pprint() +print('================================================') +print('Nº casas', pyo.value(casas)) +print('Nº predios', pyo.value(predios)) +print('Lucro', pyo.value(objetive)) +print('Nº pedreiros', pyo.value(pedreiros)) +print('Nº serventes', pyo.value(serventes)) \ No newline at end of file diff --git a/loggibud/v1/baselines/task2/kmeans_greedy.py b/loggibud/v1/baselines/task2/kmeans_greedy.py index 8c8129a..ebf2c4b 100644 --- a/loggibud/v1/baselines/task2/kmeans_greedy.py +++ b/loggibud/v1/baselines/task2/kmeans_greedy.py @@ -1,240 +1,240 @@ -""" -Splits deliveries into regions using a K-Means algorithm. Greedly insert deliveries into -vehicles within a region always assigning the demand to the most constrained vehicle from -the region. -""" - -import logging -import os -from dataclasses import dataclass -from typing import Optional, List, Dict -from multiprocessing import Pool -from argparse import ArgumentParser -from pathlib import Path - -import numpy as np -from sklearn.cluster import KMeans -from tqdm import tqdm - -from loggibud.v1.types import ( - Delivery, - CVRPInstance, - CVRPSolution, - CVRPSolutionVehicle, -) -from loggibud.v1.baselines.shared.ortools import ( - solve as ortools_solve, - ORToolsParams, -) - -logger = logging.getLogger(__name__) - - -@dataclass -class KMeansGreedyParams: - fixed_num_clusters: Optional[int] = None - variable_num_clusters: Optional[int] = None - seed: int = 0 - ortools_tsp_params: Optional[ORToolsParams] = None - - @classmethod - def get_baseline(cls): - return cls( - fixed_num_clusters=150, - ortools_tsp_params=ORToolsParams( - max_vehicles=1, - time_limit_ms=1_000, - ), - ) - - -@dataclass -class KMeansGreedyModel: - params: KMeansGreedyParams - clustering: KMeans - subinstance: Optional[CVRPInstance] = None - cluster_subsolutions: Optional[Dict[int, List[CVRPSolutionVehicle]]] = None - - -def pretrain( - instances: List[CVRPInstance], params: Optional[KMeansGreedyParams] = None -) -> KMeansGreedyModel: - params = params or KMeansGreedyParams.get_baseline() - - points = np.array( - [ - [d.point.lng, d.point.lat] - for instance in instances - for d in instance.deliveries - ] - ) - - num_deliveries = len(points) - num_clusters = int( - params.fixed_num_clusters - or np.ceil( - num_deliveries / (params.variable_num_clusters or num_deliveries) - ) - ) - - logger.info(f"Clustering instance into {num_clusters} subinstances") - clustering = KMeans(num_clusters, random_state=params.seed) - clustering.fit(points) - - return KMeansGreedyModel( - params=params, - clustering=clustering, - ) - - -def finetune( - model: KMeansGreedyModel, instance: CVRPInstance -) -> KMeansGreedyModel: - """Prepare the model for one particular instance.""" - - return KMeansGreedyModel( - params=model.params, - clustering=model.clustering, - cluster_subsolutions={ - i: [] for i in range(model.clustering.n_clusters) - }, - # Just fill some random instance. - subinstance=instance, - ) - - -def route(model: KMeansGreedyModel, delivery: Delivery) -> KMeansGreedyModel: - """Route a single delivery using the model instance.""" - - cluster = model.clustering.predict( - [[delivery.point.lng, delivery.point.lat]] - )[0] - - subsolution = model.cluster_subsolutions[cluster] - - def is_feasible(route): - return ( - route.occupation + delivery.size - < model.subinstance.vehicle_capacity - ) - - # TODO: We could make this method faster by using a route size table, but seems a bit - # overkill since it's not a bottleneck. - feasible_routes = [ - (route_idx, route) - for route_idx, route in enumerate(subsolution) - if is_feasible(route) - ] - - if feasible_routes: - route_idx, route = max(feasible_routes, key=lambda v: v[1].occupation) - - else: - route = CVRPSolutionVehicle( - origin=model.subinstance.origin, deliveries=[] - ) - subsolution.append(route) - route_idx = len(subsolution) - 1 - - route.deliveries.append(delivery) - subsolution[route_idx] = route - - return model - - -def finish(instance: CVRPInstance, model: KMeansGreedyModel) -> CVRPSolution: - - subinstances = [ - CVRPInstance( - name="", - region="", - deliveries=vehicle.deliveries, - origin=vehicle.origin, - vehicle_capacity=3 * instance.vehicle_capacity, # More relaxed. - ) - for idx, subinstance in enumerate(model.cluster_subsolutions.values()) - for vehicle in subinstance - ] - - logger.info("Reordering routes.") - subsolutions = [ - ortools_solve(subinstance, model.params.ortools_tsp_params) - for subinstance in subinstances - ] - - return CVRPSolution( - name=instance.name, - vehicles=[ - v for subsolution in subsolutions for v in subsolution.vehicles - ], - ) - - -def solve_instance( - model: KMeansGreedyModel, instance: CVRPInstance -) -> CVRPSolution: - """Solve an instance dinamically using a solver model""" - logger.info("Finetunning on evaluation instance.") - model_finetuned = finetune(model, instance) - - logger.info("Starting to dynamic route.") - for delivery in tqdm(instance.deliveries): - model_finetuned = route(model_finetuned, delivery) - - return finish(instance, model_finetuned) - - -if __name__ == "__main__": - - logging.basicConfig(level=logging.INFO) - parser = ArgumentParser() - - parser.add_argument("--train_instances", type=str, required=True) - parser.add_argument("--eval_instances", type=str, required=True) - parser.add_argument("--output", type=str) - parser.add_argument("--params", type=str) - - args = parser.parse_args() - - # Load instance and heuristic params. - eval_path = Path(args.eval_instances) - eval_path_dir = eval_path if eval_path.is_dir() else eval_path.parent - eval_files = ( - [eval_path] if eval_path.is_file() else list(eval_path.iterdir()) - ) - - train_path = Path(args.train_instances) - train_path_dir = train_path if train_path.is_dir() else train_path.parent - train_files = ( - [train_path] if train_path.is_file() else list(train_path.iterdir()) - ) - - # params = params_class.from_file(args.params) if args.params else None - - params = None - - output_dir = Path(args.output or ".") - output_dir.mkdir(parents=True, exist_ok=True) - - train_instances = [CVRPInstance.from_file(f) for f in train_files[:240]] - - logger.info("Pretraining on training instances.") - model = pretrain(train_instances) - - def solve(file): - instance = CVRPInstance.from_file(file) - - logger.info("Finetunning on evaluation instance.") - model_finetuned = finetune(model, instance) - - logger.info("Starting to dynamic route.") - for delivery in tqdm(instance.deliveries): - model_finetuned = route(model_finetuned, delivery) - - solution = finish(instance, model_finetuned) - - solution.to_file(output_dir / f"{instance.name}.json") - - # Run solver on multiprocessing pool. - with Pool(os.cpu_count()) as pool: - list(tqdm(pool.imap(solve, eval_files), total=len(eval_files))) +""" +Splits deliveries into regions using a K-Means algorithm. Greedly insert deliveries into +vehicles within a region always assigning the demand to the most constrained vehicle from +the region. +""" + +import logging +import os +from dataclasses import dataclass +from typing import Optional, List, Dict +from multiprocessing import Pool +from argparse import ArgumentParser +from pathlib import Path + +import numpy as np +from sklearn.cluster import KMeans +from tqdm import tqdm + +from loggibud.v1.types import ( + Delivery, + CVRPInstance, + CVRPSolution, + CVRPSolutionVehicle, +) +from loggibud.v1.baselines.shared.ortools import ( + solve as ortools_solve, + ORToolsParams, +) + +logger = logging.getLogger(__name__) + + +@dataclass +class KMeansGreedyParams: + fixed_num_clusters: Optional[int] = None + variable_num_clusters: Optional[int] = None + seed: int = 0 + ortools_tsp_params: Optional[ORToolsParams] = None + + @classmethod + def get_baseline(cls): + return cls( + fixed_num_clusters=150, + ortools_tsp_params=ORToolsParams( + max_vehicles=1, + time_limit_ms=1_000, + ), + ) + + +@dataclass +class KMeansGreedyModel: + params: KMeansGreedyParams + clustering: KMeans + subinstance: Optional[CVRPInstance] = None + cluster_subsolutions: Optional[Dict[int, List[CVRPSolutionVehicle]]] = None + + +def pretrain( + instances: List[CVRPInstance], params: Optional[KMeansGreedyParams] = None +) -> KMeansGreedyModel: + params = params or KMeansGreedyParams.get_baseline() + + points = np.array( + [ + [d.point.lng, d.point.lat] + for instance in instances + for d in instance.deliveries + ] + ) + + num_deliveries = len(points) + num_clusters = int( + params.fixed_num_clusters + or np.ceil( + num_deliveries / (params.variable_num_clusters or num_deliveries) + ) + ) + + logger.info(f"Clustering instance into {num_clusters} subinstances") + clustering = KMeans(num_clusters, random_state=params.seed) + clustering.fit(points) + + return KMeansGreedyModel( + params=params, + clustering=clustering, + ) + + +def finetune( + model: KMeansGreedyModel, instance: CVRPInstance +) -> KMeansGreedyModel: + """Prepare the model for one particular instance.""" + + return KMeansGreedyModel( + params=model.params, + clustering=model.clustering, + cluster_subsolutions={ + i: [] for i in range(model.clustering.n_clusters) + }, + # Just fill some random instance. + subinstance=instance, + ) + + +def route(model: KMeansGreedyModel, delivery: Delivery) -> KMeansGreedyModel: + """Route a single delivery using the model instance.""" + + cluster = model.clustering.predict( + [[delivery.point.lng, delivery.point.lat]] + )[0] + + subsolution = model.cluster_subsolutions[cluster] + + def is_feasible(route): + return ( + route.occupation + delivery.size + < model.subinstance.vehicle_capacity + ) + + # TODO: We could make this method faster by using a route size table, but seems a bit + # overkill since it's not a bottleneck. + feasible_routes = [ + (route_idx, route) + for route_idx, route in enumerate(subsolution) + if is_feasible(route) + ] + + if feasible_routes: + route_idx, route = max(feasible_routes, key=lambda v: v[1].occupation) + + else: + route = CVRPSolutionVehicle( + origin=model.subinstance.origin, deliveries=[] + ) + subsolution.append(route) + route_idx = len(subsolution) - 1 + + route.deliveries.append(delivery) + subsolution[route_idx] = route + + return model + + +def finish(instance: CVRPInstance, model: KMeansGreedyModel) -> CVRPSolution: + + subinstances = [ + CVRPInstance( + name="", + region="", + deliveries=vehicle.deliveries, + origin=vehicle.origin, + vehicle_capacity=3 * instance.vehicle_capacity, # More relaxed. + ) + for idx, subinstance in enumerate(model.cluster_subsolutions.values()) + for vehicle in subinstance + ] + + logger.info("Reordering routes.") + subsolutions = [ + ortools_solve(subinstance, model.params.ortools_tsp_params) + for subinstance in subinstances + ] + + return CVRPSolution( + name=instance.name, + vehicles=[ + v for subsolution in subsolutions for v in subsolution.vehicles + ], + ) + + +def solve_instance( + model: KMeansGreedyModel, instance: CVRPInstance +) -> CVRPSolution: + """Solve an instance dinamically using a solver model""" + logger.info("Finetunning on evaluation instance.") + model_finetuned = finetune(model, instance) + + logger.info("Starting to dynamic route.") + for delivery in tqdm(instance.deliveries): + model_finetuned = route(model_finetuned, delivery) + + return finish(instance, model_finetuned) + + +if __name__ == "__main__": + + logging.basicConfig(level=logging.INFO) + parser = ArgumentParser() + + parser.add_argument("--train_instances", type=str, required=True) + parser.add_argument("--eval_instances", type=str, required=True) + parser.add_argument("--output", type=str) + parser.add_argument("--params", type=str) + + args = parser.parse_args() + + # Load instance and heuristic params. + eval_path = Path(args.eval_instances) + eval_path_dir = eval_path if eval_path.is_dir() else eval_path.parent + eval_files = ( + [eval_path] if eval_path.is_file() else list(eval_path.iterdir()) + ) + + train_path = Path(args.train_instances) + train_path_dir = train_path if train_path.is_dir() else train_path.parent + train_files = ( + [train_path] if train_path.is_file() else list(train_path.iterdir()) + ) + + # params = params_class.from_file(args.params) if args.params else None + + params = None + + output_dir = Path(args.output or ".") + output_dir.mkdir(parents=True, exist_ok=True) + + train_instances = [CVRPInstance.from_file(f) for f in train_files[:240]] + + logger.info("Pretraining on training instances.") + model = pretrain(train_instances) + + def solve(file): + instance = CVRPInstance.from_file(file) + + logger.info("Finetunning on evaluation instance.") + model_finetuned = finetune(model, instance) + + logger.info("Starting to dynamic route.") + for delivery in tqdm(instance.deliveries): + model_finetuned = route(model_finetuned, delivery) + + solution = finish(instance, model_finetuned) + + solution.to_file(output_dir / f"{instance.name}.json") + + # Run solver on multiprocessing pool. + with Pool(os.cpu_count()) as pool: + list(tqdm(pool.imap(solve, eval_files), total=len(eval_files))) diff --git a/loggibud/v1/baselines/task2/qrp_sweep.py b/loggibud/v1/baselines/task2/qrp_sweep.py index 03aacac..b7a09d8 100644 --- a/loggibud/v1/baselines/task2/qrp_sweep.py +++ b/loggibud/v1/baselines/task2/qrp_sweep.py @@ -1,327 +1,327 @@ -"""qRP-Sweep: Capacitated Region Partitioning with a Sweep method -This method is based on [1] and [2], and has the following simple structure. - -Algorithm ---------- -There is a "planning" phase in which we divide the delivery region into -`num_clusters` sub-regions. Then, in the "execution" phase, we route each -incoming package to its closest sub-region, and add it to a vehicle. This is -repeated until each vehicle is full, when a TSP is solved. - -Besides being an old reference, it became the basis for many recent methods. In -fact, the difference among them is in how we define the sub-regions. According -to [1], the only constraint is that each one has the same probability of having -a new incoming package. - -To achieve that, we follow here the sweep method of [2]. Basically, think about -a circle centered at the centroid of the historical deliveries. We can convert -the demands into polar coordinates, each one with angles in the interval -[-pi, pi]. The sweep method consists in spliting this interval into -`n_clusters` sub-intervals, each one having almost the same number of packages. - -Assumptions ------------ -The number `n_clusters` to divide the region may be provided. If not, we choose -as default the maximum number of vehicles we required among each training -instance. This number is estimated as the ratio of total demand and an -individual vehicle's capacity. - -References ----------- -[1] Bertsimas, Dimitris J., and Garrett Van Ryzin. "Stochastic and dynamic -vehicle routing with general demand and interarrival time distributions." -Advances in Applied Probability (1993): 947-978. - -[2] Gillett, Billy E., and Leland R. Miller. "A heuristic algorithm for the -vehicle-dispatch problem." Operations research 22.2 (1974): 340-349. NBR 6023 -""" - -import logging -import os -from argparse import ArgumentParser -from copy import deepcopy -from dataclasses import dataclass -from multiprocessing import Pool -from pathlib import Path -from typing import Dict, List, Optional - -import numpy as np -from tqdm import tqdm - -from loggibud.v1.types import ( - Delivery, - CVRPInstance, - CVRPSolution, - CVRPSolutionVehicle, -) -from loggibud.v1.baselines.shared.ortools import ( - solve as ortools_solve, - ORToolsParams, -) - - -logger = logging.getLogger(__name__) - - -@dataclass -class QRPParams: - num_clusters: Optional[int] = None - ortools_tsp_params: Optional[ORToolsParams] = None - - @classmethod - def get_baseline(cls): - return cls( - ortools_tsp_params=ORToolsParams( - max_vehicles=1, - time_limit_ms=1_000, - ) - ) - - -@dataclass -class QRPModel: - params: QRPParams - cluster_subsolutions: Optional[Dict[int, List[CVRPSolutionVehicle]]] = None - subinstance: Optional[CVRPInstance] = None - # Center of all historical packages, used to translate new deliveries and - # compute their equivalent angles - center: np.ndarray = np.zeros((0, 2)) - # Angle intervals describing each sub-region. They are described by a - # `n_cluster` x 2 array with the form - # [[-pi, angle_1], [angle_1, angle_2], ..., [angle_n, pi]] - angle_intervals: np.ndarray = np.zeros((0, 2)) - - def predict(self, delivery: Delivery) -> int: - """Predict the best subregion for a given delivery - Given a set of subregions as angle intervals of the form - - [[-pi, angle_1], [angle_1, angle_2], ..., [angle_n, pi]] - - this method gets the equivalent angle of an incoming delivery and finds - its appropriate interval. - """ - - point_translated = ( - np.array([delivery.point.lng, delivery.point.lat]) - self.center - ) - angle = np.arctan2(point_translated[1], point_translated[0]) - - # Find the interval where `angle` is greater than the lower limit and - # smaller than the upper one - return int( - np.nonzero( - (angle >= self.angle_intervals[:, 0]) - & (angle < self.angle_intervals[:, 1]) - )[0] - ) - - -def pretrain( - instances: List[CVRPInstance], params: Optional[QRPParams] = None -) -> QRPModel: - """ - Divide the interval [-pi, +pi] in a number of sub-intervals such that each - one has the same number of deliveries. - - The number of slices will be defined by default as the maximum number of - required vehicles among each training instance. - - Notes - ----- - Given a set of coordinates P: - 1. Compute the center of P; - 2. Translate the coordinates with respect to this center; - 3. Compute the angle in [-pi, +pi] of each translated coordinate with - respect to a polar system; - 4. Sort the points according to their angle, and divide the final array - in equal slices. The starting angles in each slice represent the - subregions. - """ - params = params or QRPParams() - - points = np.array( - [ - [d.point.lng, d.point.lat] - for instance in instances - for d in instance.deliveries - ] - ) - - # Compute coordinate angles - center = points.mean(axis=0) - points_translated = points - center - angles = np.arctan2(points_translated[:, 1], points_translated[:, 0]) - - # Get number of subregions as the maximum number of vehicles among all - # training instances if no value is provided - def _get_number_of_vehicles(instance: CVRPInstance) -> int: - """Compute required number of vehicles in instance""" - total_demand = sum(delivery.size for delivery in instance.deliveries) - return int(np.ceil(total_demand / instance.vehicle_capacity)) - - num_clusters = params.num_clusters or min( - _get_number_of_vehicles(instance) for instance in instances - ) - - # Determine angle intervals in the form - # [[-pi, angle_1], [angle_1, angle_2], ..., [angle_n, pi]] - # Notice we need to split into `n + 1` stop-points to get `n` clusters - split_indices = np.linspace( - 0, angles.size - 1, num_clusters + 1, dtype=int - ) - sorted_angles = np.sort(angles) - sorted_angles[0] = -np.pi - sorted_angles[-1] = np.pi - angle_intervals = np.vstack( - (sorted_angles[split_indices[:-1]], sorted_angles[split_indices[1:]]) - ).T - - params.num_clusters = num_clusters - return QRPModel( - params=params, - center=center, - angle_intervals=angle_intervals, - ) - - -def finetune(model: QRPModel, instance: CVRPInstance) -> QRPModel: - """Prepare the model for one particular instance.""" - - model_finetuned = deepcopy(model) - model_finetuned.cluster_subsolutions = { - i: [] for i in range(model.params.num_clusters) - } - model_finetuned.subinstance = instance # fill a random subinstance - - return model_finetuned - - -def route(model: QRPModel, delivery: Delivery) -> QRPModel: - """Route a single delivery using the model instance.""" - - cluster = model.predict(delivery) - subsolution = model.cluster_subsolutions[cluster] - - def is_feasible(route): - return ( - route.occupation + delivery.size - < model.subinstance.vehicle_capacity - ) - - # TODO: We could make this method faster by using a route size table, but - # seems a bit overkill since it's not a bottleneck. - feasible_routes = [ - (route_idx, route) - for route_idx, route in enumerate(subsolution) - if is_feasible(route) - ] - - if feasible_routes: - route_idx, route = max(feasible_routes, key=lambda v: v[1].occupation) - else: - route = CVRPSolutionVehicle( - origin=model.subinstance.origin, deliveries=[] - ) - subsolution.append(route) - route_idx = len(subsolution) - 1 - - route.deliveries.append(delivery) - subsolution[route_idx] = route - - return model - - -def finish(instance: CVRPInstance, model: QRPModel) -> CVRPSolution: - - subinstances = [ - CVRPInstance( - name="", - region="", - deliveries=vehicle.deliveries, - origin=vehicle.origin, - vehicle_capacity=3 * instance.vehicle_capacity, # More relaxed. - ) - for idx, subinstance in enumerate(model.cluster_subsolutions.values()) - for vehicle in subinstance - ] - - logger.info("Reordering routes.") - subsolutions = [ - ortools_solve(subinstance, model.params.ortools_tsp_params) - for subinstance in subinstances - ] - - return CVRPSolution( - name=instance.name, - vehicles=[ - v for subsolution in subsolutions for v in subsolution.vehicles - ], - ) - - -def solve_instance(model: QRPModel, instance: CVRPInstance) -> CVRPSolution: - """Solve an instance dinamically using a solver model""" - logger.info("Finetunning on evaluation instance.") - model_finetuned = finetune(model, instance) - - logger.info("Starting to dynamic route.") - for delivery in tqdm(instance.deliveries): - model_finetuned = route(model_finetuned, delivery) - - return finish(instance, model_finetuned) - - -if __name__ == "__main__": - - logging.basicConfig(level=logging.INFO) - parser = ArgumentParser() - - parser.add_argument("--train_instances", type=str, required=True) - parser.add_argument("--eval_instances", type=str, required=True) - parser.add_argument("--output", type=str) - parser.add_argument("--params", type=str) - - args = parser.parse_args() - - # Load instance and heuristic params. - eval_path = Path(args.eval_instances) - eval_path_dir = eval_path if eval_path.is_dir() else eval_path.parent - eval_files = ( - [eval_path] if eval_path.is_file() else list(eval_path.iterdir()) - ) - - train_path = Path(args.train_instances) - train_path_dir = train_path if train_path.is_dir() else train_path.parent - train_files = ( - [train_path] if train_path.is_file() else list(train_path.iterdir()) - ) - - # params = params_class.from_file(args.params) if args.params else None - - params = None - - output_dir = Path(args.output or ".") - output_dir.mkdir(parents=True, exist_ok=True) - - train_instances = [CVRPInstance.from_file(f) for f in train_files[:240]] - - logger.info("Pretraining on training instances.") - model = pretrain(train_instances) - - def solve(file): - instance = CVRPInstance.from_file(file) - - logger.info("Finetunning on evaluation instance.") - model_finetuned = finetune(model, instance) - - logger.info("Starting to dynamic route.") - for delivery in tqdm(instance.deliveries): - model_finetuned = route(model_finetuned, delivery) - - solution = finish(instance, model_finetuned) - - solution.to_file(output_dir / f"{instance.name}.json") - - # Run solver on multiprocessing pool. - with Pool(os.cpu_count()) as pool: - list(tqdm(pool.imap(solve, eval_files), total=len(eval_files))) +"""qRP-Sweep: Capacitated Region Partitioning with a Sweep method +This method is based on [1] and [2], and has the following simple structure. + +Algorithm +--------- +There is a "planning" phase in which we divide the delivery region into +`num_clusters` sub-regions. Then, in the "execution" phase, we route each +incoming package to its closest sub-region, and add it to a vehicle. This is +repeated until each vehicle is full, when a TSP is solved. + +Besides being an old reference, it became the basis for many recent methods. In +fact, the difference among them is in how we define the sub-regions. According +to [1], the only constraint is that each one has the same probability of having +a new incoming package. + +To achieve that, we follow here the sweep method of [2]. Basically, think about +a circle centered at the centroid of the historical deliveries. We can convert +the demands into polar coordinates, each one with angles in the interval +[-pi, pi]. The sweep method consists in spliting this interval into +`n_clusters` sub-intervals, each one having almost the same number of packages. + +Assumptions +----------- +The number `n_clusters` to divide the region may be provided. If not, we choose +as default the maximum number of vehicles we required among each training +instance. This number is estimated as the ratio of total demand and an +individual vehicle's capacity. + +References +---------- +[1] Bertsimas, Dimitris J., and Garrett Van Ryzin. "Stochastic and dynamic +vehicle routing with general demand and interarrival time distributions." +Advances in Applied Probability (1993): 947-978. + +[2] Gillett, Billy E., and Leland R. Miller. "A heuristic algorithm for the +vehicle-dispatch problem." Operations research 22.2 (1974): 340-349. NBR 6023 +""" + +import logging +import os +from argparse import ArgumentParser +from copy import deepcopy +from dataclasses import dataclass +from multiprocessing import Pool +from pathlib import Path +from typing import Dict, List, Optional + +import numpy as np +from tqdm import tqdm + +from loggibud.v1.types import ( + Delivery, + CVRPInstance, + CVRPSolution, + CVRPSolutionVehicle, +) +from loggibud.v1.baselines.shared.ortools import ( + solve as ortools_solve, + ORToolsParams, +) + + +logger = logging.getLogger(__name__) + + +@dataclass +class QRPParams: + num_clusters: Optional[int] = None + ortools_tsp_params: Optional[ORToolsParams] = None + + @classmethod + def get_baseline(cls): + return cls( + ortools_tsp_params=ORToolsParams( + max_vehicles=1, + time_limit_ms=1_000, + ) + ) + + +@dataclass +class QRPModel: + params: QRPParams + cluster_subsolutions: Optional[Dict[int, List[CVRPSolutionVehicle]]] = None + subinstance: Optional[CVRPInstance] = None + # Center of all historical packages, used to translate new deliveries and + # compute their equivalent angles + center: np.ndarray = np.zeros((0, 2)) + # Angle intervals describing each sub-region. They are described by a + # `n_cluster` x 2 array with the form + # [[-pi, angle_1], [angle_1, angle_2], ..., [angle_n, pi]] + angle_intervals: np.ndarray = np.zeros((0, 2)) + + def predict(self, delivery: Delivery) -> int: + """Predict the best subregion for a given delivery + Given a set of subregions as angle intervals of the form + + [[-pi, angle_1], [angle_1, angle_2], ..., [angle_n, pi]] + + this method gets the equivalent angle of an incoming delivery and finds + its appropriate interval. + """ + + point_translated = ( + np.array([delivery.point.lng, delivery.point.lat]) - self.center + ) + angle = np.arctan2(point_translated[1], point_translated[0]) + + # Find the interval where `angle` is greater than the lower limit and + # smaller than the upper one + return int( + np.nonzero( + (angle >= self.angle_intervals[:, 0]) + & (angle < self.angle_intervals[:, 1]) + )[0] + ) + + +def pretrain( + instances: List[CVRPInstance], params: Optional[QRPParams] = None +) -> QRPModel: + """ + Divide the interval [-pi, +pi] in a number of sub-intervals such that each + one has the same number of deliveries. + + The number of slices will be defined by default as the maximum number of + required vehicles among each training instance. + + Notes + ----- + Given a set of coordinates P: + 1. Compute the center of P; + 2. Translate the coordinates with respect to this center; + 3. Compute the angle in [-pi, +pi] of each translated coordinate with + respect to a polar system; + 4. Sort the points according to their angle, and divide the final array + in equal slices. The starting angles in each slice represent the + subregions. + """ + params = params or QRPParams() + + points = np.array( + [ + [d.point.lng, d.point.lat] + for instance in instances + for d in instance.deliveries + ] + ) + + # Compute coordinate angles + center = points.mean(axis=0) + points_translated = points - center + angles = np.arctan2(points_translated[:, 1], points_translated[:, 0]) + + # Get number of subregions as the maximum number of vehicles among all + # training instances if no value is provided + def _get_number_of_vehicles(instance: CVRPInstance) -> int: + """Compute required number of vehicles in instance""" + total_demand = sum(delivery.size for delivery in instance.deliveries) + return int(np.ceil(total_demand / instance.vehicle_capacity)) + + num_clusters = params.num_clusters or min( + _get_number_of_vehicles(instance) for instance in instances + ) + + # Determine angle intervals in the form + # [[-pi, angle_1], [angle_1, angle_2], ..., [angle_n, pi]] + # Notice we need to split into `n + 1` stop-points to get `n` clusters + split_indices = np.linspace( + 0, angles.size - 1, num_clusters + 1, dtype=int + ) + sorted_angles = np.sort(angles) + sorted_angles[0] = -np.pi + sorted_angles[-1] = np.pi + angle_intervals = np.vstack( + (sorted_angles[split_indices[:-1]], sorted_angles[split_indices[1:]]) + ).T + + params.num_clusters = num_clusters + return QRPModel( + params=params, + center=center, + angle_intervals=angle_intervals, + ) + + +def finetune(model: QRPModel, instance: CVRPInstance) -> QRPModel: + """Prepare the model for one particular instance.""" + + model_finetuned = deepcopy(model) + model_finetuned.cluster_subsolutions = { + i: [] for i in range(model.params.num_clusters) + } + model_finetuned.subinstance = instance # fill a random subinstance + + return model_finetuned + + +def route(model: QRPModel, delivery: Delivery) -> QRPModel: + """Route a single delivery using the model instance.""" + + cluster = model.predict(delivery) + subsolution = model.cluster_subsolutions[cluster] + + def is_feasible(route): + return ( + route.occupation + delivery.size + < model.subinstance.vehicle_capacity + ) + + # TODO: We could make this method faster by using a route size table, but + # seems a bit overkill since it's not a bottleneck. + feasible_routes = [ + (route_idx, route) + for route_idx, route in enumerate(subsolution) + if is_feasible(route) + ] + + if feasible_routes: + route_idx, route = max(feasible_routes, key=lambda v: v[1].occupation) + else: + route = CVRPSolutionVehicle( + origin=model.subinstance.origin, deliveries=[] + ) + subsolution.append(route) + route_idx = len(subsolution) - 1 + + route.deliveries.append(delivery) + subsolution[route_idx] = route + + return model + + +def finish(instance: CVRPInstance, model: QRPModel) -> CVRPSolution: + + subinstances = [ + CVRPInstance( + name="", + region="", + deliveries=vehicle.deliveries, + origin=vehicle.origin, + vehicle_capacity=3 * instance.vehicle_capacity, # More relaxed. + ) + for idx, subinstance in enumerate(model.cluster_subsolutions.values()) + for vehicle in subinstance + ] + + logger.info("Reordering routes.") + subsolutions = [ + ortools_solve(subinstance, model.params.ortools_tsp_params) + for subinstance in subinstances + ] + + return CVRPSolution( + name=instance.name, + vehicles=[ + v for subsolution in subsolutions for v in subsolution.vehicles + ], + ) + + +def solve_instance(model: QRPModel, instance: CVRPInstance) -> CVRPSolution: + """Solve an instance dinamically using a solver model""" + logger.info("Finetunning on evaluation instance.") + model_finetuned = finetune(model, instance) + + logger.info("Starting to dynamic route.") + for delivery in tqdm(instance.deliveries): + model_finetuned = route(model_finetuned, delivery) + + return finish(instance, model_finetuned) + + +if __name__ == "__main__": + + logging.basicConfig(level=logging.INFO) + parser = ArgumentParser() + + parser.add_argument("--train_instances", type=str, required=True) + parser.add_argument("--eval_instances", type=str, required=True) + parser.add_argument("--output", type=str) + parser.add_argument("--params", type=str) + + args = parser.parse_args() + + # Load instance and heuristic params. + eval_path = Path(args.eval_instances) + eval_path_dir = eval_path if eval_path.is_dir() else eval_path.parent + eval_files = ( + [eval_path] if eval_path.is_file() else list(eval_path.iterdir()) + ) + + train_path = Path(args.train_instances) + train_path_dir = train_path if train_path.is_dir() else train_path.parent + train_files = ( + [train_path] if train_path.is_file() else list(train_path.iterdir()) + ) + + # params = params_class.from_file(args.params) if args.params else None + + params = None + + output_dir = Path(args.output or ".") + output_dir.mkdir(parents=True, exist_ok=True) + + train_instances = [CVRPInstance.from_file(f) for f in train_files[:240]] + + logger.info("Pretraining on training instances.") + model = pretrain(train_instances) + + def solve(file): + instance = CVRPInstance.from_file(file) + + logger.info("Finetunning on evaluation instance.") + model_finetuned = finetune(model, instance) + + logger.info("Starting to dynamic route.") + for delivery in tqdm(instance.deliveries): + model_finetuned = route(model_finetuned, delivery) + + solution = finish(instance, model_finetuned) + + solution.to_file(output_dir / f"{instance.name}.json") + + # Run solver on multiprocessing pool. + with Pool(os.cpu_count()) as pool: + list(tqdm(pool.imap(solve, eval_files), total=len(eval_files))) diff --git a/loggibud/v1/data_conversion.py b/loggibud/v1/data_conversion.py index 059d44f..e154365 100644 --- a/loggibud/v1/data_conversion.py +++ b/loggibud/v1/data_conversion.py @@ -1,64 +1,66 @@ -"""This module is used to convert the instances to and from known formats -Currently, only TSPLIB is implemented. -""" - -from typing import Optional -from dataclasses import dataclass - -import tsplib95 -import numpy as np - -from loggibud.v1.distances import calculate_distance_matrix_m, OSRMConfig -from loggibud.v1.types import CVRPInstance, JSONDataclassMixin - - -@dataclass -class TSPLIBConversionParams(JSONDataclassMixin): - - osrm_config: Optional[OSRMConfig] = None - """Config for calling OSRM distance service.""" - - distance_scaling_factor: int = 10 - """ - Scaling factor for distance matrixes. Scaling is required for solvers that - operate with integer or fixed point distances. - """ - - -def to_tsplib( - instance: CVRPInstance, params: Optional[TSPLIBConversionParams] = None -) -> tsplib95.models.StandardProblem: - - params = params or TSPLIBConversionParams() - - num_deliveries = len(instance.deliveries) - - demands_section = { - i: delivery.size - for i, delivery in enumerate(instance.deliveries, start=2) - } - - # Depot demand is always zero. - demands_section[1] = 0 - - locations = [instance.origin] + [ - delivery.point for delivery in instance.deliveries - ] - - distance_matrix = calculate_distance_matrix_m( - locations, config=params.osrm_config - ) - scaled_matrix = distance_matrix * params.distance_scaling_factor - - problem = tsplib95.models.StandardProblem( - name=instance.name, - type="ACVRP", - dimension=num_deliveries + 1, - edge_weight_type="EXPLICIT", - edge_weight_format="FULL_MATRIX", - edge_weights=scaled_matrix.astype(np.int32).tolist(), - demands=demands_section, - capacity=instance.vehicle_capacity, - ) - - return problem +"""This module is used to convert the instances to and from known formats +Currently, only TSPLIB is implemented. +""" + +from typing import Optional +from dataclasses import dataclass + +import tsplib95 +import numpy as np + +from loggibud.v1.distances import calculate_distance_matrix_m, OSRMConfig +from loggibud.v1.types import CVRPInstance, JSONDataclassMixin + + +@dataclass +class TSPLIBConversionParams(JSONDataclassMixin): + + osrm_config: Optional[OSRMConfig] = None + """Config for calling OSRM distance service.""" + + distance_scaling_factor: int = 10 + """ + Scaling factor for distance matrixes. Scaling is required for solvers that + operate with integer or fixed point distances. + """ + + +def to_tsplib( + instance: CVRPInstance, params: Optional[TSPLIBConversionParams] = None +) -> tsplib95.models.StandardProblem: + + params = params or TSPLIBConversionParams() + + num_deliveries = len(instance.deliveries) + + demands_section = { + i: delivery.size + for i, delivery in enumerate(instance.deliveries, start=2) + } + + # Depot demand is always zero. + demands_section[1] = 0 + + locations = [instance.origin] + [ + delivery.point for delivery in instance.deliveries + ] + + distance_matrix = calculate_distance_matrix_m( + locations, config=params.osrm_config + ) + # print(distance_matrix) + # print(params.distance_scaling_factor) + scaled_matrix = np.ndarray(distance_matrix) * params.distance_scaling_factor + + problem = tsplib95.models.StandardProblem( + name=instance.name, + type="ACVRP", + dimension=num_deliveries + 1, + edge_weight_type="EXPLICIT", + edge_weight_format="FULL_MATRIX", + edge_weights=scaled_matrix.astype(np.int32).tolist(), + demands=demands_section, + capacity=instance.vehicle_capacity, + ) + + return problem \ No newline at end of file diff --git a/loggibud/v1/distances.py b/loggibud/v1/distances.py index 15f22f3..8f8f09e 100644 --- a/loggibud/v1/distances.py +++ b/loggibud/v1/distances.py @@ -1,133 +1,133 @@ -from dataclasses import dataclass -from typing import Iterable, Optional, Any - -import requests -import numpy as np - -from .types import Point - - -EARTH_RADIUS_METERS = 6371000 - - -@dataclass -class OSRMConfig: - host: str = "http://localhost:5000" - timeout_s: int = 600 - - -def calculate_distance_matrix_m( - points: Iterable[Point], config: Optional[OSRMConfig] = None -): - config = config or OSRMConfig() - - if len(points) < 2: - return 0 - - coords_uri = ";".join( - ["{},{}".format(point.lng, point.lat) for point in points] - ) - - response = requests.get( - f"{config.host}/table/v1/driving/{coords_uri}?annotations=distance", - timeout=config.timeout_s, - ) - - response.raise_for_status() - - return np.array(response.json()["distances"]) - - -def calculate_route_distance_m( - points: Iterable[Point], config: Optional[OSRMConfig] = None -): - config = config or OSRMConfig() - - if len(points) < 2: - return 0 - - coords_uri = ";".join( - "{},{}".format(point.lng, point.lat) for point in points - ) - - response = requests.get( - f"{config.host}/route/v1/driving/{coords_uri}?annotations=distance&continue_straight=false", - timeout=config.timeout_s, - ) - - response.raise_for_status() - - return min(r["distance"] for r in response.json()["routes"]) - - -def calculate_distance_matrix_great_circle_m( - points: Iterable[Point], config: Any = None -) -> np.ndarray: - """Distance matrix using the Great Circle distance - This is an Euclidean-like distance but on spheres [1]. In this case it is - used to estimate the distance in meters between locations in the Earth. - - Parameters - ---------- - points - Iterable with `lat` and `lng` properties with the coordinates of a - delivery - - Returns - ------- - distance_matrix - Array with the (i, j) entry indicating the Great Circle distance (in - meters) between the `i`-th and the `j`-th point - - References - ---------- - [1] https://en.wikipedia.org/wiki/Great-circle_distance - Using the third computational formula - """ - points_rad = np.radians([(point.lat, point.lng) for point in points]) - - delta_lambda = points_rad[:, [1]] - points_rad[:, 1] # (N x M) lng - phi1 = points_rad[:, [0]] # (N x 1) array of source latitudes - phi2 = points_rad[:, 0] # (1 x M) array of destination latitudes - - delta_sigma = np.arctan2( - np.sqrt( - (np.cos(phi2) * np.sin(delta_lambda)) ** 2 - + ( - np.cos(phi1) * np.sin(phi2) - - np.sin(phi1) * np.cos(phi2) * np.cos(delta_lambda) - ) - ** 2 - ), - ( - np.sin(phi1) * np.sin(phi2) - + np.cos(phi1) * np.cos(phi2) * np.cos(delta_lambda) - ), - ) - - return EARTH_RADIUS_METERS * delta_sigma - - -def calculate_route_distance_great_circle_m(points: Iterable[Point]) -> float: - """Compute total distance from moving from starting point to final - The total distance will be from point 0 to 1, from 1 to 2, and so on in - the order provided. - - Parameters - ---------- - points - Iterable with `lat` and `lng` properties with the coordinates of a - delivery - - Returns - ------- - route_distance - Total distance from going to the first point to the next until the last - one - """ - - distance_matrix = calculate_distance_matrix_great_circle_m(points) - - point_indices = np.arange(len(points)) - - return distance_matrix[point_indices[:-1], point_indices[1:]].sum() +from dataclasses import dataclass +from typing import Iterable, Optional, Any + +import requests +import numpy as np + +from .types import Point + + +EARTH_RADIUS_METERS = 6371000 + + +@dataclass +class OSRMConfig: + host: str = "http://localhost:5000" + timeout_s: int = 600 + + +def calculate_distance_matrix_m( + points: Iterable[Point], config: Optional[OSRMConfig] = None +): + config = config or OSRMConfig() + + if len(points) < 2: + return 0 + + coords_uri = ";".join( + ["{},{}".format(point.lng, point.lat) for point in points] + ) + + response = requests.get( + f"{config.host}/table/v1/driving/{coords_uri}?annotations=distance", + timeout=config.timeout_s, + ) + + response.raise_for_status() + + return np.array(response.json()["distances"]) + + +def calculate_route_distance_m( + points: Iterable[Point], config: Optional[OSRMConfig] = None +): + config = config or OSRMConfig() + + if len(points) < 2: + return 0 + + coords_uri = ";".join( + "{},{}".format(point.lng, point.lat) for point in points + ) + + response = requests.get( + f"{config.host}/route/v1/driving/{coords_uri}?annotations=distance&continue_straight=false", + timeout=config.timeout_s, + ) + + response.raise_for_status() + + return min(r["distance"] for r in response.json()["routes"]) + + +def calculate_distance_matrix_great_circle_m( + points: Iterable[Point], config: Any = None +) -> np.ndarray: + """Distance matrix using the Great Circle distance + This is an Euclidean-like distance but on spheres [1]. In this case it is + used to estimate the distance in meters between locations in the Earth. + + Parameters + ---------- + points + Iterable with `lat` and `lng` properties with the coordinates of a + delivery + + Returns + ------- + distance_matrix + Array with the (i, j) entry indicating the Great Circle distance (in + meters) between the `i`-th and the `j`-th point + + References + ---------- + [1] https://en.wikipedia.org/wiki/Great-circle_distance + Using the third computational formula + """ + points_rad = np.radians([(point.lat, point.lng) for point in points]) + + delta_lambda = points_rad[:, [1]] - points_rad[:, 1] # (N x M) lng + phi1 = points_rad[:, [0]] # (N x 1) array of source latitudes + phi2 = points_rad[:, 0] # (1 x M) array of destination latitudes + + delta_sigma = np.arctan2( + np.sqrt( + (np.cos(phi2) * np.sin(delta_lambda)) ** 2 + + ( + np.cos(phi1) * np.sin(phi2) + - np.sin(phi1) * np.cos(phi2) * np.cos(delta_lambda) + ) + ** 2 + ), + ( + np.sin(phi1) * np.sin(phi2) + + np.cos(phi1) * np.cos(phi2) * np.cos(delta_lambda) + ), + ) + + return EARTH_RADIUS_METERS * delta_sigma + + +def calculate_route_distance_great_circle_m(points: Iterable[Point]) -> float: + """Compute total distance from moving from starting point to final + The total distance will be from point 0 to 1, from 1 to 2, and so on in + the order provided. + + Parameters + ---------- + points + Iterable with `lat` and `lng` properties with the coordinates of a + delivery + + Returns + ------- + route_distance + Total distance from going to the first point to the next until the last + one + """ + + distance_matrix = calculate_distance_matrix_great_circle_m(points) + + point_indices = np.arange(len(points)) + + return distance_matrix[point_indices[:-1], point_indices[1:]].sum() diff --git a/loggibud/v1/eval/task1.py b/loggibud/v1/eval/task1.py index 71da512..c8a7238 100644 --- a/loggibud/v1/eval/task1.py +++ b/loggibud/v1/eval/task1.py @@ -1,75 +1,81 @@ -from pathlib import Path -from argparse import ArgumentParser -from typing import Optional - -from ..distances import calculate_route_distance_m, OSRMConfig -from ..types import CVRPInstance, CVRPSolution - - -def evaluate_solution( - instance: CVRPInstance, - solution: CVRPSolution, - config: Optional[OSRMConfig] = None, -) -> float: - - # Check if all demands are present. - solution_demands = set(d for v in solution.vehicles for d in v.deliveries) - assert solution_demands == set(instance.deliveries) - - # Check if max capacity is respected. - max_capacity = max( - sum(d.size for d in v.deliveries) for v in solution.vehicles - ) - assert max_capacity <= instance.vehicle_capacity - - # Check if maximum number of origins is consistent. - origins = set([v.origin for v in solution.vehicles]) - assert len(origins) <= 1 - - route_distances_m = [ - calculate_route_distance_m(v.circuit, config=config) - for v in solution.vehicles - ] - - # Convert to km. - return round(sum(route_distances_m) / 1_000, 4) - - -if __name__ == "__main__": - parser = ArgumentParser() - - parser.add_argument("--instances", type=str, required=True) - parser.add_argument("--solutions", type=str, required=True) - - args = parser.parse_args() - - instances_path = Path(args.instances) - solutions_path = Path(args.solutions) - - if instances_path.is_file() and solutions_path.is_file(): - instances = {"": CVRPInstance.from_file(instances_path)} - solutions = {"": CVRPSolution.from_file(solutions_path)} - - elif instances_path.is_dir() and solutions_path.is_dir(): - instances = { - f.stem: CVRPInstance.from_file(f) for f in instances_path.iterdir() - } - solutions = { - f.stem: CVRPSolution.from_file(f) for f in solutions_path.iterdir() - } - - else: - raise ValueError("input files do not match, use files or directories.") - - if set(instances) != set(solutions): - raise ValueError( - "input files do not match, the solutions and instances should be the same." - ) - - stems = instances.keys() - - results = [ - evaluate_solution(instances[stem], solutions[stem]) for stem in stems - ] - - print(sum(results)) +from pathlib import Path +from argparse import ArgumentParser +from typing import Optional + +from ..distances import calculate_route_distance_m, OSRMConfig +from ..types import CVRPInstance, CVRPSolution + + +def evaluate_solution( + instance: CVRPInstance, + solution: CVRPSolution, + config: Optional[OSRMConfig] = None, +) -> float: + + # Check if all demands are present. + # solution_demands = set(d for v in solution.vehicles for d in v.deliveries) + # assert solution_demands == set(instance.deliveries) + # for v in solution.vehicles: + # print("VEICULO =") + # i = 0 + # for d in v.deliveries: + # i += 1 + # print("id "+str(i)+" = "+str(d.id)) + # Check if max capacity is respected. + max_capacity = max( + sum(d.size for d in v.deliveries) for v in solution.vehicles + ) + print(max_capacity) + # assert max_capacity <= instance.vehicle_capacity + + # Check if maximum number of origins is consistent. + origins = set([v.origin for v in solution.vehicles]) + assert len(origins) <= 1 + + route_distances_m = [ + calculate_route_distance_m(v.no_return, config=config) + for v in solution.vehicles + ] + + # Convert to km. + return round(sum(route_distances_m) / 1_000, 4) + + +if __name__ == "__main__": + parser = ArgumentParser() + + parser.add_argument("--instances", type=str, required=True) + parser.add_argument("--solutions", type=str, required=True) + + args = parser.parse_args() + + instances_path = Path(args.instances) + solutions_path = Path(args.solutions) + + if instances_path.is_file() and solutions_path.is_file(): + instances = {"": CVRPInstance.from_file(instances_path)} + solutions = {"": CVRPSolution.from_file(solutions_path)} + + elif instances_path.is_dir() and solutions_path.is_dir(): + instances = { + f.stem: CVRPInstance.from_file(f) for f in instances_path.iterdir() + } + solutions = { + f.stem: CVRPSolution.from_file(f) for f in solutions_path.iterdir() + } + + else: + raise ValueError("input files do not match, use files or directories.") + + if set(instances) != set(solutions): + raise ValueError( + "input files do not match, the solutions and instances should be the same." + ) + + stems = instances.keys() + + results = [ + evaluate_solution(instances[stem], solutions[stem]) for stem in stems + ] + + print(sum(results)) diff --git a/loggibud/v1/instance_generation/README.md b/loggibud/v1/instance_generation/README.md index f02bf73..6743d87 100644 --- a/loggibud/v1/instance_generation/README.md +++ b/loggibud/v1/instance_generation/README.md @@ -1,118 +1,118 @@ -Instance generation -=================== - -This document describes how the data from our instances is synthesized from public data. -The sources of information are IBGE, IPEA, and OpenStreetMaps. - - -## Download public data - -First we need to download the raw data from IBGE, IPEA and (\~350Mb - compressed). If you -are running a UNIX-based system and have `wget` and `unzip`, you can do: - - -```bash -./download.sh - -``` - -If you don't, you can manually download the data through the following links and unzip -them into a `data_raw/` directory: - -* IBGE Census microdata: ftp://ftp.ibge.gov.br/Censos/Censo_Demografico_2010/Resultados_do_Universo/Agregados_por_Setores_Censitarios/ -* IBGE Census Geodata (by IPEA): -https://www.ipea.gov.br/geobr/data_gpkg/census_tract/2010/33census_tract_2010.gpkg - -> OBS: To maintain compliance with the defined file structure, -> the IPEA geographic data files must be renamed for "33.gpkg" format -> (Removing the "census_tract_2010" substring). - -Make sure your final file structure looks like: - -``` -data_raw/ -├── 33.gpkg -├── RJ -│ └── Base informaçoes setores2010 universo RJ -│ ├── CSV -│ │ ├── Basico_RJ.csv -│ │ ├── Domicilio01_RJ.csv -│ │ ├── Domicilio02_RJ.csv -│ │ ├── DomicilioRenda_RJ.csv -│ │ ├── Entorno01_RJ.csv -│ │ ├── Entorno02_RJ.csv -│ │ ├── Entorno03_RJ.csv -│ │ ├── Entorno04_RJ.csv -│ │ ├── Entorno05_RJ.csv -│ │ ├── Pessoa01_RJ.csv -│ │ ├── Pessoa02_RJ.csv -│ │ ├── Pessoa03_RJ.csv -│ │ ├── Pessoa04_RJ.csv -│ │ ├── Pessoa05_RJ.csv -│ │ ├── Pessoa06_RJ.csv -│ │ ├── Pessoa07_RJ.csv -│ │ ├── Pessoa08_RJ.csv -│ │ ├── Pessoa09_RJ.csv -│ │ ├── Pessoa10_RJ.csv -│ │ ├── Pessoa11_RJ.csv -│ │ ├── Pessoa12_RJ.csv -│ │ ├── Pessoa13_RJ.csv -│ │ ├── PessoaRenda_RJ.csv -│ │ ├── Responsavel01_RJ.csv -│ │ ├── Responsavel02_RJ.csv -│ │ └── ResponsavelRenda_RJ.csv -│ └── EXCEL -│ ├── Basico_RJ.xls -│ ├── Domicilio01_RJ.xls -│ ├── Domicilio02_RJ.xls -│ ├── DomicilioRenda_RJ.XLS -│ ├── Entorno01_RJ.XLS -│ ├── Entorno02_RJ.XLS -│ ├── Entorno03_RJ.xls -│ ├── Entorno04_RJ.xls -│ ├── Entorno05_RJ.xls -│ ├── Pessoa01_RJ.xls -│ ├── Pessoa02_RJ.xls -│ ├── Pessoa03_RJ.xls -│ ├── Pessoa04_RJ.xls -│ ├── Pessoa05_RJ.xls -│ ├── Pessoa06_RJ.xls -│ ├── Pessoa07_RJ.xls -│ ├── Pessoa08_RJ.xls -│ ├── Pessoa09_RJ.xls -│ ├── Pessoa10_RJ.xls -│ ├── Pessoa11_RJ.xls -│ ├── Pessoa12_RJ.xls -│ ├── Pessoa13_RJ.xls -│ ├── PessoaRenda_RJ.xls -│ ├── Responsavel01_RJ.xls -│ ├── Responsavel02_RJ.xls -│ └── ResponsavelRenda_RJ.xls -├── RJ_20171016.zip -``` - -# Setup an OSRM distance server - -To be able to compute distances over streets, you should download and run an -OSRM Server based on OpenStreetMaps. This can be done with the following steps: - -1. Download and install docker according to your operational system. -2. Download the [precompiled distance files](https://loggibud.s3.amazonaws.com/osrm/osrm.zip) (5.3Gb compressed, 12.6Gb decompressed). -3. Extract the files into an `osrm` directory. -3. Run an OSRM backend container with the following command: - -``` -docker run --rm -t -id \ - --name osrm \ - -p 5000:5000 \ - -v "${PWD}/osrm:/data" \ - osrm/osrm-backend osrm-routed --algorithm ch /data/brazil-201110.osrm --max-table-size 10000 -``` - -# Running the generation pipeline - -Next, you can effectively generate the instances with a simple Python script: - -``` -python -m loggibud.v1.instance_generation.generate -``` +Instance generation +=================== + +This document describes how the data from our instances is synthesized from public data. +The sources of information are IBGE, IPEA, and OpenStreetMaps. + + +## Download public data + +First we need to download the raw data from IBGE, IPEA and (\~350Mb - compressed). If you +are running a UNIX-based system and have `wget` and `unzip`, you can do: + + +```bash +./download.sh + +``` + +If you don't, you can manually download the data through the following links and unzip +them into a `data_raw/` directory: + +* IBGE Census microdata: ftp://ftp.ibge.gov.br/Censos/Censo_Demografico_2010/Resultados_do_Universo/Agregados_por_Setores_Censitarios/ +* IBGE Census Geodata (by IPEA): +https://www.ipea.gov.br/geobr/data_gpkg/census_tract/2010/33census_tract_2010.gpkg + +> OBS: To maintain compliance with the defined file structure, +> the IPEA geographic data files must be renamed for "33.gpkg" format +> (Removing the "census_tract_2010" substring). + +Make sure your final file structure looks like: + +``` +data_raw/ +├── 33.gpkg +├── RJ +│ └── Base informaçoes setores2010 universo RJ +│ ├── CSV +│ │ ├── Basico_RJ.csv +│ │ ├── Domicilio01_RJ.csv +│ │ ├── Domicilio02_RJ.csv +│ │ ├── DomicilioRenda_RJ.csv +│ │ ├── Entorno01_RJ.csv +│ │ ├── Entorno02_RJ.csv +│ │ ├── Entorno03_RJ.csv +│ │ ├── Entorno04_RJ.csv +│ │ ├── Entorno05_RJ.csv +│ │ ├── Pessoa01_RJ.csv +│ │ ├── Pessoa02_RJ.csv +│ │ ├── Pessoa03_RJ.csv +│ │ ├── Pessoa04_RJ.csv +│ │ ├── Pessoa05_RJ.csv +│ │ ├── Pessoa06_RJ.csv +│ │ ├── Pessoa07_RJ.csv +│ │ ├── Pessoa08_RJ.csv +│ │ ├── Pessoa09_RJ.csv +│ │ ├── Pessoa10_RJ.csv +│ │ ├── Pessoa11_RJ.csv +│ │ ├── Pessoa12_RJ.csv +│ │ ├── Pessoa13_RJ.csv +│ │ ├── PessoaRenda_RJ.csv +│ │ ├── Responsavel01_RJ.csv +│ │ ├── Responsavel02_RJ.csv +│ │ └── ResponsavelRenda_RJ.csv +│ └── EXCEL +│ ├── Basico_RJ.xls +│ ├── Domicilio01_RJ.xls +│ ├── Domicilio02_RJ.xls +│ ├── DomicilioRenda_RJ.XLS +│ ├── Entorno01_RJ.XLS +│ ├── Entorno02_RJ.XLS +│ ├── Entorno03_RJ.xls +│ ├── Entorno04_RJ.xls +│ ├── Entorno05_RJ.xls +│ ├── Pessoa01_RJ.xls +│ ├── Pessoa02_RJ.xls +│ ├── Pessoa03_RJ.xls +│ ├── Pessoa04_RJ.xls +│ ├── Pessoa05_RJ.xls +│ ├── Pessoa06_RJ.xls +│ ├── Pessoa07_RJ.xls +│ ├── Pessoa08_RJ.xls +│ ├── Pessoa09_RJ.xls +│ ├── Pessoa10_RJ.xls +│ ├── Pessoa11_RJ.xls +│ ├── Pessoa12_RJ.xls +│ ├── Pessoa13_RJ.xls +│ ├── PessoaRenda_RJ.xls +│ ├── Responsavel01_RJ.xls +│ ├── Responsavel02_RJ.xls +│ └── ResponsavelRenda_RJ.xls +├── RJ_20171016.zip +``` + +# Setup an OSRM distance server + +To be able to compute distances over streets, you should download and run an +OSRM Server based on OpenStreetMaps. This can be done with the following steps: + +1. Download and install docker according to your operational system. +2. Download the [precompiled distance files](https://loggibud.s3.amazonaws.com/osrm/osrm.zip) (5.3Gb compressed, 12.6Gb decompressed). +3. Extract the files into an `osrm` directory. +3. Run an OSRM backend container with the following command: + +``` +docker run --rm -t -id \ + --name osrm \ + -p 5000:5000 \ + -v "${PWD}/osrm:/data" \ + osrm/osrm-backend osrm-routed --algorithm ch /data/brazil-201110.osrm --max-table-size 10000 +``` + +# Running the generation pipeline + +Next, you can effectively generate the instances with a simple Python script: + +``` +python -m loggibud.v1.instance_generation.generate +``` diff --git a/loggibud/v1/instance_generation/download.sh b/loggibud/v1/instance_generation/download.sh index 5791994..2670805 100755 --- a/loggibud/v1/instance_generation/download.sh +++ b/loggibud/v1/instance_generation/download.sh @@ -1,24 +1,24 @@ -# Download IBGE census data. -wget -P ./data_raw -nc ftp://ftp.ibge.gov.br/Censos/Censo_Demografico_2010/Resultados_do_Universo/Agregados_por_Setores_Censitarios/RJ_20171016.zip -wget -P ./data_raw -nc ftp://ftp.ibge.gov.br/Censos/Censo_Demografico_2010/Resultados_do_Universo/Agregados_por_Setores_Censitarios/DF_20171016.zip -wget -P ./data_raw -nc ftp://ftp.ibge.gov.br/Censos/Censo_Demografico_2010/Resultados_do_Universo/Agregados_por_Setores_Censitarios/PA_20171016.zip - -# Download geographic data from IPEA -# These files are indexed in "http://www.ipea.gov.br/geobr/metadata/metadata_gpkg.csv" -wget -O ./data_raw/33.gpkg -nc 'https://www.ipea.gov.br/geobr/data_gpkg/census_tract/2010/33census_tract_2010.gpkg' # RJ -wget -O ./data_raw/53.gpkg -nc 'https://www.ipea.gov.br/geobr/data_gpkg/census_tract/2010/53census_tract_2010.gpkg' # DF -wget -O ./data_raw/15.gpkg -nc 'https://www.ipea.gov.br/geobr/data_gpkg/census_tract/2010/15census_tract_2010.gpkg' # PA - -# Unzip. -unzip -d ./data_raw -o ./data_raw/RJ_20171016.zip -unzip -d ./data_raw -o ./data_raw/DF_20171016.zip -unzip -d ./data_raw -o ./data_raw/PA_20171016.zip - -# Ensure the standard of names of the directories generated -STATES=('DF' 'PA' 'RJ') -for state in ${STATES[@]}; do - if [[ -e "data_raw/${state}" ]]; then - mv -v "$(find data_raw/${state}/Base*${state} -maxdepth 0)" "data_raw/${state}/Base informaçoes setores2010 universo ${state}" - fi -done - +# Download IBGE census data. +wget -P ./data_raw -nc ftp://ftp.ibge.gov.br/Censos/Censo_Demografico_2010/Resultados_do_Universo/Agregados_por_Setores_Censitarios/RJ_20171016.zip +wget -P ./data_raw -nc ftp://ftp.ibge.gov.br/Censos/Censo_Demografico_2010/Resultados_do_Universo/Agregados_por_Setores_Censitarios/DF_20171016.zip +wget -P ./data_raw -nc ftp://ftp.ibge.gov.br/Censos/Censo_Demografico_2010/Resultados_do_Universo/Agregados_por_Setores_Censitarios/PA_20171016.zip + +# Download geographic data from IPEA +# These files are indexed in "http://www.ipea.gov.br/geobr/metadata/metadata_gpkg.csv" +wget -O ./data_raw/33.gpkg -nc 'https://www.ipea.gov.br/geobr/data_gpkg/census_tract/2010/33census_tract_2010.gpkg' # RJ +wget -O ./data_raw/53.gpkg -nc 'https://www.ipea.gov.br/geobr/data_gpkg/census_tract/2010/53census_tract_2010.gpkg' # DF +wget -O ./data_raw/15.gpkg -nc 'https://www.ipea.gov.br/geobr/data_gpkg/census_tract/2010/15census_tract_2010.gpkg' # PA + +# Unzip. +unzip -d ./data_raw -o ./data_raw/RJ_20171016.zip +unzip -d ./data_raw -o ./data_raw/DF_20171016.zip +unzip -d ./data_raw -o ./data_raw/PA_20171016.zip + +# Ensure the standard of names of the directories generated +STATES=('DF' 'PA' 'RJ') +for state in ${STATES[@]}; do + if [[ -e "data_raw/${state}" ]]; then + mv -v "$(find data_raw/${state}/Base*${state} -maxdepth 0)" "data_raw/${state}/Base informaçoes setores2010 universo ${state}" + fi +done + diff --git a/loggibud/v1/instance_generation/generate.py b/loggibud/v1/instance_generation/generate.py index cad9353..8767750 100644 --- a/loggibud/v1/instance_generation/generate.py +++ b/loggibud/v1/instance_generation/generate.py @@ -1,85 +1,85 @@ -import logging - -from .generators import ( - DeliveryGenerationConfig, - CVRPGenerationConfig, - generate_census_instances, - generate_cvrp_subinstances, -) - -DELIVERY_CONFIGS = { - "rj": DeliveryGenerationConfig( - name="rj", - num_train_instances=90, - num_dev_instances=30, - revenue_income_ratio=1e-4, - num_deliveries_average=28531, - num_deliveries_range=4430, - vehicle_capacity=180, - max_size=10, - max_hubs=7, - save_to="./data/delivery-instances-1.0", - ), - "df": DeliveryGenerationConfig( - name="df", - num_train_instances=90, - num_dev_instances=30, - revenue_income_ratio=1e-4, - num_deliveries_average=9865, - num_deliveries_range=2161, - vehicle_capacity=180, - max_size=10, - max_hubs=3, - save_to="./data/delivery-instances-1.0", - ), - "pa": DeliveryGenerationConfig( - name="pa", - num_train_instances=90, - num_dev_instances=30, - revenue_income_ratio=1e-4, - num_deliveries_average=4510, - num_deliveries_range=956, - vehicle_capacity=180, - max_size=10, - max_hubs=2, - save_to="./data/delivery-instances-1.0", - ), -} - - -CVRP_CONFIGS = { - "rj": CVRPGenerationConfig( - name="rj", - num_hubs=6, - num_clusters=256, - vehicle_capacity=180, - save_to="./data/cvrp-instances-1.0", - ), - "df": CVRPGenerationConfig( - name="df", - num_hubs=3, - num_clusters=256, - vehicle_capacity=180, - save_to="./data/cvrp-instances-1.0", - ), - "pa": CVRPGenerationConfig( - name="pa", - num_hubs=2, - num_clusters=256, - vehicle_capacity=180, - save_to="./data/cvrp-instances-1.0", - ), -} - - -if __name__ == "__main__": - logging.basicConfig(level=logging.INFO) - - for instance in DELIVERY_CONFIGS: - config = DELIVERY_CONFIGS[instance] - delivery_result = generate_census_instances(config) - - cvrp_config = CVRP_CONFIGS.get(instance) - - if cvrp_config: - generate_cvrp_subinstances(cvrp_config, delivery_result) +import logging + +from .generators import ( + DeliveryGenerationConfig, + CVRPGenerationConfig, + generate_census_instances, + generate_cvrp_subinstances, +) + +DELIVERY_CONFIGS = { + "rj": DeliveryGenerationConfig( + name="rj", + num_train_instances=90, + num_dev_instances=30, + revenue_income_ratio=1e-4, + num_deliveries_average=28531, + num_deliveries_range=4430, + vehicle_capacity=180, + max_size=10, + max_hubs=7, + save_to="./data/delivery-instances-1.0", + ), + "df": DeliveryGenerationConfig( + name="df", + num_train_instances=90, + num_dev_instances=30, + revenue_income_ratio=1e-4, + num_deliveries_average=9865, + num_deliveries_range=2161, + vehicle_capacity=180, + max_size=10, + max_hubs=3, + save_to="./data/delivery-instances-1.0", + ), + "pa": DeliveryGenerationConfig( + name="pa", + num_train_instances=90, + num_dev_instances=30, + revenue_income_ratio=1e-4, + num_deliveries_average=4510, + num_deliveries_range=956, + vehicle_capacity=180, + max_size=10, + max_hubs=2, + save_to="./data/delivery-instances-1.0", + ), +} + + +CVRP_CONFIGS = { + "rj": CVRPGenerationConfig( + name="rj", + num_hubs=6, + num_clusters=256, + vehicle_capacity=180, + save_to="./data/cvrp-instances-1.0", + ), + "df": CVRPGenerationConfig( + name="df", + num_hubs=3, + num_clusters=256, + vehicle_capacity=180, + save_to="./data/cvrp-instances-1.0", + ), + "pa": CVRPGenerationConfig( + name="pa", + num_hubs=2, + num_clusters=256, + vehicle_capacity=180, + save_to="./data/cvrp-instances-1.0", + ), +} + + +if __name__ == "__main__": + logging.basicConfig(level=logging.INFO) + + for instance in DELIVERY_CONFIGS: + config = DELIVERY_CONFIGS[instance] + delivery_result = generate_census_instances(config) + + cvrp_config = CVRP_CONFIGS.get(instance) + + if cvrp_config: + generate_cvrp_subinstances(cvrp_config, delivery_result) diff --git a/loggibud/v1/instance_generation/generators.py b/loggibud/v1/instance_generation/generators.py index 8f13e34..2df8c33 100644 --- a/loggibud/v1/instance_generation/generators.py +++ b/loggibud/v1/instance_generation/generators.py @@ -1,310 +1,310 @@ -# coding: utf-8 - -import random -import itertools -import json -import logging -from pathlib import Path -from collections import Counter -from dataclasses import dataclass, asdict -from typing import List, Optional - -import numpy as np -import pandas as pd - -from tqdm import tqdm -from shapely.geometry import Point as ShapelyPoint -from sklearn.cluster import MiniBatchKMeans - -from ..distances import calculate_distance_matrix_m -from ..types import Point, Delivery, DeliveryProblemInstance, CVRPInstance -from ..baselines.shared.p_hub import PHubProblem, solve_p_hub -from .preprocessing import prepare_census_data - - -# Create and register a new `tqdm` instance with `pandas` -# (can use tqdm_gui, optional kwargs, etc.) -tqdm.pandas() -logger = logging.getLogger(__name__) - - -def new_id(): - return format(random.getrandbits(128), "x") - - -@dataclass -class DeliveryGenerationConfig: - name: str - num_train_instances: int - num_dev_instances: int - revenue_income_ratio: float - num_deliveries_average: int - num_deliveries_range: int - vehicle_capacity: int - max_size: int - max_hubs: int - seed: int = 0 - save_to: Optional[str] = None - - -@dataclass -class CVRPGenerationConfig: - name: str - num_hubs: int - num_clusters: int - vehicle_capacity: int - seed: int = 0 - save_to: Optional[str] = None - - @classmethod - def get_default(cls): - return cls( - name="rj", - num_hubs=6, - vehicle_capacity=120, - ) - - -@dataclass -class CensusGenerationResult: - name: str - deliveries: List[Delivery] - train_instances: List[DeliveryProblemInstance] - dev_instances: List[DeliveryProblemInstance] - - -@dataclass -class CVRPGenerationResult: - name: str - train_instances: List[CVRPInstance] - dev_instances: List[CVRPInstance] - - -def generate_deliveries( - tract_df: pd.DataFrame, revenue_income_ratio: float, max_size: int -) -> List[Delivery]: - def new_point(polygon): - # Loop until the point matches the poligon. - while True: - # Generate using a uniform distribution inside the bounding box. - minx, miny, maxx, maxy = polygon.bounds - p = ShapelyPoint( - random.uniform(minx, maxx), random.uniform(miny, maxy) - ) - - # If is contained, return. - if polygon.contains(p): - return Delivery( - id=new_id(), - point=Point(p.x, p.y), - size=random.randint(1, max_size), - ) - - region_samples = tract_df.progress_apply( - lambda r: [ - new_point(r.geometry) - for i in range(max(1, int(r.total_income * revenue_income_ratio))) - ], - axis=1, - ) - return [p for r in region_samples for p in r] - - -def generate_census_instances( - config: DeliveryGenerationConfig, -) -> CensusGenerationResult: - np.random.seed(config.seed) - random.seed(config.seed) - - logger.info(f"Starting census instance generation for {config.name}.") - - num_instances = config.num_train_instances + config.num_dev_instances - - sizes = ( - np.random.randint( - -config.num_deliveries_range, - config.num_deliveries_range, - size=num_instances, - ) - + config.num_deliveries_average - ) - - # Compute deliveries from demand distribution. - - logger.info("Preprocessing census data.") - tract_df = prepare_census_data(config.name) - - logger.info("Generating census delivery instances.") - deliveries = generate_deliveries( - tract_df, config.revenue_income_ratio, config.max_size - ) - - # Sample deliveries into instances. - instances = [ - DeliveryProblemInstance( - name=f"{config.name}-{i}", - region=config.name, - deliveries=np.random.choice(deliveries, size=size).tolist(), - vehicle_capacity=config.vehicle_capacity, - max_hubs=config.max_hubs, - ) - for i, size in enumerate(sizes) - ] - - # Split train and dev instances. - train_instances = instances[: config.num_train_instances] - dev_instances = instances[config.num_train_instances :] - - if config.save_to is not None: - logger.info(f"Saving instances to {config.save_to}") - - for prefix, instances_subset in ( - ("train", train_instances), - ("dev", dev_instances), - ): - dir_path = Path(f"{config.save_to}/{prefix}/{config.name}") - dir_path.mkdir(parents=True, exist_ok=True) - - for instance in instances_subset: - path = Path(dir_path / f"{instance.name}.json") - with path.open("w") as file: - json.dump(asdict(instance), file) - - return CensusGenerationResult( - name=config.name, - deliveries=deliveries, - train_instances=train_instances, - dev_instances=dev_instances, - ) - - -def generate_cvrp_subinstances( - config: CVRPGenerationConfig, generation: CensusGenerationResult -): - logger.info(f"Starting CVRP subinstance generation for {config.name}.") - np.random.seed(config.seed) - random.seed(config.seed) - - # Merge all train instance deliveries. - logger.info("Starting region clustering.") - clustering_points = np.array( - [ - [d.point.lng, d.point.lat] - for instance in generation.train_instances - for d in instance.deliveries - ] - ) - - # Run k means clustering over the points. - clustering = MiniBatchKMeans(config.num_clusters, random_state=config.seed) - clusters = clustering.fit_predict(clustering_points) - - # Compute the number of deliveries in every cluster. - cluster_weights = Counter(clusters) - demands = np.array( - [cluster_weights[i] for i in range(config.num_clusters)] - ) - - # Compute the street distance between points. - logger.info("Computing distances between clusters.") - distances_matrix = calculate_distance_matrix_m( - [Point(x, y) for x, y in clustering.cluster_centers_] - ) - - # Solve the p-hub location problems between hubs. - logger.info("Solving allocation problem for clusters.") - locations, allocations = solve_p_hub( - PHubProblem( - p=config.num_hubs, - demands=demands, - transport_costs=distances_matrix, - ) - ) - - # Map every cluster into a hub. - hub_allocations = { - i: [j for j, a in enumerate(row) if a] - for i, row in enumerate(allocations) - } - - def aggregate_subinstances(instance): - - # Deterministic hub assignment. - cluster_index = clustering.predict( - [[d.point.lng, d.point.lat] for d in instance.deliveries] - ) - - # Group deliveries per cluster. - cluster_deliveries = { - key: [d for _, d in group] - for key, group in itertools.groupby( - sorted( - zip(cluster_index, instance.deliveries), key=lambda v: v[0] - ), - key=lambda v: v[0], - ) - } - - # Aggregate clusters into subinstances according to the hub assignment. - subinstance_deliveries = [ - [ - d - for cluster in clusters - for d in cluster_deliveries.get(cluster, []) - ] - for hub_cluster, clusters in hub_allocations.items() - if clusters - ] - - # Select the hub as one demand from the selected cluster. - subinstance_hubs = [ - Point(*clustering.cluster_centers_[hub_cluster]) - for hub_cluster, clusters in hub_allocations.items() - if clusters - ] - - return [ - CVRPInstance( - name=f"cvrp-{idx}-{instance.name}", - region=f"{config.name}-{idx}", - origin=hub, - deliveries=deliveries, - vehicle_capacity=config.vehicle_capacity, - ) - for idx, (deliveries, hub) in enumerate( - zip(subinstance_deliveries, subinstance_hubs) - ) - ] - - logger.info("Computing train subinstances.") - train_subinstances = [ - subinstance - for instance in tqdm(generation.train_instances) - for subinstance in aggregate_subinstances(instance) - ] - - logger.info("Computing dev subinstances.") - dev_subinstances = [ - subinstance - for instance in tqdm(generation.dev_instances) - for subinstance in aggregate_subinstances(instance) - ] - - if config.save_to is not None: - for prefix, instances_subset in ( - ("train", train_subinstances), - ("dev", dev_subinstances), - ): - - for instance in instances_subset: - dir_path = Path(f"{config.save_to}/{prefix}/{instance.region}") - dir_path.mkdir(parents=True, exist_ok=True) - path = Path(dir_path / f"{instance.name}.json") - with path.open("w") as file: - json.dump(asdict(instance), file) - - return CVRPGenerationResult( - name=config.name, - train_instances=train_subinstances, - dev_instances=dev_subinstances, - ) +# coding: utf-8 + +import random +import itertools +import json +import logging +from pathlib import Path +from collections import Counter +from dataclasses import dataclass, asdict +from typing import List, Optional + +import numpy as np +import pandas as pd + +from tqdm import tqdm +from shapely.geometry import Point as ShapelyPoint +from sklearn.cluster import MiniBatchKMeans + +from ..distances import calculate_distance_matrix_m +from ..types import Point, Delivery, DeliveryProblemInstance, CVRPInstance +from ..baselines.shared.p_hub import PHubProblem, solve_p_hub +from .preprocessing import prepare_census_data + + +# Create and register a new `tqdm` instance with `pandas` +# (can use tqdm_gui, optional kwargs, etc.) +tqdm.pandas() +logger = logging.getLogger(__name__) + + +def new_id(): + return format(random.getrandbits(128), "x") + + +@dataclass +class DeliveryGenerationConfig: + name: str + num_train_instances: int + num_dev_instances: int + revenue_income_ratio: float + num_deliveries_average: int + num_deliveries_range: int + vehicle_capacity: int + max_size: int + max_hubs: int + seed: int = 0 + save_to: Optional[str] = None + + +@dataclass +class CVRPGenerationConfig: + name: str + num_hubs: int + num_clusters: int + vehicle_capacity: int + seed: int = 0 + save_to: Optional[str] = None + + @classmethod + def get_default(cls): + return cls( + name="rj", + num_hubs=6, + vehicle_capacity=120, + ) + + +@dataclass +class CensusGenerationResult: + name: str + deliveries: List[Delivery] + train_instances: List[DeliveryProblemInstance] + dev_instances: List[DeliveryProblemInstance] + + +@dataclass +class CVRPGenerationResult: + name: str + train_instances: List[CVRPInstance] + dev_instances: List[CVRPInstance] + + +def generate_deliveries( + tract_df: pd.DataFrame, revenue_income_ratio: float, max_size: int +) -> List[Delivery]: + def new_point(polygon): + # Loop until the point matches the poligon. + while True: + # Generate using a uniform distribution inside the bounding box. + minx, miny, maxx, maxy = polygon.bounds + p = ShapelyPoint( + random.uniform(minx, maxx), random.uniform(miny, maxy) + ) + + # If is contained, return. + if polygon.contains(p): + return Delivery( + id=new_id(), + point=Point(p.x, p.y), + size=random.randint(1, max_size), + ) + + region_samples = tract_df.progress_apply( + lambda r: [ + new_point(r.geometry) + for i in range(max(1, int(r.total_income * revenue_income_ratio))) + ], + axis=1, + ) + return [p for r in region_samples for p in r] + + +def generate_census_instances( + config: DeliveryGenerationConfig, +) -> CensusGenerationResult: + np.random.seed(config.seed) + random.seed(config.seed) + + logger.info(f"Starting census instance generation for {config.name}.") + + num_instances = config.num_train_instances + config.num_dev_instances + + sizes = ( + np.random.randint( + -config.num_deliveries_range, + config.num_deliveries_range, + size=num_instances, + ) + + config.num_deliveries_average + ) + + # Compute deliveries from demand distribution. + + logger.info("Preprocessing census data.") + tract_df = prepare_census_data(config.name) + + logger.info("Generating census delivery instances.") + deliveries = generate_deliveries( + tract_df, config.revenue_income_ratio, config.max_size + ) + + # Sample deliveries into instances. + instances = [ + DeliveryProblemInstance( + name=f"{config.name}-{i}", + region=config.name, + deliveries=np.random.choice(deliveries, size=size).tolist(), + vehicle_capacity=config.vehicle_capacity, + max_hubs=config.max_hubs, + ) + for i, size in enumerate(sizes) + ] + + # Split train and dev instances. + train_instances = instances[: config.num_train_instances] + dev_instances = instances[config.num_train_instances :] + + if config.save_to is not None: + logger.info(f"Saving instances to {config.save_to}") + + for prefix, instances_subset in ( + ("train", train_instances), + ("dev", dev_instances), + ): + dir_path = Path(f"{config.save_to}/{prefix}/{config.name}") + dir_path.mkdir(parents=True, exist_ok=True) + + for instance in instances_subset: + path = Path(dir_path / f"{instance.name}.json") + with path.open("w") as file: + json.dump(asdict(instance), file) + + return CensusGenerationResult( + name=config.name, + deliveries=deliveries, + train_instances=train_instances, + dev_instances=dev_instances, + ) + + +def generate_cvrp_subinstances( + config: CVRPGenerationConfig, generation: CensusGenerationResult +): + logger.info(f"Starting CVRP subinstance generation for {config.name}.") + np.random.seed(config.seed) + random.seed(config.seed) + + # Merge all train instance deliveries. + logger.info("Starting region clustering.") + clustering_points = np.array( + [ + [d.point.lng, d.point.lat] + for instance in generation.train_instances + for d in instance.deliveries + ] + ) + + # Run k means clustering over the points. + clustering = MiniBatchKMeans(config.num_clusters, random_state=config.seed) + clusters = clustering.fit_predict(clustering_points) + + # Compute the number of deliveries in every cluster. + cluster_weights = Counter(clusters) + demands = np.array( + [cluster_weights[i] for i in range(config.num_clusters)] + ) + + # Compute the street distance between points. + logger.info("Computing distances between clusters.") + distances_matrix = calculate_distance_matrix_m( + [Point(x, y) for x, y in clustering.cluster_centers_] + ) + + # Solve the p-hub location problems between hubs. + logger.info("Solving allocation problem for clusters.") + locations, allocations = solve_p_hub( + PHubProblem( + p=config.num_hubs, + demands=demands, + transport_costs=distances_matrix, + ) + ) + + # Map every cluster into a hub. + hub_allocations = { + i: [j for j, a in enumerate(row) if a] + for i, row in enumerate(allocations) + } + + def aggregate_subinstances(instance): + + # Deterministic hub assignment. + cluster_index = clustering.predict( + [[d.point.lng, d.point.lat] for d in instance.deliveries] + ) + + # Group deliveries per cluster. + cluster_deliveries = { + key: [d for _, d in group] + for key, group in itertools.groupby( + sorted( + zip(cluster_index, instance.deliveries), key=lambda v: v[0] + ), + key=lambda v: v[0], + ) + } + + # Aggregate clusters into subinstances according to the hub assignment. + subinstance_deliveries = [ + [ + d + for cluster in clusters + for d in cluster_deliveries.get(cluster, []) + ] + for hub_cluster, clusters in hub_allocations.items() + if clusters + ] + + # Select the hub as one demand from the selected cluster. + subinstance_hubs = [ + Point(*clustering.cluster_centers_[hub_cluster]) + for hub_cluster, clusters in hub_allocations.items() + if clusters + ] + + return [ + CVRPInstance( + name=f"cvrp-{idx}-{instance.name}", + region=f"{config.name}-{idx}", + origin=hub, + deliveries=deliveries, + vehicle_capacity=config.vehicle_capacity, + ) + for idx, (deliveries, hub) in enumerate( + zip(subinstance_deliveries, subinstance_hubs) + ) + ] + + logger.info("Computing train subinstances.") + train_subinstances = [ + subinstance + for instance in tqdm(generation.train_instances) + for subinstance in aggregate_subinstances(instance) + ] + + logger.info("Computing dev subinstances.") + dev_subinstances = [ + subinstance + for instance in tqdm(generation.dev_instances) + for subinstance in aggregate_subinstances(instance) + ] + + if config.save_to is not None: + for prefix, instances_subset in ( + ("train", train_subinstances), + ("dev", dev_subinstances), + ): + + for instance in instances_subset: + dir_path = Path(f"{config.save_to}/{prefix}/{instance.region}") + dir_path.mkdir(parents=True, exist_ok=True) + path = Path(dir_path / f"{instance.name}.json") + with path.open("w") as file: + json.dump(asdict(instance), file) + + return CVRPGenerationResult( + name=config.name, + train_instances=train_subinstances, + dev_instances=dev_subinstances, + ) diff --git a/loggibud/v1/instance_generation/preprocessing.py b/loggibud/v1/instance_generation/preprocessing.py index 3fa8630..c703ab8 100644 --- a/loggibud/v1/instance_generation/preprocessing.py +++ b/loggibud/v1/instance_generation/preprocessing.py @@ -1,106 +1,106 @@ -import os - -import pandas as pd -import geopandas as gpd - - -BASE_PATH = os.path.dirname(os.path.realpath(__file__)) - - -CENSUS_INCOME_FILES = { - "rj": f"{BASE_PATH}/../../../data_raw/RJ/Base informaçoes setores2010 universo RJ/CSV/DomicilioRenda_RJ.csv", - "df": f"{BASE_PATH}/../../../data_raw/DF/Base informaçoes setores2010 universo DF/CSV/DomicilioRenda_DF.csv", - "pa": f"{BASE_PATH}/../../../data_raw/PA/Base informaçoes setores2010 universo PA/CSV/DomicilioRenda_PA.csv", -} - -CENSUS_POLYGON_FILES = { - "rj": f"{BASE_PATH}/../../../data_raw/33.gpkg", - "df": f"{BASE_PATH}/../../../data_raw/53.gpkg", - "pa": f"{BASE_PATH}/../../../data_raw/15.gpkg", -} - -MUNICIPALITIES = { - "rj": { - "rio de janeiro", - "niterói", - "duque de caxias", - "nova iguaçu", - "itaboraí", - "queimados", - "são gonçalo", - "belford roxo", - "nilópolis", - "são joão de meriti", - "mesquita", - }, - "df": { - "brasília", - }, - "pa": { - "belém", - "ananindeua", - "benevides", - "castanhal", - "marituba", - "santa bárbara do pará", - "santa isabel do pará", - }, -} - -INSTANCE_UF = { - "rj": "rj", - "df": "df", - "pa": "pa", -} - - -def load_income_per_sector(uf): - def int_or_zero(s): - # There are a few occurrences of "X" in some numerical columns. - try: - return int(s) - except ValueError: - return 0 - - census_income_df = pd.read_csv( - CENSUS_INCOME_FILES[uf], - sep=";", - encoding="iso-8859-1", - decimal=",", - ) - - # Sector code to string. - census_income_df["code_tract"] = census_income_df.Cod_setor.apply( - lambda s: str(s) - ) - - # Total income (V002) to int removing empty fields. - census_income_df["total_income"] = census_income_df.V002.apply(int_or_zero) - - # Drop all other fields. - return census_income_df[["code_tract", "total_income"]] - - -def load_geodata_per_sector(uf): - - # Read gpkg file using GeoPandas. - census_geo_df = gpd.read_file(CENSUS_POLYGON_FILES[uf]) - - return census_geo_df - - -def prepare_census_data(instance_name): - if instance_name not in INSTANCE_UF: - raise ValueError("Invalid instance identifier. Is it configured?") - - census_geo_df = load_geodata_per_sector(INSTANCE_UF[instance_name]) - census_income_df = load_income_per_sector(INSTANCE_UF[instance_name]) - - tract_df = pd.merge( - left=census_geo_df, right=census_income_df, on="code_tract" - ) - - municipalities = MUNICIPALITIES[instance_name] - tract_df = tract_df[tract_df.name_muni.str.lower().isin(municipalities)] - - return tract_df +import os + +import pandas as pd +import geopandas as gpd + + +BASE_PATH = os.path.dirname(os.path.realpath(__file__)) + + +CENSUS_INCOME_FILES = { + "rj": f"{BASE_PATH}/../../../data_raw/RJ/Base informaçoes setores2010 universo RJ/CSV/DomicilioRenda_RJ.csv", + "df": f"{BASE_PATH}/../../../data_raw/DF/Base informaçoes setores2010 universo DF/CSV/DomicilioRenda_DF.csv", + "pa": f"{BASE_PATH}/../../../data_raw/PA/Base informaçoes setores2010 universo PA/CSV/DomicilioRenda_PA.csv", +} + +CENSUS_POLYGON_FILES = { + "rj": f"{BASE_PATH}/../../../data_raw/33.gpkg", + "df": f"{BASE_PATH}/../../../data_raw/53.gpkg", + "pa": f"{BASE_PATH}/../../../data_raw/15.gpkg", +} + +MUNICIPALITIES = { + "rj": { + "rio de janeiro", + "niterói", + "duque de caxias", + "nova iguaçu", + "itaboraí", + "queimados", + "são gonçalo", + "belford roxo", + "nilópolis", + "são joão de meriti", + "mesquita", + }, + "df": { + "brasília", + }, + "pa": { + "belém", + "ananindeua", + "benevides", + "castanhal", + "marituba", + "santa bárbara do pará", + "santa isabel do pará", + }, +} + +INSTANCE_UF = { + "rj": "rj", + "df": "df", + "pa": "pa", +} + + +def load_income_per_sector(uf): + def int_or_zero(s): + # There are a few occurrences of "X" in some numerical columns. + try: + return int(s) + except ValueError: + return 0 + + census_income_df = pd.read_csv( + CENSUS_INCOME_FILES[uf], + sep=";", + encoding="iso-8859-1", + decimal=",", + ) + + # Sector code to string. + census_income_df["code_tract"] = census_income_df.Cod_setor.apply( + lambda s: str(s) + ) + + # Total income (V002) to int removing empty fields. + census_income_df["total_income"] = census_income_df.V002.apply(int_or_zero) + + # Drop all other fields. + return census_income_df[["code_tract", "total_income"]] + + +def load_geodata_per_sector(uf): + + # Read gpkg file using GeoPandas. + census_geo_df = gpd.read_file(CENSUS_POLYGON_FILES[uf]) + + return census_geo_df + + +def prepare_census_data(instance_name): + if instance_name not in INSTANCE_UF: + raise ValueError("Invalid instance identifier. Is it configured?") + + census_geo_df = load_geodata_per_sector(INSTANCE_UF[instance_name]) + census_income_df = load_income_per_sector(INSTANCE_UF[instance_name]) + + tract_df = pd.merge( + left=census_geo_df, right=census_income_df, on="code_tract" + ) + + municipalities = MUNICIPALITIES[instance_name] + tract_df = tract_df[tract_df.name_muni.str.lower().isin(municipalities)] + + return tract_df diff --git a/loggibud/v1/plotting/plot_instance.py b/loggibud/v1/plotting/plot_instance.py index 032d69c..5248f44 100644 --- a/loggibud/v1/plotting/plot_instance.py +++ b/loggibud/v1/plotting/plot_instance.py @@ -1,76 +1,76 @@ -import logging -from argparse import ArgumentParser - -import numpy as np -import folium - -from ..types import CVRPInstance, DeliveryProblemInstance - - -logger = logging.getLogger(__name__) - - -def plot_cvrp_instance(instance: CVRPInstance): - - origin = instance.origin - points = [delivery.point for delivery in instance.deliveries] - - # create a map - m = folium.Map( - location=(origin.lat, origin.lng), - zoom_start=12, - tiles="cartodbpositron", - ) - - for point in points: - folium.CircleMarker( - [point.lat, point.lng], color="blue", radius=1, weight=1 - ).add_to(m) - - folium.CircleMarker( - [origin.lat, origin.lng], color="red", radius=3, weight=5 - ).add_to(m) - - return m - - -def plot_delivery_instance(instance: DeliveryProblemInstance): - - points = [delivery.point for delivery in instance.deliveries] - center_lat = np.mean([p.lat for p in points]) - center_lng = np.mean([p.lng for p in points]) - - # create a map - m = folium.Map( - location=(center_lat, center_lng), - zoom_start=12, - tiles="cartodbpositron", - ) - - for point in points: - folium.CircleMarker( - [point.lat, point.lng], color="blue", radius=1, weight=1 - ).add_to(m) - - return m - - -if __name__ == "__main__": - parser = ArgumentParser() - - parser.add_argument("--cvrp", type=str) - parser.add_argument("--delivery", type=str) - - args = parser.parse_args() - - # Load instance and heuristic params. - - if args.cvrp: - instance = CVRPInstance.from_file(args.cvrp) - m = plot_cvrp_instance(instance) - - elif args.delivery: - instance = DeliveryProblemInstance.from_file(args.delivery) - m = plot_delivery_instance(instance) - - m.save("map.html") +import logging +from argparse import ArgumentParser + +import numpy as np +import folium + +from ..types import CVRPInstance, DeliveryProblemInstance + + +logger = logging.getLogger(__name__) + + +def plot_cvrp_instance(instance: CVRPInstance): + + origin = instance.origin + points = [delivery.point for delivery in instance.deliveries] + + # create a map + m = folium.Map( + location=(origin.lat, origin.lng), + zoom_start=12, + tiles="cartodbpositron", + ) + + for point in points: + folium.CircleMarker( + [point.lat, point.lng], color="blue", radius=1, weight=1 + ).add_to(m) + + folium.CircleMarker( + [origin.lat, origin.lng], color="red", radius=3, weight=5 + ).add_to(m) + + return m + + +def plot_delivery_instance(instance: DeliveryProblemInstance): + + points = [delivery.point for delivery in instance.deliveries] + center_lat = np.mean([p.lat for p in points]) + center_lng = np.mean([p.lng for p in points]) + + # create a map + m = folium.Map( + location=(center_lat, center_lng), + zoom_start=12, + tiles="cartodbpositron", + ) + + for point in points: + folium.CircleMarker( + [point.lat, point.lng], color="blue", radius=1, weight=1 + ).add_to(m) + + return m + + +if __name__ == "__main__": + parser = ArgumentParser() + + parser.add_argument("--cvrp", type=str) + parser.add_argument("--delivery", type=str) + + args = parser.parse_args() + + # Load instance and heuristic params. + + if args.cvrp: + instance = CVRPInstance.from_file(args.cvrp) + m = plot_cvrp_instance(instance) + + elif args.delivery: + instance = DeliveryProblemInstance.from_file(args.delivery) + m = plot_delivery_instance(instance) + + m.save("map.html") diff --git a/loggibud/v1/plotting/plot_solution.py b/loggibud/v1/plotting/plot_solution.py index 96cbed5..8fc8389 100644 --- a/loggibud/v1/plotting/plot_solution.py +++ b/loggibud/v1/plotting/plot_solution.py @@ -1,163 +1,163 @@ -"""Plots solution routes""" -from typing import List, Iterable, Optional - -import folium -import numpy as np -import polyline -import requests - -from loggibud.v1.types import CVRPSolution, Point -from loggibud.v1.distances import OSRMConfig - - -# All available map colors -MAP_COLORS = ( - "black", - "blue", - "darkred", - "purple", - "red", - "orange", - "green", - "pink", - "darkblue", - "beige", - "gray", - "lightgreen", - "lightblue", - "lightgray", - "cadetblue", -) - - -def plot_cvrp_solution_routes( - solution: CVRPSolution, - route_indices_to_plot: Optional[List[int]] = None, - config: Optional[OSRMConfig] = None, -) -> None: - """Plot solution routes in a map along the streets - - Parameters - ---------- - solution - A solution to any solver with the vehicles routes to plot - - route_indices_to_plot - If specified, selects a smaller subset of routes to plot by their - indices. This can be useful to reduce the clutter in case of a - solution with too many vehicles - - config - OSRM configuration - """ - config = config or OSRMConfig() - - # Initialize map centered at the mean of the origins - origins_mean = np.mean( - [ - (vehicle.origin.lat, vehicle.origin.lng) - for vehicle in solution.vehicles - ], - axis=0, - ) - m = folium.Map( - location=origins_mean, - zoom_start=12, - tiles="cartodbpositron", - ) - - num_vehicles = len(solution.vehicles) - route_indices_to_plot = route_indices_to_plot or range(num_vehicles) - vehicles_subset = [solution.vehicles[i] for i in route_indices_to_plot] - - for i, vehicle in enumerate(vehicles_subset): - vehicle_color = MAP_COLORS[i % len(MAP_COLORS)] - - # Plot origin - origin = (vehicle.origin.lat, vehicle.origin.lng) - folium.CircleMarker(origin, color="red", radius=3, weight=5).add_to(m) - - # Plot street outlines - wiring = _route_wiring(vehicle.circuit, config) - folium.PolyLine( - wiring, color=vehicle_color, weight=1.0, popup=f"Vehicle {i}" - ).add_to(m) - - # Plot the deliveries as regular points - for delivery in vehicle.deliveries: - folium.Circle( - location=(delivery.point.lat, delivery.point.lng), - radius=10, - fill=True, - color=vehicle_color, - popup=( - f"Vehicle {i} ({delivery.point.lat}, {delivery.point.lng})" - ), - ).add_to(m) - - return m - - -def _route_wiring(points: Iterable[Point], config): - coords_uri = ";".join(f"{point.lng},{point.lat}" for point in points) - - response = requests.get( - f"{config.host}/route/v1/driving/{coords_uri}?overview=simplified", - timeout=config.timeout_s, - ) - - data = response.json() - line = data["routes"][0]["geometry"] - - return [(lat, lng) for lat, lng in polyline.decode(line)] - - -def plot_cvrp_solution( - solution: CVRPSolution, route_indices_to_plot: Optional[List[int]] = None -) -> None: - """Plot solution deliveries in a map - This is a simplified version showing only the edges between each delivery. - It does not require an OSRM server configuration. - - Parameters - ---------- - solution - A solution to any solver with the vehicles routes to plot - - route_indices_to_plot - If specified, selects a smaller subset of routes to plot by their - indices. This can be useful to reduce the clutter in case of a - solution with too many vehicles - """ - # Initialize map centered at the mean of the origins - origins_mean = np.mean( - [ - (vehicle.origin.lat, vehicle.origin.lng) - for vehicle in solution.vehicles - ], - axis=0, - ) - m = folium.Map( - location=origins_mean, - zoom_start=12, - tiles="cartodbpositron", - ) - - num_vehicles = len(solution.vehicles) - route_indices_to_plot = route_indices_to_plot or range(num_vehicles) - vehicles_subset = [solution.vehicles[i] for i in route_indices_to_plot] - - for i, vehicle in enumerate(vehicles_subset): - origin = (vehicle.origin.lat, vehicle.origin.lng) - folium.CircleMarker(origin, color="red", radius=3, weight=5).add_to(m) - - vehicle_color = MAP_COLORS[i % len(MAP_COLORS)] - vehicle_coords = [(point.lat, point.lng) for point in vehicle.circuit] - folium.Polygon( - vehicle_coords, - popup=f"Vehicle {i}", - color=vehicle_color, - weight=1, - ).add_to(m) - - return m +"""Plots solution routes""" +from typing import List, Iterable, Optional + +import folium +import numpy as np +import polyline +import requests + +from loggibud.v1.types import CVRPSolution, Point +from loggibud.v1.distances import OSRMConfig + + +# All available map colors +MAP_COLORS = ( + "black", + "blue", + "darkred", + "purple", + "red", + "orange", + "green", + "pink", + "darkblue", + "beige", + "gray", + "lightgreen", + "lightblue", + "lightgray", + "cadetblue", +) + + +def plot_cvrp_solution_routes( + solution: CVRPSolution, + route_indices_to_plot: Optional[List[int]] = None, + config: Optional[OSRMConfig] = None, +) -> None: + """Plot solution routes in a map along the streets + + Parameters + ---------- + solution + A solution to any solver with the vehicles routes to plot + + route_indices_to_plot + If specified, selects a smaller subset of routes to plot by their + indices. This can be useful to reduce the clutter in case of a + solution with too many vehicles + + config + OSRM configuration + """ + config = config or OSRMConfig() + + # Initialize map centered at the mean of the origins + origins_mean = np.mean( + [ + (vehicle.origin.lat, vehicle.origin.lng) + for vehicle in solution.vehicles + ], + axis=0, + ) + m = folium.Map( + location=origins_mean, + zoom_start=12, + tiles="cartodbpositron", + ) + + num_vehicles = len(solution.vehicles) + route_indices_to_plot = route_indices_to_plot or range(num_vehicles) + vehicles_subset = [solution.vehicles[i] for i in route_indices_to_plot] + + for i, vehicle in enumerate(vehicles_subset): + vehicle_color = MAP_COLORS[i % len(MAP_COLORS)] + + # Plot origin + origin = (vehicle.origin.lat, vehicle.origin.lng) + folium.CircleMarker(origin, color="red", radius=3, weight=5).add_to(m) + + # Plot street outlines + wiring = _route_wiring(vehicle.circuit, config) + folium.PolyLine( + wiring, color=vehicle_color, weight=1.0, popup=f"Vehicle {i}" + ).add_to(m) + + # Plot the deliveries as regular points + for delivery in vehicle.deliveries: + folium.Circle( + location=(delivery.point.lat, delivery.point.lng), + radius=10, + fill=True, + color=vehicle_color, + popup=( + f"Vehicle {i} ({delivery.point.lat}, {delivery.point.lng})" + ), + ).add_to(m) + + return m + + +def _route_wiring(points: Iterable[Point], config): + coords_uri = ";".join(f"{point.lng},{point.lat}" for point in points) + + response = requests.get( + f"{config.host}/route/v1/driving/{coords_uri}?overview=simplified", + timeout=config.timeout_s, + ) + + data = response.json() + line = data["routes"][0]["geometry"] + + return [(lat, lng) for lat, lng in polyline.decode(line)] + + +def plot_cvrp_solution( + solution: CVRPSolution, route_indices_to_plot: Optional[List[int]] = None +) -> None: + """Plot solution deliveries in a map + This is a simplified version showing only the edges between each delivery. + It does not require an OSRM server configuration. + + Parameters + ---------- + solution + A solution to any solver with the vehicles routes to plot + + route_indices_to_plot + If specified, selects a smaller subset of routes to plot by their + indices. This can be useful to reduce the clutter in case of a + solution with too many vehicles + """ + # Initialize map centered at the mean of the origins + origins_mean = np.mean( + [ + (vehicle.origin.lat, vehicle.origin.lng) + for vehicle in solution.vehicles + ], + axis=0, + ) + m = folium.Map( + location=origins_mean, + zoom_start=12, + tiles="cartodbpositron", + ) + + num_vehicles = len(solution.vehicles) + route_indices_to_plot = route_indices_to_plot or range(num_vehicles) + vehicles_subset = [solution.vehicles[i] for i in route_indices_to_plot] + + for i, vehicle in enumerate(vehicles_subset): + origin = (vehicle.origin.lat, vehicle.origin.lng) + folium.CircleMarker(origin, color="red", radius=3, weight=5).add_to(m) + + vehicle_color = MAP_COLORS[i % len(MAP_COLORS)] + vehicle_coords = [(point.lat, point.lng) for point in vehicle.circuit] + folium.Polygon( + vehicle_coords, + popup=f"Vehicle {i}", + color=vehicle_color, + weight=1, + ).add_to(m) + + return m diff --git a/loggibud/v1/types.py b/loggibud/v1/types.py index e3113f2..4360cfb 100644 --- a/loggibud/v1/types.py +++ b/loggibud/v1/types.py @@ -1,118 +1,257 @@ -import json -from dataclasses import dataclass, asdict -from pathlib import Path -from typing import List, Union - -from dacite import from_dict - - -class JSONDataclassMixin: - """Mixin for adding JSON file capabilities to Python dataclasses.""" - - @classmethod - def from_file(cls, path: Union[Path, str]) -> "JSONDataclassMixin": - """Load dataclass instance from provided file path.""" - - with open(path) as f: - data = json.load(f) - - return from_dict(cls, data) - - def to_file(self, path: Union[Path, str]) -> None: - """Save dataclass instance to provided file path.""" - - with open(path, "w") as f: - json.dump(asdict(self), f) - - return - - -@dataclass(unsafe_hash=True) -class Point: - """Point in earth. Assumes a geodesical projection.""" - - lng: float - """Longitude (x axis).""" - - lat: float - """Latitude (y axis).""" - - -@dataclass(unsafe_hash=True) -class Delivery: - """A delivery request.""" - - id: str - """Unique id.""" - - point: Point - """Delivery location.""" - - size: int - """Size it occupies in the vehicle (considered 1-D for simplicity).""" - - -@dataclass -class DeliveryProblemInstance(JSONDataclassMixin): - name: str - """Unique name of this instance.""" - - region: str - """Region name.""" - - max_hubs: int - """Maximum number of hubs allowed in the solution.""" - - vehicle_capacity: int - """Maximum sum of sizes per vehicle allowed in the solution.""" - - deliveries: List[Delivery] - """List of deliveries to be solved.""" - - -@dataclass -class CVRPInstance(JSONDataclassMixin): - name: str - """Unique name of this instance.""" - - region: str - """Region name.""" - - origin: Point - """Location of the origin hub.""" - - vehicle_capacity: int - """Maximum sum of sizes per vehicle allowed in the solution.""" - - deliveries: List[Delivery] - """List of deliveries to be solved.""" - - -@dataclass -class CVRPSolutionVehicle: - - origin: Point - """Location of the origin hub.""" - - deliveries: List[Delivery] - """Ordered list of deliveries from the vehicle.""" - - @property - def circuit(self) -> List[Point]: - return ( - [self.origin] + [d.point for d in self.deliveries] + [self.origin] - ) - - @property - def occupation(self) -> int: - return sum([d.size for d in self.deliveries]) - - -@dataclass -class CVRPSolution(JSONDataclassMixin): - name: str - vehicles: List[CVRPSolutionVehicle] - - @property - def deliveries(self): - return [d for v in self.vehicles for d in v.deliveries] +import json +from dataclasses import dataclass, asdict +from pathlib import Path +from typing import List, Union + +from dacite import from_dict + + +class JSONDataclassMixin: + """Mixin for adding JSON file capabilities to Python dataclasses.""" + + @classmethod + def from_file(cls, path: Union[Path, str]) -> "JSONDataclassMixin": + """Load dataclass instance from provided file path.""" + + with open(path) as f: + data = json.load(f) + + return from_dict(cls, data) + + def to_file(self, path: Union[Path, str]) -> None: + """Save dataclass instance to provided file path.""" + + with open(path, "w") as f: + json.dump(asdict(self), f) + + return + + +@dataclass(unsafe_hash=True) +class Point: + """Point in earth. Assumes a geodesical projection.""" + + lng: float + """Longitude (x axis).""" + + lat: float + """Latitude (y axis).""" + +@dataclass(unsafe_hash=True) +class Vehicle: + """Vehicle build""" + id: int + """Unique ID.""" + + type_vehicle: str + """Vehicle type.""" + + capacity: int + """Maximum Capacity veícle""" + + cust: int + """Cust Transportation vehicle per km""" + + origin: Point + """Localization initial""" + + +@dataclass(unsafe_hash=True) +class DeliveryOPT: + """A delivery request.""" + id: int + """Unique id.""" + + point: Point + """Delivery location.""" + + size: int + """Size it occupies in the vehicle (considered 1-D for simplicity).""" + + idu: int + +@dataclass(unsafe_hash=True) +class Delivery: + """A delivery request.""" + + id: str + """Unique id.""" + + point: Point + """Delivery location.""" + + size: int + """Size it occupies in the vehicle (considered 1-D for simplicity).""" + + +@dataclass +class DeliveryProblemInstance(JSONDataclassMixin): + name: str + """Unique name of this instance.""" + + region: str + """Region name.""" + + max_hubs: int + """Maximum number of hubs allowed in the solution.""" + + vehicle_capacity: int + """Maximum sum of sizes per vehicle allowed in the solution.""" + + deliveries: List[Delivery] + """List of deliveries to be solved.""" + + +@dataclass +class CVRPInstance(JSONDataclassMixin): + name: str + """Unique name of this instance.""" + + region: str + """Region name.""" + + origin: Point + """Location of the origin hub.""" + + vehicle_capacity: int + """Maximum sum of sizes per vehicle allowed in the solution.""" + + deliveries: List[Delivery] + """List of deliveries to be solved.""" +@dataclass +class CVRPInstanceHeterogeneous(JSONDataclassMixin): + name: str + """Unique name of this instance.""" + + region: str + """Region name.""" + + origin: Point + """Location of the origin hub.""" + + vehicles: List[Vehicle] + """Maximum sum of sizes per vehicle allowed in the solution.""" + + deliveries: List[Delivery] + """List of deliveries to be solved.""" +@dataclass +class ParamsVehicles(JSONDataclassMixin): + types: List[str] + """Tipo de veículos existentes.""" + + num_types: List[float] + """Número de tipos de veículo que deve existir no problema.""" + + capacities: List[int] + """Capacidade dos veículos existentes.""" + + custs: List[int] + """Custos dos veículos existentes.""" + +@dataclass +class CVRPInstanceOPT(JSONDataclassMixin): + name: str + """Unique name of this instance.""" + + region: str + """Region name.""" + + origin: Point + """Location of the origin hub.""" + + vehicle_capacity: int + """Maximum sum of sizes per vehicle allowed in the solution.""" + + deliveries: List[DeliveryOPT] + """List of deliveries to be solved.""" + + +@dataclass +class CVRPSolutionVehicle: + + origin: Point + """Location of the origin hub.""" + + deliveries: List[Delivery] + """Ordered list of deliveries from the vehicle.""" + + @property + def circuit(self) -> List[Point]: + return ( + [self.origin] + [d.point for d in self.deliveries] + [self.origin] + ) + + @property + def no_return(self) -> List[Point]: + return ( + [self.origin] + [d.point for d in self.deliveries] + ) + + @property + def occupation(self) -> int: + return sum([d.size for d in self.deliveries]) +@dataclass +class CVRPSolutionVehicleOPT: + + origin: Point + """Location of the origin hub.""" + + deliveries: List[DeliveryOPT] + """Ordered list of deliveries from the vehicle.""" + + @property + def circuit(self) -> List[Point]: + return ( + [self.origin] + [d.point for d in self.deliveries] + [self.origin] + ) + + @property + def no_return(self) -> List[Point]: + return ( + [self.origin] + [d.point for d in self.deliveries] + ) + + @property + def occupation(self) -> int: + return sum([d.size for d in self.deliveries]) + + +@dataclass +class CVRPSolution(JSONDataclassMixin): + name: str + vehicles: List[CVRPSolutionVehicle] + time_exec: float = 0.0 #??? + + @property + def deliveries(self): + return [d for v in self.vehicles for d in v.deliveries] +@dataclass +class CVRPSolutionOPT(JSONDataclassMixin): + name: str + vehicles: List[CVRPSolutionVehicleOPT] + time_exec: float = 0.0 #??? + + @property + def deliveries(self): + return [d for v in self.vehicles for d in v.deliveries] +@dataclass +class CVRPSolutionKpprrf(JSONDataclassMixin): + k_regions: int + name: str + time_execution: float + total_vehicles: int + vehicles: List[CVRPSolutionVehicle] + + @property + def deliveries(self): + return [d for v in self.vehicles for d in v.deliveries] +@dataclass +class KmeansSolution(JSONDataclassMixin): + num_vehicles: int + time_execution: float + n_clusters: int + total_points: int + + @property + def deliveries(self): + return [d for v in self.vehicles for d in v.deliveries] diff --git a/loggibud/v1/workspace.code-workspace b/loggibud/v1/workspace.code-workspace new file mode 100644 index 0000000..ec92e29 --- /dev/null +++ b/loggibud/v1/workspace.code-workspace @@ -0,0 +1,11 @@ +{ + "folders": [ + { + "path": "..\\.." + }, + { + "path": "..\\..\\..\\..\\results" + } + ], + "settings": {} +} \ No newline at end of file diff --git a/output.rar b/output.rar new file mode 100644 index 0000000..08ae4d1 Binary files /dev/null and b/output.rar differ diff --git a/poetry.lock b/poetry.lock index 2ea44db..9625e83 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,1546 +1,1546 @@ -[[package]] -category = "main" -description = "Abseil Python Common Libraries, see https://github.com/abseil/abseil-py." -name = "absl-py" -optional = false -python-versions = "*" -version = "0.13.0" - -[package.dependencies] -six = "*" - -[[package]] -category = "dev" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -name = "appdirs" -optional = false -python-versions = "*" -version = "1.4.4" - -[[package]] -category = "dev" -description = "Disable App Nap on macOS >= 10.9" -marker = "python_version > \"3.6\" and sys_platform == \"darwin\"" -name = "appnope" -optional = false -python-versions = "*" -version = "0.1.2" - -[[package]] -category = "dev" -description = "Atomic file writes." -marker = "sys_platform == \"win32\"" -name = "atomicwrites" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.4.0" - -[[package]] -category = "main" -description = "Classes Without Boilerplate" -name = "attrs" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "21.2.0" - -[package.extras] -dev = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] -docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] -tests = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] -tests_no_zope = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] - -[[package]] -category = "dev" -description = "Specifications for callback functions passed in to an API" -marker = "python_version > \"3.6\"" -name = "backcall" -optional = false -python-versions = "*" -version = "0.2.0" - -[[package]] -category = "dev" -description = "The uncompromising code formatter." -name = "black" -optional = false -python-versions = ">=3.6.2" -version = "21.6b0" - -[package.dependencies] -appdirs = "*" -click = ">=7.1.2" -mypy-extensions = ">=0.4.3" -pathspec = ">=0.8.1,<1" -regex = ">=2020.1.8" -toml = ">=0.10.1" - -[package.dependencies.typed-ast] -python = "<3.8" -version = ">=1.4.2" - -[package.dependencies.typing-extensions] -python = "<3.8" -version = ">=3.7.4" - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.6.0)", "aiohttp-cors (>=0.4.0)"] -python2 = ["typed-ast (>=1.4.2)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -category = "main" -description = "Generate complex HTML+JS pages with Python" -name = "branca" -optional = false -python-versions = ">=3.5" -version = "0.4.2" - -[package.dependencies] -jinja2 = "*" - -[[package]] -category = "main" -description = "Python package for providing Mozilla's CA Bundle." -name = "certifi" -optional = false -python-versions = "*" -version = "2021.5.30" - -[[package]] -category = "main" -description = "Universal encoding detector for Python 2 and 3" -name = "chardet" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "4.0.0" - -[[package]] -category = "main" -description = "Composable command line interface toolkit" -name = "click" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "7.1.2" - -[[package]] -category = "main" -description = "An extension module for click to enable registering CLI commands via setuptools entry-points." -name = "click-plugins" -optional = false -python-versions = "*" -version = "1.1.1" - -[package.dependencies] -click = ">=4.0" - -[package.extras] -dev = ["pytest (>=3.6)", "pytest-cov", "wheel", "coveralls"] - -[[package]] -category = "main" -description = "Click params for commmand line interfaces to GeoJSON" -name = "cligj" -optional = false -python-versions = "*" -version = "0.6.0" - -[package.dependencies] -click = ">=4.0,<8" - -[package.extras] -test = ["pytest-cov"] - -[[package]] -category = "dev" -description = "Cross-platform colored terminal text." -marker = "python_version > \"3.6\" and sys_platform == \"win32\" or sys_platform == \"win32\"" -name = "colorama" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "0.4.4" - -[[package]] -category = "main" -description = "Simple creation of data classes from dictionaries." -name = "dacite" -optional = false -python-versions = ">=3.6" -version = "1.6.0" - -[package.extras] -dev = ["pytest (>=5)", "pytest-cov", "coveralls", "black", "mypy", "pylint"] - -[[package]] -category = "main" -description = "Decorators for Humans" -name = "decorator" -optional = false -python-versions = ">=3.5" -version = "5.0.9" - -[[package]] -category = "main" -description = "Python @deprecated decorator to deprecate old python classes, functions or methods." -name = "deprecated" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.2.12" - -[package.dependencies] -wrapt = ">=1.10,<2" - -[package.extras] -dev = ["tox", "bump2version (<1)", "sphinx (<2)", "importlib-metadata (<3)", "importlib-resources (<4)", "configparser (<5)", "sphinxcontrib-websupport (<2)", "zipp (<2)", "PyTest (<5)", "PyTest-Cov (<2.6)", "pytest", "pytest-cov"] - -[[package]] -category = "main" -description = "Fiona reads and writes spatial data files" -name = "fiona" -optional = false -python-versions = "*" -version = "1.8.20" - -[package.dependencies] -attrs = ">=17" -certifi = "*" -click = ">=4.0" -click-plugins = ">=1.0" -cligj = ">=0.5" -munch = "*" -setuptools = "*" -six = ">=1.7" - -[package.extras] -all = ["pytest (>=3)", "boto3 (>=1.2.4)", "pytest-cov", "shapely", "mock"] -calc = ["shapely"] -s3 = ["boto3 (>=1.2.4)"] -test = ["pytest (>=3)", "pytest-cov", "boto3 (>=1.2.4)", "mock"] - -[[package]] -category = "dev" -description = "the modular source code checker: pep8 pyflakes and co" -name = "flake8" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" -version = "3.9.2" - -[package.dependencies] -mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.7.0,<2.8.0" -pyflakes = ">=2.3.0,<2.4.0" - -[package.dependencies.importlib-metadata] -python = "<3.8" -version = "*" - -[[package]] -category = "main" -description = "Make beautiful maps with Leaflet.js & Python" -name = "folium" -optional = false -python-versions = ">=3.5" -version = "0.12.1" - -[package.dependencies] -branca = ">=0.3.0" -jinja2 = ">=2.9" -numpy = "*" -requests = "*" - -[package.extras] -testing = ["pytest"] - -[[package]] -category = "main" -description = "Geographic pandas extensions" -name = "geopandas" -optional = false -python-versions = ">=3.6" -version = "0.9.0" - -[package.dependencies] -fiona = ">=1.8" -pandas = ">=0.24.0" -pyproj = ">=2.2.0" -shapely = ">=1.6" - -[[package]] -category = "main" -description = "Internationalized Domain Names in Applications (IDNA)" -name = "idna" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "2.10" - -[[package]] -category = "dev" -description = "Read metadata from Python packages" -marker = "python_version < \"3.8\"" -name = "importlib-metadata" -optional = false -python-versions = ">=3.6" -version = "4.5.0" - -[package.dependencies] -zipp = ">=0.5" - -[package.dependencies.typing-extensions] -python = "<3.8" -version = ">=3.6.4" - -[package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] - -[[package]] -category = "dev" -description = "iniconfig: brain-dead simple config-ini parsing" -name = "iniconfig" -optional = false -python-versions = "*" -version = "1.1.1" - -[[package]] -category = "dev" -description = "IPython-enabled pdb" -name = "ipdb" -optional = false -python-versions = ">=2.7" -version = "0.13.9" - -[package.dependencies] -setuptools = "*" - -[package.dependencies.decorator] -python = ">=3.7" -version = "*" - -[package.dependencies.ipython] -python = ">=3.7" -version = ">=7.17.0" - -[package.dependencies.toml] -python = ">=3.7" -version = ">=0.10.2" - -[[package]] -category = "dev" -description = "IPython: Productive Interactive Computing" -marker = "python_version > \"3.6\"" -name = "ipython" -optional = false -python-versions = ">=3.7" -version = "7.24.1" - -[package.dependencies] -appnope = "*" -backcall = "*" -colorama = "*" -decorator = "*" -jedi = ">=0.16" -matplotlib-inline = "*" -pexpect = ">4.3" -pickleshare = "*" -prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" -pygments = "*" -setuptools = ">=18.5" -traitlets = ">=4.2" - -[package.extras] -all = ["Sphinx (>=1.3)", "ipykernel", "ipyparallel", "ipywidgets", "nbconvert", "nbformat", "nose (>=0.10.1)", "notebook", "numpy (>=1.17)", "pygments", "qtconsole", "requests", "testpath"] -doc = ["Sphinx (>=1.3)"] -kernel = ["ipykernel"] -nbconvert = ["nbconvert"] -nbformat = ["nbformat"] -notebook = ["notebook", "ipywidgets"] -parallel = ["ipyparallel"] -qtconsole = ["qtconsole"] -test = ["nose (>=0.10.1)", "requests", "testpath", "pygments", "nbformat", "ipykernel", "numpy (>=1.17)"] - -[[package]] -category = "dev" -description = "Vestigial utilities from IPython" -marker = "python_version > \"3.6\"" -name = "ipython-genutils" -optional = false -python-versions = "*" -version = "0.2.0" - -[[package]] -category = "dev" -description = "An autocompletion tool for Python that can be used for text editors." -marker = "python_version > \"3.6\"" -name = "jedi" -optional = false -python-versions = ">=3.6" -version = "0.18.0" - -[package.dependencies] -parso = ">=0.8.0,<0.9.0" - -[package.extras] -qa = ["flake8 (3.8.3)", "mypy (0.782)"] -testing = ["Django (<3.1)", "colorama", "docopt", "pytest (<6.0.0)"] - -[[package]] -category = "main" -description = "A very fast and expressive template engine." -name = "jinja2" -optional = false -python-versions = ">=3.6" -version = "3.0.1" - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -category = "main" -description = "Lightweight pipelining with Python functions" -name = "joblib" -optional = false -python-versions = ">=3.6" -version = "1.0.1" - -[[package]] -category = "main" -description = "Super simple Python wrapper for LKH-3" -name = "lkh" -optional = false -python-versions = ">=3.3" -version = "1.0.4" - -[package.dependencies] -tsplib95 = "*" - -[[package]] -category = "main" -description = "Safely add untrusted strings to HTML/XML markup." -name = "markupsafe" -optional = false -python-versions = ">=3.6" -version = "2.0.1" - -[[package]] -category = "dev" -description = "Inline Matplotlib backend for Jupyter" -marker = "python_version > \"3.6\"" -name = "matplotlib-inline" -optional = false -python-versions = ">=3.5" -version = "0.1.2" - -[package.dependencies] -traitlets = "*" - -[[package]] -category = "dev" -description = "McCabe checker, plugin for flake8" -name = "mccabe" -optional = false -python-versions = "*" -version = "0.6.1" - -[[package]] -category = "dev" -description = "Rolling backport of unittest.mock for all Pythons" -name = "mock" -optional = false -python-versions = ">=3.6" -version = "4.0.3" - -[package.extras] -build = ["twine", "wheel", "blurb"] -docs = ["sphinx"] -test = ["pytest (<5.4)", "pytest-cov"] - -[[package]] -category = "main" -description = "A dot-accessible dictionary (a la JavaScript objects)" -name = "munch" -optional = false -python-versions = "*" -version = "2.5.0" - -[package.dependencies] -six = "*" - -[package.extras] -testing = ["pytest", "coverage", "astroid (>=1.5.3,<1.6.0)", "pylint (>=1.7.2,<1.8.0)", "astroid (>=2.0)", "pylint (>=2.3.1,<2.4.0)"] -yaml = ["PyYAML (>=5.1.0)"] - -[[package]] -category = "dev" -description = "Experimental type system extensions for programs checked with the mypy typechecker." -name = "mypy-extensions" -optional = false -python-versions = "*" -version = "0.4.3" - -[[package]] -category = "main" -description = "Python package for creating and manipulating graphs and networks" -name = "networkx" -optional = false -python-versions = ">=3.6" -version = "2.5" - -[package.dependencies] -decorator = ">=4.3.0" - -[package.extras] -all = ["numpy", "scipy", "pandas", "matplotlib", "pygraphviz", "pydot", "pyyaml", "lxml", "pytest"] -gdal = ["gdal"] -lxml = ["lxml"] -matplotlib = ["matplotlib"] -numpy = ["numpy"] -pandas = ["pandas"] -pydot = ["pydot"] -pygraphviz = ["pygraphviz"] -pytest = ["pytest"] -pyyaml = ["pyyaml"] -scipy = ["scipy"] - -[[package]] -category = "main" -description = "NumPy is the fundamental package for array computing with Python." -name = "numpy" -optional = false -python-versions = ">=3.7" -version = "1.20.3" - -[[package]] -category = "main" -description = "Google OR-Tools python libraries and modules" -name = "ortools" -optional = false -python-versions = "*" -version = "8.2.8710" - -[package.dependencies] -absl-py = ">=0.11" -protobuf = ">=3.14.0" - -[[package]] -category = "dev" -description = "Core utilities for Python packages" -name = "packaging" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "20.9" - -[package.dependencies] -pyparsing = ">=2.0.2" - -[[package]] -category = "main" -description = "Powerful data structures for data analysis, time series, and statistics" -name = "pandas" -optional = false -python-versions = ">=3.7.1" -version = "1.2.4" - -[package.dependencies] -numpy = ">=1.16.5" -python-dateutil = ">=2.7.3" -pytz = ">=2017.3" - -[package.extras] -test = ["pytest (>=5.0.1)", "pytest-xdist", "hypothesis (>=3.58)"] - -[[package]] -category = "dev" -description = "A Python Parser" -marker = "python_version > \"3.6\"" -name = "parso" -optional = false -python-versions = ">=3.6" -version = "0.8.2" - -[package.extras] -qa = ["flake8 (3.8.3)", "mypy (0.782)"] -testing = ["docopt", "pytest (<6.0.0)"] - -[[package]] -category = "dev" -description = "Utility library for gitignore style pattern matching of file paths." -name = "pathspec" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "0.8.1" - -[[package]] -category = "dev" -description = "Pexpect allows easy control of interactive console applications." -marker = "python_version > \"3.6\" and sys_platform != \"win32\"" -name = "pexpect" -optional = false -python-versions = "*" -version = "4.8.0" - -[package.dependencies] -ptyprocess = ">=0.5" - -[[package]] -category = "dev" -description = "Tiny 'shelve'-like database with concurrency support" -marker = "python_version > \"3.6\"" -name = "pickleshare" -optional = false -python-versions = "*" -version = "0.7.5" - -[[package]] -category = "dev" -description = "plugin and hook calling mechanisms for python" -name = "pluggy" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "0.13.1" - -[package.dependencies] -[package.dependencies.importlib-metadata] -python = "<3.8" -version = ">=0.12" - -[package.extras] -dev = ["pre-commit", "tox"] - -[[package]] -category = "main" -description = "A Python implementation of Google's Encoded Polyline Algorithm Format." -name = "polyline" -optional = false -python-versions = "*" -version = "1.4.0" - -[package.dependencies] -six = ">=1.8.0" - -[[package]] -category = "dev" -description = "Library for building powerful interactive command lines in Python" -marker = "python_version > \"3.6\"" -name = "prompt-toolkit" -optional = false -python-versions = ">=3.6.1" -version = "3.0.18" - -[package.dependencies] -wcwidth = "*" - -[[package]] -category = "main" -description = "Protocol Buffers" -name = "protobuf" -optional = false -python-versions = "*" -version = "3.17.3" - -[package.dependencies] -six = ">=1.9" - -[[package]] -category = "dev" -description = "Run a subprocess in a pseudo terminal" -marker = "python_version > \"3.6\" and sys_platform != \"win32\"" -name = "ptyprocess" -optional = false -python-versions = "*" -version = "0.7.0" - -[[package]] -category = "dev" -description = "library with cross-python path, ini-parsing, io, code, log facilities" -name = "py" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.10.0" - -[[package]] -category = "dev" -description = "Python style guide checker" -name = "pycodestyle" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "2.7.0" - -[[package]] -category = "dev" -description = "passive checker of Python programs" -name = "pyflakes" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "2.3.1" - -[[package]] -category = "dev" -description = "Pygments is a syntax highlighting package written in Python." -marker = "python_version > \"3.6\"" -name = "pygments" -optional = false -python-versions = ">=3.5" -version = "2.9.0" - -[[package]] -category = "dev" -description = "Python parsing module" -name = "pyparsing" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -version = "2.4.7" - -[[package]] -category = "main" -description = "Python interface to PROJ (cartographic projections and coordinate transformations library)" -name = "pyproj" -optional = false -python-versions = ">=3.7" -version = "3.1.0" - -[package.dependencies] -certifi = "*" - -[[package]] -category = "dev" -description = "pytest: simple powerful testing with Python" -name = "pytest" -optional = false -python-versions = ">=3.6" -version = "6.2.4" - -[package.dependencies] -atomicwrites = ">=1.0" -attrs = ">=19.2.0" -colorama = "*" -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<1.0.0a1" -py = ">=1.8.2" -toml = "*" - -[package.dependencies.importlib-metadata] -python = "<3.8" -version = ">=0.12" - -[package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] - -[[package]] -category = "main" -description = "Extensions to the standard Python datetime module" -name = "python-dateutil" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -version = "2.8.1" - -[package.dependencies] -six = ">=1.5" - -[[package]] -category = "main" -description = "World timezone definitions, modern and historical" -name = "pytz" -optional = false -python-versions = "*" -version = "2021.1" - -[[package]] -category = "dev" -description = "Alternative regular expression module, to replace re." -name = "regex" -optional = false -python-versions = "*" -version = "2021.4.4" - -[[package]] -category = "main" -description = "Python HTTP for Humans." -name = "requests" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "2.25.1" - -[package.dependencies] -certifi = ">=2017.4.17" -chardet = ">=3.0.2,<5" -idna = ">=2.5,<3" -urllib3 = ">=1.21.1,<1.27" - -[package.extras] -security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] -socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7)", "win-inet-pton"] - -[[package]] -category = "main" -description = "A set of python modules for machine learning and data mining" -name = "scikit-learn" -optional = false -python-versions = ">=3.6" -version = "0.24.2" - -[package.dependencies] -joblib = ">=0.11" -numpy = ">=1.13.3" -scipy = ">=0.19.1" -threadpoolctl = ">=2.0.0" - -[package.extras] -benchmark = ["matplotlib (>=2.1.1)", "pandas (>=0.25.0)", "memory-profiler (>=0.57.0)"] -docs = ["matplotlib (>=2.1.1)", "scikit-image (>=0.13)", "pandas (>=0.25.0)", "seaborn (>=0.9.0)", "memory-profiler (>=0.57.0)", "sphinx (>=3.2.0)", "sphinx-gallery (>=0.7.0)", "numpydoc (>=1.0.0)", "Pillow (>=7.1.2)", "sphinx-prompt (>=1.3.0)"] -examples = ["matplotlib (>=2.1.1)", "scikit-image (>=0.13)", "pandas (>=0.25.0)", "seaborn (>=0.9.0)"] -tests = ["matplotlib (>=2.1.1)", "scikit-image (>=0.13)", "pandas (>=0.25.0)", "pytest (>=5.0.1)", "pytest-cov (>=2.9.0)", "flake8 (>=3.8.2)", "mypy (>=0.770)", "pyamg (>=4.0.0)"] - -[[package]] -category = "main" -description = "SciPy: Scientific Library for Python" -name = "scipy" -optional = false -python-versions = ">=3.7" -version = "1.6.1" - -[package.dependencies] -numpy = ">=1.16.5" - -[[package]] -category = "main" -description = "Geometric objects, predicates, and operations" -name = "shapely" -optional = false -python-versions = "*" -version = "1.7.1" - -[package.extras] -all = ["numpy", "pytest", "pytest-cov"] -test = ["pytest", "pytest-cov"] -vectorized = ["numpy"] - -[[package]] -category = "main" -description = "Python 2 and 3 compatibility utilities" -name = "six" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -version = "1.16.0" - -[[package]] -category = "main" -description = "A set of python modules for machine learning and data mining" -name = "sklearn" -optional = false -python-versions = "*" -version = "0.0" - -[package.dependencies] -scikit-learn = "*" - -[[package]] -category = "main" -description = "Pretty-print tabular data" -name = "tabulate" -optional = false -python-versions = "*" -version = "0.8.9" - -[package.extras] -widechars = ["wcwidth"] - -[[package]] -category = "main" -description = "threadpoolctl" -name = "threadpoolctl" -optional = false -python-versions = ">=3.5" -version = "2.1.0" - -[[package]] -category = "dev" -description = "Python Library for Tom's Obvious, Minimal Language" -name = "toml" -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -version = "0.10.2" - -[[package]] -category = "main" -description = "Fast, Extensible Progress Meter" -name = "tqdm" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -version = "4.61.1" - -[package.extras] -dev = ["py-make (>=0.1.0)", "twine", "wheel"] -notebook = ["ipywidgets (>=6)"] -telegram = ["requests"] - -[[package]] -category = "dev" -description = "Traitlets Python configuration system" -marker = "python_version > \"3.6\"" -name = "traitlets" -optional = false -python-versions = ">=3.7" -version = "5.0.5" - -[package.dependencies] -ipython-genutils = "*" - -[package.extras] -test = ["pytest"] - -[[package]] -category = "main" -description = "TSPLIB95 works with TSPLIB95 files." -name = "tsplib95" -optional = false -python-versions = "*" -version = "0.7.1" - -[package.dependencies] -Click = ">=6.0" -Deprecated = ">=1.2.9,<1.3.0" -networkx = ">=2.1,<3.0" -tabulate = ">=0.8.7,<0.9.0" - -[[package]] -category = "dev" -description = "a fork of Python 2 and 3 ast modules with type comment support" -marker = "python_version < \"3.8\"" -name = "typed-ast" -optional = false -python-versions = "*" -version = "1.4.3" - -[[package]] -category = "dev" -description = "Backported and Experimental Type Hints for Python 3.5+" -marker = "python_version < \"3.8\"" -name = "typing-extensions" -optional = false -python-versions = "*" -version = "3.10.0.0" - -[[package]] -category = "main" -description = "HTTP library with thread-safe connection pooling, file post, and more." -name = "urllib3" -optional = false -python-versions = "*" -version = "1.22" - -[package.extras] -secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] -socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"] - -[[package]] -category = "dev" -description = "Measures the displayed width of unicode strings in a terminal" -marker = "python_version > \"3.6\"" -name = "wcwidth" -optional = false -python-versions = "*" -version = "0.2.5" - -[[package]] -category = "main" -description = "Module for decorators, wrappers and monkey patching." -name = "wrapt" -optional = false -python-versions = "*" -version = "1.12.1" - -[[package]] -category = "dev" -description = "Backport of pathlib-compatible object wrapper for zip files" -marker = "python_version < \"3.8\"" -name = "zipp" -optional = false -python-versions = ">=3.6" -version = "3.4.1" - -[package.extras] -docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] - -[metadata] -content-hash = "baa15acc0d8c6166dfc223d9e8c8eea58b87282fbf4334c74d9f2e717664a3eb" -lock-version = "1.0" -python-versions = ">=3.7.1" - -[metadata.files] -absl-py = [ - {file = "absl-py-0.13.0.tar.gz", hash = "sha256:6953272383486044699fd0e9f00aad167a27e08ce19aae66c6c4b10e7e767793"}, - {file = "absl_py-0.13.0-py3-none-any.whl", hash = "sha256:62bd4e248ddb19d81aec8f9446b407ff37c8175c2ba88266a7afa9b4ce4a333b"}, -] -appdirs = [ - {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, - {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, -] -appnope = [ - {file = "appnope-0.1.2-py2.py3-none-any.whl", hash = "sha256:93aa393e9d6c54c5cd570ccadd8edad61ea0c4b9ea7a01409020c9aa019eb442"}, - {file = "appnope-0.1.2.tar.gz", hash = "sha256:dd83cd4b5b460958838f6eb3000c660b1f9caf2a5b1de4264e941512f603258a"}, -] -atomicwrites = [ - {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, - {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, -] -attrs = [ - {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, - {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, -] -backcall = [ - {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, - {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, -] -black = [ - {file = "black-21.6b0-py3-none-any.whl", hash = "sha256:dfb8c5a069012b2ab1e972e7b908f5fb42b6bbabcba0a788b86dc05067c7d9c7"}, - {file = "black-21.6b0.tar.gz", hash = "sha256:dc132348a88d103016726fe360cb9ede02cecf99b76e3660ce6c596be132ce04"}, -] -branca = [ - {file = "branca-0.4.2-py3-none-any.whl", hash = "sha256:62c2e777f074fc1830cd40ba9e650beb941861075980babafead8d97856b1a4b"}, - {file = "branca-0.4.2.tar.gz", hash = "sha256:c111453617b17ab2bda60a4cd71787d6f2b59c85cdf71ab160a737606ac66c31"}, -] -certifi = [ - {file = "certifi-2021.5.30-py2.py3-none-any.whl", hash = "sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8"}, - {file = "certifi-2021.5.30.tar.gz", hash = "sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee"}, -] -chardet = [ - {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, - {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, -] -click = [ - {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, - {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, -] -click-plugins = [ - {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, - {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, -] -cligj = [ - {file = "cligj-0.6.0-py2-none-any.whl", hash = "sha256:b258362e31ffbd8dbaeeb81c14ffe803675767e96b4807bfc39da69e467f7c0d"}, - {file = "cligj-0.6.0-py3-none-any.whl", hash = "sha256:639242b1df173fdaef11c6214b2bc7404c7c6909730a1cfa1e69b5255acf2d60"}, - {file = "cligj-0.6.0.tar.gz", hash = "sha256:a5f080858fd584d73fcc2b75f80ed05054130944e2283019d1828a6deb9e4110"}, -] -colorama = [ - {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, - {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, -] -dacite = [ - {file = "dacite-1.6.0-py3-none-any.whl", hash = "sha256:4331535f7aabb505c732fa4c3c094313fc0a1d5ea19907bf4726a7819a68b93f"}, - {file = "dacite-1.6.0.tar.gz", hash = "sha256:d48125ed0a0352d3de9f493bf980038088f45f3f9d7498f090b50a847daaa6df"}, -] -decorator = [ - {file = "decorator-5.0.9-py3-none-any.whl", hash = "sha256:6e5c199c16f7a9f0e3a61a4a54b3d27e7dad0dbdde92b944426cb20914376323"}, - {file = "decorator-5.0.9.tar.gz", hash = "sha256:72ecfba4320a893c53f9706bebb2d55c270c1e51a28789361aa93e4a21319ed5"}, -] -deprecated = [ - {file = "Deprecated-1.2.12-py2.py3-none-any.whl", hash = "sha256:08452d69b6b5bc66e8330adde0a4f8642e969b9e1702904d137eeb29c8ffc771"}, - {file = "Deprecated-1.2.12.tar.gz", hash = "sha256:6d2de2de7931a968874481ef30208fd4e08da39177d61d3d4ebdf4366e7dbca1"}, -] -fiona = [ - {file = "Fiona-1.8.20-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:02880556540e36ad6aac97687799d9b3093c354787a47bc0e73026c7fc15f1b3"}, - {file = "Fiona-1.8.20-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3f668c471fa2f8c9c0a9ca83639cb2c8dcc93edc3d93d43dba2f9e8da38ad53e"}, - {file = "Fiona-1.8.20-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:54f81039e913d0f88728ef23edf5a69038dec94dea54f4c799f972ba8e2a7d40"}, - {file = "Fiona-1.8.20-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:328340a448bed5c43d119f61f760368a04d13a302c59d2fccb051a3ff021f4b8"}, - {file = "Fiona-1.8.20-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:03f910380dbe684730b59b817aa030e6e9a3ee79211b66c6db2d1c8fe6ea12de"}, - {file = "Fiona-1.8.20-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:bef100ebd82afb9a4d67096216e82611b82ca9341330e4805832d7ff8c9bc1f7"}, - {file = "Fiona-1.8.20-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5e1cef608c6de9039eaa65b395024096e3189ab0559a5a328c68c4690c3302ce"}, - {file = "Fiona-1.8.20-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e72e4a5b84ec410be531d4fe4c1a5c87c6c0e92d01116c145c0f1b33f81c8080"}, - {file = "Fiona-1.8.20.tar.gz", hash = "sha256:a70502d2857b82f749c09cb0dea3726787747933a2a1599b5ab787d74e3c143b"}, -] -flake8 = [ - {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, - {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, -] -folium = [ - {file = "folium-0.12.1-py2.py3-none-any.whl", hash = "sha256:3d2c48dd6ffe5327975bbfd718468c4e81db9f2844c26e574f878adf4c08b644"}, -] -geopandas = [ - {file = "geopandas-0.9.0-py2.py3-none-any.whl", hash = "sha256:79f6e557ba0dba76eec44f8351b1c6b42a17c38f5f08fef347e98fe4dae563c7"}, - {file = "geopandas-0.9.0.tar.gz", hash = "sha256:63972ab4dc44c4029f340600dcb83264eb8132dd22b104da0b654bef7f42630a"}, -] -idna = [ - {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, - {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, -] -importlib-metadata = [ - {file = "importlib_metadata-4.5.0-py3-none-any.whl", hash = "sha256:833b26fb89d5de469b24a390e9df088d4e52e4ba33b01dc5e0e4f41b81a16c00"}, - {file = "importlib_metadata-4.5.0.tar.gz", hash = "sha256:b142cc1dd1342f31ff04bb7d022492b09920cb64fed867cd3ea6f80fe3ebd139"}, -] -iniconfig = [ - {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, - {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, -] -ipdb = [ - {file = "ipdb-0.13.9.tar.gz", hash = "sha256:951bd9a64731c444fd907a5ce268543020086a697f6be08f7cc2c9a752a278c5"}, -] -ipython = [ - {file = "ipython-7.24.1-py3-none-any.whl", hash = "sha256:d513e93327cf8657d6467c81f1f894adc125334ffe0e4ddd1abbb1c78d828703"}, - {file = "ipython-7.24.1.tar.gz", hash = "sha256:9bc24a99f5d19721fb8a2d1408908e9c0520a17fff2233ffe82620847f17f1b6"}, -] -ipython-genutils = [ - {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, - {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, -] -jedi = [ - {file = "jedi-0.18.0-py2.py3-none-any.whl", hash = "sha256:18456d83f65f400ab0c2d3319e48520420ef43b23a086fdc05dff34132f0fb93"}, - {file = "jedi-0.18.0.tar.gz", hash = "sha256:92550a404bad8afed881a137ec9a461fed49eca661414be45059329614ed0707"}, -] -jinja2 = [ - {file = "Jinja2-3.0.1-py3-none-any.whl", hash = "sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4"}, - {file = "Jinja2-3.0.1.tar.gz", hash = "sha256:703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4"}, -] -joblib = [ - {file = "joblib-1.0.1-py3-none-any.whl", hash = "sha256:feeb1ec69c4d45129954f1b7034954241eedfd6ba39b5e9e4b6883be3332d5e5"}, - {file = "joblib-1.0.1.tar.gz", hash = "sha256:9c17567692206d2f3fb9ecf5e991084254fe631665c450b443761c4186a613f7"}, -] -lkh = [ - {file = "lkh-1.0.4.tar.gz", hash = "sha256:d12ede84483d2473f0795c530abdfb6edbc5fb8b8b112dee6ef9152fff467029"}, -] -markupsafe = [ - {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, - {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, - {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, - {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, - {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, - {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, -] -matplotlib-inline = [ - {file = "matplotlib-inline-0.1.2.tar.gz", hash = "sha256:f41d5ff73c9f5385775d5c0bc13b424535c8402fe70ea8210f93e11f3683993e"}, - {file = "matplotlib_inline-0.1.2-py3-none-any.whl", hash = "sha256:5cf1176f554abb4fa98cb362aa2b55c500147e4bdbb07e3fda359143e1da0811"}, -] -mccabe = [ - {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, - {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, -] -mock = [ - {file = "mock-4.0.3-py3-none-any.whl", hash = "sha256:122fcb64ee37cfad5b3f48d7a7d51875d7031aaf3d8be7c42e2bee25044eee62"}, - {file = "mock-4.0.3.tar.gz", hash = "sha256:7d3fbbde18228f4ff2f1f119a45cdffa458b4c0dee32eb4d2bb2f82554bac7bc"}, -] -munch = [ - {file = "munch-2.5.0-py2.py3-none-any.whl", hash = "sha256:6f44af89a2ce4ed04ff8de41f70b226b984db10a91dcc7b9ac2efc1c77022fdd"}, - {file = "munch-2.5.0.tar.gz", hash = "sha256:2d735f6f24d4dba3417fa448cae40c6e896ec1fdab6cdb5e6510999758a4dbd2"}, -] -mypy-extensions = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, -] -networkx = [ - {file = "networkx-2.5-py3-none-any.whl", hash = "sha256:8c5812e9f798d37c50570d15c4a69d5710a18d77bafc903ee9c5fba7454c616c"}, - {file = "networkx-2.5.tar.gz", hash = "sha256:7978955423fbc9639c10498878be59caf99b44dc304c2286162fd24b458c1602"}, -] -numpy = [ - {file = "numpy-1.20.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:70eb5808127284c4e5c9e836208e09d685a7978b6a216db85960b1a112eeace8"}, - {file = "numpy-1.20.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6ca2b85a5997dabc38301a22ee43c82adcb53ff660b89ee88dded6b33687e1d8"}, - {file = "numpy-1.20.3-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c5bf0e132acf7557fc9bb8ded8b53bbbbea8892f3c9a1738205878ca9434206a"}, - {file = "numpy-1.20.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db250fd3e90117e0312b611574cd1b3f78bec046783195075cbd7ba9c3d73f16"}, - {file = "numpy-1.20.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:637d827248f447e63585ca3f4a7d2dfaa882e094df6cfa177cc9cf9cd6cdf6d2"}, - {file = "numpy-1.20.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8b7bb4b9280da3b2856cb1fc425932f46fba609819ee1c62256f61799e6a51d2"}, - {file = "numpy-1.20.3-cp37-cp37m-win32.whl", hash = "sha256:67d44acb72c31a97a3d5d33d103ab06d8ac20770e1c5ad81bdb3f0c086a56cf6"}, - {file = "numpy-1.20.3-cp37-cp37m-win_amd64.whl", hash = "sha256:43909c8bb289c382170e0282158a38cf306a8ad2ff6dfadc447e90f9961bef43"}, - {file = "numpy-1.20.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f1452578d0516283c87608a5a5548b0cdde15b99650efdfd85182102ef7a7c17"}, - {file = "numpy-1.20.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6e51534e78d14b4a009a062641f465cfaba4fdcb046c3ac0b1f61dd97c861b1b"}, - {file = "numpy-1.20.3-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e515c9a93aebe27166ec9593411c58494fa98e5fcc219e47260d9ab8a1cc7f9f"}, - {file = "numpy-1.20.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1c09247ccea742525bdb5f4b5ceeacb34f95731647fe55774aa36557dbb5fa4"}, - {file = "numpy-1.20.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:66fbc6fed94a13b9801fb70b96ff30605ab0a123e775a5e7a26938b717c5d71a"}, - {file = "numpy-1.20.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ea9cff01e75a956dbee133fa8e5b68f2f92175233de2f88de3a682dd94deda65"}, - {file = "numpy-1.20.3-cp38-cp38-win32.whl", hash = "sha256:f39a995e47cb8649673cfa0579fbdd1cdd33ea497d1728a6cb194d6252268e48"}, - {file = "numpy-1.20.3-cp38-cp38-win_amd64.whl", hash = "sha256:1676b0a292dd3c99e49305a16d7a9f42a4ab60ec522eac0d3dd20cdf362ac010"}, - {file = "numpy-1.20.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:830b044f4e64a76ba71448fce6e604c0fc47a0e54d8f6467be23749ac2cbd2fb"}, - {file = "numpy-1.20.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:55b745fca0a5ab738647d0e4db099bd0a23279c32b31a783ad2ccea729e632df"}, - {file = "numpy-1.20.3-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5d050e1e4bc9ddb8656d7b4f414557720ddcca23a5b88dd7cff65e847864c400"}, - {file = "numpy-1.20.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9c65473ebc342715cb2d7926ff1e202c26376c0dcaaee85a1fd4b8d8c1d3b2f"}, - {file = "numpy-1.20.3-cp39-cp39-win32.whl", hash = "sha256:16f221035e8bd19b9dc9a57159e38d2dd060b48e93e1d843c49cb370b0f415fd"}, - {file = "numpy-1.20.3-cp39-cp39-win_amd64.whl", hash = "sha256:6690080810f77485667bfbff4f69d717c3be25e5b11bb2073e76bb3f578d99b4"}, - {file = "numpy-1.20.3-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e465afc3b96dbc80cf4a5273e5e2b1e3451286361b4af70ce1adb2984d392f9"}, - {file = "numpy-1.20.3.zip", hash = "sha256:e55185e51b18d788e49fe8305fd73ef4470596b33fc2c1ceb304566b99c71a69"}, -] -ortools = [ - {file = "ortools-8.2.8710-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b79db31d9d306e5a731f6f34461c895f8f1dafcd9203b9b18d429c389c3e28c7"}, - {file = "ortools-8.2.8710-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8869b260078bad8fad9e51b4471ccf14e985c637b3497b8711fd5bf6e01863fe"}, - {file = "ortools-8.2.8710-cp36-cp36m-win_amd64.whl", hash = "sha256:0470b28db960ccdca59f738520209daadce3e18e94d219033d4a50d4cfa1484b"}, - {file = "ortools-8.2.8710-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c380e542f111b225311c077699caa7cd4f9084e066d586d4a0dadd674063a089"}, - {file = "ortools-8.2.8710-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:b81d383ad8dec2fba41fa0129996177ec19ac9414bf132a998280065daa11b0c"}, - {file = "ortools-8.2.8710-cp37-cp37m-win_amd64.whl", hash = "sha256:1312cdd7bbdbe51871f5fc206c95fdea6b2a2315ff6d917df36f0ec1305b13cd"}, - {file = "ortools-8.2.8710-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2e269c43eeb553bf3781953f47a38c4b3b86acec64d04ebc23406f2d1e812782"}, - {file = "ortools-8.2.8710-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:4531ca6285f4f2d813dac5e4bb63ce0d467cea49fe12b7162595aa8ef197b983"}, - {file = "ortools-8.2.8710-cp38-cp38-win_amd64.whl", hash = "sha256:8b6f022ac028875303f85b48821c728db8f4028558b5c71102a1ec897acdbfec"}, - {file = "ortools-8.2.8710-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:63f25d9746ccd91b6a8c423906a6c2547bd92be2e212d677d2f89724cf847b69"}, - {file = "ortools-8.2.8710-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:436860b7bf8ae389b7f34f8548c57f6f602acdf6df552b54cdca9559fb561fe5"}, - {file = "ortools-8.2.8710-cp39-cp39-win_amd64.whl", hash = "sha256:61292bdd01dbe254203580b5d3f9f45c1e192e3f743e205cf29d690c11c342b8"}, -] -packaging = [ - {file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"}, - {file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"}, -] -pandas = [ - {file = "pandas-1.2.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c601c6fdebc729df4438ec1f62275d6136a0dd14d332fc0e8ce3f7d2aadb4dd6"}, - {file = "pandas-1.2.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:8d4c74177c26aadcfb4fd1de6c1c43c2bf822b3e0fc7a9b409eeaf84b3e92aaa"}, - {file = "pandas-1.2.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:b730add5267f873b3383c18cac4df2527ac4f0f0eed1c6cf37fcb437e25cf558"}, - {file = "pandas-1.2.4-cp37-cp37m-win32.whl", hash = "sha256:2cb7e8f4f152f27dc93f30b5c7a98f6c748601ea65da359af734dd0cf3fa733f"}, - {file = "pandas-1.2.4-cp37-cp37m-win_amd64.whl", hash = "sha256:2111c25e69fa9365ba80bbf4f959400054b2771ac5d041ed19415a8b488dc70a"}, - {file = "pandas-1.2.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:167693a80abc8eb28051fbd184c1b7afd13ce2c727a5af47b048f1ea3afefff4"}, - {file = "pandas-1.2.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:612add929bf3ba9d27b436cc8853f5acc337242d6b584203f207e364bb46cb12"}, - {file = "pandas-1.2.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:971e2a414fce20cc5331fe791153513d076814d30a60cd7348466943e6e909e4"}, - {file = "pandas-1.2.4-cp38-cp38-win32.whl", hash = "sha256:68d7baa80c74aaacbed597265ca2308f017859123231542ff8a5266d489e1858"}, - {file = "pandas-1.2.4-cp38-cp38-win_amd64.whl", hash = "sha256:bd659c11a4578af740782288cac141a322057a2e36920016e0fc7b25c5a4b686"}, - {file = "pandas-1.2.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9db70ffa8b280bb4de83f9739d514cd0735825e79eef3a61d312420b9f16b758"}, - {file = "pandas-1.2.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:298f0553fd3ba8e002c4070a723a59cdb28eda579f3e243bc2ee397773f5398b"}, - {file = "pandas-1.2.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52d2472acbb8a56819a87aafdb8b5b6d2b3386e15c95bde56b281882529a7ded"}, - {file = "pandas-1.2.4-cp39-cp39-win32.whl", hash = "sha256:d0877407359811f7b853b548a614aacd7dea83b0c0c84620a9a643f180060950"}, - {file = "pandas-1.2.4-cp39-cp39-win_amd64.whl", hash = "sha256:2b063d41803b6a19703b845609c0b700913593de067b552a8b24dd8eeb8c9895"}, - {file = "pandas-1.2.4.tar.gz", hash = "sha256:649ecab692fade3cbfcf967ff936496b0cfba0af00a55dfaacd82bdda5cb2279"}, -] -parso = [ - {file = "parso-0.8.2-py2.py3-none-any.whl", hash = "sha256:a8c4922db71e4fdb90e0d0bc6e50f9b273d3397925e5e60a717e719201778d22"}, - {file = "parso-0.8.2.tar.gz", hash = "sha256:12b83492c6239ce32ff5eed6d3639d6a536170723c6f3f1506869f1ace413398"}, -] -pathspec = [ - {file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"}, - {file = "pathspec-0.8.1.tar.gz", hash = "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd"}, -] -pexpect = [ - {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, - {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, -] -pickleshare = [ - {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, - {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, -] -pluggy = [ - {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, - {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, -] -polyline = [ - {file = "polyline-1.4.0-py2.py3-none-any.whl", hash = "sha256:6559a0d5d37f4d14255744b3c6a648d5ff480d3d5c5f30186effc72a4142fd6c"}, - {file = "polyline-1.4.0.tar.gz", hash = "sha256:7c7f89d09a09c7b6161bdbfb4fd304b186fc7a2060fa4f31cb3f61c646a5c074"}, -] -prompt-toolkit = [ - {file = "prompt_toolkit-3.0.18-py3-none-any.whl", hash = "sha256:bf00f22079f5fadc949f42ae8ff7f05702826a97059ffcc6281036ad40ac6f04"}, - {file = "prompt_toolkit-3.0.18.tar.gz", hash = "sha256:e1b4f11b9336a28fa11810bc623c357420f69dfdb6d2dac41ca2c21a55c033bc"}, -] -protobuf = [ - {file = "protobuf-3.17.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ab6bb0e270c6c58e7ff4345b3a803cc59dbee19ddf77a4719c5b635f1d547aa8"}, - {file = "protobuf-3.17.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:13ee7be3c2d9a5d2b42a1030976f760f28755fcf5863c55b1460fd205e6cd637"}, - {file = "protobuf-3.17.3-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:1556a1049ccec58c7855a78d27e5c6e70e95103b32de9142bae0576e9200a1b0"}, - {file = "protobuf-3.17.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f0e59430ee953184a703a324b8ec52f571c6c4259d496a19d1cabcdc19dabc62"}, - {file = "protobuf-3.17.3-cp35-cp35m-win32.whl", hash = "sha256:a981222367fb4210a10a929ad5983ae93bd5a050a0824fc35d6371c07b78caf6"}, - {file = "protobuf-3.17.3-cp35-cp35m-win_amd64.whl", hash = "sha256:6d847c59963c03fd7a0cd7c488cadfa10cda4fff34d8bc8cba92935a91b7a037"}, - {file = "protobuf-3.17.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:145ce0af55c4259ca74993ddab3479c78af064002ec8227beb3d944405123c71"}, - {file = "protobuf-3.17.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6ce4d8bf0321e7b2d4395e253f8002a1a5ffbcfd7bcc0a6ba46712c07d47d0b4"}, - {file = "protobuf-3.17.3-cp36-cp36m-win32.whl", hash = "sha256:7a4c97961e9e5b03a56f9a6c82742ed55375c4a25f2692b625d4087d02ed31b9"}, - {file = "protobuf-3.17.3-cp36-cp36m-win_amd64.whl", hash = "sha256:a22b3a0dbac6544dacbafd4c5f6a29e389a50e3b193e2c70dae6bbf7930f651d"}, - {file = "protobuf-3.17.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ffea251f5cd3c0b9b43c7a7a912777e0bc86263436a87c2555242a348817221b"}, - {file = "protobuf-3.17.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:9b7a5c1022e0fa0dbde7fd03682d07d14624ad870ae52054849d8960f04bc764"}, - {file = "protobuf-3.17.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8727ee027157516e2c311f218ebf2260a18088ffb2d29473e82add217d196b1c"}, - {file = "protobuf-3.17.3-cp37-cp37m-win32.whl", hash = "sha256:14c1c9377a7ffbeaccd4722ab0aa900091f52b516ad89c4b0c3bb0a4af903ba5"}, - {file = "protobuf-3.17.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c56c050a947186ba51de4f94ab441d7f04fcd44c56df6e922369cc2e1a92d683"}, - {file = "protobuf-3.17.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2ae692bb6d1992afb6b74348e7bb648a75bb0d3565a3f5eea5bec8f62bd06d87"}, - {file = "protobuf-3.17.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:99938f2a2d7ca6563c0ade0c5ca8982264c484fdecf418bd68e880a7ab5730b1"}, - {file = "protobuf-3.17.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6902a1e4b7a319ec611a7345ff81b6b004b36b0d2196ce7a748b3493da3d226d"}, - {file = "protobuf-3.17.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4ffbd23640bb7403574f7aff8368e2aeb2ec9a5c6306580be48ac59a6bac8bde"}, - {file = "protobuf-3.17.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:26010f693b675ff5a1d0e1bdb17689b8b716a18709113288fead438703d45539"}, - {file = "protobuf-3.17.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e76d9686e088fece2450dbc7ee905f9be904e427341d289acbe9ad00b78ebd47"}, - {file = "protobuf-3.17.3-py2.py3-none-any.whl", hash = "sha256:2bfb815216a9cd9faec52b16fd2bfa68437a44b67c56bee59bc3926522ecb04e"}, - {file = "protobuf-3.17.3.tar.gz", hash = "sha256:72804ea5eaa9c22a090d2803813e280fb273b62d5ae497aaf3553d141c4fdd7b"}, -] -ptyprocess = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, -] -py = [ - {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, - {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, -] -pycodestyle = [ - {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, - {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, -] -pyflakes = [ - {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, - {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, -] -pygments = [ - {file = "Pygments-2.9.0-py3-none-any.whl", hash = "sha256:d66e804411278594d764fc69ec36ec13d9ae9147193a1740cd34d272ca383b8e"}, - {file = "Pygments-2.9.0.tar.gz", hash = "sha256:a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f"}, -] -pyparsing = [ - {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, - {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, -] -pyproj = [ - {file = "pyproj-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8eda240225971b5cd0bac2d399ed6222068f0598ee92d5f6e847bd2019d2c8b0"}, - {file = "pyproj-3.1.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:ae237492767e0225f99b53a0fd7110fde2b7e7cabc105bbc243c151a7497de88"}, - {file = "pyproj-3.1.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b635e7e21fea5af74e90fc9e54d1a4c27078efdce6f214101c98dd93afae599a"}, - {file = "pyproj-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa87df0982aa0f4477478899d9c930cc0f97cd6d8a4ce84c43ac88ccf86d1da7"}, - {file = "pyproj-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:10dad599b9f7ce2194996dc25f1000e0aa15754ecef9db46b624713959c67957"}, - {file = "pyproj-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a162ed199cd2ec392cffe20b2fa3381b68e7a166d55f3f060eceb8d517e4f46d"}, - {file = "pyproj-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1e88ebc4e08e661e9011b5c1ebfb32f0d311963a9824a6effb4168c7e07918b1"}, - {file = "pyproj-3.1.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:da88abc5e2f6a8fb07533855a57ca2a31845f58901a87f821b68b0db6b023978"}, - {file = "pyproj-3.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:911d773da9fa4d4f3f7580173858c391e3ee0b61acaf0be303baab323d2eae78"}, - {file = "pyproj-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f8a8d982bde211e65dc2de1f8f36cf162f9cc7fcd8a7625046ea265284e5e65"}, - {file = "pyproj-3.1.0-cp38-cp38-win32.whl", hash = "sha256:c4193e1069d165476b2d0f7d882b7712b3eab6e2e6fe2a0a78ef40de825a1f28"}, - {file = "pyproj-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:b6c74bbec679199746a3e02c0e0fad093c3652df96dd63e086a2fbf2afe9dc0e"}, - {file = "pyproj-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:04c185102e659439c5bd428ac5473d36ef795fca8e225bbbe78e20643d804ec0"}, - {file = "pyproj-3.1.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ebbba7707fe83a01e54bce8e3e7342feb0b3e0d74ff8c28df12f8bc59b76827c"}, - {file = "pyproj-3.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9cc464a1c51baad28ffb7a233116e8d4ce4c560b32039fa986d0f992ac3c431f"}, - {file = "pyproj-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f3ad09cf3352bf5664794042b28d98781362ec8d9774ad73f28a1a0101a27f1"}, - {file = "pyproj-3.1.0-cp39-cp39-win32.whl", hash = "sha256:ae5534fa7a3b74f20534694d297fce6f7483890ff6ca404394ecf372f3c589d4"}, - {file = "pyproj-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:808f5992320e9631b2e45444028a65cd6ba3ee40229292934178ef07020a5ffd"}, - {file = "pyproj-3.1.0.tar.gz", hash = "sha256:67b94f4e694ae33fc90dfb7da0e6b5ed5f671dd0acc2f6cf46e9c39d56e16e1a"}, -] -pytest = [ - {file = "pytest-6.2.4-py3-none-any.whl", hash = "sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890"}, - {file = "pytest-6.2.4.tar.gz", hash = "sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b"}, -] -python-dateutil = [ - {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, - {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, -] -pytz = [ - {file = "pytz-2021.1-py2.py3-none-any.whl", hash = "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798"}, - {file = "pytz-2021.1.tar.gz", hash = "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da"}, -] -regex = [ - {file = "regex-2021.4.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a"}, - {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7"}, - {file = "regex-2021.4.4-cp36-cp36m-win32.whl", hash = "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29"}, - {file = "regex-2021.4.4-cp36-cp36m-win_amd64.whl", hash = "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79"}, - {file = "regex-2021.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e"}, - {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439"}, - {file = "regex-2021.4.4-cp37-cp37m-win32.whl", hash = "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d"}, - {file = "regex-2021.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3"}, - {file = "regex-2021.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f"}, - {file = "regex-2021.4.4-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87"}, - {file = "regex-2021.4.4-cp38-cp38-win32.whl", hash = "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac"}, - {file = "regex-2021.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2"}, - {file = "regex-2021.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c"}, - {file = "regex-2021.4.4-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042"}, - {file = "regex-2021.4.4-cp39-cp39-win32.whl", hash = "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6"}, - {file = "regex-2021.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07"}, - {file = "regex-2021.4.4.tar.gz", hash = "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb"}, -] -requests = [ - {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, - {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, -] -scikit-learn = [ - {file = "scikit-learn-0.24.2.tar.gz", hash = "sha256:d14701a12417930392cd3898e9646cf5670c190b933625ebe7511b1f7d7b8736"}, - {file = "scikit_learn-0.24.2-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:d5bf9c863ba4717b3917b5227463ee06860fc43931dc9026747de416c0a10fee"}, - {file = "scikit_learn-0.24.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5beaeb091071625e83f5905192d8aecde65ba2f26f8b6719845bbf586f7a04a1"}, - {file = "scikit_learn-0.24.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:06ffdcaaf81e2a3b1b50c3ac6842cfb13df2d8b737d61f64643ed61da7389cde"}, - {file = "scikit_learn-0.24.2-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:fec42690a2eb646b384eafb021c425fab48991587edb412d4db77acc358b27ce"}, - {file = "scikit_learn-0.24.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:5ff3e4e4cf7592d36541edec434e09fb8ab9ba6b47608c4ffe30c9038d301897"}, - {file = "scikit_learn-0.24.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:3cbd734e1aefc7c5080e6b6973fe062f97c26a1cdf1a991037ca196ce1c8f427"}, - {file = "scikit_learn-0.24.2-cp36-cp36m-win32.whl", hash = "sha256:f74429a07fedb36a03c159332b914e6de757176064f9fed94b5f79ebac07d913"}, - {file = "scikit_learn-0.24.2-cp36-cp36m-win_amd64.whl", hash = "sha256:dd968a174aa82f3341a615a033fa6a8169e9320cbb46130686562db132d7f1f0"}, - {file = "scikit_learn-0.24.2-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:49ec0b1361da328da9bb7f1a162836028e72556356adeb53342f8fae6b450d47"}, - {file = "scikit_learn-0.24.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:f18c3ed484eeeaa43a0d45dc2efb4d00fc6542ccdcfa2c45d7b635096a2ae534"}, - {file = "scikit_learn-0.24.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:cdf24c1b9bbeb4936456b42ac5bd32c60bb194a344951acb6bfb0cddee5439a4"}, - {file = "scikit_learn-0.24.2-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d177fe1ff47cc235942d628d41ee5b1c6930d8f009f1a451c39b5411e8d0d4cf"}, - {file = "scikit_learn-0.24.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f3ec00f023d84526381ad0c0f2cff982852d035c921bbf8ceb994f4886c00c64"}, - {file = "scikit_learn-0.24.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:ae19ac105cf7ce8c205a46166992fdec88081d6e783ab6e38ecfbe45729f3c39"}, - {file = "scikit_learn-0.24.2-cp37-cp37m-win32.whl", hash = "sha256:f0ed4483c258fb23150e31b91ea7d25ff8495dba108aea0b0d4206a777705350"}, - {file = "scikit_learn-0.24.2-cp37-cp37m-win_amd64.whl", hash = "sha256:39b7e3b71bcb1fe46397185d6c1a5db1c441e71c23c91a31e7ad8cc3f7305f9a"}, - {file = "scikit_learn-0.24.2-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:90a297330f608adeb4d2e9786c6fda395d3150739deb3d42a86d9a4c2d15bc1d"}, - {file = "scikit_learn-0.24.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:f1d2108e770907540b5248977e4cff9ffaf0f73d0d13445ee938df06ca7579c6"}, - {file = "scikit_learn-0.24.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:1eec963fe9ffc827442c2e9333227c4d49749a44e592f305398c1db5c1563393"}, - {file = "scikit_learn-0.24.2-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:2db429090b98045d71218a9ba913cc9b3fe78e0ba0b6b647d8748bc6d5a44080"}, - {file = "scikit_learn-0.24.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:62214d2954377fcf3f31ec867dd4e436df80121e7a32947a0b3244f58f45e455"}, - {file = "scikit_learn-0.24.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8fac72b9688176922f9f54fda1ba5f7ffd28cbeb9aad282760186e8ceba9139a"}, - {file = "scikit_learn-0.24.2-cp38-cp38-win32.whl", hash = "sha256:ae426e3a52842c6b6d77d00f906b6031c8c2cfdfabd6af7511bb4bc9a68d720e"}, - {file = "scikit_learn-0.24.2-cp38-cp38-win_amd64.whl", hash = "sha256:038f4e9d6ef10e1f3fe82addc3a14735c299866eb10f2c77c090410904828312"}, - {file = "scikit_learn-0.24.2-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:48f273836e19901ba2beecd919f7b352f09310ce67c762f6e53bc6b81cacf1f0"}, - {file = "scikit_learn-0.24.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:a2a47449093dcf70babc930beba2ca0423cb7df2fa5fd76be5260703d67fa574"}, - {file = "scikit_learn-0.24.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:0e71ce9c7cbc20f6f8b860107ce15114da26e8675238b4b82b7e7cd37ca0c087"}, - {file = "scikit_learn-0.24.2-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2754c85b2287333f9719db7f23fb7e357f436deed512db3417a02bf6f2830aa5"}, - {file = "scikit_learn-0.24.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:7be1b88c23cfac46e06404582215a917017cd2edaa2e4d40abe6aaff5458f24b"}, - {file = "scikit_learn-0.24.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:4e6198675a6f9d333774671bd536668680eea78e2e81c0b19e57224f58d17f37"}, - {file = "scikit_learn-0.24.2-cp39-cp39-win32.whl", hash = "sha256:cbdb0b3db99dd1d5f69d31b4234367d55475add31df4d84a3bd690ef017b55e2"}, - {file = "scikit_learn-0.24.2-cp39-cp39-win_amd64.whl", hash = "sha256:40556bea1ef26ef54bc678d00cf138a63069144a0b5f3a436eecd8f3468b903e"}, -] -scipy = [ - {file = "scipy-1.6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a15a1f3fc0abff33e792d6049161b7795909b40b97c6cc2934ed54384017ab76"}, - {file = "scipy-1.6.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:e79570979ccdc3d165456dd62041d9556fb9733b86b4b6d818af7a0afc15f092"}, - {file = "scipy-1.6.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:a423533c55fec61456dedee7b6ee7dce0bb6bfa395424ea374d25afa262be261"}, - {file = "scipy-1.6.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:33d6b7df40d197bdd3049d64e8e680227151673465e5d85723b3b8f6b15a6ced"}, - {file = "scipy-1.6.1-cp37-cp37m-win32.whl", hash = "sha256:6725e3fbb47da428794f243864f2297462e9ee448297c93ed1dcbc44335feb78"}, - {file = "scipy-1.6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:5fa9c6530b1661f1370bcd332a1e62ca7881785cc0f80c0d559b636567fab63c"}, - {file = "scipy-1.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bd50daf727f7c195e26f27467c85ce653d41df4358a25b32434a50d8870fc519"}, - {file = "scipy-1.6.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:f46dd15335e8a320b0fb4685f58b7471702234cba8bb3442b69a3e1dc329c345"}, - {file = "scipy-1.6.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:0e5b0ccf63155d90da576edd2768b66fb276446c371b73841e3503be1d63fb5d"}, - {file = "scipy-1.6.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:2481efbb3740977e3c831edfd0bd9867be26387cacf24eb5e366a6a374d3d00d"}, - {file = "scipy-1.6.1-cp38-cp38-win32.whl", hash = "sha256:68cb4c424112cd4be886b4d979c5497fba190714085f46b8ae67a5e4416c32b4"}, - {file = "scipy-1.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:5f331eeed0297232d2e6eea51b54e8278ed8bb10b099f69c44e2558c090d06bf"}, - {file = "scipy-1.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0c8a51d33556bf70367452d4d601d1742c0e806cd0194785914daf19775f0e67"}, - {file = "scipy-1.6.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:83bf7c16245c15bc58ee76c5418e46ea1811edcc2e2b03041b804e46084ab627"}, - {file = "scipy-1.6.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:794e768cc5f779736593046c9714e0f3a5940bc6dcc1dba885ad64cbfb28e9f0"}, - {file = "scipy-1.6.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:5da5471aed911fe7e52b86bf9ea32fb55ae93e2f0fac66c32e58897cfb02fa07"}, - {file = "scipy-1.6.1-cp39-cp39-win32.whl", hash = "sha256:8e403a337749ed40af60e537cc4d4c03febddcc56cd26e774c9b1b600a70d3e4"}, - {file = "scipy-1.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a5193a098ae9f29af283dcf0041f762601faf2e595c0db1da929875b7570353f"}, - {file = "scipy-1.6.1.tar.gz", hash = "sha256:c4fceb864890b6168e79b0e714c585dbe2fd4222768ee90bc1aa0f8218691b11"}, -] -shapely = [ - {file = "Shapely-1.7.1-1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:46da0ea527da9cf9503e66c18bab6981c5556859e518fe71578b47126e54ca93"}, - {file = "Shapely-1.7.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:4c10f317e379cc404f8fc510cd9982d5d3e7ba13a9cfd39aa251d894c6366798"}, - {file = "Shapely-1.7.1-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:17df66e87d0fe0193910aeaa938c99f0b04f67b430edb8adae01e7be557b141b"}, - {file = "Shapely-1.7.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:da38ed3d65b8091447dc3717e5218cc336d20303b77b0634b261bc5c1aa2bae8"}, - {file = "Shapely-1.7.1-cp35-cp35m-win32.whl", hash = "sha256:8e7659dd994792a0aad8fb80439f59055a21163e236faf2f9823beb63a380e19"}, - {file = "Shapely-1.7.1-cp35-cp35m-win_amd64.whl", hash = "sha256:791477edb422692e7dc351c5ed6530eb0e949a31b45569946619a0d9cd5f53cb"}, - {file = "Shapely-1.7.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e3afccf0437edc108eef1e2bb9cc4c7073e7705924eb4cd0bf7715cd1ef0ce1b"}, - {file = "Shapely-1.7.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8f15b6ce67dcc05b61f19c689b60f3fe58550ba994290ff8332f711f5aaa9840"}, - {file = "Shapely-1.7.1-cp36-cp36m-win32.whl", hash = "sha256:60e5b2282619249dbe8dc5266d781cc7d7fb1b27fa49f8241f2167672ad26719"}, - {file = "Shapely-1.7.1-cp36-cp36m-win_amd64.whl", hash = "sha256:de618e67b64a51a0768d26a9963ecd7d338a2cf6e9e7582d2385f88ad005b3d1"}, - {file = "Shapely-1.7.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:182716ffb500d114b5d1b75d7fd9d14b7d3414cef3c38c0490534cc9ce20981a"}, - {file = "Shapely-1.7.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:4f3c59f6dbf86a9fc293546de492f5e07344e045f9333f3a753f2dda903c45d1"}, - {file = "Shapely-1.7.1-cp37-cp37m-win32.whl", hash = "sha256:6871acba8fbe744efa4f9f34e726d070bfbf9bffb356a8f6d64557846324232b"}, - {file = "Shapely-1.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:35be1c5d869966569d3dfd4ec31832d7c780e9df760e1fe52131105685941891"}, - {file = "Shapely-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:052eb5b9ba756808a7825e8a8020fb146ec489dd5c919e7d139014775411e688"}, - {file = "Shapely-1.7.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:90a3e2ae0d6d7d50ff2370ba168fbd416a53e7d8448410758c5d6a5920646c1d"}, - {file = "Shapely-1.7.1-cp38-cp38-win32.whl", hash = "sha256:a3774516c8a83abfd1ddffb8b6ec1b0935d7fe6ea0ff5c31a18bfdae567b4eba"}, - {file = "Shapely-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:6593026cd3f5daaea12bcc51ae5c979318070fefee210e7990cb8ac2364e79a1"}, - {file = "Shapely-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:617bf046a6861d7c6b44d2d9cb9e2311548638e684c2cd071d8945f24a926263"}, - {file = "Shapely-1.7.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:b40cc7bb089ae4aa9ddba1db900b4cd1bce3925d2a4b5837b639e49de054784f"}, - {file = "Shapely-1.7.1-cp39-cp39-win32.whl", hash = "sha256:2df5260d0f2983309776cb41bfa85c464ec07018d88c0ecfca23d40bfadae2f1"}, - {file = "Shapely-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:a5c3a50d823c192f32615a2a6920e8c046b09e07a58eba220407335a9cd2e8ea"}, - {file = "Shapely-1.7.1.tar.gz", hash = "sha256:1641724c1055459a7e2b8bbe47ba25bdc89554582e62aec23cb3f3ca25f9b129"}, -] -six = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] -sklearn = [ - {file = "sklearn-0.0.tar.gz", hash = "sha256:e23001573aa194b834122d2b9562459bf5ae494a2d59ca6b8aa22c85a44c0e31"}, -] -tabulate = [ - {file = "tabulate-0.8.9-py3-none-any.whl", hash = "sha256:d7c013fe7abbc5e491394e10fa845f8f32fe54f8dc60c6622c6cf482d25d47e4"}, - {file = "tabulate-0.8.9.tar.gz", hash = "sha256:eb1d13f25760052e8931f2ef80aaf6045a6cceb47514db8beab24cded16f13a7"}, -] -threadpoolctl = [ - {file = "threadpoolctl-2.1.0-py3-none-any.whl", hash = "sha256:38b74ca20ff3bb42caca8b00055111d74159ee95c4370882bbff2b93d24da725"}, - {file = "threadpoolctl-2.1.0.tar.gz", hash = "sha256:ddc57c96a38beb63db45d6c159b5ab07b6bced12c45a1f07b2b92f272aebfa6b"}, -] -toml = [ - {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, - {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, -] -tqdm = [ - {file = "tqdm-4.61.1-py2.py3-none-any.whl", hash = "sha256:aa0c29f03f298951ac6318f7c8ce584e48fa22ec26396e6411e43d038243bdb2"}, - {file = "tqdm-4.61.1.tar.gz", hash = "sha256:24be966933e942be5f074c29755a95b315c69a91f839a29139bf26ffffe2d3fd"}, -] -traitlets = [ - {file = "traitlets-5.0.5-py3-none-any.whl", hash = "sha256:69ff3f9d5351f31a7ad80443c2674b7099df13cc41fc5fa6e2f6d3b0330b0426"}, - {file = "traitlets-5.0.5.tar.gz", hash = "sha256:178f4ce988f69189f7e523337a3e11d91c786ded9360174a3d9ca83e79bc5396"}, -] -tsplib95 = [ - {file = "tsplib95-0.7.1-py2.py3-none-any.whl", hash = "sha256:c481638e293baaa62134b491477aa5b2681e552e4dc28a6106ca1e157ae59184"}, - {file = "tsplib95-0.7.1.tar.gz", hash = "sha256:3da80175dfb0478b967b87c508f75def47371188b6401b719441f2cedc817e00"}, -] -typed-ast = [ - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, - {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, - {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, - {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, - {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, - {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, - {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, - {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, - {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, - {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, - {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, - {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, - {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, - {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, - {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, - {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, -] -typing-extensions = [ - {file = "typing_extensions-3.10.0.0-py2-none-any.whl", hash = "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497"}, - {file = "typing_extensions-3.10.0.0-py3-none-any.whl", hash = "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"}, - {file = "typing_extensions-3.10.0.0.tar.gz", hash = "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342"}, -] -urllib3 = [ - {file = "urllib3-1.22-py2.py3-none-any.whl", hash = "sha256:06330f386d6e4b195fbfc736b297f58c5a892e4440e54d294d7004e3a9bbea1b"}, - {file = "urllib3-1.22.tar.gz", hash = "sha256:cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f"}, -] -wcwidth = [ - {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, - {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, -] -wrapt = [ - {file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"}, -] -zipp = [ - {file = "zipp-3.4.1-py3-none-any.whl", hash = "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"}, - {file = "zipp-3.4.1.tar.gz", hash = "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76"}, -] +[[package]] +category = "main" +description = "Abseil Python Common Libraries, see https://github.com/abseil/abseil-py." +name = "absl-py" +optional = false +python-versions = "*" +version = "0.13.0" + +[package.dependencies] +six = "*" + +[[package]] +category = "dev" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +name = "appdirs" +optional = false +python-versions = "*" +version = "1.4.4" + +[[package]] +category = "dev" +description = "Disable App Nap on macOS >= 10.9" +marker = "python_version > \"3.6\" and sys_platform == \"darwin\"" +name = "appnope" +optional = false +python-versions = "*" +version = "0.1.2" + +[[package]] +category = "dev" +description = "Atomic file writes." +marker = "sys_platform == \"win32\"" +name = "atomicwrites" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.4.0" + +[[package]] +category = "main" +description = "Classes Without Boilerplate" +name = "attrs" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "21.2.0" + +[package.extras] +dev = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] +tests_no_zope = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] + +[[package]] +category = "dev" +description = "Specifications for callback functions passed in to an API" +marker = "python_version > \"3.6\"" +name = "backcall" +optional = false +python-versions = "*" +version = "0.2.0" + +[[package]] +category = "dev" +description = "The uncompromising code formatter." +name = "black" +optional = false +python-versions = ">=3.6.2" +version = "21.6b0" + +[package.dependencies] +appdirs = "*" +click = ">=7.1.2" +mypy-extensions = ">=0.4.3" +pathspec = ">=0.8.1,<1" +regex = ">=2020.1.8" +toml = ">=0.10.1" + +[package.dependencies.typed-ast] +python = "<3.8" +version = ">=1.4.2" + +[package.dependencies.typing-extensions] +python = "<3.8" +version = ">=3.7.4" + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.6.0)", "aiohttp-cors (>=0.4.0)"] +python2 = ["typed-ast (>=1.4.2)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +category = "main" +description = "Generate complex HTML+JS pages with Python" +name = "branca" +optional = false +python-versions = ">=3.5" +version = "0.4.2" + +[package.dependencies] +jinja2 = "*" + +[[package]] +category = "main" +description = "Python package for providing Mozilla's CA Bundle." +name = "certifi" +optional = false +python-versions = "*" +version = "2021.5.30" + +[[package]] +category = "main" +description = "Universal encoding detector for Python 2 and 3" +name = "chardet" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "4.0.0" + +[[package]] +category = "main" +description = "Composable command line interface toolkit" +name = "click" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "7.1.2" + +[[package]] +category = "main" +description = "An extension module for click to enable registering CLI commands via setuptools entry-points." +name = "click-plugins" +optional = false +python-versions = "*" +version = "1.1.1" + +[package.dependencies] +click = ">=4.0" + +[package.extras] +dev = ["pytest (>=3.6)", "pytest-cov", "wheel", "coveralls"] + +[[package]] +category = "main" +description = "Click params for commmand line interfaces to GeoJSON" +name = "cligj" +optional = false +python-versions = "*" +version = "0.6.0" + +[package.dependencies] +click = ">=4.0,<8" + +[package.extras] +test = ["pytest-cov"] + +[[package]] +category = "dev" +description = "Cross-platform colored terminal text." +marker = "python_version > \"3.6\" and sys_platform == \"win32\" or sys_platform == \"win32\"" +name = "colorama" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "0.4.4" + +[[package]] +category = "main" +description = "Simple creation of data classes from dictionaries." +name = "dacite" +optional = false +python-versions = ">=3.6" +version = "1.6.0" + +[package.extras] +dev = ["pytest (>=5)", "pytest-cov", "coveralls", "black", "mypy", "pylint"] + +[[package]] +category = "main" +description = "Decorators for Humans" +name = "decorator" +optional = false +python-versions = ">=3.5" +version = "5.0.9" + +[[package]] +category = "main" +description = "Python @deprecated decorator to deprecate old python classes, functions or methods." +name = "deprecated" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.2.12" + +[package.dependencies] +wrapt = ">=1.10,<2" + +[package.extras] +dev = ["tox", "bump2version (<1)", "sphinx (<2)", "importlib-metadata (<3)", "importlib-resources (<4)", "configparser (<5)", "sphinxcontrib-websupport (<2)", "zipp (<2)", "PyTest (<5)", "PyTest-Cov (<2.6)", "pytest", "pytest-cov"] + +[[package]] +category = "main" +description = "Fiona reads and writes spatial data files" +name = "fiona" +optional = false +python-versions = "*" +version = "1.8.20" + +[package.dependencies] +attrs = ">=17" +certifi = "*" +click = ">=4.0" +click-plugins = ">=1.0" +cligj = ">=0.5" +munch = "*" +setuptools = "*" +six = ">=1.7" + +[package.extras] +all = ["pytest (>=3)", "boto3 (>=1.2.4)", "pytest-cov", "shapely", "mock"] +calc = ["shapely"] +s3 = ["boto3 (>=1.2.4)"] +test = ["pytest (>=3)", "pytest-cov", "boto3 (>=1.2.4)", "mock"] + +[[package]] +category = "dev" +description = "the modular source code checker: pep8 pyflakes and co" +name = "flake8" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +version = "3.9.2" + +[package.dependencies] +mccabe = ">=0.6.0,<0.7.0" +pycodestyle = ">=2.7.0,<2.8.0" +pyflakes = ">=2.3.0,<2.4.0" + +[package.dependencies.importlib-metadata] +python = "<3.8" +version = "*" + +[[package]] +category = "main" +description = "Make beautiful maps with Leaflet.js & Python" +name = "folium" +optional = false +python-versions = ">=3.5" +version = "0.12.1" + +[package.dependencies] +branca = ">=0.3.0" +jinja2 = ">=2.9" +numpy = "*" +requests = "*" + +[package.extras] +testing = ["pytest"] + +[[package]] +category = "main" +description = "Geographic pandas extensions" +name = "geopandas" +optional = false +python-versions = ">=3.6" +version = "0.9.0" + +[package.dependencies] +fiona = ">=1.8" +pandas = ">=0.24.0" +pyproj = ">=2.2.0" +shapely = ">=1.6" + +[[package]] +category = "main" +description = "Internationalized Domain Names in Applications (IDNA)" +name = "idna" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.10" + +[[package]] +category = "dev" +description = "Read metadata from Python packages" +marker = "python_version < \"3.8\"" +name = "importlib-metadata" +optional = false +python-versions = ">=3.6" +version = "4.5.0" + +[package.dependencies] +zipp = ">=0.5" + +[package.dependencies.typing-extensions] +python = "<3.8" +version = ">=3.6.4" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] + +[[package]] +category = "dev" +description = "iniconfig: brain-dead simple config-ini parsing" +name = "iniconfig" +optional = false +python-versions = "*" +version = "1.1.1" + +[[package]] +category = "dev" +description = "IPython-enabled pdb" +name = "ipdb" +optional = false +python-versions = ">=2.7" +version = "0.13.9" + +[package.dependencies] +setuptools = "*" + +[package.dependencies.decorator] +python = ">=3.7" +version = "*" + +[package.dependencies.ipython] +python = ">=3.7" +version = ">=7.17.0" + +[package.dependencies.toml] +python = ">=3.7" +version = ">=0.10.2" + +[[package]] +category = "dev" +description = "IPython: Productive Interactive Computing" +marker = "python_version > \"3.6\"" +name = "ipython" +optional = false +python-versions = ">=3.7" +version = "7.24.1" + +[package.dependencies] +appnope = "*" +backcall = "*" +colorama = "*" +decorator = "*" +jedi = ">=0.16" +matplotlib-inline = "*" +pexpect = ">4.3" +pickleshare = "*" +prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" +pygments = "*" +setuptools = ">=18.5" +traitlets = ">=4.2" + +[package.extras] +all = ["Sphinx (>=1.3)", "ipykernel", "ipyparallel", "ipywidgets", "nbconvert", "nbformat", "nose (>=0.10.1)", "notebook", "numpy (>=1.17)", "pygments", "qtconsole", "requests", "testpath"] +doc = ["Sphinx (>=1.3)"] +kernel = ["ipykernel"] +nbconvert = ["nbconvert"] +nbformat = ["nbformat"] +notebook = ["notebook", "ipywidgets"] +parallel = ["ipyparallel"] +qtconsole = ["qtconsole"] +test = ["nose (>=0.10.1)", "requests", "testpath", "pygments", "nbformat", "ipykernel", "numpy (>=1.17)"] + +[[package]] +category = "dev" +description = "Vestigial utilities from IPython" +marker = "python_version > \"3.6\"" +name = "ipython-genutils" +optional = false +python-versions = "*" +version = "0.2.0" + +[[package]] +category = "dev" +description = "An autocompletion tool for Python that can be used for text editors." +marker = "python_version > \"3.6\"" +name = "jedi" +optional = false +python-versions = ">=3.6" +version = "0.18.0" + +[package.dependencies] +parso = ">=0.8.0,<0.9.0" + +[package.extras] +qa = ["flake8 (3.8.3)", "mypy (0.782)"] +testing = ["Django (<3.1)", "colorama", "docopt", "pytest (<6.0.0)"] + +[[package]] +category = "main" +description = "A very fast and expressive template engine." +name = "jinja2" +optional = false +python-versions = ">=3.6" +version = "3.0.1" + +[package.dependencies] +MarkupSafe = ">=2.0" + +[package.extras] +i18n = ["Babel (>=2.7)"] + +[[package]] +category = "main" +description = "Lightweight pipelining with Python functions" +name = "joblib" +optional = false +python-versions = ">=3.6" +version = "1.0.1" + +[[package]] +category = "main" +description = "Super simple Python wrapper for LKH-3" +name = "lkh" +optional = false +python-versions = ">=3.3" +version = "1.0.4" + +[package.dependencies] +tsplib95 = "*" + +[[package]] +category = "main" +description = "Safely add untrusted strings to HTML/XML markup." +name = "markupsafe" +optional = false +python-versions = ">=3.6" +version = "2.0.1" + +[[package]] +category = "dev" +description = "Inline Matplotlib backend for Jupyter" +marker = "python_version > \"3.6\"" +name = "matplotlib-inline" +optional = false +python-versions = ">=3.5" +version = "0.1.2" + +[package.dependencies] +traitlets = "*" + +[[package]] +category = "dev" +description = "McCabe checker, plugin for flake8" +name = "mccabe" +optional = false +python-versions = "*" +version = "0.6.1" + +[[package]] +category = "dev" +description = "Rolling backport of unittest.mock for all Pythons" +name = "mock" +optional = false +python-versions = ">=3.6" +version = "4.0.3" + +[package.extras] +build = ["twine", "wheel", "blurb"] +docs = ["sphinx"] +test = ["pytest (<5.4)", "pytest-cov"] + +[[package]] +category = "main" +description = "A dot-accessible dictionary (a la JavaScript objects)" +name = "munch" +optional = false +python-versions = "*" +version = "2.5.0" + +[package.dependencies] +six = "*" + +[package.extras] +testing = ["pytest", "coverage", "astroid (>=1.5.3,<1.6.0)", "pylint (>=1.7.2,<1.8.0)", "astroid (>=2.0)", "pylint (>=2.3.1,<2.4.0)"] +yaml = ["PyYAML (>=5.1.0)"] + +[[package]] +category = "dev" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +name = "mypy-extensions" +optional = false +python-versions = "*" +version = "0.4.3" + +[[package]] +category = "main" +description = "Python package for creating and manipulating graphs and networks" +name = "networkx" +optional = false +python-versions = ">=3.6" +version = "2.5" + +[package.dependencies] +decorator = ">=4.3.0" + +[package.extras] +all = ["numpy", "scipy", "pandas", "matplotlib", "pygraphviz", "pydot", "pyyaml", "lxml", "pytest"] +gdal = ["gdal"] +lxml = ["lxml"] +matplotlib = ["matplotlib"] +numpy = ["numpy"] +pandas = ["pandas"] +pydot = ["pydot"] +pygraphviz = ["pygraphviz"] +pytest = ["pytest"] +pyyaml = ["pyyaml"] +scipy = ["scipy"] + +[[package]] +category = "main" +description = "NumPy is the fundamental package for array computing with Python." +name = "numpy" +optional = false +python-versions = ">=3.7" +version = "1.20.3" + +[[package]] +category = "main" +description = "Google OR-Tools python libraries and modules" +name = "ortools" +optional = false +python-versions = "*" +version = "8.2.8710" + +[package.dependencies] +absl-py = ">=0.11" +protobuf = ">=3.14.0" + +[[package]] +category = "dev" +description = "Core utilities for Python packages" +name = "packaging" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "20.9" + +[package.dependencies] +pyparsing = ">=2.0.2" + +[[package]] +category = "main" +description = "Powerful data structures for data analysis, time series, and statistics" +name = "pandas" +optional = false +python-versions = ">=3.7.1" +version = "1.2.4" + +[package.dependencies] +numpy = ">=1.16.5" +python-dateutil = ">=2.7.3" +pytz = ">=2017.3" + +[package.extras] +test = ["pytest (>=5.0.1)", "pytest-xdist", "hypothesis (>=3.58)"] + +[[package]] +category = "dev" +description = "A Python Parser" +marker = "python_version > \"3.6\"" +name = "parso" +optional = false +python-versions = ">=3.6" +version = "0.8.2" + +[package.extras] +qa = ["flake8 (3.8.3)", "mypy (0.782)"] +testing = ["docopt", "pytest (<6.0.0)"] + +[[package]] +category = "dev" +description = "Utility library for gitignore style pattern matching of file paths." +name = "pathspec" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "0.8.1" + +[[package]] +category = "dev" +description = "Pexpect allows easy control of interactive console applications." +marker = "python_version > \"3.6\" and sys_platform != \"win32\"" +name = "pexpect" +optional = false +python-versions = "*" +version = "4.8.0" + +[package.dependencies] +ptyprocess = ">=0.5" + +[[package]] +category = "dev" +description = "Tiny 'shelve'-like database with concurrency support" +marker = "python_version > \"3.6\"" +name = "pickleshare" +optional = false +python-versions = "*" +version = "0.7.5" + +[[package]] +category = "dev" +description = "plugin and hook calling mechanisms for python" +name = "pluggy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "0.13.1" + +[package.dependencies] +[package.dependencies.importlib-metadata] +python = "<3.8" +version = ">=0.12" + +[package.extras] +dev = ["pre-commit", "tox"] + +[[package]] +category = "main" +description = "A Python implementation of Google's Encoded Polyline Algorithm Format." +name = "polyline" +optional = false +python-versions = "*" +version = "1.4.0" + +[package.dependencies] +six = ">=1.8.0" + +[[package]] +category = "dev" +description = "Library for building powerful interactive command lines in Python" +marker = "python_version > \"3.6\"" +name = "prompt-toolkit" +optional = false +python-versions = ">=3.6.1" +version = "3.0.18" + +[package.dependencies] +wcwidth = "*" + +[[package]] +category = "main" +description = "Protocol Buffers" +name = "protobuf" +optional = false +python-versions = "*" +version = "3.17.3" + +[package.dependencies] +six = ">=1.9" + +[[package]] +category = "dev" +description = "Run a subprocess in a pseudo terminal" +marker = "python_version > \"3.6\" and sys_platform != \"win32\"" +name = "ptyprocess" +optional = false +python-versions = "*" +version = "0.7.0" + +[[package]] +category = "dev" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +name = "py" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.10.0" + +[[package]] +category = "dev" +description = "Python style guide checker" +name = "pycodestyle" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.7.0" + +[[package]] +category = "dev" +description = "passive checker of Python programs" +name = "pyflakes" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.3.1" + +[[package]] +category = "dev" +description = "Pygments is a syntax highlighting package written in Python." +marker = "python_version > \"3.6\"" +name = "pygments" +optional = false +python-versions = ">=3.5" +version = "2.9.0" + +[[package]] +category = "dev" +description = "Python parsing module" +name = "pyparsing" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +version = "2.4.7" + +[[package]] +category = "main" +description = "Python interface to PROJ (cartographic projections and coordinate transformations library)" +name = "pyproj" +optional = false +python-versions = ">=3.7" +version = "3.1.0" + +[package.dependencies] +certifi = "*" + +[[package]] +category = "dev" +description = "pytest: simple powerful testing with Python" +name = "pytest" +optional = false +python-versions = ">=3.6" +version = "6.2.4" + +[package.dependencies] +atomicwrites = ">=1.0" +attrs = ">=19.2.0" +colorama = "*" +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<1.0.0a1" +py = ">=1.8.2" +toml = "*" + +[package.dependencies.importlib-metadata] +python = "<3.8" +version = ">=0.12" + +[package.extras] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] + +[[package]] +category = "main" +description = "Extensions to the standard Python datetime module" +name = "python-dateutil" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +version = "2.8.1" + +[package.dependencies] +six = ">=1.5" + +[[package]] +category = "main" +description = "World timezone definitions, modern and historical" +name = "pytz" +optional = false +python-versions = "*" +version = "2021.1" + +[[package]] +category = "dev" +description = "Alternative regular expression module, to replace re." +name = "regex" +optional = false +python-versions = "*" +version = "2021.4.4" + +[[package]] +category = "main" +description = "Python HTTP for Humans." +name = "requests" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +version = "2.25.1" + +[package.dependencies] +certifi = ">=2017.4.17" +chardet = ">=3.0.2,<5" +idna = ">=2.5,<3" +urllib3 = ">=1.21.1,<1.27" + +[package.extras] +security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] +socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7)", "win-inet-pton"] + +[[package]] +category = "main" +description = "A set of python modules for machine learning and data mining" +name = "scikit-learn" +optional = false +python-versions = ">=3.6" +version = "0.24.2" + +[package.dependencies] +joblib = ">=0.11" +numpy = ">=1.13.3" +scipy = ">=0.19.1" +threadpoolctl = ">=2.0.0" + +[package.extras] +benchmark = ["matplotlib (>=2.1.1)", "pandas (>=0.25.0)", "memory-profiler (>=0.57.0)"] +docs = ["matplotlib (>=2.1.1)", "scikit-image (>=0.13)", "pandas (>=0.25.0)", "seaborn (>=0.9.0)", "memory-profiler (>=0.57.0)", "sphinx (>=3.2.0)", "sphinx-gallery (>=0.7.0)", "numpydoc (>=1.0.0)", "Pillow (>=7.1.2)", "sphinx-prompt (>=1.3.0)"] +examples = ["matplotlib (>=2.1.1)", "scikit-image (>=0.13)", "pandas (>=0.25.0)", "seaborn (>=0.9.0)"] +tests = ["matplotlib (>=2.1.1)", "scikit-image (>=0.13)", "pandas (>=0.25.0)", "pytest (>=5.0.1)", "pytest-cov (>=2.9.0)", "flake8 (>=3.8.2)", "mypy (>=0.770)", "pyamg (>=4.0.0)"] + +[[package]] +category = "main" +description = "SciPy: Scientific Library for Python" +name = "scipy" +optional = false +python-versions = ">=3.7" +version = "1.6.1" + +[package.dependencies] +numpy = ">=1.16.5" + +[[package]] +category = "main" +description = "Geometric objects, predicates, and operations" +name = "shapely" +optional = false +python-versions = "*" +version = "1.7.1" + +[package.extras] +all = ["numpy", "pytest", "pytest-cov"] +test = ["pytest", "pytest-cov"] +vectorized = ["numpy"] + +[[package]] +category = "main" +description = "Python 2 and 3 compatibility utilities" +name = "six" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +version = "1.16.0" + +[[package]] +category = "main" +description = "A set of python modules for machine learning and data mining" +name = "sklearn" +optional = false +python-versions = "*" +version = "0.0" + +[package.dependencies] +scikit-learn = "*" + +[[package]] +category = "main" +description = "Pretty-print tabular data" +name = "tabulate" +optional = false +python-versions = "*" +version = "0.8.9" + +[package.extras] +widechars = ["wcwidth"] + +[[package]] +category = "main" +description = "threadpoolctl" +name = "threadpoolctl" +optional = false +python-versions = ">=3.5" +version = "2.1.0" + +[[package]] +category = "dev" +description = "Python Library for Tom's Obvious, Minimal Language" +name = "toml" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +version = "0.10.2" + +[[package]] +category = "main" +description = "Fast, Extensible Progress Meter" +name = "tqdm" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +version = "4.61.1" + +[package.extras] +dev = ["py-make (>=0.1.0)", "twine", "wheel"] +notebook = ["ipywidgets (>=6)"] +telegram = ["requests"] + +[[package]] +category = "dev" +description = "Traitlets Python configuration system" +marker = "python_version > \"3.6\"" +name = "traitlets" +optional = false +python-versions = ">=3.7" +version = "5.0.5" + +[package.dependencies] +ipython-genutils = "*" + +[package.extras] +test = ["pytest"] + +[[package]] +category = "main" +description = "TSPLIB95 works with TSPLIB95 files." +name = "tsplib95" +optional = false +python-versions = "*" +version = "0.7.1" + +[package.dependencies] +Click = ">=6.0" +Deprecated = ">=1.2.9,<1.3.0" +networkx = ">=2.1,<3.0" +tabulate = ">=0.8.7,<0.9.0" + +[[package]] +category = "dev" +description = "a fork of Python 2 and 3 ast modules with type comment support" +marker = "python_version < \"3.8\"" +name = "typed-ast" +optional = false +python-versions = "*" +version = "1.4.3" + +[[package]] +category = "dev" +description = "Backported and Experimental Type Hints for Python 3.5+" +marker = "python_version < \"3.8\"" +name = "typing-extensions" +optional = false +python-versions = "*" +version = "3.10.0.0" + +[[package]] +category = "main" +description = "HTTP library with thread-safe connection pooling, file post, and more." +name = "urllib3" +optional = false +python-versions = "*" +version = "1.22" + +[package.extras] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"] + +[[package]] +category = "dev" +description = "Measures the displayed width of unicode strings in a terminal" +marker = "python_version > \"3.6\"" +name = "wcwidth" +optional = false +python-versions = "*" +version = "0.2.5" + +[[package]] +category = "main" +description = "Module for decorators, wrappers and monkey patching." +name = "wrapt" +optional = false +python-versions = "*" +version = "1.12.1" + +[[package]] +category = "dev" +description = "Backport of pathlib-compatible object wrapper for zip files" +marker = "python_version < \"3.8\"" +name = "zipp" +optional = false +python-versions = ">=3.6" +version = "3.4.1" + +[package.extras] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] + +[metadata] +content-hash = "baa15acc0d8c6166dfc223d9e8c8eea58b87282fbf4334c74d9f2e717664a3eb" +lock-version = "1.0" +python-versions = ">=3.7.1" + +[metadata.files] +absl-py = [ + {file = "absl-py-0.13.0.tar.gz", hash = "sha256:6953272383486044699fd0e9f00aad167a27e08ce19aae66c6c4b10e7e767793"}, + {file = "absl_py-0.13.0-py3-none-any.whl", hash = "sha256:62bd4e248ddb19d81aec8f9446b407ff37c8175c2ba88266a7afa9b4ce4a333b"}, +] +appdirs = [ + {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, + {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, +] +appnope = [ + {file = "appnope-0.1.2-py2.py3-none-any.whl", hash = "sha256:93aa393e9d6c54c5cd570ccadd8edad61ea0c4b9ea7a01409020c9aa019eb442"}, + {file = "appnope-0.1.2.tar.gz", hash = "sha256:dd83cd4b5b460958838f6eb3000c660b1f9caf2a5b1de4264e941512f603258a"}, +] +atomicwrites = [ + {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, + {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, +] +attrs = [ + {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, + {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, +] +backcall = [ + {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, + {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, +] +black = [ + {file = "black-21.6b0-py3-none-any.whl", hash = "sha256:dfb8c5a069012b2ab1e972e7b908f5fb42b6bbabcba0a788b86dc05067c7d9c7"}, + {file = "black-21.6b0.tar.gz", hash = "sha256:dc132348a88d103016726fe360cb9ede02cecf99b76e3660ce6c596be132ce04"}, +] +branca = [ + {file = "branca-0.4.2-py3-none-any.whl", hash = "sha256:62c2e777f074fc1830cd40ba9e650beb941861075980babafead8d97856b1a4b"}, + {file = "branca-0.4.2.tar.gz", hash = "sha256:c111453617b17ab2bda60a4cd71787d6f2b59c85cdf71ab160a737606ac66c31"}, +] +certifi = [ + {file = "certifi-2021.5.30-py2.py3-none-any.whl", hash = "sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8"}, + {file = "certifi-2021.5.30.tar.gz", hash = "sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee"}, +] +chardet = [ + {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, + {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, +] +click = [ + {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, + {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, +] +click-plugins = [ + {file = "click-plugins-1.1.1.tar.gz", hash = "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b"}, + {file = "click_plugins-1.1.1-py2.py3-none-any.whl", hash = "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8"}, +] +cligj = [ + {file = "cligj-0.6.0-py2-none-any.whl", hash = "sha256:b258362e31ffbd8dbaeeb81c14ffe803675767e96b4807bfc39da69e467f7c0d"}, + {file = "cligj-0.6.0-py3-none-any.whl", hash = "sha256:639242b1df173fdaef11c6214b2bc7404c7c6909730a1cfa1e69b5255acf2d60"}, + {file = "cligj-0.6.0.tar.gz", hash = "sha256:a5f080858fd584d73fcc2b75f80ed05054130944e2283019d1828a6deb9e4110"}, +] +colorama = [ + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, +] +dacite = [ + {file = "dacite-1.6.0-py3-none-any.whl", hash = "sha256:4331535f7aabb505c732fa4c3c094313fc0a1d5ea19907bf4726a7819a68b93f"}, + {file = "dacite-1.6.0.tar.gz", hash = "sha256:d48125ed0a0352d3de9f493bf980038088f45f3f9d7498f090b50a847daaa6df"}, +] +decorator = [ + {file = "decorator-5.0.9-py3-none-any.whl", hash = "sha256:6e5c199c16f7a9f0e3a61a4a54b3d27e7dad0dbdde92b944426cb20914376323"}, + {file = "decorator-5.0.9.tar.gz", hash = "sha256:72ecfba4320a893c53f9706bebb2d55c270c1e51a28789361aa93e4a21319ed5"}, +] +deprecated = [ + {file = "Deprecated-1.2.12-py2.py3-none-any.whl", hash = "sha256:08452d69b6b5bc66e8330adde0a4f8642e969b9e1702904d137eeb29c8ffc771"}, + {file = "Deprecated-1.2.12.tar.gz", hash = "sha256:6d2de2de7931a968874481ef30208fd4e08da39177d61d3d4ebdf4366e7dbca1"}, +] +fiona = [ + {file = "Fiona-1.8.20-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:02880556540e36ad6aac97687799d9b3093c354787a47bc0e73026c7fc15f1b3"}, + {file = "Fiona-1.8.20-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:3f668c471fa2f8c9c0a9ca83639cb2c8dcc93edc3d93d43dba2f9e8da38ad53e"}, + {file = "Fiona-1.8.20-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:54f81039e913d0f88728ef23edf5a69038dec94dea54f4c799f972ba8e2a7d40"}, + {file = "Fiona-1.8.20-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:328340a448bed5c43d119f61f760368a04d13a302c59d2fccb051a3ff021f4b8"}, + {file = "Fiona-1.8.20-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:03f910380dbe684730b59b817aa030e6e9a3ee79211b66c6db2d1c8fe6ea12de"}, + {file = "Fiona-1.8.20-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:bef100ebd82afb9a4d67096216e82611b82ca9341330e4805832d7ff8c9bc1f7"}, + {file = "Fiona-1.8.20-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5e1cef608c6de9039eaa65b395024096e3189ab0559a5a328c68c4690c3302ce"}, + {file = "Fiona-1.8.20-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e72e4a5b84ec410be531d4fe4c1a5c87c6c0e92d01116c145c0f1b33f81c8080"}, + {file = "Fiona-1.8.20.tar.gz", hash = "sha256:a70502d2857b82f749c09cb0dea3726787747933a2a1599b5ab787d74e3c143b"}, +] +flake8 = [ + {file = "flake8-3.9.2-py2.py3-none-any.whl", hash = "sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907"}, + {file = "flake8-3.9.2.tar.gz", hash = "sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b"}, +] +folium = [ + {file = "folium-0.12.1-py2.py3-none-any.whl", hash = "sha256:3d2c48dd6ffe5327975bbfd718468c4e81db9f2844c26e574f878adf4c08b644"}, +] +geopandas = [ + {file = "geopandas-0.9.0-py2.py3-none-any.whl", hash = "sha256:79f6e557ba0dba76eec44f8351b1c6b42a17c38f5f08fef347e98fe4dae563c7"}, + {file = "geopandas-0.9.0.tar.gz", hash = "sha256:63972ab4dc44c4029f340600dcb83264eb8132dd22b104da0b654bef7f42630a"}, +] +idna = [ + {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, + {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, +] +importlib-metadata = [ + {file = "importlib_metadata-4.5.0-py3-none-any.whl", hash = "sha256:833b26fb89d5de469b24a390e9df088d4e52e4ba33b01dc5e0e4f41b81a16c00"}, + {file = "importlib_metadata-4.5.0.tar.gz", hash = "sha256:b142cc1dd1342f31ff04bb7d022492b09920cb64fed867cd3ea6f80fe3ebd139"}, +] +iniconfig = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, +] +ipdb = [ + {file = "ipdb-0.13.9.tar.gz", hash = "sha256:951bd9a64731c444fd907a5ce268543020086a697f6be08f7cc2c9a752a278c5"}, +] +ipython = [ + {file = "ipython-7.24.1-py3-none-any.whl", hash = "sha256:d513e93327cf8657d6467c81f1f894adc125334ffe0e4ddd1abbb1c78d828703"}, + {file = "ipython-7.24.1.tar.gz", hash = "sha256:9bc24a99f5d19721fb8a2d1408908e9c0520a17fff2233ffe82620847f17f1b6"}, +] +ipython-genutils = [ + {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, + {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, +] +jedi = [ + {file = "jedi-0.18.0-py2.py3-none-any.whl", hash = "sha256:18456d83f65f400ab0c2d3319e48520420ef43b23a086fdc05dff34132f0fb93"}, + {file = "jedi-0.18.0.tar.gz", hash = "sha256:92550a404bad8afed881a137ec9a461fed49eca661414be45059329614ed0707"}, +] +jinja2 = [ + {file = "Jinja2-3.0.1-py3-none-any.whl", hash = "sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4"}, + {file = "Jinja2-3.0.1.tar.gz", hash = "sha256:703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4"}, +] +joblib = [ + {file = "joblib-1.0.1-py3-none-any.whl", hash = "sha256:feeb1ec69c4d45129954f1b7034954241eedfd6ba39b5e9e4b6883be3332d5e5"}, + {file = "joblib-1.0.1.tar.gz", hash = "sha256:9c17567692206d2f3fb9ecf5e991084254fe631665c450b443761c4186a613f7"}, +] +lkh = [ + {file = "lkh-1.0.4.tar.gz", hash = "sha256:d12ede84483d2473f0795c530abdfb6edbc5fb8b8b112dee6ef9152fff467029"}, +] +markupsafe = [ + {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, + {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, +] +matplotlib-inline = [ + {file = "matplotlib-inline-0.1.2.tar.gz", hash = "sha256:f41d5ff73c9f5385775d5c0bc13b424535c8402fe70ea8210f93e11f3683993e"}, + {file = "matplotlib_inline-0.1.2-py3-none-any.whl", hash = "sha256:5cf1176f554abb4fa98cb362aa2b55c500147e4bdbb07e3fda359143e1da0811"}, +] +mccabe = [ + {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, + {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, +] +mock = [ + {file = "mock-4.0.3-py3-none-any.whl", hash = "sha256:122fcb64ee37cfad5b3f48d7a7d51875d7031aaf3d8be7c42e2bee25044eee62"}, + {file = "mock-4.0.3.tar.gz", hash = "sha256:7d3fbbde18228f4ff2f1f119a45cdffa458b4c0dee32eb4d2bb2f82554bac7bc"}, +] +munch = [ + {file = "munch-2.5.0-py2.py3-none-any.whl", hash = "sha256:6f44af89a2ce4ed04ff8de41f70b226b984db10a91dcc7b9ac2efc1c77022fdd"}, + {file = "munch-2.5.0.tar.gz", hash = "sha256:2d735f6f24d4dba3417fa448cae40c6e896ec1fdab6cdb5e6510999758a4dbd2"}, +] +mypy-extensions = [ + {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, + {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, +] +networkx = [ + {file = "networkx-2.5-py3-none-any.whl", hash = "sha256:8c5812e9f798d37c50570d15c4a69d5710a18d77bafc903ee9c5fba7454c616c"}, + {file = "networkx-2.5.tar.gz", hash = "sha256:7978955423fbc9639c10498878be59caf99b44dc304c2286162fd24b458c1602"}, +] +numpy = [ + {file = "numpy-1.20.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:70eb5808127284c4e5c9e836208e09d685a7978b6a216db85960b1a112eeace8"}, + {file = "numpy-1.20.3-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6ca2b85a5997dabc38301a22ee43c82adcb53ff660b89ee88dded6b33687e1d8"}, + {file = "numpy-1.20.3-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:c5bf0e132acf7557fc9bb8ded8b53bbbbea8892f3c9a1738205878ca9434206a"}, + {file = "numpy-1.20.3-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:db250fd3e90117e0312b611574cd1b3f78bec046783195075cbd7ba9c3d73f16"}, + {file = "numpy-1.20.3-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:637d827248f447e63585ca3f4a7d2dfaa882e094df6cfa177cc9cf9cd6cdf6d2"}, + {file = "numpy-1.20.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8b7bb4b9280da3b2856cb1fc425932f46fba609819ee1c62256f61799e6a51d2"}, + {file = "numpy-1.20.3-cp37-cp37m-win32.whl", hash = "sha256:67d44acb72c31a97a3d5d33d103ab06d8ac20770e1c5ad81bdb3f0c086a56cf6"}, + {file = "numpy-1.20.3-cp37-cp37m-win_amd64.whl", hash = "sha256:43909c8bb289c382170e0282158a38cf306a8ad2ff6dfadc447e90f9961bef43"}, + {file = "numpy-1.20.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f1452578d0516283c87608a5a5548b0cdde15b99650efdfd85182102ef7a7c17"}, + {file = "numpy-1.20.3-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6e51534e78d14b4a009a062641f465cfaba4fdcb046c3ac0b1f61dd97c861b1b"}, + {file = "numpy-1.20.3-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e515c9a93aebe27166ec9593411c58494fa98e5fcc219e47260d9ab8a1cc7f9f"}, + {file = "numpy-1.20.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c1c09247ccea742525bdb5f4b5ceeacb34f95731647fe55774aa36557dbb5fa4"}, + {file = "numpy-1.20.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:66fbc6fed94a13b9801fb70b96ff30605ab0a123e775a5e7a26938b717c5d71a"}, + {file = "numpy-1.20.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:ea9cff01e75a956dbee133fa8e5b68f2f92175233de2f88de3a682dd94deda65"}, + {file = "numpy-1.20.3-cp38-cp38-win32.whl", hash = "sha256:f39a995e47cb8649673cfa0579fbdd1cdd33ea497d1728a6cb194d6252268e48"}, + {file = "numpy-1.20.3-cp38-cp38-win_amd64.whl", hash = "sha256:1676b0a292dd3c99e49305a16d7a9f42a4ab60ec522eac0d3dd20cdf362ac010"}, + {file = "numpy-1.20.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:830b044f4e64a76ba71448fce6e604c0fc47a0e54d8f6467be23749ac2cbd2fb"}, + {file = "numpy-1.20.3-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:55b745fca0a5ab738647d0e4db099bd0a23279c32b31a783ad2ccea729e632df"}, + {file = "numpy-1.20.3-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5d050e1e4bc9ddb8656d7b4f414557720ddcca23a5b88dd7cff65e847864c400"}, + {file = "numpy-1.20.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a9c65473ebc342715cb2d7926ff1e202c26376c0dcaaee85a1fd4b8d8c1d3b2f"}, + {file = "numpy-1.20.3-cp39-cp39-win32.whl", hash = "sha256:16f221035e8bd19b9dc9a57159e38d2dd060b48e93e1d843c49cb370b0f415fd"}, + {file = "numpy-1.20.3-cp39-cp39-win_amd64.whl", hash = "sha256:6690080810f77485667bfbff4f69d717c3be25e5b11bb2073e76bb3f578d99b4"}, + {file = "numpy-1.20.3-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4e465afc3b96dbc80cf4a5273e5e2b1e3451286361b4af70ce1adb2984d392f9"}, + {file = "numpy-1.20.3.zip", hash = "sha256:e55185e51b18d788e49fe8305fd73ef4470596b33fc2c1ceb304566b99c71a69"}, +] +ortools = [ + {file = "ortools-8.2.8710-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b79db31d9d306e5a731f6f34461c895f8f1dafcd9203b9b18d429c389c3e28c7"}, + {file = "ortools-8.2.8710-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8869b260078bad8fad9e51b4471ccf14e985c637b3497b8711fd5bf6e01863fe"}, + {file = "ortools-8.2.8710-cp36-cp36m-win_amd64.whl", hash = "sha256:0470b28db960ccdca59f738520209daadce3e18e94d219033d4a50d4cfa1484b"}, + {file = "ortools-8.2.8710-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c380e542f111b225311c077699caa7cd4f9084e066d586d4a0dadd674063a089"}, + {file = "ortools-8.2.8710-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:b81d383ad8dec2fba41fa0129996177ec19ac9414bf132a998280065daa11b0c"}, + {file = "ortools-8.2.8710-cp37-cp37m-win_amd64.whl", hash = "sha256:1312cdd7bbdbe51871f5fc206c95fdea6b2a2315ff6d917df36f0ec1305b13cd"}, + {file = "ortools-8.2.8710-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2e269c43eeb553bf3781953f47a38c4b3b86acec64d04ebc23406f2d1e812782"}, + {file = "ortools-8.2.8710-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:4531ca6285f4f2d813dac5e4bb63ce0d467cea49fe12b7162595aa8ef197b983"}, + {file = "ortools-8.2.8710-cp38-cp38-win_amd64.whl", hash = "sha256:8b6f022ac028875303f85b48821c728db8f4028558b5c71102a1ec897acdbfec"}, + {file = "ortools-8.2.8710-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:63f25d9746ccd91b6a8c423906a6c2547bd92be2e212d677d2f89724cf847b69"}, + {file = "ortools-8.2.8710-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:436860b7bf8ae389b7f34f8548c57f6f602acdf6df552b54cdca9559fb561fe5"}, + {file = "ortools-8.2.8710-cp39-cp39-win_amd64.whl", hash = "sha256:61292bdd01dbe254203580b5d3f9f45c1e192e3f743e205cf29d690c11c342b8"}, +] +packaging = [ + {file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"}, + {file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"}, +] +pandas = [ + {file = "pandas-1.2.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c601c6fdebc729df4438ec1f62275d6136a0dd14d332fc0e8ce3f7d2aadb4dd6"}, + {file = "pandas-1.2.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:8d4c74177c26aadcfb4fd1de6c1c43c2bf822b3e0fc7a9b409eeaf84b3e92aaa"}, + {file = "pandas-1.2.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:b730add5267f873b3383c18cac4df2527ac4f0f0eed1c6cf37fcb437e25cf558"}, + {file = "pandas-1.2.4-cp37-cp37m-win32.whl", hash = "sha256:2cb7e8f4f152f27dc93f30b5c7a98f6c748601ea65da359af734dd0cf3fa733f"}, + {file = "pandas-1.2.4-cp37-cp37m-win_amd64.whl", hash = "sha256:2111c25e69fa9365ba80bbf4f959400054b2771ac5d041ed19415a8b488dc70a"}, + {file = "pandas-1.2.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:167693a80abc8eb28051fbd184c1b7afd13ce2c727a5af47b048f1ea3afefff4"}, + {file = "pandas-1.2.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:612add929bf3ba9d27b436cc8853f5acc337242d6b584203f207e364bb46cb12"}, + {file = "pandas-1.2.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:971e2a414fce20cc5331fe791153513d076814d30a60cd7348466943e6e909e4"}, + {file = "pandas-1.2.4-cp38-cp38-win32.whl", hash = "sha256:68d7baa80c74aaacbed597265ca2308f017859123231542ff8a5266d489e1858"}, + {file = "pandas-1.2.4-cp38-cp38-win_amd64.whl", hash = "sha256:bd659c11a4578af740782288cac141a322057a2e36920016e0fc7b25c5a4b686"}, + {file = "pandas-1.2.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9db70ffa8b280bb4de83f9739d514cd0735825e79eef3a61d312420b9f16b758"}, + {file = "pandas-1.2.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:298f0553fd3ba8e002c4070a723a59cdb28eda579f3e243bc2ee397773f5398b"}, + {file = "pandas-1.2.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52d2472acbb8a56819a87aafdb8b5b6d2b3386e15c95bde56b281882529a7ded"}, + {file = "pandas-1.2.4-cp39-cp39-win32.whl", hash = "sha256:d0877407359811f7b853b548a614aacd7dea83b0c0c84620a9a643f180060950"}, + {file = "pandas-1.2.4-cp39-cp39-win_amd64.whl", hash = "sha256:2b063d41803b6a19703b845609c0b700913593de067b552a8b24dd8eeb8c9895"}, + {file = "pandas-1.2.4.tar.gz", hash = "sha256:649ecab692fade3cbfcf967ff936496b0cfba0af00a55dfaacd82bdda5cb2279"}, +] +parso = [ + {file = "parso-0.8.2-py2.py3-none-any.whl", hash = "sha256:a8c4922db71e4fdb90e0d0bc6e50f9b273d3397925e5e60a717e719201778d22"}, + {file = "parso-0.8.2.tar.gz", hash = "sha256:12b83492c6239ce32ff5eed6d3639d6a536170723c6f3f1506869f1ace413398"}, +] +pathspec = [ + {file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"}, + {file = "pathspec-0.8.1.tar.gz", hash = "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd"}, +] +pexpect = [ + {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, + {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, +] +pickleshare = [ + {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, + {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, +] +pluggy = [ + {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, + {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, +] +polyline = [ + {file = "polyline-1.4.0-py2.py3-none-any.whl", hash = "sha256:6559a0d5d37f4d14255744b3c6a648d5ff480d3d5c5f30186effc72a4142fd6c"}, + {file = "polyline-1.4.0.tar.gz", hash = "sha256:7c7f89d09a09c7b6161bdbfb4fd304b186fc7a2060fa4f31cb3f61c646a5c074"}, +] +prompt-toolkit = [ + {file = "prompt_toolkit-3.0.18-py3-none-any.whl", hash = "sha256:bf00f22079f5fadc949f42ae8ff7f05702826a97059ffcc6281036ad40ac6f04"}, + {file = "prompt_toolkit-3.0.18.tar.gz", hash = "sha256:e1b4f11b9336a28fa11810bc623c357420f69dfdb6d2dac41ca2c21a55c033bc"}, +] +protobuf = [ + {file = "protobuf-3.17.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ab6bb0e270c6c58e7ff4345b3a803cc59dbee19ddf77a4719c5b635f1d547aa8"}, + {file = "protobuf-3.17.3-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:13ee7be3c2d9a5d2b42a1030976f760f28755fcf5863c55b1460fd205e6cd637"}, + {file = "protobuf-3.17.3-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:1556a1049ccec58c7855a78d27e5c6e70e95103b32de9142bae0576e9200a1b0"}, + {file = "protobuf-3.17.3-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f0e59430ee953184a703a324b8ec52f571c6c4259d496a19d1cabcdc19dabc62"}, + {file = "protobuf-3.17.3-cp35-cp35m-win32.whl", hash = "sha256:a981222367fb4210a10a929ad5983ae93bd5a050a0824fc35d6371c07b78caf6"}, + {file = "protobuf-3.17.3-cp35-cp35m-win_amd64.whl", hash = "sha256:6d847c59963c03fd7a0cd7c488cadfa10cda4fff34d8bc8cba92935a91b7a037"}, + {file = "protobuf-3.17.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:145ce0af55c4259ca74993ddab3479c78af064002ec8227beb3d944405123c71"}, + {file = "protobuf-3.17.3-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6ce4d8bf0321e7b2d4395e253f8002a1a5ffbcfd7bcc0a6ba46712c07d47d0b4"}, + {file = "protobuf-3.17.3-cp36-cp36m-win32.whl", hash = "sha256:7a4c97961e9e5b03a56f9a6c82742ed55375c4a25f2692b625d4087d02ed31b9"}, + {file = "protobuf-3.17.3-cp36-cp36m-win_amd64.whl", hash = "sha256:a22b3a0dbac6544dacbafd4c5f6a29e389a50e3b193e2c70dae6bbf7930f651d"}, + {file = "protobuf-3.17.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:ffea251f5cd3c0b9b43c7a7a912777e0bc86263436a87c2555242a348817221b"}, + {file = "protobuf-3.17.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:9b7a5c1022e0fa0dbde7fd03682d07d14624ad870ae52054849d8960f04bc764"}, + {file = "protobuf-3.17.3-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:8727ee027157516e2c311f218ebf2260a18088ffb2d29473e82add217d196b1c"}, + {file = "protobuf-3.17.3-cp37-cp37m-win32.whl", hash = "sha256:14c1c9377a7ffbeaccd4722ab0aa900091f52b516ad89c4b0c3bb0a4af903ba5"}, + {file = "protobuf-3.17.3-cp37-cp37m-win_amd64.whl", hash = "sha256:c56c050a947186ba51de4f94ab441d7f04fcd44c56df6e922369cc2e1a92d683"}, + {file = "protobuf-3.17.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2ae692bb6d1992afb6b74348e7bb648a75bb0d3565a3f5eea5bec8f62bd06d87"}, + {file = "protobuf-3.17.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:99938f2a2d7ca6563c0ade0c5ca8982264c484fdecf418bd68e880a7ab5730b1"}, + {file = "protobuf-3.17.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:6902a1e4b7a319ec611a7345ff81b6b004b36b0d2196ce7a748b3493da3d226d"}, + {file = "protobuf-3.17.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:4ffbd23640bb7403574f7aff8368e2aeb2ec9a5c6306580be48ac59a6bac8bde"}, + {file = "protobuf-3.17.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:26010f693b675ff5a1d0e1bdb17689b8b716a18709113288fead438703d45539"}, + {file = "protobuf-3.17.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:e76d9686e088fece2450dbc7ee905f9be904e427341d289acbe9ad00b78ebd47"}, + {file = "protobuf-3.17.3-py2.py3-none-any.whl", hash = "sha256:2bfb815216a9cd9faec52b16fd2bfa68437a44b67c56bee59bc3926522ecb04e"}, + {file = "protobuf-3.17.3.tar.gz", hash = "sha256:72804ea5eaa9c22a090d2803813e280fb273b62d5ae497aaf3553d141c4fdd7b"}, +] +ptyprocess = [ + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, +] +py = [ + {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, + {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, +] +pycodestyle = [ + {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, + {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, +] +pyflakes = [ + {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, + {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, +] +pygments = [ + {file = "Pygments-2.9.0-py3-none-any.whl", hash = "sha256:d66e804411278594d764fc69ec36ec13d9ae9147193a1740cd34d272ca383b8e"}, + {file = "Pygments-2.9.0.tar.gz", hash = "sha256:a18f47b506a429f6f4b9df81bb02beab9ca21d0a5fee38ed15aef65f0545519f"}, +] +pyparsing = [ + {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, + {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, +] +pyproj = [ + {file = "pyproj-3.1.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8eda240225971b5cd0bac2d399ed6222068f0598ee92d5f6e847bd2019d2c8b0"}, + {file = "pyproj-3.1.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:ae237492767e0225f99b53a0fd7110fde2b7e7cabc105bbc243c151a7497de88"}, + {file = "pyproj-3.1.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b635e7e21fea5af74e90fc9e54d1a4c27078efdce6f214101c98dd93afae599a"}, + {file = "pyproj-3.1.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa87df0982aa0f4477478899d9c930cc0f97cd6d8a4ce84c43ac88ccf86d1da7"}, + {file = "pyproj-3.1.0-cp37-cp37m-win32.whl", hash = "sha256:10dad599b9f7ce2194996dc25f1000e0aa15754ecef9db46b624713959c67957"}, + {file = "pyproj-3.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a162ed199cd2ec392cffe20b2fa3381b68e7a166d55f3f060eceb8d517e4f46d"}, + {file = "pyproj-3.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1e88ebc4e08e661e9011b5c1ebfb32f0d311963a9824a6effb4168c7e07918b1"}, + {file = "pyproj-3.1.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:da88abc5e2f6a8fb07533855a57ca2a31845f58901a87f821b68b0db6b023978"}, + {file = "pyproj-3.1.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:911d773da9fa4d4f3f7580173858c391e3ee0b61acaf0be303baab323d2eae78"}, + {file = "pyproj-3.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f8a8d982bde211e65dc2de1f8f36cf162f9cc7fcd8a7625046ea265284e5e65"}, + {file = "pyproj-3.1.0-cp38-cp38-win32.whl", hash = "sha256:c4193e1069d165476b2d0f7d882b7712b3eab6e2e6fe2a0a78ef40de825a1f28"}, + {file = "pyproj-3.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:b6c74bbec679199746a3e02c0e0fad093c3652df96dd63e086a2fbf2afe9dc0e"}, + {file = "pyproj-3.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:04c185102e659439c5bd428ac5473d36ef795fca8e225bbbe78e20643d804ec0"}, + {file = "pyproj-3.1.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ebbba7707fe83a01e54bce8e3e7342feb0b3e0d74ff8c28df12f8bc59b76827c"}, + {file = "pyproj-3.1.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9cc464a1c51baad28ffb7a233116e8d4ce4c560b32039fa986d0f992ac3c431f"}, + {file = "pyproj-3.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4f3ad09cf3352bf5664794042b28d98781362ec8d9774ad73f28a1a0101a27f1"}, + {file = "pyproj-3.1.0-cp39-cp39-win32.whl", hash = "sha256:ae5534fa7a3b74f20534694d297fce6f7483890ff6ca404394ecf372f3c589d4"}, + {file = "pyproj-3.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:808f5992320e9631b2e45444028a65cd6ba3ee40229292934178ef07020a5ffd"}, + {file = "pyproj-3.1.0.tar.gz", hash = "sha256:67b94f4e694ae33fc90dfb7da0e6b5ed5f671dd0acc2f6cf46e9c39d56e16e1a"}, +] +pytest = [ + {file = "pytest-6.2.4-py3-none-any.whl", hash = "sha256:91ef2131a9bd6be8f76f1f08eac5c5317221d6ad1e143ae03894b862e8976890"}, + {file = "pytest-6.2.4.tar.gz", hash = "sha256:50bcad0a0b9c5a72c8e4e7c9855a3ad496ca6a881a3641b4260605450772c54b"}, +] +python-dateutil = [ + {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, + {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, +] +pytz = [ + {file = "pytz-2021.1-py2.py3-none-any.whl", hash = "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798"}, + {file = "pytz-2021.1.tar.gz", hash = "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da"}, +] +regex = [ + {file = "regex-2021.4.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:619d71c59a78b84d7f18891fe914446d07edd48dc8328c8e149cbe0929b4e000"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:47bf5bf60cf04d72bf6055ae5927a0bd9016096bf3d742fa50d9bf9f45aa0711"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:281d2fd05555079448537fe108d79eb031b403dac622621c78944c235f3fcf11"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bd28bc2e3a772acbb07787c6308e00d9626ff89e3bfcdebe87fa5afbfdedf968"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7c2a1af393fcc09e898beba5dd59196edaa3116191cc7257f9224beaed3e1aa0"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c38c71df845e2aabb7fb0b920d11a1b5ac8526005e533a8920aea97efb8ec6a4"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:96fcd1888ab4d03adfc9303a7b3c0bd78c5412b2bfbe76db5b56d9eae004907a"}, + {file = "regex-2021.4.4-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:ade17eb5d643b7fead300a1641e9f45401c98eee23763e9ed66a43f92f20b4a7"}, + {file = "regex-2021.4.4-cp36-cp36m-win32.whl", hash = "sha256:e8e5b509d5c2ff12f8418006d5a90e9436766133b564db0abaec92fd27fcee29"}, + {file = "regex-2021.4.4-cp36-cp36m-win_amd64.whl", hash = "sha256:11d773d75fa650cd36f68d7ca936e3c7afaae41b863b8c387a22aaa78d3c5c79"}, + {file = "regex-2021.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d3029c340cfbb3ac0a71798100ccc13b97dddf373a4ae56b6a72cf70dfd53bc8"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:18c071c3eb09c30a264879f0d310d37fe5d3a3111662438889ae2eb6fc570c31"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:4c557a7b470908b1712fe27fb1ef20772b78079808c87d20a90d051660b1d69a"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:01afaf2ec48e196ba91b37451aa353cb7eda77efe518e481707e0515025f0cd5"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:3a9cd17e6e5c7eb328517969e0cb0c3d31fd329298dd0c04af99ebf42e904f82"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:90f11ff637fe8798933fb29f5ae1148c978cccb0452005bf4c69e13db951e765"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:919859aa909429fb5aa9cf8807f6045592c85ef56fdd30a9a3747e513db2536e"}, + {file = "regex-2021.4.4-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:339456e7d8c06dd36a22e451d58ef72cef293112b559010db3d054d5560ef439"}, + {file = "regex-2021.4.4-cp37-cp37m-win32.whl", hash = "sha256:67bdb9702427ceddc6ef3dc382455e90f785af4c13d495f9626861763ee13f9d"}, + {file = "regex-2021.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:32e65442138b7b76dd8173ffa2cf67356b7bc1768851dded39a7a13bf9223da3"}, + {file = "regex-2021.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1e1c20e29358165242928c2de1482fb2cf4ea54a6a6dea2bd7a0e0d8ee321500"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:314d66636c494ed9c148a42731b3834496cc9a2c4251b1661e40936814542b14"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6d1b01031dedf2503631d0903cb563743f397ccaf6607a5e3b19a3d76fc10480"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:741a9647fcf2e45f3a1cf0e24f5e17febf3efe8d4ba1281dcc3aa0459ef424dc"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:4c46e22a0933dd783467cf32b3516299fb98cfebd895817d685130cc50cd1093"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:e512d8ef5ad7b898cdb2d8ee1cb09a8339e4f8be706d27eaa180c2f177248a10"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:980d7be47c84979d9136328d882f67ec5e50008681d94ecc8afa8a65ed1f4a6f"}, + {file = "regex-2021.4.4-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:ce15b6d103daff8e9fee13cf7f0add05245a05d866e73926c358e871221eae87"}, + {file = "regex-2021.4.4-cp38-cp38-win32.whl", hash = "sha256:a91aa8619b23b79bcbeb37abe286f2f408d2f2d6f29a17237afda55bb54e7aac"}, + {file = "regex-2021.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:c0502c0fadef0d23b128605d69b58edb2c681c25d44574fc673b0e52dce71ee2"}, + {file = "regex-2021.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:598585c9f0af8374c28edd609eb291b5726d7cbce16be6a8b95aa074d252ee17"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:ee54ff27bf0afaf4c3b3a62bcd016c12c3fdb4ec4f413391a90bd38bc3624605"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7d9884d86dd4dd489e981d94a65cd30d6f07203d90e98f6f657f05170f6324c9"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:bf5824bfac591ddb2c1f0a5f4ab72da28994548c708d2191e3b87dd207eb3ad7"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:563085e55b0d4fb8f746f6a335893bda5c2cef43b2f0258fe1020ab1dd874df8"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9c3db21af35e3b3c05764461b262d6f05bbca08a71a7849fd79d47ba7bc33ed"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3916d08be28a1149fb97f7728fca1f7c15d309a9f9682d89d79db75d5e52091c"}, + {file = "regex-2021.4.4-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:fd45ff9293d9274c5008a2054ecef86a9bfe819a67c7be1afb65e69b405b3042"}, + {file = "regex-2021.4.4-cp39-cp39-win32.whl", hash = "sha256:fa4537fb4a98fe8fde99626e4681cc644bdcf2a795038533f9f711513a862ae6"}, + {file = "regex-2021.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:97f29f57d5b84e73fbaf99ab3e26134e6687348e95ef6b48cfd2c06807005a07"}, + {file = "regex-2021.4.4.tar.gz", hash = "sha256:52ba3d3f9b942c49d7e4bc105bb28551c44065f139a65062ab7912bef10c9afb"}, +] +requests = [ + {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, + {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, +] +scikit-learn = [ + {file = "scikit-learn-0.24.2.tar.gz", hash = "sha256:d14701a12417930392cd3898e9646cf5670c190b933625ebe7511b1f7d7b8736"}, + {file = "scikit_learn-0.24.2-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:d5bf9c863ba4717b3917b5227463ee06860fc43931dc9026747de416c0a10fee"}, + {file = "scikit_learn-0.24.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5beaeb091071625e83f5905192d8aecde65ba2f26f8b6719845bbf586f7a04a1"}, + {file = "scikit_learn-0.24.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:06ffdcaaf81e2a3b1b50c3ac6842cfb13df2d8b737d61f64643ed61da7389cde"}, + {file = "scikit_learn-0.24.2-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:fec42690a2eb646b384eafb021c425fab48991587edb412d4db77acc358b27ce"}, + {file = "scikit_learn-0.24.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:5ff3e4e4cf7592d36541edec434e09fb8ab9ba6b47608c4ffe30c9038d301897"}, + {file = "scikit_learn-0.24.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:3cbd734e1aefc7c5080e6b6973fe062f97c26a1cdf1a991037ca196ce1c8f427"}, + {file = "scikit_learn-0.24.2-cp36-cp36m-win32.whl", hash = "sha256:f74429a07fedb36a03c159332b914e6de757176064f9fed94b5f79ebac07d913"}, + {file = "scikit_learn-0.24.2-cp36-cp36m-win_amd64.whl", hash = "sha256:dd968a174aa82f3341a615a033fa6a8169e9320cbb46130686562db132d7f1f0"}, + {file = "scikit_learn-0.24.2-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:49ec0b1361da328da9bb7f1a162836028e72556356adeb53342f8fae6b450d47"}, + {file = "scikit_learn-0.24.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:f18c3ed484eeeaa43a0d45dc2efb4d00fc6542ccdcfa2c45d7b635096a2ae534"}, + {file = "scikit_learn-0.24.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:cdf24c1b9bbeb4936456b42ac5bd32c60bb194a344951acb6bfb0cddee5439a4"}, + {file = "scikit_learn-0.24.2-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d177fe1ff47cc235942d628d41ee5b1c6930d8f009f1a451c39b5411e8d0d4cf"}, + {file = "scikit_learn-0.24.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:f3ec00f023d84526381ad0c0f2cff982852d035c921bbf8ceb994f4886c00c64"}, + {file = "scikit_learn-0.24.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:ae19ac105cf7ce8c205a46166992fdec88081d6e783ab6e38ecfbe45729f3c39"}, + {file = "scikit_learn-0.24.2-cp37-cp37m-win32.whl", hash = "sha256:f0ed4483c258fb23150e31b91ea7d25ff8495dba108aea0b0d4206a777705350"}, + {file = "scikit_learn-0.24.2-cp37-cp37m-win_amd64.whl", hash = "sha256:39b7e3b71bcb1fe46397185d6c1a5db1c441e71c23c91a31e7ad8cc3f7305f9a"}, + {file = "scikit_learn-0.24.2-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:90a297330f608adeb4d2e9786c6fda395d3150739deb3d42a86d9a4c2d15bc1d"}, + {file = "scikit_learn-0.24.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:f1d2108e770907540b5248977e4cff9ffaf0f73d0d13445ee938df06ca7579c6"}, + {file = "scikit_learn-0.24.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:1eec963fe9ffc827442c2e9333227c4d49749a44e592f305398c1db5c1563393"}, + {file = "scikit_learn-0.24.2-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:2db429090b98045d71218a9ba913cc9b3fe78e0ba0b6b647d8748bc6d5a44080"}, + {file = "scikit_learn-0.24.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:62214d2954377fcf3f31ec867dd4e436df80121e7a32947a0b3244f58f45e455"}, + {file = "scikit_learn-0.24.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8fac72b9688176922f9f54fda1ba5f7ffd28cbeb9aad282760186e8ceba9139a"}, + {file = "scikit_learn-0.24.2-cp38-cp38-win32.whl", hash = "sha256:ae426e3a52842c6b6d77d00f906b6031c8c2cfdfabd6af7511bb4bc9a68d720e"}, + {file = "scikit_learn-0.24.2-cp38-cp38-win_amd64.whl", hash = "sha256:038f4e9d6ef10e1f3fe82addc3a14735c299866eb10f2c77c090410904828312"}, + {file = "scikit_learn-0.24.2-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:48f273836e19901ba2beecd919f7b352f09310ce67c762f6e53bc6b81cacf1f0"}, + {file = "scikit_learn-0.24.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:a2a47449093dcf70babc930beba2ca0423cb7df2fa5fd76be5260703d67fa574"}, + {file = "scikit_learn-0.24.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:0e71ce9c7cbc20f6f8b860107ce15114da26e8675238b4b82b7e7cd37ca0c087"}, + {file = "scikit_learn-0.24.2-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2754c85b2287333f9719db7f23fb7e357f436deed512db3417a02bf6f2830aa5"}, + {file = "scikit_learn-0.24.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:7be1b88c23cfac46e06404582215a917017cd2edaa2e4d40abe6aaff5458f24b"}, + {file = "scikit_learn-0.24.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:4e6198675a6f9d333774671bd536668680eea78e2e81c0b19e57224f58d17f37"}, + {file = "scikit_learn-0.24.2-cp39-cp39-win32.whl", hash = "sha256:cbdb0b3db99dd1d5f69d31b4234367d55475add31df4d84a3bd690ef017b55e2"}, + {file = "scikit_learn-0.24.2-cp39-cp39-win_amd64.whl", hash = "sha256:40556bea1ef26ef54bc678d00cf138a63069144a0b5f3a436eecd8f3468b903e"}, +] +scipy = [ + {file = "scipy-1.6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a15a1f3fc0abff33e792d6049161b7795909b40b97c6cc2934ed54384017ab76"}, + {file = "scipy-1.6.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:e79570979ccdc3d165456dd62041d9556fb9733b86b4b6d818af7a0afc15f092"}, + {file = "scipy-1.6.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:a423533c55fec61456dedee7b6ee7dce0bb6bfa395424ea374d25afa262be261"}, + {file = "scipy-1.6.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:33d6b7df40d197bdd3049d64e8e680227151673465e5d85723b3b8f6b15a6ced"}, + {file = "scipy-1.6.1-cp37-cp37m-win32.whl", hash = "sha256:6725e3fbb47da428794f243864f2297462e9ee448297c93ed1dcbc44335feb78"}, + {file = "scipy-1.6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:5fa9c6530b1661f1370bcd332a1e62ca7881785cc0f80c0d559b636567fab63c"}, + {file = "scipy-1.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bd50daf727f7c195e26f27467c85ce653d41df4358a25b32434a50d8870fc519"}, + {file = "scipy-1.6.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:f46dd15335e8a320b0fb4685f58b7471702234cba8bb3442b69a3e1dc329c345"}, + {file = "scipy-1.6.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:0e5b0ccf63155d90da576edd2768b66fb276446c371b73841e3503be1d63fb5d"}, + {file = "scipy-1.6.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:2481efbb3740977e3c831edfd0bd9867be26387cacf24eb5e366a6a374d3d00d"}, + {file = "scipy-1.6.1-cp38-cp38-win32.whl", hash = "sha256:68cb4c424112cd4be886b4d979c5497fba190714085f46b8ae67a5e4416c32b4"}, + {file = "scipy-1.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:5f331eeed0297232d2e6eea51b54e8278ed8bb10b099f69c44e2558c090d06bf"}, + {file = "scipy-1.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0c8a51d33556bf70367452d4d601d1742c0e806cd0194785914daf19775f0e67"}, + {file = "scipy-1.6.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:83bf7c16245c15bc58ee76c5418e46ea1811edcc2e2b03041b804e46084ab627"}, + {file = "scipy-1.6.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:794e768cc5f779736593046c9714e0f3a5940bc6dcc1dba885ad64cbfb28e9f0"}, + {file = "scipy-1.6.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:5da5471aed911fe7e52b86bf9ea32fb55ae93e2f0fac66c32e58897cfb02fa07"}, + {file = "scipy-1.6.1-cp39-cp39-win32.whl", hash = "sha256:8e403a337749ed40af60e537cc4d4c03febddcc56cd26e774c9b1b600a70d3e4"}, + {file = "scipy-1.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:a5193a098ae9f29af283dcf0041f762601faf2e595c0db1da929875b7570353f"}, + {file = "scipy-1.6.1.tar.gz", hash = "sha256:c4fceb864890b6168e79b0e714c585dbe2fd4222768ee90bc1aa0f8218691b11"}, +] +shapely = [ + {file = "Shapely-1.7.1-1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:46da0ea527da9cf9503e66c18bab6981c5556859e518fe71578b47126e54ca93"}, + {file = "Shapely-1.7.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:4c10f317e379cc404f8fc510cd9982d5d3e7ba13a9cfd39aa251d894c6366798"}, + {file = "Shapely-1.7.1-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:17df66e87d0fe0193910aeaa938c99f0b04f67b430edb8adae01e7be557b141b"}, + {file = "Shapely-1.7.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:da38ed3d65b8091447dc3717e5218cc336d20303b77b0634b261bc5c1aa2bae8"}, + {file = "Shapely-1.7.1-cp35-cp35m-win32.whl", hash = "sha256:8e7659dd994792a0aad8fb80439f59055a21163e236faf2f9823beb63a380e19"}, + {file = "Shapely-1.7.1-cp35-cp35m-win_amd64.whl", hash = "sha256:791477edb422692e7dc351c5ed6530eb0e949a31b45569946619a0d9cd5f53cb"}, + {file = "Shapely-1.7.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e3afccf0437edc108eef1e2bb9cc4c7073e7705924eb4cd0bf7715cd1ef0ce1b"}, + {file = "Shapely-1.7.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8f15b6ce67dcc05b61f19c689b60f3fe58550ba994290ff8332f711f5aaa9840"}, + {file = "Shapely-1.7.1-cp36-cp36m-win32.whl", hash = "sha256:60e5b2282619249dbe8dc5266d781cc7d7fb1b27fa49f8241f2167672ad26719"}, + {file = "Shapely-1.7.1-cp36-cp36m-win_amd64.whl", hash = "sha256:de618e67b64a51a0768d26a9963ecd7d338a2cf6e9e7582d2385f88ad005b3d1"}, + {file = "Shapely-1.7.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:182716ffb500d114b5d1b75d7fd9d14b7d3414cef3c38c0490534cc9ce20981a"}, + {file = "Shapely-1.7.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:4f3c59f6dbf86a9fc293546de492f5e07344e045f9333f3a753f2dda903c45d1"}, + {file = "Shapely-1.7.1-cp37-cp37m-win32.whl", hash = "sha256:6871acba8fbe744efa4f9f34e726d070bfbf9bffb356a8f6d64557846324232b"}, + {file = "Shapely-1.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:35be1c5d869966569d3dfd4ec31832d7c780e9df760e1fe52131105685941891"}, + {file = "Shapely-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:052eb5b9ba756808a7825e8a8020fb146ec489dd5c919e7d139014775411e688"}, + {file = "Shapely-1.7.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:90a3e2ae0d6d7d50ff2370ba168fbd416a53e7d8448410758c5d6a5920646c1d"}, + {file = "Shapely-1.7.1-cp38-cp38-win32.whl", hash = "sha256:a3774516c8a83abfd1ddffb8b6ec1b0935d7fe6ea0ff5c31a18bfdae567b4eba"}, + {file = "Shapely-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:6593026cd3f5daaea12bcc51ae5c979318070fefee210e7990cb8ac2364e79a1"}, + {file = "Shapely-1.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:617bf046a6861d7c6b44d2d9cb9e2311548638e684c2cd071d8945f24a926263"}, + {file = "Shapely-1.7.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:b40cc7bb089ae4aa9ddba1db900b4cd1bce3925d2a4b5837b639e49de054784f"}, + {file = "Shapely-1.7.1-cp39-cp39-win32.whl", hash = "sha256:2df5260d0f2983309776cb41bfa85c464ec07018d88c0ecfca23d40bfadae2f1"}, + {file = "Shapely-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:a5c3a50d823c192f32615a2a6920e8c046b09e07a58eba220407335a9cd2e8ea"}, + {file = "Shapely-1.7.1.tar.gz", hash = "sha256:1641724c1055459a7e2b8bbe47ba25bdc89554582e62aec23cb3f3ca25f9b129"}, +] +six = [ + {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, + {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, +] +sklearn = [ + {file = "sklearn-0.0.tar.gz", hash = "sha256:e23001573aa194b834122d2b9562459bf5ae494a2d59ca6b8aa22c85a44c0e31"}, +] +tabulate = [ + {file = "tabulate-0.8.9-py3-none-any.whl", hash = "sha256:d7c013fe7abbc5e491394e10fa845f8f32fe54f8dc60c6622c6cf482d25d47e4"}, + {file = "tabulate-0.8.9.tar.gz", hash = "sha256:eb1d13f25760052e8931f2ef80aaf6045a6cceb47514db8beab24cded16f13a7"}, +] +threadpoolctl = [ + {file = "threadpoolctl-2.1.0-py3-none-any.whl", hash = "sha256:38b74ca20ff3bb42caca8b00055111d74159ee95c4370882bbff2b93d24da725"}, + {file = "threadpoolctl-2.1.0.tar.gz", hash = "sha256:ddc57c96a38beb63db45d6c159b5ab07b6bced12c45a1f07b2b92f272aebfa6b"}, +] +toml = [ + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, +] +tqdm = [ + {file = "tqdm-4.61.1-py2.py3-none-any.whl", hash = "sha256:aa0c29f03f298951ac6318f7c8ce584e48fa22ec26396e6411e43d038243bdb2"}, + {file = "tqdm-4.61.1.tar.gz", hash = "sha256:24be966933e942be5f074c29755a95b315c69a91f839a29139bf26ffffe2d3fd"}, +] +traitlets = [ + {file = "traitlets-5.0.5-py3-none-any.whl", hash = "sha256:69ff3f9d5351f31a7ad80443c2674b7099df13cc41fc5fa6e2f6d3b0330b0426"}, + {file = "traitlets-5.0.5.tar.gz", hash = "sha256:178f4ce988f69189f7e523337a3e11d91c786ded9360174a3d9ca83e79bc5396"}, +] +tsplib95 = [ + {file = "tsplib95-0.7.1-py2.py3-none-any.whl", hash = "sha256:c481638e293baaa62134b491477aa5b2681e552e4dc28a6106ca1e157ae59184"}, + {file = "tsplib95-0.7.1.tar.gz", hash = "sha256:3da80175dfb0478b967b87c508f75def47371188b6401b719441f2cedc817e00"}, +] +typed-ast = [ + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, + {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, + {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, + {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, + {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, + {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, + {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, + {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, + {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, + {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, + {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, + {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, + {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, + {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, + {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, + {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, + {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, + {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, +] +typing-extensions = [ + {file = "typing_extensions-3.10.0.0-py2-none-any.whl", hash = "sha256:0ac0f89795dd19de6b97debb0c6af1c70987fd80a2d62d1958f7e56fcc31b497"}, + {file = "typing_extensions-3.10.0.0-py3-none-any.whl", hash = "sha256:779383f6086d90c99ae41cf0ff39aac8a7937a9283ce0a414e5dd782f4c94a84"}, + {file = "typing_extensions-3.10.0.0.tar.gz", hash = "sha256:50b6f157849174217d0656f99dc82fe932884fb250826c18350e159ec6cdf342"}, +] +urllib3 = [ + {file = "urllib3-1.22-py2.py3-none-any.whl", hash = "sha256:06330f386d6e4b195fbfc736b297f58c5a892e4440e54d294d7004e3a9bbea1b"}, + {file = "urllib3-1.22.tar.gz", hash = "sha256:cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f"}, +] +wcwidth = [ + {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, + {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, +] +wrapt = [ + {file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"}, +] +zipp = [ + {file = "zipp-3.4.1-py3-none-any.whl", hash = "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"}, + {file = "zipp-3.4.1.tar.gz", hash = "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76"}, +] diff --git a/pyproject.toml b/pyproject.toml index 6af4f9f..c90bf87 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,36 +1,36 @@ -[tool.poetry] -name = "loggibud" -version = "0.1.0" -description = "Real-world benchmarks for urban delivery problems, including vehicle routing and facility location problems." -authors = ["Gabriela Surita