diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 0724c09..bdf77e9 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,7 +1,8 @@ name: CI on: push jobs: - tests: + fast-tests: + name: fast (without R) runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 @@ -10,5 +11,22 @@ jobs: python-version: 3.8 architecture: x64 - run: pip install nox==2020.5.24 - - run: pip install poetry==1.0.10 - - run: nox + - run: pip install poetry==1.1.4 + - run: nox -s format lint ci-tests-fast safety docs + slow-tests: + name: slow (with R) + runs-on: ubuntu-latest + env: + R_LIBS: .r_libs + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v1 + with: + python-version: 3.8 + architecture: x64 + - run: mkdir .r_libs + - run: sudo apt-get install r-base r-base-dev libcurl4-openssl-dev libxml2-dev patchelf + - run: R -e "install.packages('forecast')" + - run: pip install nox==2020.5.24 + - run: pip install poetry==1.1.4 + - run: nox -s ci-tests-slow diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..8300932 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "research/papers/demand-forecasting"] + path = research/papers/demand-forecasting + url = git@github.com:webartifex/urban-meal-delivery-demand-forecasting.git diff --git a/README.md b/README.md index 77cb3db..537aa98 100644 --- a/README.md +++ b/README.md @@ -16,16 +16,16 @@ that iteratively build on each other. ### Data Cleaning The UDP provided its raw data as a PostgreSQL dump. -This [notebook](https://nbviewer.jupyter.org/github/webartifex/urban-meal-delivery/blob/main/notebooks/00_clean_data.ipynb) +This [notebook](https://nbviewer.jupyter.org/github/webartifex/urban-meal-delivery/blob/develop/research/clean_data.ipynb) cleans the data extensively -and maps them onto the [ORM models](https://github.com/webartifex/urban-meal-delivery/tree/main/src/urban_meal_delivery/db) +and maps them onto the [ORM models](https://github.com/webartifex/urban-meal-delivery/tree/develop/src/urban_meal_delivery/db) defined in the `urban-meal-delivery` package -that is developed in the [src/](https://github.com/webartifex/urban-meal-delivery/tree/main/src) folder +that is developed in the [src/](https://github.com/webartifex/urban-meal-delivery/tree/develop/src) folder and contains all source code to drive the analyses. Due to a non-disclosure agreement with the UDP, neither the raw nor the cleaned data are published as of now. -However, previews of the data can be seen throughout the [notebooks/](https://github.com/webartifex/urban-meal-delivery/tree/main/notebooks) folders. +However, previews of the data can be seen throughout the [research/](https://github.com/webartifex/urban-meal-delivery/tree/develop/research) folder. ### Real-time Demand Forecasting @@ -51,11 +51,11 @@ and `poetry install --extras research` The `--extras` option is necessary as the non-develop dependencies -are structured in the [pyproject.toml](https://github.com/webartifex/urban-meal-delivery/blob/main/pyproject.toml) file +are structured in the [pyproject.toml](https://github.com/webartifex/urban-meal-delivery/blob/develop/pyproject.toml) file into dependencies related to only the `urban-meal-delivery` source code package and dependencies used to run the [Jupyter](https://jupyter.org/) environment with the analyses. Contributions are welcome. Use the [issues](https://github.com/webartifex/urban-meal-delivery/issues) tab. -The project is licensed under the [MIT license](https://github.com/webartifex/urban-meal-delivery/blob/main/LICENSE.txt). +The project is licensed under the [MIT license](https://github.com/webartifex/urban-meal-delivery/blob/develop/LICENSE.txt). diff --git a/docs/conf.py b/docs/conf.py index 2ab87a7..40ef34b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -5,7 +5,7 @@ import urban_meal_delivery as umd project = umd.__pkg_name__ author = umd.__author__ -copyright = f'2020, {author}' # pylint:disable=redefined-builtin +copyright = f'2020, {author}' version = release = umd.__version__ extensions = [ diff --git a/migrations/env.py b/migrations/env.py index 15c79e3..1669e2d 100644 --- a/migrations/env.py +++ b/migrations/env.py @@ -21,7 +21,11 @@ log_config.fileConfig(context.config.config_file_name) def include_object(obj, _name, type_, _reflected, _compare_to): """Only include the clean schema into --autogenerate migrations.""" - if type_ in {'table', 'column'} and obj.schema != umd_config.DATABASE_SCHEMA: + if ( # noqa:WPS337 + type_ in {'table', 'column'} + and hasattr(obj, 'schema') # noqa:WPS421 => fix for rare edge case + and obj.schema != umd_config.CLEAN_SCHEMA + ): return False return True diff --git a/migrations/versions/rev_20200806_23_f11cd76d2f45_create_the_database.py b/migrations/versions/rev_20200806_23_f11cd76d2f45_create_the_database.py index a03e1dc..5f02843 100644 --- a/migrations/versions/rev_20200806_23_f11cd76d2f45_create_the_database.py +++ b/migrations/versions/rev_20200806_23_f11cd76d2f45_create_the_database.py @@ -107,13 +107,13 @@ def upgrade(): sa.Column('id', sa.Integer(), autoincrement=False, nullable=False), sa.Column('primary_id', sa.Integer(), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('place_id', sa.Unicode(length=120), nullable=False), # noqa:WPS432 + sa.Column('place_id', sa.Unicode(length=120), nullable=False), sa.Column('latitude', postgresql.DOUBLE_PRECISION(), nullable=False), sa.Column('longitude', postgresql.DOUBLE_PRECISION(), nullable=False), sa.Column('city_id', sa.SmallInteger(), nullable=False), - sa.Column('city', sa.Unicode(length=25), nullable=False), # noqa:WPS432 + sa.Column('city', sa.Unicode(length=25), nullable=False), sa.Column('zip_code', sa.Integer(), nullable=False), - sa.Column('street', sa.Unicode(length=80), nullable=False), # noqa:WPS432 + sa.Column('street', sa.Unicode(length=80), nullable=False), sa.Column('floor', sa.SmallInteger(), nullable=True), sa.CheckConstraint( '-180 <= longitude AND longitude <= 180', @@ -192,7 +192,7 @@ def upgrade(): 'restaurants', sa.Column('id', sa.SmallInteger(), autoincrement=False, nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('name', sa.Unicode(length=45), nullable=False), # noqa:WPS432 + sa.Column('name', sa.Unicode(length=45), nullable=False), sa.Column('address_id', sa.Integer(), nullable=False), sa.Column('estimated_prep_duration', sa.SmallInteger(), nullable=False), sa.CheckConstraint( diff --git a/migrations/versions/rev_20210102_18_888e352d7526_add_pixel_grid.py b/migrations/versions/rev_20210102_18_888e352d7526_add_pixel_grid.py new file mode 100644 index 0000000..dc5e8d7 --- /dev/null +++ b/migrations/versions/rev_20210102_18_888e352d7526_add_pixel_grid.py @@ -0,0 +1,167 @@ +"""Add pixel grid. + +Revision: #888e352d7526 at 2021-01-02 18:11:02 +Revises: #f11cd76d2f45 +""" + +import os + +import sqlalchemy as sa +from alembic import op + +from urban_meal_delivery import configuration + + +revision = '888e352d7526' +down_revision = 'f11cd76d2f45' +branch_labels = None +depends_on = None + + +config = configuration.make_config('testing' if os.getenv('TESTING') else 'production') + + +def upgrade(): + """Upgrade to revision 888e352d7526.""" + op.create_table( + 'grids', + sa.Column('id', sa.SmallInteger(), autoincrement=True, nullable=False), + sa.Column('city_id', sa.SmallInteger(), nullable=False), + sa.Column('side_length', sa.SmallInteger(), nullable=True), + sa.PrimaryKeyConstraint('id', name=op.f('pk_grids')), + sa.ForeignKeyConstraint( + ['city_id'], + [f'{config.CLEAN_SCHEMA}.cities.id'], + name=op.f('fk_grids_to_cities_via_city_id'), + onupdate='RESTRICT', + ondelete='RESTRICT', + ), + sa.UniqueConstraint( + 'city_id', 'side_length', name=op.f('uq_grids_on_city_id_side_length'), + ), + # This `UniqueConstraint` is needed by the `addresses_pixels` table below. + sa.UniqueConstraint('id', 'city_id', name=op.f('uq_grids_on_id_city_id')), + schema=config.CLEAN_SCHEMA, + ) + + op.create_table( + 'pixels', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('grid_id', sa.SmallInteger(), nullable=False), + sa.Column('n_x', sa.SmallInteger(), nullable=False), + sa.Column('n_y', sa.SmallInteger(), nullable=False), + sa.CheckConstraint('0 <= n_x', name=op.f('ck_pixels_on_n_x_is_positive')), + sa.CheckConstraint('0 <= n_y', name=op.f('ck_pixels_on_n_y_is_positive')), + sa.ForeignKeyConstraint( + ['grid_id'], + [f'{config.CLEAN_SCHEMA}.grids.id'], + name=op.f('fk_pixels_to_grids_via_grid_id'), + onupdate='RESTRICT', + ondelete='RESTRICT', + ), + sa.PrimaryKeyConstraint('id', name=op.f('pk_pixels')), + sa.UniqueConstraint( + 'grid_id', 'n_x', 'n_y', name=op.f('uq_pixels_on_grid_id_n_x_n_y'), + ), + sa.UniqueConstraint('id', 'grid_id', name=op.f('uq_pixels_on_id_grid_id')), + schema=config.CLEAN_SCHEMA, + ) + + op.create_index( + op.f('ix_pixels_on_grid_id'), + 'pixels', + ['grid_id'], + unique=False, + schema=config.CLEAN_SCHEMA, + ) + op.create_index( + op.f('ix_pixels_on_n_x'), + 'pixels', + ['n_x'], + unique=False, + schema=config.CLEAN_SCHEMA, + ) + op.create_index( + op.f('ix_pixels_on_n_y'), + 'pixels', + ['n_y'], + unique=False, + schema=config.CLEAN_SCHEMA, + ) + + # This `UniqueConstraint` is needed by the `addresses_pixels` table below. + op.create_unique_constraint( + 'uq_addresses_on_id_city_id', + 'addresses', + ['id', 'city_id'], + schema=config.CLEAN_SCHEMA, + ) + + op.create_table( + 'addresses_pixels', + sa.Column('address_id', sa.Integer(), nullable=False), + sa.Column('city_id', sa.SmallInteger(), nullable=False), + sa.Column('grid_id', sa.SmallInteger(), nullable=False), + sa.Column('pixel_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ['address_id', 'city_id'], + [ + f'{config.CLEAN_SCHEMA}.addresses.id', + f'{config.CLEAN_SCHEMA}.addresses.city_id', + ], + name=op.f('fk_addresses_pixels_to_addresses_via_address_id_city_id'), + onupdate='RESTRICT', + ondelete='RESTRICT', + ), + sa.ForeignKeyConstraint( + ['grid_id', 'city_id'], + [ + f'{config.CLEAN_SCHEMA}.grids.id', + f'{config.CLEAN_SCHEMA}.grids.city_id', + ], + name=op.f('fk_addresses_pixels_to_grids_via_grid_id_city_id'), + onupdate='RESTRICT', + ondelete='RESTRICT', + ), + sa.ForeignKeyConstraint( + ['pixel_id', 'grid_id'], + [ + f'{config.CLEAN_SCHEMA}.pixels.id', + f'{config.CLEAN_SCHEMA}.pixels.grid_id', + ], + name=op.f('fk_addresses_pixels_to_pixels_via_pixel_id_grid_id'), + onupdate='RESTRICT', + ondelete='RESTRICT', + ), + sa.PrimaryKeyConstraint( + 'address_id', 'pixel_id', name=op.f('pk_addresses_pixels'), + ), + sa.UniqueConstraint( + 'address_id', + 'grid_id', + name=op.f('uq_addresses_pixels_on_address_id_grid_id'), + ), + schema=config.CLEAN_SCHEMA, + ) + + +def downgrade(): + """Downgrade to revision f11cd76d2f45.""" + op.drop_table('addresses_pixels', schema=config.CLEAN_SCHEMA) + op.drop_constraint( + 'uq_addresses_on_id_city_id', + 'addresses', + type_=None, + schema=config.CLEAN_SCHEMA, + ) + op.drop_index( + op.f('ix_pixels_on_n_y'), table_name='pixels', schema=config.CLEAN_SCHEMA, + ) + op.drop_index( + op.f('ix_pixels_on_n_x'), table_name='pixels', schema=config.CLEAN_SCHEMA, + ) + op.drop_index( + op.f('ix_pixels_on_grid_id'), table_name='pixels', schema=config.CLEAN_SCHEMA, + ) + op.drop_table('pixels', schema=config.CLEAN_SCHEMA) + op.drop_table('grids', schema=config.CLEAN_SCHEMA) diff --git a/migrations/versions/rev_20210106_19_e40623e10405_add_demand_forecasting.py b/migrations/versions/rev_20210106_19_e40623e10405_add_demand_forecasting.py new file mode 100644 index 0000000..e624259 --- /dev/null +++ b/migrations/versions/rev_20210106_19_e40623e10405_add_demand_forecasting.py @@ -0,0 +1,96 @@ +"""Add demand forecasting. + +Revision: #e40623e10405 at 2021-01-06 19:55:56 +Revises: #888e352d7526 +""" + +import os + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +from urban_meal_delivery import configuration + + +revision = 'e40623e10405' +down_revision = '888e352d7526' +branch_labels = None +depends_on = None + + +config = configuration.make_config('testing' if os.getenv('TESTING') else 'production') + + +def upgrade(): + """Upgrade to revision e40623e10405.""" + op.create_table( + 'forecasts', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('pixel_id', sa.Integer(), nullable=False), + sa.Column('start_at', sa.DateTime(), nullable=False), + sa.Column('time_step', sa.SmallInteger(), nullable=False), + sa.Column('training_horizon', sa.SmallInteger(), nullable=False), + sa.Column('method', sa.Unicode(length=20), nullable=False), + sa.Column('prediction', postgresql.DOUBLE_PRECISION(), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_forecasts')), + sa.ForeignKeyConstraint( + ['pixel_id'], + [f'{config.CLEAN_SCHEMA}.pixels.id'], + name=op.f('fk_forecasts_to_pixels_via_pixel_id'), + onupdate='RESTRICT', + ondelete='RESTRICT', + ), + sa.CheckConstraint( + """ + NOT ( + EXTRACT(HOUR FROM start_at) < 11 + OR + EXTRACT(HOUR FROM start_at) > 22 + ) + """, + name=op.f('ck_forecasts_on_start_at_must_be_within_operating_hours'), + ), + sa.CheckConstraint( + 'CAST(EXTRACT(MINUTES FROM start_at) AS INTEGER) % 15 = 0', + name=op.f('ck_forecasts_on_start_at_minutes_must_be_quarters_of_the_hour'), + ), + sa.CheckConstraint( + 'CAST(EXTRACT(MICROSECONDS FROM start_at) AS INTEGER) % 1000000 = 0', + name=op.f('ck_forecasts_on_start_at_allows_no_microseconds'), + ), + sa.CheckConstraint( + 'EXTRACT(SECONDS FROM start_at) = 0', + name=op.f('ck_forecasts_on_start_at_allows_no_seconds'), + ), + sa.CheckConstraint( + 'time_step > 0', name=op.f('ck_forecasts_on_time_step_must_be_positive'), + ), + sa.CheckConstraint( + 'training_horizon > 0', + name=op.f('ck_forecasts_on_training_horizon_must_be_positive'), + ), + sa.UniqueConstraint( + 'pixel_id', + 'start_at', + 'time_step', + 'training_horizon', + 'method', + name=op.f( + 'uq_forecasts_on_pixel_id_start_at_time_step_training_horizon_method', + ), + ), + schema=config.CLEAN_SCHEMA, + ) + op.create_index( + op.f('ix_forecasts_on_pixel_id'), + 'forecasts', + ['pixel_id'], + unique=False, + schema=config.CLEAN_SCHEMA, + ) + + +def downgrade(): + """Downgrade to revision 888e352d7526.""" + op.drop_table('forecasts', schema=config.CLEAN_SCHEMA) diff --git a/migrations/versions/rev_20210120_16_26711cd3f9b9_add_confidence_intervals_to_forecasts.py b/migrations/versions/rev_20210120_16_26711cd3f9b9_add_confidence_intervals_to_forecasts.py new file mode 100644 index 0000000..ab352c1 --- /dev/null +++ b/migrations/versions/rev_20210120_16_26711cd3f9b9_add_confidence_intervals_to_forecasts.py @@ -0,0 +1,124 @@ +"""Add confidence intervals to forecasts. + +Revision: #26711cd3f9b9 at 2021-01-20 16:08:21 +Revises: #e40623e10405 +""" + +import os + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +from urban_meal_delivery import configuration + + +revision = '26711cd3f9b9' +down_revision = 'e40623e10405' +branch_labels = None +depends_on = None + + +config = configuration.make_config('testing' if os.getenv('TESTING') else 'production') + + +def upgrade(): + """Upgrade to revision 26711cd3f9b9.""" + op.alter_column( + 'forecasts', 'method', new_column_name='model', schema=config.CLEAN_SCHEMA, + ) + op.add_column( + 'forecasts', + sa.Column('low80', postgresql.DOUBLE_PRECISION(), nullable=True), + schema=config.CLEAN_SCHEMA, + ) + op.add_column( + 'forecasts', + sa.Column('high80', postgresql.DOUBLE_PRECISION(), nullable=True), + schema=config.CLEAN_SCHEMA, + ) + op.add_column( + 'forecasts', + sa.Column('low95', postgresql.DOUBLE_PRECISION(), nullable=True), + schema=config.CLEAN_SCHEMA, + ) + op.add_column( + 'forecasts', + sa.Column('high95', postgresql.DOUBLE_PRECISION(), nullable=True), + schema=config.CLEAN_SCHEMA, + ) + op.create_check_constraint( + op.f('ck_forecasts_on_ci_upper_and_lower_bounds'), + 'forecasts', + """ + NOT ( + low80 IS NULL AND high80 IS NOT NULL + OR + low80 IS NOT NULL AND high80 IS NULL + OR + low95 IS NULL AND high95 IS NOT NULL + OR + low95 IS NOT NULL AND high95 IS NULL + ) + """, + schema=config.CLEAN_SCHEMA, + ) + op.create_check_constraint( + op.f('prediction_must_be_within_ci'), + 'forecasts', + """ + NOT ( + prediction < low80 + OR + prediction < low95 + OR + prediction > high80 + OR + prediction > high95 + ) + """, + schema=config.CLEAN_SCHEMA, + ) + op.create_check_constraint( + op.f('ci_upper_bound_greater_than_lower_bound'), + 'forecasts', + """ + NOT ( + low80 > high80 + OR + low95 > high95 + ) + """, + schema=config.CLEAN_SCHEMA, + ) + op.create_check_constraint( + op.f('ci95_must_be_wider_than_ci80'), + 'forecasts', + """ + NOT ( + low80 < low95 + OR + high80 > high95 + ) + """, + schema=config.CLEAN_SCHEMA, + ) + + +def downgrade(): + """Downgrade to revision e40623e10405.""" + op.alter_column( + 'forecasts', 'model', new_column_name='method', schema=config.CLEAN_SCHEMA, + ) + op.drop_column( + 'forecasts', 'low80', schema=config.CLEAN_SCHEMA, + ) + op.drop_column( + 'forecasts', 'high80', schema=config.CLEAN_SCHEMA, + ) + op.drop_column( + 'forecasts', 'low95', schema=config.CLEAN_SCHEMA, + ) + op.drop_column( + 'forecasts', 'high95', schema=config.CLEAN_SCHEMA, + ) diff --git a/migrations/versions/rev_20210123_15_e86290e7305e_remove_orders_from_restaurants_with.py b/migrations/versions/rev_20210123_15_e86290e7305e_remove_orders_from_restaurants_with.py new file mode 100644 index 0000000..19c9223 --- /dev/null +++ b/migrations/versions/rev_20210123_15_e86290e7305e_remove_orders_from_restaurants_with.py @@ -0,0 +1,398 @@ +"""Remove orders from restaurants with invalid location ... + +... and also de-duplicate a couple of redundant addresses. + +Revision: #e86290e7305e at 2021-01-23 15:56:59 +Revises: #26711cd3f9b9 + +1) Remove orders + +Some restaurants have orders to be picked up at an address that +not their primary address. That is ok if that address is the location +of a second franchise. However, for a small number of restaurants +there is only exactly one order at that other address that often is +located far away from the restaurant's primary location. It looks +like a restaurant signed up with some invalid location that was then +corrected into the primary one. + +Use the following SQL statement to obtain a list of these locations +before this migration is run: + +SELECT + orders.pickup_address_id, + COUNT(*) AS n_orders, + MIN(placed_at) as first_order_at, + MAX(placed_at) as last_order_at +FROM + {config.CLEAN_SCHEMA}.orders +LEFT OUTER JOIN + {config.CLEAN_SCHEMA}.restaurants + ON orders.restaurant_id = restaurants.id +WHERE + orders.pickup_address_id <> restaurants.address_id +GROUP BY + pickup_address_id; + +50 orders with such weird pickup addresses are removed with this migration. + + +2) De-duplicate addresses + +Five restaurants have two pickup addresses that are actually the same location. + +The following SQL statement shows them before this migration is run: + +SELECT + orders.restaurant_id, + restaurants.name, + restaurants.address_id AS primary_address_id, + addresses.id AS address_id, + addresses.street, + COUNT(*) AS n_orders +FROM + {config.CLEAN_SCHEMA}.orders +LEFT OUTER JOIN + {config.CLEAN_SCHEMA}.addresses ON orders.pickup_address_id = addresses.id +LEFT OUTER JOIN + {config.CLEAN_SCHEMA}.restaurants ON orders.restaurant_id = restaurants.id +WHERE + orders.restaurant_id IN ( + SELECT + restaurant_id + FROM ( + SELECT DISTINCT + restaurant_id, + pickup_address_id + FROM + {config.CLEAN_SCHEMA}.orders + ) AS restaurant_locations + GROUP BY + restaurant_id + HAVING + COUNT(pickup_address_id) > 1 +) +GROUP BY + orders.restaurant_id, + restaurants.name, + restaurants.address_id, + addresses.id, + addresses.street +ORDER BY + orders.restaurant_id, + restaurants.name, + restaurants.address_id, + addresses.id, + addresses.street; + + +3) Remove addresses without any association + +After steps 1) and 2) some addresses are not associated with a restaurant any more. + +The following SQL statement lists them before this migration is run: + +SELECT + id, + street, + zip_code, + city +FROM + {config.CLEAN_SCHEMA}.addresses +WHERE + id NOT IN ( + SELECT DISTINCT + pickup_address_id AS id + FROM + {config.CLEAN_SCHEMA}.orders + UNION + SELECT DISTINCT + delivery_address_id AS id + FROM + {config.CLEAN_SCHEMA}.orders + UNION + SELECT DISTINCT + address_id AS id + FROM + {config.CLEAN_SCHEMA}.restaurants +); + +4) Ensure every `Restaurant` has exactly one `Address`. + +Replace the current `ForeignKeyConstraint` to from `Order` to `Restaurant` +with one that also includes the `Order.pickup_address_id`. +""" + +import os + +from alembic import op + +from urban_meal_delivery import configuration + + +revision = 'e86290e7305e' +down_revision = '26711cd3f9b9' +branch_labels = None +depends_on = None + + +config = configuration.make_config('testing' if os.getenv('TESTING') else 'production') + + +def upgrade(): + """Upgrade to revision e86290e7305e.""" + # 1) Remove orders + op.execute( + f""" + DELETE + FROM + {config.CLEAN_SCHEMA}.orders + WHERE pickup_address_id IN ( + SELECT + orders.pickup_address_id + FROM + {config.CLEAN_SCHEMA}.orders + LEFT OUTER JOIN + {config.CLEAN_SCHEMA}.restaurants + ON orders.restaurant_id = restaurants.id + WHERE + orders.pickup_address_id <> restaurants.address_id + GROUP BY + orders.pickup_address_id + HAVING + COUNT(*) = 1 + ); + """, + ) + + # 2) De-duplicate addresses + op.execute( + f""" + UPDATE + {config.CLEAN_SCHEMA}.orders + SET + pickup_address_id = 353 + WHERE + pickup_address_id = 548916; + """, + ) + op.execute( + f""" + UPDATE + {config.CLEAN_SCHEMA}.orders + SET + pickup_address_id = 4850 + WHERE + pickup_address_id = 6415; + """, + ) + op.execute( + f""" + UPDATE + {config.CLEAN_SCHEMA}.orders + SET + pickup_address_id = 16227 + WHERE + pickup_address_id = 44627; + """, + ) + op.execute( + f""" + UPDATE + {config.CLEAN_SCHEMA}.orders + SET + pickup_address_id = 44458 + WHERE + pickup_address_id = 534543; + """, + ) + op.execute( + f""" + UPDATE + {config.CLEAN_SCHEMA}.orders + SET + pickup_address_id = 289997 + WHERE + pickup_address_id = 309525; + """, + ) + + # 3) Remove addresses + op.execute( + f""" + DELETE + FROM + {config.CLEAN_SCHEMA}.addresses_pixels + WHERE + address_id NOT IN ( + SELECT DISTINCT + pickup_address_id AS id + FROM + {config.CLEAN_SCHEMA}.orders + UNION + SELECT DISTINCT + delivery_address_id AS id + FROM + {config.CLEAN_SCHEMA}.orders + UNION + SELECT DISTINCT + address_id AS id + FROM + {config.CLEAN_SCHEMA}.restaurants + ); + """, + ) + op.execute( + f""" + UPDATE + {config.CLEAN_SCHEMA}.addresses + SET + primary_id = 302883 + WHERE + primary_id = 43526; + """, + ) + op.execute( + f""" + UPDATE + {config.CLEAN_SCHEMA}.addresses + SET + primary_id = 47597 + WHERE + primary_id = 43728; + """, + ) + op.execute( + f""" + UPDATE + {config.CLEAN_SCHEMA}.addresses + SET + primary_id = 159631 + WHERE + primary_id = 43942; + """, + ) + op.execute( + f""" + UPDATE + {config.CLEAN_SCHEMA}.addresses + SET + primary_id = 275651 + WHERE + primary_id = 44759; + """, + ) + op.execute( + f""" + UPDATE + {config.CLEAN_SCHEMA}.addresses + SET + primary_id = 156685 + WHERE + primary_id = 50599; + """, + ) + op.execute( + f""" + UPDATE + {config.CLEAN_SCHEMA}.addresses + SET + primary_id = 480206 + WHERE + primary_id = 51774; + """, + ) + op.execute( + f""" + DELETE + FROM + {config.CLEAN_SCHEMA}.addresses + WHERE + id NOT IN ( + SELECT DISTINCT + pickup_address_id AS id + FROM + {config.CLEAN_SCHEMA}.orders + UNION + SELECT DISTINCT + delivery_address_id AS id + FROM + {config.CLEAN_SCHEMA}.orders + UNION + SELECT DISTINCT + address_id AS id + FROM + {config.CLEAN_SCHEMA}.restaurants + ); + """, + ) + + # 4) Ensure every `Restaurant` has only one `Order.pickup_address`. + op.execute( + f""" + UPDATE + {config.CLEAN_SCHEMA}.orders + SET + pickup_address_id = 53733 + WHERE + pickup_address_id = 54892; + """, + ) + op.execute( + f""" + DELETE + FROM + {config.CLEAN_SCHEMA}.addresses + WHERE + id = 54892; + """, + ) + op.create_unique_constraint( + 'uq_restaurants_on_id_address_id', + 'restaurants', + ['id', 'address_id'], + schema=config.CLEAN_SCHEMA, + ) + op.create_foreign_key( + op.f('fk_orders_to_restaurants_via_restaurant_id_pickup_address_id'), + 'orders', + 'restaurants', + ['restaurant_id', 'pickup_address_id'], + ['id', 'address_id'], + source_schema=config.CLEAN_SCHEMA, + referent_schema=config.CLEAN_SCHEMA, + onupdate='RESTRICT', + ondelete='RESTRICT', + ) + op.drop_constraint( + 'fk_orders_to_restaurants_via_restaurant_id', + 'orders', + type_='foreignkey', + schema=config.CLEAN_SCHEMA, + ) + + +def downgrade(): + """Downgrade to revision 26711cd3f9b9.""" + op.create_foreign_key( + op.f('fk_orders_to_restaurants_via_restaurant_id'), + 'orders', + 'restaurants', + ['restaurant_id'], + ['id'], + source_schema=config.CLEAN_SCHEMA, + referent_schema=config.CLEAN_SCHEMA, + onupdate='RESTRICT', + ondelete='RESTRICT', + ) + op.drop_constraint( + 'fk_orders_to_restaurants_via_restaurant_id_pickup_address_id', + 'orders', + type_='foreignkey', + schema=config.CLEAN_SCHEMA, + ) + op.drop_constraint( + 'uq_restaurants_on_id_address_id', + 'restaurants', + type_='unique', + schema=config.CLEAN_SCHEMA, + ) diff --git a/migrations/versions/rev_20210129_11_c2af85bada01_store_actuals_with_forecast.py b/migrations/versions/rev_20210129_11_c2af85bada01_store_actuals_with_forecast.py new file mode 100644 index 0000000..810fbb5 --- /dev/null +++ b/migrations/versions/rev_20210129_11_c2af85bada01_store_actuals_with_forecast.py @@ -0,0 +1,41 @@ +"""Store actuals with forecast. + +Revision: #c2af85bada01 at 2021-01-29 11:13:15 +Revises: #e86290e7305e +""" + +import os + +import sqlalchemy as sa +from alembic import op + +from urban_meal_delivery import configuration + + +revision = 'c2af85bada01' +down_revision = 'e86290e7305e' +branch_labels = None +depends_on = None + + +config = configuration.make_config('testing' if os.getenv('TESTING') else 'production') + + +def upgrade(): + """Upgrade to revision c2af85bada01.""" + op.add_column( + 'forecasts', + sa.Column('actual', sa.SmallInteger(), nullable=False), + schema=config.CLEAN_SCHEMA, + ) + op.create_check_constraint( + op.f('ck_forecasts_on_actuals_must_be_non_negative'), + 'forecasts', + 'actual >= 0', + schema=config.CLEAN_SCHEMA, + ) + + +def downgrade(): + """Downgrade to revision e86290e7305e.""" + op.drop_column('forecasts', 'actual', schema=config.CLEAN_SCHEMA) diff --git a/migrations/versions/rev_20210202_12_8bfb928a31f8_rename_training_horizon_into_train_horizon.py b/migrations/versions/rev_20210202_12_8bfb928a31f8_rename_training_horizon_into_train_horizon.py new file mode 100644 index 0000000..fb5fc93 --- /dev/null +++ b/migrations/versions/rev_20210202_12_8bfb928a31f8_rename_training_horizon_into_train_horizon.py @@ -0,0 +1,48 @@ +"""Rename `Forecast.training_horizon` into `.train_horizon`. + +Revision: #8bfb928a31f8 at 2021-02-02 12:55:09 +Revises: #c2af85bada01 +""" + +import os + +from alembic import op + +from urban_meal_delivery import configuration + + +revision = '8bfb928a31f8' +down_revision = 'c2af85bada01' +branch_labels = None +depends_on = None + + +config = configuration.make_config('testing' if os.getenv('TESTING') else 'production') + + +def upgrade(): + """Upgrade to revision 8bfb928a31f8.""" + op.execute( + f""" + ALTER TABLE + {config.CLEAN_SCHEMA}.forecasts + RENAME COLUMN + training_horizon + TO + train_horizon; + """, + ) # noqa:WPS355 + + +def downgrade(): + """Downgrade to revision c2af85bada01.""" + op.execute( + f""" + ALTER TABLE + {config.CLEAN_SCHEMA}.forecasts + RENAME COLUMN + train_horizon + TO + training_horizon; + """, + ) # noqa:WPS355 diff --git a/noxfile.py b/noxfile.py index 6a9620d..eac123e 100644 --- a/noxfile.py +++ b/noxfile.py @@ -17,7 +17,7 @@ as unified tasks to assure the quality of the source code: that are then interpreted as the paths the formatters and linters work on recursively -- "lint" (flake8, mypy, pylint): same as "format" +- "lint" (flake8, mypy): same as "format" - "test" (pytest, xdoctest): @@ -25,26 +25,6 @@ as unified tasks to assure the quality of the source code: + accepts extra arguments, e.g., `poetry run nox -s test -- --no-cov`, that are passed on to `pytest` and `xdoctest` with no changes => may be paths or options - - -GitHub Actions implements the following CI workflow: - -- "format", "lint", and "test" as above - -- "safety": check if dependencies contain known security vulnerabilites - -- "docs": build the documentation with sphinx - - -The pre-commit framework invokes the following tasks: - -- before any commit: - - + "format" and "lint" as above - + "fix-branch-references": replace branch references with the current one - -- before merges: run the entire "test-suite" independent of the file changes - """ import contextlib @@ -92,7 +72,7 @@ nox.options.envdir = '.cache/nox' # Avoid accidental successes if the environment is not set up properly. nox.options.error_on_external_run = True -# Run only CI related checks by default. +# Run only local checks by default. nox.options.sessions = ( 'format', 'lint', @@ -141,7 +121,7 @@ def format_(session): @nox.session(python=PYTHON) def lint(session): - """Lint source files with flake8, mypy, and pylint. + """Lint source files with flake8 and mypy. If no extra arguments are provided, all source files are linted. Otherwise, they are interpreted as paths the linters work on recursively. @@ -158,7 +138,6 @@ def lint(session): 'flake8-expression-complexity', 'flake8-pytest-style', 'mypy', - 'pylint', 'wemake-python-styleguide', ) @@ -182,18 +161,6 @@ def lint(session): else: session.log('No paths to be checked with mypy') - # Ignore errors where pylint cannot import a third-party package due its - # being run in an isolated environment. For the same reason, pylint is - # also not able to determine the correct order of imports. - # One way to fix this is to install all develop dependencies in this nox - # session, which we do not do. The whole point of static linting tools is - # to not rely on any package be importable at runtime. Instead, these - # imports are validated implicitly when the test suite is run. - session.run('pylint', '--version') - session.run( - 'pylint', '--disable=import-error', '--disable=wrong-import-order', *locations, - ) - @nox.session(python=PYTHON) def test(session): @@ -222,33 +189,71 @@ def test(session): session.run('poetry', 'install', '--no-dev', external=True) _install_packages( session, + 'Faker', + 'factory-boy', + 'geopy', 'packaging', 'pytest', 'pytest-cov', 'pytest-env', + 'pytest-mock', + 'pytest-randomly', 'xdoctest[optional]', ) + session.run('pytest', '--version') + + # When the CI server runs the slow tests, we only execute the R related + # test cases that require the slow installation of R and some packages. + if session.env.get('_slow_ci_tests'): + session.run( + 'pytest', '--randomly-seed=4287', '-m', 'r and not db', PYTEST_LOCATION, + ) + + # In the "ci-tests-slow" session, we do not run any test tool + # other than pytest. So, xdoctest, for example, is only run + # locally or in the "ci-tests-fast" session. + return + + # When the CI server executes pytest, no database is available. + # Therefore, the CI server does not measure coverage. + elif session.env.get('_fast_ci_tests'): + pytest_args = ( + '--randomly-seed=4287', + '-m', + 'not (db or r)', + PYTEST_LOCATION, + ) + + # When pytest is executed in the local develop environment, + # both R and a database are available. + # Therefore, we require 100% coverage. + else: + pytest_args = ( + '--cov', + '--no-cov-on-fail', + '--cov-branch', + '--cov-fail-under=100', + '--cov-report=term-missing:skip-covered', + '--randomly-seed=4287', + PYTEST_LOCATION, + ) + # Interpret extra arguments as options for pytest. - # They are "dropped" by the hack in the pre_merge() function - # if this function is run within the "pre-merge" session. + # They are "dropped" by the hack in the test_suite() function + # if this function is run within the "test-suite" session. posargs = () if session.env.get('_drop_posargs') else session.posargs - args = posargs or ( - '--cov', - '--no-cov-on-fail', - '--cov-branch', - '--cov-fail-under=100', - '--cov-report=term-missing:skip-covered', - '-k', - 'not e2e', - PYTEST_LOCATION, - ) - session.run('pytest', '--version') - session.run('pytest', *args) + session.run('pytest', *(posargs or pytest_args)) # For xdoctest, the default arguments are different from pytest. args = posargs or [PACKAGE_IMPORT_NAME] + + # The "TESTING" environment variable forces the global `engine`, `connection`, + # and `session` objects to be set to `None` and avoid any database connection. + # For pytest above this is not necessary as pytest sets this variable itself. + session.env['TESTING'] = 'true' + session.run('xdoctest', '--version') session.run('xdoctest', '--quiet', *args) # --quiet => less verbose output @@ -292,6 +297,10 @@ def docs(session): session.run('poetry', 'install', '--no-dev', external=True) _install_packages(session, 'sphinx', 'sphinx-autodoc-typehints') + # The "TESTING" environment variable forces the global `engine`, `connection`, + # and `session` objects to be set to `None` and avoid any database connection. + session.env['TESTING'] = 'true' + session.run('sphinx-build', DOCS_SRC, DOCS_BUILD) # Verify all external links return 200 OK. session.run('sphinx-build', '-b', 'linkcheck', DOCS_SRC, DOCS_BUILD) @@ -299,11 +308,63 @@ def docs(session): print(f'Docs are available at {os.getcwd()}/{DOCS_BUILD}index.html') # noqa:WPS421 +@nox.session(name='ci-tests-fast', python=PYTHON) +def fast_ci_tests(session): + """Fast tests run by the GitHub Actions CI server. + + These regards all test cases NOT involving R via `rpy2`. + + Also, coverage is not measured as full coverage can only be + achieved by running the tests in the local develop environment + that has access to a database. + """ + # Re-using an old environment is not so easy here as the "test" session + # runs `poetry install --no-dev`, which removes previously installed packages. + if session.virtualenv.reuse_existing: + raise RuntimeError( + 'The "ci-tests-fast" session must be run without the "-r" option', + ) + + # Little hack to pass arguments to the "test" session. + session.env['_fast_ci_tests'] = 'true' + + # Cannot use session.notify() to trigger the "test" session + # as that would create a new Session object without the flag + # in the env(ironment). + test(session) + + +@nox.session(name='ci-tests-slow', python=PYTHON) +def slow_ci_tests(session): + """Slow tests run by the GitHub Actions CI server. + + These regards all test cases involving R via `rpy2`. + They are slow as the CI server needs to install R and some packages + first, which takes a couple of minutes. + + Also, coverage is not measured as full coverage can only be + achieved by running the tests in the local develop environment + that has access to a database. + """ + # Re-using an old environment is not so easy here as the "test" session + # runs `poetry install --no-dev`, which removes previously installed packages. + if session.virtualenv.reuse_existing: + raise RuntimeError( + 'The "ci-tests-slow" session must be run without the "-r" option', + ) + + # Little hack to pass arguments to the "test" session. + session.env['_slow_ci_tests'] = 'true' + + # Cannot use session.notify() to trigger the "test" session + # as that would create a new Session object without the flag + # in the env(ironment). + test(session) + + @nox.session(name='test-suite', python=PYTHON) def test_suite(session): - """Run the entire test suite. - - Intended to be run as a pre-commit hook. + """Run the entire test suite as a pre-commit hook. Ignores the paths passed in by the pre-commit framework and runs the entire test suite. @@ -322,13 +383,12 @@ def test_suite(session): # Cannot use session.notify() to trigger the "test" session # as that would create a new Session object without the flag - # in the env(ironment). Instead, run the test() function within - # the "pre-merge" session. + # in the env(ironment). test(session) @nox.session(name='fix-branch-references', python=PYTHON, venv_backend='none') -def fix_branch_references(session): # noqa:WPS210 +def fix_branch_references(session): # noqa:WPS210,WPS231 """Replace branch references with the current branch. Intended to be run as a pre-commit hook. @@ -336,9 +396,15 @@ def fix_branch_references(session): # noqa:WPS210 Many files in the project (e.g., README.md) contain links to resources on github.com or nbviewer.jupyter.org that contain branch labels. - This task rewrites these links such that they contain the branch reference - of the current branch. If the branch is only a temporary one that is to be - merged into the 'main' branch, all references are adjusted to 'main' as well. + This task rewrites these links such that they contain branch references + that make sense given the context: + + - If the branch is only a temporary one that is to be merged into + the 'main' branch, all references are adjusted to 'main' as well. + + - If the branch is not named after a default branch in the GitFlow + model, it is interpreted as a feature branch and the references + are adjusted into 'develop'. This task may be called with one positional argument that is interpreted as the branch to which all references are changed into. @@ -362,6 +428,10 @@ def fix_branch_references(session): # noqa:WPS210 # into 'main', we adjust all branch references to 'main' as well. if branch.startswith('release') or branch.startswith('research'): branch = 'main' + # If the current branch appears to be a feature branch, we adjust + # all branch references to 'develop'. + elif branch != 'main': + branch = 'develop' # If a "--branch=BRANCH_NAME" argument is passed in # as the only positional argument, we use BRANCH_NAME. # Note: The --branch is required as session.posargs contains @@ -445,7 +515,7 @@ def init_project(session): @nox.session(name='clean-pwd', python=PYTHON, venv_backend='none') -def clean_pwd(session): # noqa:WPS210,WPS231 +def clean_pwd(session): # noqa:WPS231 """Remove (almost) all glob patterns listed in .gitignore. The difference compared to `git clean -X` is that this task @@ -519,6 +589,7 @@ def _install_packages(session: Session, *packages_or_pip_args: str, **kwargs) -> '--dev', '--format=requirements.txt', f'--output={requirements_txt.name}', + '--without-hashes', external=True, ) session.install( diff --git a/poetry.lock b/poetry.lock index 9fa86ac..5b0958b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,185 +1,170 @@ [[package]] -category = "dev" -description = "A configurable sidebar-enabled Sphinx theme" name = "alabaster" +version = "0.7.12" +description = "A configurable sidebar-enabled Sphinx theme" +category = "dev" optional = false python-versions = "*" -version = "0.7.12" [[package]] -category = "main" -description = "A database migration tool for SQLAlchemy." name = "alembic" +version = "1.5.4" +description = "A database migration tool for SQLAlchemy." +category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.4.3" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" [package.dependencies] Mako = "*" -SQLAlchemy = ">=1.1.0" python-dateutil = "*" python-editor = ">=0.3" +SQLAlchemy = ">=1.3.0" [[package]] -category = "dev" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." name = "appdirs" +version = "1.4.4" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" optional = false python-versions = "*" -version = "1.4.4" [[package]] -category = "main" -description = "Disable App Nap on OS X 10.9" -marker = "sys_platform == \"darwin\" or platform_system == \"Darwin\"" name = "appnope" +version = "0.1.2" +description = "Disable App Nap on macOS >= 10.9" +category = "main" optional = true python-versions = "*" -version = "0.1.0" [[package]] -category = "dev" -description = "Bash tab completion for argparse" name = "argcomplete" +version = "1.12.2" +description = "Bash tab completion for argparse" +category = "dev" optional = false python-versions = "*" -version = "1.12.1" [package.extras] test = ["coverage", "flake8", "pexpect", "wheel"] [[package]] -category = "main" -description = "The secure Argon2 password hashing algorithm." name = "argon2-cffi" +version = "20.1.0" +description = "The secure Argon2 password hashing algorithm." +category = "main" optional = true python-versions = "*" -version = "20.1.0" [package.dependencies] cffi = ">=1.0.0" six = "*" [package.extras] -dev = ["coverage (>=5.0.2)", "hypothesis", "pytest", "sphinx", "wheel", "pre-commit"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest", "sphinx", "wheel", "pre-commit"] docs = ["sphinx"] -tests = ["coverage (>=5.0.2)", "hypothesis", "pytest"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest"] [[package]] -category = "dev" -description = "Read/rewrite/write Python ASTs" name = "astor" +version = "0.8.1" +description = "Read/rewrite/write Python ASTs" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -version = "0.8.1" [[package]] -category = "dev" -description = "Pretty print the output of python stdlib `ast.parse`." name = "astpretty" +version = "2.1.0" +description = "Pretty print the output of python stdlib `ast.parse`." +category = "dev" optional = false python-versions = ">=3.6.1" -version = "2.0.0" [package.extras] typed = ["typed-ast"] [[package]] -category = "dev" -description = "An abstract syntax tree for Python with inference support." -name = "astroid" -optional = false -python-versions = ">=3.5" -version = "2.4.2" - -[package.dependencies] -lazy-object-proxy = ">=1.4.0,<1.5.0" -six = ">=1.12,<2.0" -wrapt = ">=1.11,<2.0" - -[[package]] -category = "main" -description = "Async generators and context managers for Python 3.5+" name = "async-generator" +version = "1.10" +description = "Async generators and context managers for Python 3.5+" +category = "main" optional = true python-versions = ">=3.5" -version = "1.10" [[package]] -category = "dev" -description = "Atomic file writes." -marker = "sys_platform == \"win32\"" name = "atomicwrites" +version = "1.4.0" +description = "Atomic file writes." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.4.0" [[package]] -category = "main" -description = "Classes Without Boilerplate" name = "attrs" +version = "20.3.0" +description = "Classes Without Boilerplate" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "20.2.0" [package.extras] -dev = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "sphinx-rtd-theme", "pre-commit"] -docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] -tests = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] -tests_no_zope = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "furo", "sphinx", "pre-commit"] +docs = ["furo", "sphinx", "zope.interface"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] [[package]] -category = "dev" -description = "Removes unused imports and unused variables" name = "autoflake" +version = "1.4" +description = "Removes unused imports and unused variables" +category = "dev" optional = false python-versions = "*" -version = "1.4" [package.dependencies] pyflakes = ">=1.1.0" [[package]] -category = "dev" -description = "Internationalization utilities" name = "babel" +version = "2.9.0" +description = "Internationalization utilities" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "2.8.0" [package.dependencies] pytz = ">=2015.7" [[package]] -category = "main" -description = "Specifications for callback functions passed in to an API" name = "backcall" +version = "0.2.0" +description = "Specifications for callback functions passed in to an API" +category = "main" optional = true python-versions = "*" -version = "0.2.0" [[package]] -category = "dev" -description = "Security oriented static analyser for python code." name = "bandit" +version = "1.7.0" +description = "Security oriented static analyser for python code." +category = "dev" optional = false -python-versions = "*" -version = "1.6.2" +python-versions = ">=3.5" [package.dependencies] +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} GitPython = ">=1.0.1" -PyYAML = ">=3.13" -colorama = ">=0.3.9" +PyYAML = ">=5.3.1" six = ">=1.10.0" stevedore = ">=1.20.0" [[package]] -category = "dev" -description = "The uncompromising code formatter." name = "black" +version = "19.10b0" +description = "The uncompromising code formatter." +category = "dev" optional = false python-versions = ">=3.6" -version = "19.10b0" [package.dependencies] appdirs = "*" @@ -194,12 +179,12 @@ typed-ast = ">=1.4.0" d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] -category = "main" -description = "An easy safelist-based HTML-sanitizing tool." name = "bleach" +version = "3.3.0" +description = "An easy safelist-based HTML-sanitizing tool." +category = "main" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "3.2.1" [package.dependencies] packaging = "*" @@ -207,150 +192,198 @@ six = ">=1.9.0" webencodings = "*" [[package]] +name = "branca" +version = "0.4.2" +description = "Generate complex HTML+JS pages with Python" category = "main" -description = "Python package for providing Mozilla's CA Bundle." -name = "certifi" optional = false -python-versions = "*" -version = "2020.6.20" +python-versions = ">=3.5" + +[package.dependencies] +jinja2 = "*" [[package]] +name = "certifi" +version = "2020.12.5" +description = "Python package for providing Mozilla's CA Bundle." category = "main" -description = "Foreign Function Interface for Python calling C code." -name = "cffi" -optional = true +optional = false +python-versions = "*" + +[[package]] +name = "cffi" +version = "1.14.4" +description = "Foreign Function Interface for Python calling C code." +category = "main" +optional = false python-versions = "*" -version = "1.14.3" [package.dependencies] pycparser = "*" [[package]] -category = "dev" -description = "Validate configuration and produce human readable error messages." name = "cfgv" +version = "3.2.0" +description = "Validate configuration and produce human readable error messages." +category = "dev" optional = false python-versions = ">=3.6.1" -version = "3.2.0" [[package]] -category = "main" -description = "Universal encoding detector for Python 2 and 3" name = "chardet" +version = "4.0.0" +description = "Universal encoding detector for Python 2 and 3" +category = "main" optional = false -python-versions = "*" -version = "3.0.4" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] -category = "main" -description = "Composable command line interface toolkit" name = "click" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" version = "7.1.2" - -[[package]] +description = "Composable command line interface toolkit" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "colorama" +version = "0.4.4" +description = "Cross-platform colored terminal text." category = "main" -description = "Cross-platform colored terminal text." -marker = "sys_platform == \"win32\" or platform_system == \"Windows\" or platform_system == \"Windows\"" -name = "colorama" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "0.4.3" [[package]] -category = "dev" -description = "Log formatting with colors!" name = "colorlog" +version = "4.7.2" +description = "Log formatting with colors!" +category = "dev" optional = false python-versions = "*" -version = "4.2.1" [package.dependencies] -colorama = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} [[package]] -category = "dev" -description = "Code coverage measurement for Python" name = "coverage" +version = "5.4" +description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" -version = "5.3" [package.extras] toml = ["toml"] [[package]] -category = "dev" -description = "A utility for ensuring Google-style docstrings stay up to date with the source code." -name = "darglint" +name = "cycler" +version = "0.10.0" +description = "Composable style cycles" +category = "main" optional = false -python-versions = ">=3.5,<4.0" -version = "1.5.4" +python-versions = "*" + +[package.dependencies] +six = "*" + +[[package]] +name = "darglint" +version = "1.6.0" +description = "A utility for ensuring Google-style docstrings stay up to date with the source code." +category = "dev" +optional = false +python-versions = ">=3.6,<4.0" [[package]] -category = "main" -description = "Decorators for Humans" name = "decorator" +version = "4.4.2" +description = "Decorators for Humans" +category = "main" optional = true python-versions = ">=2.6, !=3.0.*, !=3.1.*" -version = "4.4.2" [[package]] -category = "main" -description = "XML bomb protection for Python stdlib modules" name = "defusedxml" +version = "0.6.0" +description = "XML bomb protection for Python stdlib modules" +category = "main" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "0.6.0" [[package]] -category = "dev" -description = "Distribution utilities" name = "distlib" +version = "0.3.1" +description = "Distribution utilities" +category = "dev" optional = false python-versions = "*" -version = "0.3.1" [[package]] -category = "dev" -description = "Docutils -- Python Documentation Utilities" name = "docutils" +version = "0.16" +description = "Docutils -- Python Documentation Utilities" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "0.16" [[package]] -category = "main" -description = "Discover and load entry points from installed packages." name = "entrypoints" +version = "0.3" +description = "Discover and load entry points from installed packages." +category = "main" optional = true python-versions = ">=2.7" -version = "0.3" [[package]] -category = "dev" -description = "Removes commented-out code." name = "eradicate" -optional = false -python-versions = "*" version = "1.0" - -[[package]] +description = "Removes commented-out code." category = "dev" -description = "A platform independent file lock." -name = "filelock" optional = false python-versions = "*" -version = "3.0.12" [[package]] +name = "factory-boy" +version = "3.2.0" +description = "A versatile test fixtures replacement based on thoughtbot's factory_bot for Ruby." category = "dev" -description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +Faker = ">=0.7.0" + +[package.extras] +dev = ["coverage", "django", "flake8", "isort", "pillow", "sqlalchemy", "mongoengine", "wheel (>=0.32.0)", "tox", "zest.releaser"] +doc = ["sphinx", "sphinx-rtd-theme", "sphinxcontrib-spelling"] + +[[package]] +name = "faker" +version = "5.8.0" +description = "Faker is a Python package that generates fake data for you." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +python-dateutil = ">=2.4" +text-unidecode = "1.3" + +[[package]] +name = "filelock" +version = "3.0.12" +description = "A platform independent file lock." +category = "dev" +optional = false +python-versions = "*" + +[[package]] name = "flake8" +version = "3.8.4" +description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -version = "3.8.3" [package.dependencies] mccabe = ">=0.6.0,<0.7.0" @@ -358,23 +391,23 @@ pycodestyle = ">=2.6.0a1,<2.7.0" pyflakes = ">=2.2.0,<2.3.0" [[package]] -category = "dev" -description = "Flake8 Type Annotation Checks" name = "flake8-annotations" +version = "2.5.0" +description = "Flake8 Type Annotation Checks" +category = "dev" optional = false python-versions = ">=3.6.1,<4.0.0" -version = "2.4.1" [package.dependencies] flake8 = ">=3.7,<3.9" [[package]] -category = "dev" -description = "Automated security testing with bandit and flake8." name = "flake8-bandit" +version = "2.1.2" +description = "Automated security testing with bandit and flake8." +category = "dev" optional = false python-versions = "*" -version = "2.1.2" [package.dependencies] bandit = "*" @@ -383,93 +416,93 @@ flake8-polyfill = "*" pycodestyle = "*" [[package]] -category = "dev" -description = "flake8 plugin to call black as a code style validator" name = "flake8-black" +version = "0.2.1" +description = "flake8 plugin to call black as a code style validator" +category = "dev" optional = false python-versions = "*" -version = "0.2.1" [package.dependencies] black = "*" flake8 = ">=3.0.0" [[package]] -category = "dev" -description = "Flake8 plugin to forbid backslashes for line breaks" name = "flake8-broken-line" +version = "0.2.1" +description = "Flake8 plugin to forbid backslashes for line breaks" +category = "dev" optional = false python-versions = ">=3.6,<4.0" -version = "0.2.1" [package.dependencies] flake8 = ">=3.5,<4.0" [[package]] -category = "dev" -description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." name = "flake8-bugbear" +version = "19.8.0" +description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." +category = "dev" optional = false python-versions = ">=3.5" -version = "19.8.0" [package.dependencies] attrs = "*" flake8 = ">=3.0.0" [[package]] -category = "dev" -description = "Flake8 lint for trailing commas." name = "flake8-commas" +version = "2.0.0" +description = "Flake8 lint for trailing commas." +category = "dev" optional = false python-versions = "*" -version = "2.0.0" [package.dependencies] flake8 = ">=2,<4.0.0" [[package]] -category = "dev" -description = "A flake8 plugin to help you write better list/set/dict comprehensions." name = "flake8-comprehensions" +version = "3.3.1" +description = "A flake8 plugin to help you write better list/set/dict comprehensions." +category = "dev" optional = false -python-versions = ">=3.5" -version = "3.2.3" +python-versions = ">=3.6" [package.dependencies] flake8 = ">=3.0,<3.2.0 || >3.2.0,<4" [[package]] -category = "dev" -description = "ipdb/pdb statement checker plugin for flake8" name = "flake8-debugger" +version = "3.2.1" +description = "ipdb/pdb statement checker plugin for flake8" +category = "dev" optional = false python-versions = "*" -version = "3.2.1" [package.dependencies] flake8 = ">=1.5" pycodestyle = "*" [[package]] -category = "dev" -description = "Extension for flake8 which uses pydocstyle to check docstrings" name = "flake8-docstrings" +version = "1.5.0" +description = "Extension for flake8 which uses pydocstyle to check docstrings" +category = "dev" optional = false python-versions = "*" -version = "1.5.0" [package.dependencies] flake8 = ">=3" pydocstyle = ">=2.1" [[package]] -category = "dev" -description = "Flake8 plugin to find commented out code" name = "flake8-eradicate" +version = "0.3.0" +description = "Flake8 plugin to find commented out code" +category = "dev" optional = false python-versions = ">=3.6,<4.0" -version = "0.3.0" [package.dependencies] attrs = "*" @@ -477,195 +510,235 @@ eradicate = ">=1.0,<2.0" flake8 = ">=3.5,<4.0" [[package]] -category = "dev" -description = "A flake8 extension that checks expressions complexity" name = "flake8-expression-complexity" +version = "0.0.8" +description = "A flake8 extension that checks expressions complexity" +category = "dev" optional = false python-versions = ">=3.6" -version = "0.0.8" [package.dependencies] astpretty = "*" flake8 = "*" -setuptools = "*" [[package]] -category = "dev" -description = "flake8 plugin that integrates isort ." name = "flake8-isort" +version = "3.0.1" +description = "flake8 plugin that integrates isort ." +category = "dev" optional = false python-versions = "*" -version = "3.0.1" [package.dependencies] flake8 = ">=3.2.1,<4" +isort = {version = ">=4.3.5,<5", extras = ["pyproject"]} testfixtures = ">=6.8.0,<7" -[package.dependencies.isort] -extras = ["pyproject"] -version = ">=4.3.5,<5" - [package.extras] test = ["pytest (>=4.0.2,<6)"] [[package]] -category = "dev" -description = "The package provides base classes and utils for flake8 plugin writing" name = "flake8-plugin-utils" +version = "1.3.1" +description = "The package provides base classes and utils for flake8 plugin writing" +category = "dev" optional = false python-versions = ">=3.6,<4.0" -version = "1.3.1" [[package]] -category = "dev" -description = "Polyfill package for Flake8 plugins" name = "flake8-polyfill" +version = "1.0.2" +description = "Polyfill package for Flake8 plugins" +category = "dev" optional = false python-versions = "*" -version = "1.0.2" [package.dependencies] flake8 = "*" [[package]] -category = "dev" -description = "A flake8 plugin checking common style issues or inconsistencies with pytest-based tests." name = "flake8-pytest-style" +version = "1.3.0" +description = "A flake8 plugin checking common style issues or inconsistencies with pytest-based tests." +category = "dev" optional = false python-versions = ">=3.6,<4.0" -version = "1.3.0" [package.dependencies] flake8-plugin-utils = ">=1.3.1,<2.0.0" [[package]] -category = "dev" -description = "Flake8 lint for quotes." name = "flake8-quotes" +version = "2.1.2" +description = "Flake8 lint for quotes." +category = "dev" optional = false python-versions = "*" -version = "2.1.2" [package.dependencies] flake8 = "*" [[package]] -category = "dev" -description = "Python docstring reStructuredText (RST) validator" name = "flake8-rst-docstrings" +version = "0.0.12" +description = "Python docstring reStructuredText (RST) validator" +category = "dev" optional = false python-versions = "*" -version = "0.0.12" [package.dependencies] flake8 = ">=3.0.0" restructuredtext_lint = "*" [[package]] -category = "dev" -description = "string format checker, plugin for flake8" name = "flake8-string-format" +version = "0.2.3" +description = "string format checker, plugin for flake8" +category = "dev" optional = false python-versions = "*" -version = "0.2.3" [package.dependencies] flake8 = "*" [[package]] +name = "folium" +version = "0.12.1" +description = "Make beautiful maps with Leaflet.js & Python" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +branca = ">=0.3.0" +jinja2 = ">=2.9" +numpy = "*" +requests = "*" + +[package.extras] +testing = ["pytest"] + +[[package]] +name = "geographiclib" +version = "1.50" +description = "The geodesic routines from GeographicLib" category = "dev" -description = "Git Object Database" +optional = false +python-versions = "*" + +[[package]] +name = "geopy" +version = "2.1.0" +description = "Python Geocoding Toolbox" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +geographiclib = ">=1.49,<2" + +[package.extras] +aiohttp = ["aiohttp"] +dev = ["async-generator", "flake8 (>=3.8.0,<3.9.0)", "isort (>=5.6.0,<5.7.0)", "coverage", "pytest-aiohttp", "pytest (>=3.10)", "readme-renderer", "sphinx", "sphinx-issues", "sphinx-rtd-theme (>=0.5.0)"] +dev-docs = ["readme-renderer", "sphinx", "sphinx-issues", "sphinx-rtd-theme (>=0.5.0)"] +dev-lint = ["async-generator", "flake8 (>=3.8.0,<3.9.0)", "isort (>=5.6.0,<5.7.0)"] +dev-test = ["async-generator", "coverage", "pytest-aiohttp", "pytest (>=3.10)"] +requests = ["urllib3 (>=1.24.2)", "requests (>=2.16.2)"] +timezone = ["pytz"] + +[[package]] name = "gitdb" +version = "4.0.5" +description = "Git Object Database" +category = "dev" optional = false python-versions = ">=3.4" -version = "4.0.5" [package.dependencies] smmap = ">=3.0.1,<4" [[package]] -category = "dev" -description = "Python Git Library" name = "gitpython" +version = "3.1.12" +description = "Python Git Library" +category = "dev" optional = false python-versions = ">=3.4" -version = "3.1.8" [package.dependencies] gitdb = ">=4.0.1,<5" [[package]] -category = "dev" -description = "File identification library for Python" name = "identify" +version = "1.5.13" +description = "File identification library for Python" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -version = "1.5.5" [package.extras] license = ["editdistance"] [[package]] -category = "main" -description = "Internationalized Domain Names in Applications (IDNA)" name = "idna" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" version = "2.10" - -[[package]] -category = "dev" -description = "Getting image size from png/jpeg/jpeg2000/gif file" -name = "imagesize" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.2.0" [[package]] +name = "imagesize" +version = "1.2.0" +description = "Getting image size from png/jpeg/jpeg2000/gif file" category = "dev" -description = "iniconfig: brain-dead simple config-ini parsing" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = "*" -version = "1.0.1" [[package]] -category = "main" -description = "IPython Kernel for Jupyter" name = "ipykernel" +version = "5.4.3" +description = "IPython Kernel for Jupyter" +category = "main" optional = true python-versions = ">=3.5" -version = "5.3.4" [package.dependencies] -appnope = "*" +appnope = {version = "*", markers = "platform_system == \"Darwin\""} ipython = ">=5.0.0" jupyter-client = "*" tornado = ">=4.2" traitlets = ">=4.1.0" [package.extras] -test = ["pytest (!=5.3.4)", "pytest-cov", "flaky", "nose"] +test = ["pytest (!=5.3.4)", "pytest-cov", "flaky", "nose", "jedi (<=0.17.2)"] [[package]] -category = "main" -description = "IPython: Productive Interactive Computing" name = "ipython" +version = "7.20.0" +description = "IPython: Productive Interactive Computing" +category = "main" optional = true python-versions = ">=3.7" -version = "7.18.1" [package.dependencies] -appnope = "*" +appnope = {version = "*", markers = "sys_platform == \"darwin\""} backcall = "*" -colorama = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} decorator = "*" -jedi = ">=0.10" -pexpect = ">4.3" +jedi = ">=0.16" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} pickleshare = "*" prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" pygments = "*" -setuptools = ">=18.5" traitlets = ">=4.2" [package.extras] @@ -680,20 +753,23 @@ qtconsole = ["qtconsole"] test = ["nose (>=0.10.1)", "requests", "testpath", "pygments", "nbformat", "ipykernel", "numpy (>=1.14)"] [[package]] -category = "main" -description = "Vestigial utilities from IPython" name = "ipython-genutils" +version = "0.2.0" +description = "Vestigial utilities from IPython" +category = "main" optional = true python-versions = "*" -version = "0.2.0" [[package]] -category = "dev" -description = "A Python utility / library to sort Python imports." name = "isort" +version = "4.3.21" +description = "A Python utility / library to sort Python imports." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "4.3.21" + +[package.dependencies] +toml = {version = "*", optional = true, markers = "extra == \"pyproject\""} [package.extras] pipfile = ["pipreqs", "requirementslib"] @@ -702,27 +778,27 @@ requirements = ["pipreqs", "pip-api"] xdg_home = ["appdirs (>=1.4.0)"] [[package]] -category = "main" -description = "An autocompletion tool for Python that can be used for text editors." name = "jedi" +version = "0.18.0" +description = "An autocompletion tool for Python that can be used for text editors." +category = "main" optional = true -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "0.17.2" +python-versions = ">=3.6" [package.dependencies] -parso = ">=0.7.0,<0.8.0" +parso = ">=0.8.0,<0.9.0" [package.extras] -qa = ["flake8 (3.7.9)"] -testing = ["Django (<3.1)", "colorama", "docopt", "pytest (>=3.9.0,<5.0.0)"] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["Django (<3.1)", "colorama", "docopt", "pytest (<6.0.0)"] [[package]] -category = "main" -description = "A very fast and expressive template engine." name = "jinja2" +version = "2.11.3" +description = "A very fast and expressive template engine." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "2.11.2" [package.dependencies] MarkupSafe = ">=0.23" @@ -731,28 +807,27 @@ MarkupSafe = ">=0.23" i18n = ["Babel (>=0.8)"] [[package]] -category = "main" -description = "A Python implementation of the JSON5 data format." name = "json5" +version = "0.9.5" +description = "A Python implementation of the JSON5 data format." +category = "main" optional = true python-versions = "*" -version = "0.9.5" [package.extras] dev = ["hypothesis"] [[package]] -category = "main" -description = "An implementation of JSON Schema validation for Python" name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +category = "main" optional = true python-versions = "*" -version = "3.2.0" [package.dependencies] attrs = ">=17.4.0" pyrsistent = ">=0.14.0" -setuptools = "*" six = ">=1.11.0" [package.extras] @@ -760,12 +835,12 @@ format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors format_nongpl = ["idna", "jsonpointer (>1.13)", "webcolors", "rfc3986-validator (>0.1.0)", "rfc3339-validator"] [[package]] -category = "main" -description = "Jupyter protocol implementation and client libraries" name = "jupyter-client" +version = "6.1.11" +description = "Jupyter protocol implementation and client libraries" +category = "main" optional = true python-versions = ">=3.5" -version = "6.1.7" [package.dependencies] jupyter-core = ">=4.6.0" @@ -775,27 +850,28 @@ tornado = ">=4.1" traitlets = "*" [package.extras] -test = ["ipykernel", "ipython", "mock", "pytest", "pytest-asyncio", "async-generator", "pytest-timeout"] +doc = ["sphinx (>=1.3.6)", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] +test = ["jedi (<=0.17.2)", "ipykernel", "ipython", "mock", "pytest", "pytest-asyncio", "async-generator", "pytest-timeout"] [[package]] -category = "main" -description = "Jupyter core package. A base package on which Jupyter projects rely." name = "jupyter-core" +version = "4.7.1" +description = "Jupyter core package. A base package on which Jupyter projects rely." +category = "main" optional = true -python-versions = "!=3.0,!=3.1,!=3.2,!=3.3,!=3.4,>=2.7" -version = "4.6.3" +python-versions = ">=3.6" [package.dependencies] -pywin32 = ">=1.0" +pywin32 = {version = ">=1.0", markers = "sys_platform == \"win32\""} traitlets = "*" [[package]] -category = "main" -description = "The JupyterLab notebook server extension." name = "jupyterlab" +version = "2.2.9" +description = "The JupyterLab notebook server extension." +category = "main" optional = true python-versions = ">=3.5" -version = "2.2.8" [package.dependencies] jinja2 = ">=2.10" @@ -808,23 +884,23 @@ docs = ["jsx-lexer", "recommonmark", "sphinx", "sphinx-rtd-theme", "sphinx-copyb test = ["pytest", "pytest-check-links", "requests", "wheel", "virtualenv"] [[package]] -category = "main" -description = "Pygments theme using JupyterLab CSS variables" name = "jupyterlab-pygments" +version = "0.1.2" +description = "Pygments theme using JupyterLab CSS variables" +category = "main" optional = true python-versions = "*" -version = "0.1.2" [package.dependencies] pygments = ">=2.4.1,<3" [[package]] -category = "main" -description = "JupyterLab Server" name = "jupyterlab-server" +version = "1.2.0" +description = "JupyterLab Server" +category = "main" optional = true python-versions = ">=3.5" -version = "1.2.0" [package.dependencies] jinja2 = ">=2.10" @@ -837,20 +913,20 @@ requests = "*" test = ["pytest", "requests"] [[package]] -category = "dev" -description = "A fast and thorough lazy object proxy." -name = "lazy-object-proxy" +name = "kiwisolver" +version = "1.3.1" +description = "A fast implementation of the Cassowary constraint solver" +category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.4.3" +python-versions = ">=3.6" [[package]] -category = "main" -description = "A super-fast templating language that borrows the best ideas from the existing templating languages." name = "mako" +version = "1.1.4" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.1.3" [package.dependencies] MarkupSafe = ">=0.9.2" @@ -860,36 +936,52 @@ babel = ["babel"] lingua = ["lingua"] [[package]] -category = "main" -description = "Safely add untrusted strings to HTML/XML markup." name = "markupsafe" +version = "1.1.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" -version = "1.1.1" [[package]] -category = "dev" -description = "McCabe checker, plugin for flake8" +name = "matplotlib" +version = "3.3.4" +description = "Python plotting package" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +cycler = ">=0.10" +kiwisolver = ">=1.0.1" +numpy = ">=1.15" +pillow = ">=6.2.0" +pyparsing = ">=2.0.3,<2.0.4 || >2.0.4,<2.1.2 || >2.1.2,<2.1.6 || >2.1.6" +python-dateutil = ">=2.1" + +[[package]] name = "mccabe" +version = "0.6.1" +description = "McCabe checker, plugin for flake8" +category = "dev" optional = false python-versions = "*" -version = "0.6.1" [[package]] -category = "main" -description = "The fastest markdown parser in pure Python" name = "mistune" +version = "0.8.4" +description = "The fastest markdown parser in pure Python" +category = "main" optional = true python-versions = "*" -version = "0.8.4" [[package]] -category = "dev" -description = "Optional static typing for Python" name = "mypy" +version = "0.782" +description = "Optional static typing for Python" +category = "dev" optional = false python-versions = ">=3.5" -version = "0.782" [package.dependencies] mypy-extensions = ">=0.4.3,<0.5.0" @@ -900,31 +992,31 @@ typing-extensions = ">=3.7.4" dmypy = ["psutil (>=4.0)"] [[package]] -category = "dev" -description = "Experimental type system extensions for programs checked with the mypy typechecker." name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "dev" optional = false python-versions = "*" -version = "0.4.3" [[package]] -category = "main" -description = "A simple extension for Jupyter Notebook and Jupyter Lab to beautify Python code automatically using Black." name = "nb-black" +version = "1.0.7" +description = "A simple extension for Jupyter Notebook and Jupyter Lab to beautify Python code automatically using Black." +category = "main" optional = true python-versions = "*" -version = "1.0.7" [package.dependencies] ipython = "*" [[package]] -category = "main" -description = "A client library for executing notebooks. Formally nbconvert's ExecutePreprocessor." name = "nbclient" +version = "0.5.1" +description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." +category = "main" optional = true python-versions = ">=3.6" -version = "0.5.0" [package.dependencies] async-generator = "*" @@ -939,12 +1031,12 @@ sphinx = ["Sphinx (>=1.7)", "sphinx-book-theme", "mock", "moto", "myst-parser"] test = ["codecov", "coverage", "ipython", "ipykernel", "ipywidgets", "pytest (>=4.1)", "pytest-cov (>=2.6.1)", "check-manifest", "flake8", "mypy", "tox", "bumpversion", "xmltodict", "pip (>=18.1)", "wheel (>=0.31.0)", "setuptools (>=38.6.0)", "twine (>=1.11.0)", "black"] [[package]] -category = "main" -description = "Converting Jupyter Notebooks" name = "nbconvert" +version = "6.0.7" +description = "Converting Jupyter Notebooks" +category = "main" optional = true python-versions = ">=3.6" -version = "6.0.6" [package.dependencies] bleach = "*" @@ -962,19 +1054,19 @@ testpath = "*" traitlets = ">=4.2" [package.extras] -all = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (0.2.2)", "tornado (>=4.0)", "sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] +all = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (==0.2.2)", "tornado (>=4.0)", "sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] docs = ["sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] serve = ["tornado (>=4.0)"] -test = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (0.2.2)"] -webpdf = ["pyppeteer (0.2.2)"] +test = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (==0.2.2)"] +webpdf = ["pyppeteer (==0.2.2)"] [[package]] -category = "main" -description = "The Jupyter Notebook format" name = "nbformat" +version = "5.1.2" +description = "The Jupyter Notebook format" +category = "main" optional = true python-versions = ">=3.5" -version = "5.0.7" [package.dependencies] ipython-genutils = "*" @@ -983,34 +1075,34 @@ jupyter-core = "*" traitlets = ">=4.1" [package.extras] -test = ["pytest", "pytest-cov", "testpath"] +fast = ["fastjsonschema"] +test = ["check-manifest", "fastjsonschema", "testpath", "pytest", "pytest-cov"] [[package]] -category = "main" -description = "Patch asyncio to allow nested event loops" name = "nest-asyncio" +version = "1.5.1" +description = "Patch asyncio to allow nested event loops" +category = "main" optional = true python-versions = ">=3.5" -version = "1.4.1" [[package]] -category = "dev" -description = "Node.js virtual environment builder" name = "nodeenv" +version = "1.5.0" +description = "Node.js virtual environment builder" +category = "dev" optional = false python-versions = "*" -version = "1.5.0" [[package]] -category = "main" -description = "A web-based notebook environment for interactive computing" name = "notebook" +version = "6.2.0" +description = "A web-based notebook environment for interactive computing" +category = "main" optional = true python-versions = ">=3.5" -version = "6.1.4" [package.dependencies] -Send2Trash = "*" argon2-cffi = "*" ipykernel = "*" ipython-genutils = "*" @@ -1021,21 +1113,23 @@ nbconvert = "*" nbformat = "*" prometheus-client = "*" pyzmq = ">=17" +Send2Trash = ">=1.5.0" terminado = ">=0.8.3" -tornado = ">=5.0" +tornado = ">=6.1" traitlets = ">=4.2.1" [package.extras] -docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt"] -test = ["nose", "coverage", "requests", "nose-warnings-filters", "nbval", "nose-exclude", "selenium", "pytest", "pytest-cov", "requests-unixsocket"] +docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt", "sphinx-rtd-theme"] +json-logging = ["json-logging"] +test = ["pytest", "coverage", "requests", "nbval", "selenium", "pytest-cov", "requests-unixsocket"] [[package]] -category = "dev" -description = "Flexible test automation." name = "nox" +version = "2020.12.31" +description = "Flexible test automation." +category = "dev" optional = false -python-versions = ">=3.5" -version = "2020.8.22" +python-versions = ">=3.6" [package.dependencies] argcomplete = ">=1.9.4,<2.0" @@ -1047,125 +1141,144 @@ virtualenv = ">=14.0.0" tox_to_nox = ["jinja2", "tox"] [[package]] -category = "main" -description = "NumPy is the fundamental package for array computing with Python." name = "numpy" -optional = true -python-versions = ">=3.6" -version = "1.19.2" +version = "1.20.0" +description = "NumPy is the fundamental package for array computing with Python." +category = "main" +optional = false +python-versions = ">=3.7" [[package]] -category = "main" -description = "Core utilities for Python packages" name = "packaging" +version = "20.9" +description = "Core utilities for Python packages" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "20.4" [package.dependencies] pyparsing = ">=2.0.2" + +[[package]] +name = "pandas" +version = "1.2.1" +description = "Powerful data structures for data analysis, time series, and statistics" +category = "main" +optional = false +python-versions = ">=3.7.1" + +[package.dependencies] +numpy = ">=1.16.5" +python-dateutil = ">=2.7.3" +pytz = ">=2017.3" + +[package.extras] +test = ["pytest (>=5.0.1)", "pytest-xdist", "hypothesis (>=3.58)"] + +[[package]] +name = "pandocfilters" +version = "1.4.3" +description = "Utilities for writing pandoc filters in python" +category = "main" +optional = true +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "parso" +version = "0.8.1" +description = "A Python Parser" +category = "main" +optional = true +python-versions = ">=3.6" + +[package.extras] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["docopt", "pytest (<6.0.0)"] + +[[package]] +name = "pathspec" +version = "0.8.1" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "patsy" +version = "0.5.1" +description = "A Python package for describing statistical models and for building design matrices." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +numpy = ">=1.4" six = "*" [[package]] -category = "main" -description = "Powerful data structures for data analysis, time series, and statistics" -name = "pandas" -optional = true -python-versions = ">=3.6.1" -version = "1.1.2" - -[package.dependencies] -numpy = ">=1.15.4" -python-dateutil = ">=2.7.3" -pytz = ">=2017.2" - -[package.extras] -test = ["pytest (>=4.0.2)", "pytest-xdist", "hypothesis (>=3.58)"] - -[[package]] -category = "main" -description = "Utilities for writing pandoc filters in python" -name = "pandocfilters" -optional = true -python-versions = "*" -version = "1.4.2" - -[[package]] -category = "main" -description = "A Python Parser" -name = "parso" -optional = true -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "0.7.1" - -[package.extras] -testing = ["docopt", "pytest (>=3.0.7)"] - -[[package]] -category = "dev" -description = "Utility library for gitignore style pattern matching of file paths." -name = "pathspec" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "0.8.0" - -[[package]] -category = "dev" -description = "Python Build Reasonableness" name = "pbr" +version = "5.5.1" +description = "Python Build Reasonableness" +category = "dev" optional = false python-versions = ">=2.6" -version = "5.5.0" [[package]] -category = "dev" -description = "Check PEP-8 naming conventions, plugin for flake8" name = "pep8-naming" +version = "0.9.1" +description = "Check PEP-8 naming conventions, plugin for flake8" +category = "dev" optional = false python-versions = "*" -version = "0.9.1" [package.dependencies] flake8-polyfill = ">=1.0.2,<2" [[package]] -category = "main" -description = "Pexpect allows easy control of interactive console applications." -marker = "sys_platform != \"win32\"" name = "pexpect" +version = "4.8.0" +description = "Pexpect allows easy control of interactive console applications." +category = "main" optional = true python-versions = "*" -version = "4.8.0" [package.dependencies] ptyprocess = ">=0.5" [[package]] -category = "main" -description = "Tiny 'shelve'-like database with concurrency support" name = "pickleshare" +version = "0.7.5" +description = "Tiny 'shelve'-like database with concurrency support" +category = "main" optional = true python-versions = "*" -version = "0.7.5" [[package]] -category = "dev" -description = "plugin and hook calling mechanisms for python" +name = "pillow" +version = "8.1.0" +description = "Python Imaging Library (Fork)" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] name = "pluggy" +version = "0.13.1" +description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "0.13.1" [package.extras] dev = ["pre-commit", "tox"] [[package]] -category = "dev" -description = "A framework for managing and maintaining multi-language pre-commit hooks." name = "pre-commit" +version = "2.10.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +category = "dev" optional = false python-versions = ">=3.6.1" -version = "2.7.1" [package.dependencies] cfgv = ">=2.0.0" @@ -1176,334 +1289,376 @@ toml = "*" virtualenv = ">=20.0.8" [[package]] -category = "main" -description = "Python client for the Prometheus monitoring system." name = "prometheus-client" +version = "0.9.0" +description = "Python client for the Prometheus monitoring system." +category = "main" optional = true python-versions = "*" -version = "0.8.0" [package.extras] twisted = ["twisted"] [[package]] -category = "main" -description = "Library for building powerful interactive command lines in Python" name = "prompt-toolkit" +version = "3.0.14" +description = "Library for building powerful interactive command lines in Python" +category = "main" optional = true python-versions = ">=3.6.1" -version = "3.0.7" [package.dependencies] wcwidth = "*" [[package]] -category = "main" -description = "psycopg2 - Python-PostgreSQL Database Adapter" name = "psycopg2" +version = "2.8.6" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" -version = "2.8.6" [[package]] -category = "main" -description = "Run a subprocess in a pseudo terminal" -marker = "sys_platform != \"win32\" or os_name != \"nt\"" name = "ptyprocess" +version = "0.7.0" +description = "Run a subprocess in a pseudo terminal" +category = "main" optional = true python-versions = "*" -version = "0.6.0" [[package]] -category = "dev" -description = "library with cross-python path, ini-parsing, io, code, log facilities" name = "py" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.9.0" - -[[package]] -category = "dev" -description = "Python style guide checker" -name = "pycodestyle" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "2.6.0" - -[[package]] +version = "1.10.0" +description = "library with cross-python path, ini-parsing, io, code, log facilities" category = "main" -description = "C parser in Python" -name = "pycparser" -optional = true +optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "2.20" [[package]] +name = "pycodestyle" +version = "2.6.0" +description = "Python style guide checker" category = "dev" -description = "Python docstring style checker" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pycparser" +version = "2.20" +description = "C parser in Python" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] name = "pydocstyle" +version = "5.1.1" +description = "Python docstring style checker" +category = "dev" optional = false python-versions = ">=3.5" -version = "5.1.1" [package.dependencies] snowballstemmer = "*" [[package]] -category = "dev" -description = "passive checker of Python programs" name = "pyflakes" +version = "2.2.0" +description = "passive checker of Python programs" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "2.2.0" [[package]] -category = "main" -description = "Pygments is a syntax highlighting package written in Python." name = "pygments" +version = "2.7.4" +description = "Pygments is a syntax highlighting package written in Python." +category = "main" optional = false python-versions = ">=3.5" -version = "2.7.1" [[package]] -category = "dev" -description = "python code static checker" -name = "pylint" -optional = false -python-versions = ">=3.5.*" -version = "2.6.0" - -[package.dependencies] -astroid = ">=2.4.0,<=2.5" -colorama = "*" -isort = ">=4.2.5,<6" -mccabe = ">=0.6,<0.7" -toml = ">=0.7.1" - -[[package]] -category = "main" -description = "Python parsing module" name = "pyparsing" +version = "2.4.7" +description = "Python parsing module" +category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -version = "2.4.7" [[package]] -category = "main" -description = "Persistent/Functional/Immutable data structures" name = "pyrsistent" +version = "0.17.3" +description = "Persistent/Functional/Immutable data structures" +category = "main" optional = true python-versions = ">=3.5" -version = "0.17.3" [[package]] -category = "dev" -description = "pytest: simple powerful testing with Python" name = "pytest" +version = "6.2.2" +description = "pytest: simple powerful testing with Python" +category = "dev" optional = false -python-versions = ">=3.5" -version = "6.1.0" +python-versions = ">=3.6" [package.dependencies] -atomicwrites = ">=1.0" -attrs = ">=17.4.0" -colorama = "*" +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<1.0" +pluggy = ">=0.12,<1.0.0a1" py = ">=1.8.2" toml = "*" [package.extras] -checkqa_mypy = ["mypy (0.780)"] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] [[package]] -category = "dev" -description = "Pytest plugin for measuring coverage." name = "pytest-cov" +version = "2.11.1" +description = "Pytest plugin for measuring coverage." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "2.10.1" [package.dependencies] -coverage = ">=4.4" +coverage = ">=5.2.1" pytest = ">=4.6" [package.extras] -testing = ["fields", "hunter", "process-tests (2.0.2)", "six", "pytest-xdist", "virtualenv"] +testing = ["fields", "hunter", "process-tests (==2.0.2)", "six", "pytest-xdist", "virtualenv"] [[package]] -category = "dev" -description = "py.test plugin that allows you to add environment variables." name = "pytest-env" +version = "0.6.2" +description = "py.test plugin that allows you to add environment variables." +category = "dev" optional = false python-versions = "*" -version = "0.6.2" [package.dependencies] pytest = ">=2.6.0" [[package]] -category = "main" -description = "Extensions to the standard Python datetime module" +name = "pytest-mock" +version = "3.5.1" +description = "Thin-wrapper around the mock package for easier use with pytest" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "tox", "pytest-asyncio"] + +[[package]] +name = "pytest-randomly" +version = "3.5.0" +description = "Pytest plugin to randomly order tests and control random.seed." +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +pytest = "*" + +[[package]] name = "python-dateutil" +version = "2.8.1" +description = "Extensions to the standard Python datetime module" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -version = "2.8.1" [package.dependencies] six = ">=1.5" [[package]] -category = "main" -description = "Add .env support to your django/flask apps in development and deployments" -name = "python-dotenv" -optional = false -python-versions = "*" -version = "0.14.0" - -[package.extras] -cli = ["click (>=5.0)"] - -[[package]] -category = "main" -description = "Programmatically open an editor, capture the result." name = "python-editor" -optional = false -python-versions = "*" version = "1.0.4" +description = "Programmatically open an editor, capture the result." +category = "main" +optional = false +python-versions = "*" [[package]] -category = "main" -description = "World timezone definitions, modern and historical" name = "pytz" +version = "2020.5" +description = "World timezone definitions, modern and historical" +category = "main" optional = false python-versions = "*" -version = "2020.1" [[package]] -category = "main" -description = "Python for Window Extensions" -marker = "sys_platform == \"win32\"" name = "pywin32" +version = "300" +description = "Python for Window Extensions" +category = "main" optional = true python-versions = "*" -version = "228" [[package]] -category = "main" -description = "Python bindings for the winpty library" -marker = "os_name == \"nt\"" name = "pywinpty" -optional = true -python-versions = "*" version = "0.5.7" - -[[package]] -category = "dev" -description = "YAML parser and emitter for Python" -name = "pyyaml" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "5.3.1" - -[[package]] +description = "Python bindings for the winpty library" category = "main" -description = "Python bindings for 0MQ" -name = "pyzmq" optional = true -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*" -version = "19.0.2" +python-versions = "*" [[package]] +name = "pyyaml" +version = "5.4.1" +description = "YAML parser and emitter for Python" category = "dev" -description = "Alternative regular expression module, to replace re." +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" + +[[package]] +name = "pyzmq" +version = "22.0.2" +description = "Python bindings for 0MQ" +category = "main" +optional = true +python-versions = ">=3.6" + +[package.dependencies] +cffi = {version = "*", markers = "implementation_name == \"pypy\""} +py = {version = "*", markers = "implementation_name == \"pypy\""} + +[[package]] name = "regex" +version = "2020.11.13" +description = "Alternative regular expression module, to replace re." +category = "dev" optional = false python-versions = "*" -version = "2020.9.27" [[package]] -category = "main" -description = "Python HTTP for Humans." name = "requests" +version = "2.25.1" +description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "2.24.0" [package.dependencies] certifi = ">=2017.4.17" -chardet = ">=3.0.2,<4" +chardet = ">=3.0.2,<5" idna = ">=2.5,<3" -urllib3 = ">=1.21.1,<1.25.0 || >1.25.0,<1.25.1 || >1.25.1,<1.26" +urllib3 = ">=1.21.1,<1.27" [package.extras] security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] -socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7)", "win-inet-pton"] +socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] [[package]] -category = "dev" -description = "reStructuredText linter" name = "restructuredtext-lint" +version = "1.3.2" +description = "reStructuredText linter" +category = "dev" optional = false python-versions = "*" -version = "1.3.1" [package.dependencies] docutils = ">=0.11,<1.0" [[package]] +name = "rpy2" +version = "3.4.2" +description = "Python interface to the R language (embedded R)" category = "main" -description = "Send file to trash natively under Mac OS X, Windows and Linux." -name = "send2trash" -optional = true -python-versions = "*" -version = "1.5.0" - -[[package]] -category = "main" -description = "Python 2 and 3 compatibility utilities" -name = "six" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -version = "1.15.0" - -[[package]] -category = "dev" -description = "A pure Python implementation of a sliding window memory map manager" -name = "smmap" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "3.0.4" - -[[package]] -category = "dev" -description = "This package provides 26 stemmers for 25 languages generated from Snowball algorithms." -name = "snowballstemmer" optional = false python-versions = "*" -version = "2.0.0" - -[[package]] -category = "dev" -description = "Python documentation generator" -name = "sphinx" -optional = false -python-versions = ">=3.5" -version = "3.2.1" [package.dependencies] -Jinja2 = ">=2.3" -Pygments = ">=2.0" +cffi = ">=1.10.0" +jinja2 = "*" +pytz = "*" +tzlocal = "*" + +[package.extras] +all = ["pandas", "numpy", "pytest"] +numpy = ["pandas"] +pandas = ["numpy", "pandas"] +test = ["pytest"] + +[[package]] +name = "scipy" +version = "1.6.0" +description = "SciPy: Scientific Library for Python" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +numpy = ">=1.16.5" + +[[package]] +name = "send2trash" +version = "1.5.0" +description = "Send file to trash natively under Mac OS X, Windows and Linux." +category = "main" +optional = true +python-versions = "*" + +[[package]] +name = "shapely" +version = "1.7.1" +description = "Geometric objects, predicates, and operations" +category = "main" +optional = false +python-versions = "*" + +[package.extras] +all = ["numpy", "pytest", "pytest-cov"] +test = ["pytest", "pytest-cov"] +vectorized = ["numpy"] + +[[package]] +name = "six" +version = "1.15.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] +name = "smmap" +version = "3.0.5" +description = "A pure Python implementation of a sliding window memory map manager" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "snowballstemmer" +version = "2.1.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "sphinx" +version = "3.4.3" +description = "Python documentation generator" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] alabaster = ">=0.7,<0.8" babel = ">=1.3" -colorama = ">=0.3.5" +colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} docutils = ">=0.12" imagesize = "*" +Jinja2 = ">=2.3" packaging = "*" +Pygments = ">=2.0" requests = ">=2.5.0" -setuptools = "*" snowballstemmer = ">=1.1" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" @@ -1514,102 +1669,102 @@ sphinxcontrib-serializinghtml = "*" [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["flake8 (>=3.5.0)", "flake8-import-order", "mypy (>=0.780)", "docutils-stubs"] -test = ["pytest", "pytest-cov", "html5lib", "typed-ast", "cython"] +lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.790)", "docutils-stubs"] +test = ["pytest", "pytest-cov", "html5lib", "cython", "typed-ast"] [[package]] -category = "dev" -description = "Type hints (PEP 484) support for the Sphinx autodoc extension" name = "sphinx-autodoc-typehints" +version = "1.11.1" +description = "Type hints (PEP 484) support for the Sphinx autodoc extension" +category = "dev" optional = false python-versions = ">=3.5.2" -version = "1.11.0" [package.dependencies] Sphinx = ">=3.0" [package.extras] -test = ["pytest (>=3.1.0)", "typing-extensions (>=3.5)", "sphobjinv (>=2.0)", "dataclasses"] +test = ["pytest (>=3.1.0)", "typing-extensions (>=3.5)", "sphobjinv (>=2.0)", "Sphinx (>=3.2.0)", "dataclasses"] type_comments = ["typed-ast (>=1.4.0)"] [[package]] -category = "dev" -description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" name = "sphinxcontrib-applehelp" +version = "1.0.2" +description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +category = "dev" optional = false python-versions = ">=3.5" -version = "1.0.2" [package.extras] lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] [[package]] -category = "dev" -description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." name = "sphinxcontrib-devhelp" +version = "1.0.2" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +category = "dev" optional = false python-versions = ">=3.5" -version = "1.0.2" [package.extras] lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] [[package]] -category = "dev" -description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" name = "sphinxcontrib-htmlhelp" +version = "1.0.3" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +category = "dev" optional = false python-versions = ">=3.5" -version = "1.0.3" [package.extras] lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest", "html5lib"] [[package]] -category = "dev" -description = "A sphinx extension which renders display math in HTML via JavaScript" name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +category = "dev" optional = false python-versions = ">=3.5" -version = "1.0.1" [package.extras] test = ["pytest", "flake8", "mypy"] [[package]] -category = "dev" -description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." name = "sphinxcontrib-qthelp" -optional = false -python-versions = ">=3.5" version = "1.0.3" - -[package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] -test = ["pytest"] - -[[package]] +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.4" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." category = "dev" -description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." -name = "sphinxcontrib-serializinghtml" optional = false python-versions = ">=3.5" -version = "1.1.4" [package.extras] lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] [[package]] -category = "main" -description = "Database Abstraction Library" name = "sqlalchemy" +version = "1.3.23" +description = "Database Abstraction Library" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.3.19" [package.extras] mssql = ["pyodbc"] @@ -1618,42 +1773,61 @@ mssql_pyodbc = ["pyodbc"] mysql = ["mysqlclient"] oracle = ["cx-oracle"] postgresql = ["psycopg2"] -postgresql_pg8000 = ["pg8000"] +postgresql_pg8000 = ["pg8000 (<1.16.6)"] postgresql_psycopg2binary = ["psycopg2-binary"] postgresql_psycopg2cffi = ["psycopg2cffi"] -pymysql = ["pymysql"] +pymysql = ["pymysql (<1)", "pymysql"] [[package]] -category = "dev" -description = "Manage dynamic plugins for Python applications" -name = "stevedore" +name = "statsmodels" +version = "0.12.2" +description = "Statistical computations and models for Python" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +numpy = ">=1.15" +pandas = ">=0.21" +patsy = ">=0.5" +scipy = ">=1.1" + +[package.extras] +build = ["cython (>=0.29)"] +develop = ["cython (>=0.29)"] +docs = ["sphinx", "nbconvert", "jupyter-client", "ipykernel", "matplotlib", "nbformat", "numpydoc", "pandas-datareader"] + +[[package]] +name = "stevedore" +version = "3.3.0" +description = "Manage dynamic plugins for Python applications" +category = "dev" optional = false python-versions = ">=3.6" -version = "3.2.2" [package.dependencies] pbr = ">=2.0.0,<2.1.0 || >2.1.0" [[package]] -category = "main" -description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." name = "terminado" +version = "0.9.2" +description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." +category = "main" optional = true python-versions = ">=3.6" -version = "0.9.1" [package.dependencies] -ptyprocess = "*" -pywinpty = ">=0.5" +ptyprocess = {version = "*", markers = "os_name != \"nt\""} +pywinpty = {version = ">=0.5", markers = "os_name == \"nt\""} tornado = ">=4" [[package]] -category = "dev" -description = "A collection of helpers and mock objects for unit tests and doc tests." name = "testfixtures" +version = "6.17.1" +description = "A collection of helpers and mock objects for unit tests and doc tests." +category = "dev" optional = false python-versions = "*" -version = "6.14.2" [package.extras] build = ["setuptools-git", "wheel", "twine"] @@ -1661,39 +1835,47 @@ docs = ["sphinx", "zope.component", "sybil", "twisted", "mock", "django (<2)", " test = ["pytest (>=3.6)", "pytest-cov", "pytest-django", "zope.component", "sybil", "twisted", "mock", "django (<2)", "django"] [[package]] -category = "main" -description = "Test utilities for code working with files and commands" name = "testpath" +version = "0.4.4" +description = "Test utilities for code working with files and commands" +category = "main" optional = true python-versions = "*" -version = "0.4.4" [package.extras] test = ["pathlib2"] [[package]] +name = "text-unidecode" +version = "1.3" +description = "The most basic Text::Unidecode port" category = "dev" -description = "Python Library for Tom's Obvious, Minimal Language" -name = "toml" optional = false python-versions = "*" -version = "0.10.1" [[package]] -category = "main" -description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +name = "toml" +version = "0.10.2" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" + +[[package]] name = "tornado" +version = "6.1" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +category = "main" optional = true python-versions = ">= 3.5" -version = "6.0.4" [[package]] -category = "main" -description = "Traitlets Python configuration system" name = "traitlets" +version = "5.0.5" +description = "Traitlets Python configuration system" +category = "main" optional = true python-versions = ">=3.7" -version = "5.0.4" [package.dependencies] ipython-genutils = "*" @@ -1702,41 +1884,60 @@ ipython-genutils = "*" test = ["pytest"] [[package]] -category = "dev" -description = "a fork of Python 2 and 3 ast modules with type comment support" name = "typed-ast" -optional = false -python-versions = "*" -version = "1.4.1" - -[[package]] +version = "1.4.2" +description = "a fork of Python 2 and 3 ast modules with type comment support" category = "dev" -description = "Backported and Experimental Type Hints for Python 3.5+" -name = "typing-extensions" optional = false python-versions = "*" -version = "3.7.4.3" [[package]] +name = "typing-extensions" +version = "3.7.4.3" +description = "Backported and Experimental Type Hints for Python 3.5+" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "tzlocal" +version = "2.1" +description = "tzinfo object for the local timezone" category = "main" -description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = "*" + +[package.dependencies] +pytz = "*" + +[[package]] name = "urllib3" +version = "1.26.3" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" -version = "1.25.10" [package.extras] brotli = ["brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "pyOpenSSL (>=0.14)", "ipaddress"] -socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] + +[[package]] +name = "utm" +version = "0.7.0" +description = "Bidirectional UTM-WGS84 converter for python" +category = "main" +optional = false +python-versions = "*" [[package]] -category = "dev" -description = "Virtual Python Environment builder" name = "virtualenv" +version = "20.4.2" +description = "Virtual Python Environment builder" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -version = "20.0.31" [package.dependencies] appdirs = ">=1.4.3,<2" @@ -1746,31 +1947,31 @@ six = ">=1.9.0,<2" [package.extras] docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=19.9.0rc1)"] -testing = ["coverage (>=5)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "pytest-xdist (>=1.31.0)", "packaging (>=20.0)", "xonsh (>=0.9.16)"] +testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)", "xonsh (>=0.9.16)"] [[package]] -category = "main" -description = "Measures the displayed width of unicode strings in a terminal" name = "wcwidth" -optional = true -python-versions = "*" version = "0.2.5" - -[[package]] +description = "Measures the displayed width of unicode strings in a terminal" +category = "main" +optional = true +python-versions = "*" + +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" category = "main" -description = "Character encoding aliases for legacy web content" -name = "webencodings" optional = true python-versions = "*" -version = "0.5.1" [[package]] -category = "dev" -description = "The strictest and most opinionated python linter ever" name = "wemake-python-styleguide" +version = "0.14.1" +description = "The strictest and most opinionated python linter ever" +category = "dev" optional = false python-versions = ">=3.6,<4.0" -version = "0.14.1" [package.dependencies] astor = ">=0.8,<0.9" @@ -1794,44 +1995,30 @@ pygments = ">=2.4,<3.0" typing_extensions = ">=3.6,<4.0" [[package]] -category = "dev" -description = "Module for decorators, wrappers and monkey patching." -name = "wrapt" -optional = false -python-versions = "*" -version = "1.12.1" - -[[package]] -category = "dev" -description = "A rewrite of the builtin doctest module" name = "xdoctest" +version = "0.13.0" +description = "A rewrite of the builtin doctest module" +category = "dev" optional = false python-versions = "*" -version = "0.13.0" [package.dependencies] +colorama = {version = "*", optional = true, markers = "platform_system == \"Windows\" and extra == \"optional\""} +Pygments = {version = "*", optional = true, markers = "extra == \"optional\""} six = "*" -[package.dependencies.Pygments] -optional = true -version = "*" - -[package.dependencies.colorama] -optional = true -version = "*" - [package.extras] all = ["six", "pytest", "pytest-cov", "codecov", "scikit-build", "cmake", "ninja", "pybind11", "pygments", "colorama"] optional = ["pygments", "colorama"] tests = ["pytest", "pytest-cov", "codecov", "scikit-build", "cmake", "ninja", "pybind11"] [extras] -research = ["jupyterlab", "nb_black", "numpy", "pandas", "pytz"] +research = ["jupyterlab", "nb_black", "numpy", "pytz"] [metadata] -content-hash = "eba980d4335eef2012a1e7ce27941731149eb224cdfad856aa0bcd7701e9e557" -lock-version = "1.0" +lock-version = "1.1" python-versions = "^3.8" +content-hash = "9a2253e699e28998cb3ef8d8dadd8bf15a891c5e0cec4709671afe22159d5d86" [metadata.files] alabaster = [ @@ -1839,20 +2026,19 @@ alabaster = [ {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, ] alembic = [ - {file = "alembic-1.4.3-py2.py3-none-any.whl", hash = "sha256:4e02ed2aa796bd179965041afa092c55b51fb077de19d61835673cc80672c01c"}, - {file = "alembic-1.4.3.tar.gz", hash = "sha256:5334f32314fb2a56d86b4c4dd1ae34b08c03cae4cb888bc699942104d66bc245"}, + {file = "alembic-1.5.4.tar.gz", hash = "sha256:e871118b6174681f7e9a9ea67cfcae954c6d18e05b49c6b17f662d2530c76bf5"}, ] appdirs = [ {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, ] appnope = [ - {file = "appnope-0.1.0-py2.py3-none-any.whl", hash = "sha256:5b26757dc6f79a3b7dc9fab95359328d5747fcb2409d331ea66d0272b90ab2a0"}, - {file = "appnope-0.1.0.tar.gz", hash = "sha256:8b995ffe925347a2138d7ac0fe77155e4311a0ea6d6da4f5128fe4b3cbe5ed71"}, + {file = "appnope-0.1.2-py2.py3-none-any.whl", hash = "sha256:93aa393e9d6c54c5cd570ccadd8edad61ea0c4b9ea7a01409020c9aa019eb442"}, + {file = "appnope-0.1.2.tar.gz", hash = "sha256:dd83cd4b5b460958838f6eb3000c660b1f9caf2a5b1de4264e941512f603258a"}, ] argcomplete = [ - {file = "argcomplete-1.12.1-py2.py3-none-any.whl", hash = "sha256:5cd1ac4fc49c29d6016fc2cc4b19a3c08c3624544503495bf25989834c443898"}, - {file = "argcomplete-1.12.1.tar.gz", hash = "sha256:849c2444c35bb2175aea74100ca5f644c29bf716429399c0f2203bb5d9a8e4e6"}, + {file = "argcomplete-1.12.2-py2.py3-none-any.whl", hash = "sha256:17f01a9b9b9ece3e6b07058eae737ad6e10de8b4e149105f84614783913aba71"}, + {file = "argcomplete-1.12.2.tar.gz", hash = "sha256:de0e1282330940d52ea92a80fea2e4b9e0da1932aaa570f84d268939d1897b04"}, ] argon2-cffi = [ {file = "argon2-cffi-20.1.0.tar.gz", hash = "sha256:d8029b2d3e4b4cea770e9e5a0104dd8fa185c1724a0f01528ae4826a6d25f97d"}, @@ -1871,18 +2057,16 @@ argon2-cffi = [ {file = "argon2_cffi-20.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:6678bb047373f52bcff02db8afab0d2a77d83bde61cfecea7c5c62e2335cb203"}, {file = "argon2_cffi-20.1.0-cp38-cp38-win32.whl", hash = "sha256:77e909cc756ef81d6abb60524d259d959bab384832f0c651ed7dcb6e5ccdbb78"}, {file = "argon2_cffi-20.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:9dfd5197852530294ecb5795c97a823839258dfd5eb9420233c7cfedec2058f2"}, + {file = "argon2_cffi-20.1.0-cp39-cp39-win32.whl", hash = "sha256:e2db6e85c057c16d0bd3b4d2b04f270a7467c147381e8fd73cbbe5bc719832be"}, + {file = "argon2_cffi-20.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:8a84934bd818e14a17943de8099d41160da4a336bcc699bb4c394bbb9b94bd32"}, ] astor = [ {file = "astor-0.8.1-py2.py3-none-any.whl", hash = "sha256:070a54e890cefb5b3739d19f30f5a5ec840ffc9c50ffa7d23cc9fc1a38ebbfc5"}, {file = "astor-0.8.1.tar.gz", hash = "sha256:6a6effda93f4e1ce9f618779b2dd1d9d84f1e32812c23a29b3fff6fd7f63fa5e"}, ] astpretty = [ - {file = "astpretty-2.0.0-py2.py3-none-any.whl", hash = "sha256:7f27633ed885033da8b58666e7079ffff7e8e01869ec1aa66484cb5185ea3aa4"}, - {file = "astpretty-2.0.0.tar.gz", hash = "sha256:e4724bfd753636ba4a84384702e9796e5356969f40af2596d846ce64addde086"}, -] -astroid = [ - {file = "astroid-2.4.2-py3-none-any.whl", hash = "sha256:bc58d83eb610252fd8de6363e39d4f1d0619c894b0ed24603b881c02e64c7386"}, - {file = "astroid-2.4.2.tar.gz", hash = "sha256:2f4078c2a41bf377eea06d71c9d2ba4eb8f6b1af2135bec27bbbb7d8f12bb703"}, + {file = "astpretty-2.1.0-py2.py3-none-any.whl", hash = "sha256:f81f14b5636f7af81fadb1e3c09ca7702ce4615500d9cc6d6829befb2dec2e3c"}, + {file = "astpretty-2.1.0.tar.gz", hash = "sha256:8a801fcda604ec741f010bb36d7cbadc3ec8a182ea6fb83e20ab663463e75ff6"}, ] async-generator = [ {file = "async_generator-1.10-py3-none-any.whl", hash = "sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b"}, @@ -1893,133 +2077,156 @@ atomicwrites = [ {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, ] attrs = [ - {file = "attrs-20.2.0-py2.py3-none-any.whl", hash = "sha256:fce7fc47dfc976152e82d53ff92fa0407700c21acd20886a13777a0d20e655dc"}, - {file = "attrs-20.2.0.tar.gz", hash = "sha256:26b54ddbbb9ee1d34d5d3668dd37d6cf74990ab23c828c2888dccdceee395594"}, + {file = "attrs-20.3.0-py2.py3-none-any.whl", hash = "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6"}, + {file = "attrs-20.3.0.tar.gz", hash = "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"}, ] autoflake = [ {file = "autoflake-1.4.tar.gz", hash = "sha256:61a353012cff6ab94ca062823d1fb2f692c4acda51c76ff83a8d77915fba51ea"}, ] babel = [ - {file = "Babel-2.8.0-py2.py3-none-any.whl", hash = "sha256:d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4"}, - {file = "Babel-2.8.0.tar.gz", hash = "sha256:1aac2ae2d0d8ea368fa90906567f5c08463d98ade155c0c4bfedd6a0f7160e38"}, + {file = "Babel-2.9.0-py2.py3-none-any.whl", hash = "sha256:9d35c22fcc79893c3ecc85ac4a56cde1ecf3f19c540bba0922308a6c06ca6fa5"}, + {file = "Babel-2.9.0.tar.gz", hash = "sha256:da031ab54472314f210b0adcff1588ee5d1d1d0ba4dbd07b94dba82bde791e05"}, ] backcall = [ {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, ] bandit = [ - {file = "bandit-1.6.2-py2.py3-none-any.whl", hash = "sha256:336620e220cf2d3115877685e264477ff9d9abaeb0afe3dc7264f55fa17a3952"}, - {file = "bandit-1.6.2.tar.gz", hash = "sha256:41e75315853507aa145d62a78a2a6c5e3240fe14ee7c601459d0df9418196065"}, + {file = "bandit-1.7.0-py3-none-any.whl", hash = "sha256:216be4d044209fa06cf2a3e51b319769a51be8318140659719aa7a115c35ed07"}, + {file = "bandit-1.7.0.tar.gz", hash = "sha256:8a4c7415254d75df8ff3c3b15cfe9042ecee628a1e40b44c15a98890fbfc2608"}, ] black = [ {file = "black-19.10b0-py36-none-any.whl", hash = "sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b"}, {file = "black-19.10b0.tar.gz", hash = "sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539"}, ] bleach = [ - {file = "bleach-3.2.1-py2.py3-none-any.whl", hash = "sha256:9f8ccbeb6183c6e6cddea37592dfb0167485c1e3b13b3363bc325aa8bda3adbd"}, - {file = "bleach-3.2.1.tar.gz", hash = "sha256:52b5919b81842b1854196eaae5ca29679a2f2e378905c346d3ca8227c2c66080"}, + {file = "bleach-3.3.0-py2.py3-none-any.whl", hash = "sha256:6123ddc1052673e52bab52cdc955bcb57a015264a1c57d37bea2f6b817af0125"}, + {file = "bleach-3.3.0.tar.gz", hash = "sha256:98b3170739e5e83dd9dc19633f074727ad848cbedb6026708c8ac2d3b697a433"}, +] +branca = [ + {file = "branca-0.4.2-py3-none-any.whl", hash = "sha256:62c2e777f074fc1830cd40ba9e650beb941861075980babafead8d97856b1a4b"}, + {file = "branca-0.4.2.tar.gz", hash = "sha256:c111453617b17ab2bda60a4cd71787d6f2b59c85cdf71ab160a737606ac66c31"}, ] certifi = [ - {file = "certifi-2020.6.20-py2.py3-none-any.whl", hash = "sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41"}, - {file = "certifi-2020.6.20.tar.gz", hash = "sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3"}, + {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, + {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"}, ] cffi = [ - {file = "cffi-1.14.3-2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3eeeb0405fd145e714f7633a5173318bd88d8bbfc3dd0a5751f8c4f70ae629bc"}, - {file = "cffi-1.14.3-2-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:cb763ceceae04803adcc4e2d80d611ef201c73da32d8f2722e9d0ab0c7f10768"}, - {file = "cffi-1.14.3-2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:44f60519595eaca110f248e5017363d751b12782a6f2bd6a7041cba275215f5d"}, - {file = "cffi-1.14.3-2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c53af463f4a40de78c58b8b2710ade243c81cbca641e34debf3396a9640d6ec1"}, - {file = "cffi-1.14.3-2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:33c6cdc071ba5cd6d96769c8969a0531be2d08c2628a0143a10a7dcffa9719ca"}, - {file = "cffi-1.14.3-2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c11579638288e53fc94ad60022ff1b67865363e730ee41ad5e6f0a17188b327a"}, - {file = "cffi-1.14.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:3cb3e1b9ec43256c4e0f8d2837267a70b0e1ca8c4f456685508ae6106b1f504c"}, - {file = "cffi-1.14.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:f0620511387790860b249b9241c2f13c3a80e21a73e0b861a2df24e9d6f56730"}, - {file = "cffi-1.14.3-cp27-cp27m-win32.whl", hash = "sha256:005f2bfe11b6745d726dbb07ace4d53f057de66e336ff92d61b8c7e9c8f4777d"}, - {file = "cffi-1.14.3-cp27-cp27m-win_amd64.whl", hash = "sha256:2f9674623ca39c9ebe38afa3da402e9326c245f0f5ceff0623dccdac15023e05"}, - {file = "cffi-1.14.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:09e96138280241bd355cd585148dec04dbbedb4f46128f340d696eaafc82dd7b"}, - {file = "cffi-1.14.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:3363e77a6176afb8823b6e06db78c46dbc4c7813b00a41300a4873b6ba63b171"}, - {file = "cffi-1.14.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:0ef488305fdce2580c8b2708f22d7785ae222d9825d3094ab073e22e93dfe51f"}, - {file = "cffi-1.14.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:0b1ad452cc824665ddc682400b62c9e4f5b64736a2ba99110712fdee5f2505c4"}, - {file = "cffi-1.14.3-cp35-cp35m-win32.whl", hash = "sha256:85ba797e1de5b48aa5a8427b6ba62cf69607c18c5d4eb747604b7302f1ec382d"}, - {file = "cffi-1.14.3-cp35-cp35m-win_amd64.whl", hash = "sha256:e66399cf0fc07de4dce4f588fc25bfe84a6d1285cc544e67987d22663393926d"}, - {file = "cffi-1.14.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:15f351bed09897fbda218e4db5a3d5c06328862f6198d4fb385f3e14e19decb3"}, - {file = "cffi-1.14.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4d7c26bfc1ea9f92084a1d75e11999e97b62d63128bcc90c3624d07813c52808"}, - {file = "cffi-1.14.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:23e5d2040367322824605bc29ae8ee9175200b92cb5483ac7d466927a9b3d537"}, - {file = "cffi-1.14.3-cp36-cp36m-win32.whl", hash = "sha256:a624fae282e81ad2e4871bdb767e2c914d0539708c0f078b5b355258293c98b0"}, - {file = "cffi-1.14.3-cp36-cp36m-win_amd64.whl", hash = "sha256:de31b5164d44ef4943db155b3e8e17929707cac1e5bd2f363e67a56e3af4af6e"}, - {file = "cffi-1.14.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:f92cdecb618e5fa4658aeb97d5eb3d2f47aa94ac6477c6daf0f306c5a3b9e6b1"}, - {file = "cffi-1.14.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:22399ff4870fb4c7ef19fff6eeb20a8bbf15571913c181c78cb361024d574579"}, - {file = "cffi-1.14.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:f4eae045e6ab2bb54ca279733fe4eb85f1effda392666308250714e01907f394"}, - {file = "cffi-1.14.3-cp37-cp37m-win32.whl", hash = "sha256:b0358e6fefc74a16f745afa366acc89f979040e0cbc4eec55ab26ad1f6a9bfbc"}, - {file = "cffi-1.14.3-cp37-cp37m-win_amd64.whl", hash = "sha256:6642f15ad963b5092d65aed022d033c77763515fdc07095208f15d3563003869"}, - {file = "cffi-1.14.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:2791f68edc5749024b4722500e86303a10d342527e1e3bcac47f35fbd25b764e"}, - {file = "cffi-1.14.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:529c4ed2e10437c205f38f3691a68be66c39197d01062618c55f74294a4a4828"}, - {file = "cffi-1.14.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8f0f1e499e4000c4c347a124fa6a27d37608ced4fe9f7d45070563b7c4c370c9"}, - {file = "cffi-1.14.3-cp38-cp38-win32.whl", hash = "sha256:3b8eaf915ddc0709779889c472e553f0d3e8b7bdf62dab764c8921b09bf94522"}, - {file = "cffi-1.14.3-cp38-cp38-win_amd64.whl", hash = "sha256:bbd2f4dfee1079f76943767fce837ade3087b578aeb9f69aec7857d5bf25db15"}, - {file = "cffi-1.14.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:cc75f58cdaf043fe6a7a6c04b3b5a0e694c6a9e24050967747251fb80d7bce0d"}, - {file = "cffi-1.14.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:bf39a9e19ce7298f1bd6a9758fa99707e9e5b1ebe5e90f2c3913a47bc548747c"}, - {file = "cffi-1.14.3-cp39-cp39-win32.whl", hash = "sha256:d80998ed59176e8cba74028762fbd9b9153b9afc71ea118e63bbf5d4d0f9552b"}, - {file = "cffi-1.14.3-cp39-cp39-win_amd64.whl", hash = "sha256:c150eaa3dadbb2b5339675b88d4573c1be3cb6f2c33a6c83387e10cc0bf05bd3"}, - {file = "cffi-1.14.3.tar.gz", hash = "sha256:f92f789e4f9241cd262ad7a555ca2c648a98178a953af117ef7fad46aa1d5591"}, + {file = "cffi-1.14.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ebb253464a5d0482b191274f1c8bf00e33f7e0b9c66405fbffc61ed2c839c775"}, + {file = "cffi-1.14.4-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:2c24d61263f511551f740d1a065eb0212db1dbbbbd241db758f5244281590c06"}, + {file = "cffi-1.14.4-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9f7a31251289b2ab6d4012f6e83e58bc3b96bd151f5b5262467f4bb6b34a7c26"}, + {file = "cffi-1.14.4-cp27-cp27m-win32.whl", hash = "sha256:5cf4be6c304ad0b6602f5c4e90e2f59b47653ac1ed9c662ed379fe48a8f26b0c"}, + {file = "cffi-1.14.4-cp27-cp27m-win_amd64.whl", hash = "sha256:f60567825f791c6f8a592f3c6e3bd93dd2934e3f9dac189308426bd76b00ef3b"}, + {file = "cffi-1.14.4-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:c6332685306b6417a91b1ff9fae889b3ba65c2292d64bd9245c093b1b284809d"}, + {file = "cffi-1.14.4-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d9efd8b7a3ef378dd61a1e77367f1924375befc2eba06168b6ebfa903a5e59ca"}, + {file = "cffi-1.14.4-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:51a8b381b16ddd370178a65360ebe15fbc1c71cf6f584613a7ea08bfad946698"}, + {file = "cffi-1.14.4-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:1d2c4994f515e5b485fd6d3a73d05526aa0fcf248eb135996b088d25dfa1865b"}, + {file = "cffi-1.14.4-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:af5c59122a011049aad5dd87424b8e65a80e4a6477419c0c1015f73fb5ea0293"}, + {file = "cffi-1.14.4-cp35-cp35m-win32.whl", hash = "sha256:594234691ac0e9b770aee9fcdb8fa02c22e43e5c619456efd0d6c2bf276f3eb2"}, + {file = "cffi-1.14.4-cp35-cp35m-win_amd64.whl", hash = "sha256:64081b3f8f6f3c3de6191ec89d7dc6c86a8a43911f7ecb422c60e90c70be41c7"}, + {file = "cffi-1.14.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f803eaa94c2fcda012c047e62bc7a51b0bdabda1cad7a92a522694ea2d76e49f"}, + {file = "cffi-1.14.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:105abaf8a6075dc96c1fe5ae7aae073f4696f2905fde6aeada4c9d2926752362"}, + {file = "cffi-1.14.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0638c3ae1a0edfb77c6765d487fee624d2b1ee1bdfeffc1f0b58c64d149e7eec"}, + {file = "cffi-1.14.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:7c6b1dece89874d9541fc974917b631406233ea0440d0bdfbb8e03bf39a49b3b"}, + {file = "cffi-1.14.4-cp36-cp36m-win32.whl", hash = "sha256:155136b51fd733fa94e1c2ea5211dcd4c8879869008fc811648f16541bf99668"}, + {file = "cffi-1.14.4-cp36-cp36m-win_amd64.whl", hash = "sha256:6bc25fc545a6b3d57b5f8618e59fc13d3a3a68431e8ca5fd4c13241cd70d0009"}, + {file = "cffi-1.14.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a7711edca4dcef1a75257b50a2fbfe92a65187c47dab5a0f1b9b332c5919a3fb"}, + {file = "cffi-1.14.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:00e28066507bfc3fe865a31f325c8391a1ac2916219340f87dfad602c3e48e5d"}, + {file = "cffi-1.14.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:798caa2a2384b1cbe8a2a139d80734c9db54f9cc155c99d7cc92441a23871c03"}, + {file = "cffi-1.14.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:a5ed8c05548b54b998b9498753fb9cadbfd92ee88e884641377d8a8b291bcc01"}, + {file = "cffi-1.14.4-cp37-cp37m-win32.whl", hash = "sha256:00a1ba5e2e95684448de9b89888ccd02c98d512064b4cb987d48f4b40aa0421e"}, + {file = "cffi-1.14.4-cp37-cp37m-win_amd64.whl", hash = "sha256:9cc46bc107224ff5b6d04369e7c595acb700c3613ad7bcf2e2012f62ece80c35"}, + {file = "cffi-1.14.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:df5169c4396adc04f9b0a05f13c074df878b6052430e03f50e68adf3a57aa28d"}, + {file = "cffi-1.14.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:9ffb888f19d54a4d4dfd4b3f29bc2c16aa4972f1c2ab9c4ab09b8ab8685b9c2b"}, + {file = "cffi-1.14.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8d6603078baf4e11edc4168a514c5ce5b3ba6e3e9c374298cb88437957960a53"}, + {file = "cffi-1.14.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:d5ff0621c88ce83a28a10d2ce719b2ee85635e85c515f12bac99a95306da4b2e"}, + {file = "cffi-1.14.4-cp38-cp38-win32.whl", hash = "sha256:b4e248d1087abf9f4c10f3c398896c87ce82a9856494a7155823eb45a892395d"}, + {file = "cffi-1.14.4-cp38-cp38-win_amd64.whl", hash = "sha256:ec80dc47f54e6e9a78181ce05feb71a0353854cc26999db963695f950b5fb375"}, + {file = "cffi-1.14.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:840793c68105fe031f34d6a086eaea153a0cd5c491cde82a74b420edd0a2b909"}, + {file = "cffi-1.14.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:b18e0a9ef57d2b41f5c68beefa32317d286c3d6ac0484efd10d6e07491bb95dd"}, + {file = "cffi-1.14.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:045d792900a75e8b1e1b0ab6787dd733a8190ffcf80e8c8ceb2fb10a29ff238a"}, + {file = "cffi-1.14.4-cp39-cp39-win32.whl", hash = "sha256:ba4e9e0ae13fc41c6b23299545e5ef73055213e466bd107953e4a013a5ddd7e3"}, + {file = "cffi-1.14.4-cp39-cp39-win_amd64.whl", hash = "sha256:f032b34669220030f905152045dfa27741ce1a6db3324a5bc0b96b6c7420c87b"}, + {file = "cffi-1.14.4.tar.gz", hash = "sha256:1a465cbe98a7fd391d47dce4b8f7e5b921e6cd805ef421d04f5f66ba8f06086c"}, ] cfgv = [ {file = "cfgv-3.2.0-py2.py3-none-any.whl", hash = "sha256:32e43d604bbe7896fe7c248a9c2276447dbef840feb28fe20494f62af110211d"}, {file = "cfgv-3.2.0.tar.gz", hash = "sha256:cf22deb93d4bcf92f345a5c3cd39d3d41d6340adc60c78bbbd6588c384fda6a1"}, ] chardet = [ - {file = "chardet-3.0.4-py2.py3-none-any.whl", hash = "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"}, - {file = "chardet-3.0.4.tar.gz", hash = "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"}, + {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, + {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, ] click = [ {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, ] colorama = [ - {file = "colorama-0.4.3-py2.py3-none-any.whl", hash = "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff"}, - {file = "colorama-0.4.3.tar.gz", hash = "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"}, + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] colorlog = [ - {file = "colorlog-4.2.1-py2.py3-none-any.whl", hash = "sha256:43597fd822ce705190fc997519342fdaaf44b9b47f896ece7aa153ed4b909c74"}, - {file = "colorlog-4.2.1.tar.gz", hash = "sha256:75e55822c3a3387d721579241e776de2cf089c9ef9528b1f09e8b04d403ad118"}, + {file = "colorlog-4.7.2-py2.py3-none-any.whl", hash = "sha256:0a9dcdba6cab68e8a768448b418a858d73c52b37b6e8dea2568296faece393bd"}, + {file = "colorlog-4.7.2.tar.gz", hash = "sha256:18d05b616438a75762d7d214b9ec3b05d274466c9f3ddd92807e755840c88251"}, ] coverage = [ - {file = "coverage-5.3-cp27-cp27m-macosx_10_13_intel.whl", hash = "sha256:bd3166bb3b111e76a4f8e2980fa1addf2920a4ca9b2b8ca36a3bc3dedc618270"}, - {file = "coverage-5.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:9342dd70a1e151684727c9c91ea003b2fb33523bf19385d4554f7897ca0141d4"}, - {file = "coverage-5.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:63808c30b41f3bbf65e29f7280bf793c79f54fb807057de7e5238ffc7cc4d7b9"}, - {file = "coverage-5.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:4d6a42744139a7fa5b46a264874a781e8694bb32f1d76d8137b68138686f1729"}, - {file = "coverage-5.3-cp27-cp27m-win32.whl", hash = "sha256:86e9f8cd4b0cdd57b4ae71a9c186717daa4c5a99f3238a8723f416256e0b064d"}, - {file = "coverage-5.3-cp27-cp27m-win_amd64.whl", hash = "sha256:7858847f2d84bf6e64c7f66498e851c54de8ea06a6f96a32a1d192d846734418"}, - {file = "coverage-5.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:530cc8aaf11cc2ac7430f3614b04645662ef20c348dce4167c22d99bec3480e9"}, - {file = "coverage-5.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:381ead10b9b9af5f64646cd27107fb27b614ee7040bb1226f9c07ba96625cbb5"}, - {file = "coverage-5.3-cp35-cp35m-macosx_10_13_x86_64.whl", hash = "sha256:71b69bd716698fa62cd97137d6f2fdf49f534decb23a2c6fc80813e8b7be6822"}, - {file = "coverage-5.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:1d44bb3a652fed01f1f2c10d5477956116e9b391320c94d36c6bf13b088a1097"}, - {file = "coverage-5.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:1c6703094c81fa55b816f5ae542c6ffc625fec769f22b053adb42ad712d086c9"}, - {file = "coverage-5.3-cp35-cp35m-win32.whl", hash = "sha256:cedb2f9e1f990918ea061f28a0f0077a07702e3819602d3507e2ff98c8d20636"}, - {file = "coverage-5.3-cp35-cp35m-win_amd64.whl", hash = "sha256:7f43286f13d91a34fadf61ae252a51a130223c52bfefb50310d5b2deb062cf0f"}, - {file = "coverage-5.3-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:c851b35fc078389bc16b915a0a7c1d5923e12e2c5aeec58c52f4aa8085ac8237"}, - {file = "coverage-5.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:aac1ba0a253e17889550ddb1b60a2063f7474155465577caa2a3b131224cfd54"}, - {file = "coverage-5.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2b31f46bf7b31e6aa690d4c7a3d51bb262438c6dcb0d528adde446531d0d3bb7"}, - {file = "coverage-5.3-cp36-cp36m-win32.whl", hash = "sha256:c5f17ad25d2c1286436761b462e22b5020d83316f8e8fcb5deb2b3151f8f1d3a"}, - {file = "coverage-5.3-cp36-cp36m-win_amd64.whl", hash = "sha256:aef72eae10b5e3116bac6957de1df4d75909fc76d1499a53fb6387434b6bcd8d"}, - {file = "coverage-5.3-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:e8caf961e1b1a945db76f1b5fa9c91498d15f545ac0ababbe575cfab185d3bd8"}, - {file = "coverage-5.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:29a6272fec10623fcbe158fdf9abc7a5fa032048ac1d8631f14b50fbfc10d17f"}, - {file = "coverage-5.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:2d43af2be93ffbad25dd959899b5b809618a496926146ce98ee0b23683f8c51c"}, - {file = "coverage-5.3-cp37-cp37m-win32.whl", hash = "sha256:c3888a051226e676e383de03bf49eb633cd39fc829516e5334e69b8d81aae751"}, - {file = "coverage-5.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9669179786254a2e7e57f0ecf224e978471491d660aaca833f845b72a2df3709"}, - {file = "coverage-5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0203acd33d2298e19b57451ebb0bed0ab0c602e5cf5a818591b4918b1f97d516"}, - {file = "coverage-5.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:582ddfbe712025448206a5bc45855d16c2e491c2dd102ee9a2841418ac1c629f"}, - {file = "coverage-5.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:0f313707cdecd5cd3e217fc68c78a960b616604b559e9ea60cc16795c4304259"}, - {file = "coverage-5.3-cp38-cp38-win32.whl", hash = "sha256:78e93cc3571fd928a39c0b26767c986188a4118edc67bc0695bc7a284da22e82"}, - {file = "coverage-5.3-cp38-cp38-win_amd64.whl", hash = "sha256:8f264ba2701b8c9f815b272ad568d555ef98dfe1576802ab3149c3629a9f2221"}, - {file = "coverage-5.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:50691e744714856f03a86df3e2bff847c2acede4c191f9a1da38f088df342978"}, - {file = "coverage-5.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9361de40701666b034c59ad9e317bae95c973b9ff92513dd0eced11c6adf2e21"}, - {file = "coverage-5.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:c1b78fb9700fc961f53386ad2fd86d87091e06ede5d118b8a50dea285a071c24"}, - {file = "coverage-5.3-cp39-cp39-win32.whl", hash = "sha256:cb7df71de0af56000115eafd000b867d1261f786b5eebd88a0ca6360cccfaca7"}, - {file = "coverage-5.3-cp39-cp39-win_amd64.whl", hash = "sha256:47a11bdbd8ada9b7ee628596f9d97fbd3851bd9999d398e9436bd67376dbece7"}, - {file = "coverage-5.3.tar.gz", hash = "sha256:280baa8ec489c4f542f8940f9c4c2181f0306a8ee1a54eceba071a449fb870a0"}, + {file = "coverage-5.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:6d9c88b787638a451f41f97446a1c9fd416e669b4d9717ae4615bd29de1ac135"}, + {file = "coverage-5.4-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:66a5aae8233d766a877c5ef293ec5ab9520929c2578fd2069308a98b7374ea8c"}, + {file = "coverage-5.4-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9754a5c265f991317de2bac0c70a746efc2b695cf4d49f5d2cddeac36544fb44"}, + {file = "coverage-5.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:fbb17c0d0822684b7d6c09915677a32319f16ff1115df5ec05bdcaaee40b35f3"}, + {file = "coverage-5.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:b7f7421841f8db443855d2854e25914a79a1ff48ae92f70d0a5c2f8907ab98c9"}, + {file = "coverage-5.4-cp27-cp27m-win32.whl", hash = "sha256:4a780807e80479f281d47ee4af2eb2df3e4ccf4723484f77da0bb49d027e40a1"}, + {file = "coverage-5.4-cp27-cp27m-win_amd64.whl", hash = "sha256:87c4b38288f71acd2106f5d94f575bc2136ea2887fdb5dfe18003c881fa6b370"}, + {file = "coverage-5.4-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:c6809ebcbf6c1049002b9ac09c127ae43929042ec1f1dbd8bb1615f7cd9f70a0"}, + {file = "coverage-5.4-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ba7ca81b6d60a9f7a0b4b4e175dcc38e8fef4992673d9d6e6879fd6de00dd9b8"}, + {file = "coverage-5.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:89fc12c6371bf963809abc46cced4a01ca4f99cba17be5e7d416ed7ef1245d19"}, + {file = "coverage-5.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a8eb7785bd23565b542b01fb39115a975fefb4a82f23d407503eee2c0106247"}, + {file = "coverage-5.4-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:7e40d3f8eb472c1509b12ac2a7e24158ec352fc8567b77ab02c0db053927e339"}, + {file = "coverage-5.4-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:1ccae21a076d3d5f471700f6d30eb486da1626c380b23c70ae32ab823e453337"}, + {file = "coverage-5.4-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:755c56beeacac6a24c8e1074f89f34f4373abce8b662470d3aa719ae304931f3"}, + {file = "coverage-5.4-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:322549b880b2d746a7672bf6ff9ed3f895e9c9f108b714e7360292aa5c5d7cf4"}, + {file = "coverage-5.4-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:60a3307a84ec60578accd35d7f0c71a3a971430ed7eca6567399d2b50ef37b8c"}, + {file = "coverage-5.4-cp35-cp35m-win32.whl", hash = "sha256:1375bb8b88cb050a2d4e0da901001347a44302aeadb8ceb4b6e5aa373b8ea68f"}, + {file = "coverage-5.4-cp35-cp35m-win_amd64.whl", hash = "sha256:16baa799ec09cc0dcb43a10680573269d407c159325972dd7114ee7649e56c66"}, + {file = "coverage-5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2f2cf7a42d4b7654c9a67b9d091ec24374f7c58794858bff632a2039cb15984d"}, + {file = "coverage-5.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:b62046592b44263fa7570f1117d372ae3f310222af1fc1407416f037fb3af21b"}, + {file = "coverage-5.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:812eaf4939ef2284d29653bcfee9665f11f013724f07258928f849a2306ea9f9"}, + {file = "coverage-5.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:859f0add98707b182b4867359e12bde806b82483fb12a9ae868a77880fc3b7af"}, + {file = "coverage-5.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:04b14e45d6a8e159c9767ae57ecb34563ad93440fc1b26516a89ceb5b33c1ad5"}, + {file = "coverage-5.4-cp36-cp36m-win32.whl", hash = "sha256:ebfa374067af240d079ef97b8064478f3bf71038b78b017eb6ec93ede1b6bcec"}, + {file = "coverage-5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:84df004223fd0550d0ea7a37882e5c889f3c6d45535c639ce9802293b39cd5c9"}, + {file = "coverage-5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1b811662ecf72eb2d08872731636aee6559cae21862c36f74703be727b45df90"}, + {file = "coverage-5.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6b588b5cf51dc0fd1c9e19f622457cc74b7d26fe295432e434525f1c0fae02bc"}, + {file = "coverage-5.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:3fe50f1cac369b02d34ad904dfe0771acc483f82a1b54c5e93632916ba847b37"}, + {file = "coverage-5.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:32ab83016c24c5cf3db2943286b85b0a172dae08c58d0f53875235219b676409"}, + {file = "coverage-5.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:68fb816a5dd901c6aff352ce49e2a0ffadacdf9b6fae282a69e7a16a02dad5fb"}, + {file = "coverage-5.4-cp37-cp37m-win32.whl", hash = "sha256:a636160680c6e526b84f85d304e2f0bb4e94f8284dd765a1911de9a40450b10a"}, + {file = "coverage-5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:bb32ca14b4d04e172c541c69eec5f385f9a075b38fb22d765d8b0ce3af3a0c22"}, + {file = "coverage-5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4d7165a4e8f41eca6b990c12ee7f44fef3932fac48ca32cecb3a1b2223c21f"}, + {file = "coverage-5.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:a565f48c4aae72d1d3d3f8e8fb7218f5609c964e9c6f68604608e5958b9c60c3"}, + {file = "coverage-5.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:fff1f3a586246110f34dc762098b5afd2de88de507559e63553d7da643053786"}, + {file = "coverage-5.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:a839e25f07e428a87d17d857d9935dd743130e77ff46524abb992b962eb2076c"}, + {file = "coverage-5.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:6625e52b6f346a283c3d563d1fd8bae8956daafc64bb5bbd2b8f8a07608e3994"}, + {file = "coverage-5.4-cp38-cp38-win32.whl", hash = "sha256:5bee3970617b3d74759b2d2df2f6a327d372f9732f9ccbf03fa591b5f7581e39"}, + {file = "coverage-5.4-cp38-cp38-win_amd64.whl", hash = "sha256:03ed2a641e412e42cc35c244508cf186015c217f0e4d496bf6d7078ebe837ae7"}, + {file = "coverage-5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:14a9f1887591684fb59fdba8feef7123a0da2424b0652e1b58dd5b9a7bb1188c"}, + {file = "coverage-5.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9564ac7eb1652c3701ac691ca72934dd3009997c81266807aef924012df2f4b3"}, + {file = "coverage-5.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:0f48fc7dc82ee14aeaedb986e175a429d24129b7eada1b7e94a864e4f0644dde"}, + {file = "coverage-5.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:107d327071061fd4f4a2587d14c389a27e4e5c93c7cba5f1f59987181903902f"}, + {file = "coverage-5.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:0cdde51bfcf6b6bd862ee9be324521ec619b20590787d1655d005c3fb175005f"}, + {file = "coverage-5.4-cp39-cp39-win32.whl", hash = "sha256:c67734cff78383a1f23ceba3b3239c7deefc62ac2b05fa6a47bcd565771e5880"}, + {file = "coverage-5.4-cp39-cp39-win_amd64.whl", hash = "sha256:c669b440ce46ae3abe9b2d44a913b5fd86bb19eb14a8701e88e3918902ecd345"}, + {file = "coverage-5.4-pp36-none-any.whl", hash = "sha256:c0ff1c1b4d13e2240821ef23c1efb1f009207cb3f56e16986f713c2b0e7cd37f"}, + {file = "coverage-5.4-pp37-none-any.whl", hash = "sha256:cd601187476c6bed26a0398353212684c427e10a903aeafa6da40c63309d438b"}, + {file = "coverage-5.4.tar.gz", hash = "sha256:6d2e262e5e8da6fa56e774fb8e2643417351427604c2b177f8e8c5f75fc928ca"}, +] +cycler = [ + {file = "cycler-0.10.0-py2.py3-none-any.whl", hash = "sha256:1d8a5ae1ff6c5cf9b93e8811e581232ad8920aeec647c37316ceac982b08cb2d"}, + {file = "cycler-0.10.0.tar.gz", hash = "sha256:cd7b2d1018258d7247a71425e9f26463dfb444d411c39569972f4ce586b0c9d8"}, ] darglint = [ - {file = "darglint-1.5.4-py3-none-any.whl", hash = "sha256:e58ff63f0f29a4dc8f9c1e102c7d00539290567d72feb74b7b9d5f8302992b8d"}, - {file = "darglint-1.5.4.tar.gz", hash = "sha256:7ebaafc8559d0db7735b6e15904ee5cca4be56fa85eac21c025c328278c6317a"}, + {file = "darglint-1.6.0-py3-none-any.whl", hash = "sha256:c80849fd83a06d0bec3c93240360214cf56979691c6d18e2abb293aa404bf443"}, + {file = "darglint-1.6.0.tar.gz", hash = "sha256:9c91a1dd93f6cdbdd626ecea8ae2849fafe6588bb6d6dbbf7066f9ae69fca771"}, ] decorator = [ {file = "decorator-4.4.2-py2.py3-none-any.whl", hash = "sha256:41fa54c2a0cc4ba648be4fd43cff00aedf5b9465c9bf18d64325bc225f08f760"}, @@ -2044,17 +2251,25 @@ entrypoints = [ eradicate = [ {file = "eradicate-1.0.tar.gz", hash = "sha256:4ffda82aae6fd49dfffa777a857cb758d77502a1f2e0f54c9ac5155a39d2d01a"}, ] +factory-boy = [ + {file = "factory_boy-3.2.0-py2.py3-none-any.whl", hash = "sha256:1d3db4b44b8c8c54cdd8b83ae4bdb9aeb121e464400035f1f03ae0e1eade56a4"}, + {file = "factory_boy-3.2.0.tar.gz", hash = "sha256:401cc00ff339a022f84d64a4339503d1689e8263a4478d876e58a3295b155c5b"}, +] +faker = [ + {file = "Faker-5.8.0-py3-none-any.whl", hash = "sha256:0783729c61501d52efea2967aff6e6fcb8370f0f6b5a558f2a81233642ae529a"}, + {file = "Faker-5.8.0.tar.gz", hash = "sha256:6b2995ffff6c2b02bc5daad96f8c24c021e5bd491d9d53d31bcbd66f348181d4"}, +] filelock = [ {file = "filelock-3.0.12-py3-none-any.whl", hash = "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836"}, {file = "filelock-3.0.12.tar.gz", hash = "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59"}, ] flake8 = [ - {file = "flake8-3.8.3-py2.py3-none-any.whl", hash = "sha256:15e351d19611c887e482fb960eae4d44845013cc142d42896e9862f775d8cf5c"}, - {file = "flake8-3.8.3.tar.gz", hash = "sha256:f04b9fcbac03b0a3e58c0ab3a0ecc462e023a9faf046d57794184028123aa208"}, + {file = "flake8-3.8.4-py2.py3-none-any.whl", hash = "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839"}, + {file = "flake8-3.8.4.tar.gz", hash = "sha256:aadae8761ec651813c24be05c6f7b4680857ef6afaae4651a4eccaef97ce6c3b"}, ] flake8-annotations = [ - {file = "flake8-annotations-2.4.1.tar.gz", hash = "sha256:0bcebb0792f1f96d617ded674dca7bf64181870bfe5dace353a1483551f8e5f1"}, - {file = "flake8_annotations-2.4.1-py3-none-any.whl", hash = "sha256:bebd11a850f6987a943ce8cdff4159767e0f5f89b3c88aca64680c2175ee02df"}, + {file = "flake8-annotations-2.5.0.tar.gz", hash = "sha256:e17947a48a5b9f632fe0c72682fc797c385e451048e7dfb20139f448a074cb3e"}, + {file = "flake8_annotations-2.5.0-py3-none-any.whl", hash = "sha256:3a377140556aecf11fa9f3bb18c10db01f5ea56dc79a730e2ec9b4f1f49e2055"}, ] flake8-bandit = [ {file = "flake8_bandit-2.1.2.tar.gz", hash = "sha256:687fc8da2e4a239b206af2e54a90093572a60d0954f3054e23690739b0b0de3b"}, @@ -2075,8 +2290,8 @@ flake8-commas = [ {file = "flake8_commas-2.0.0-py2.py3-none-any.whl", hash = "sha256:ee2141a3495ef9789a3894ed8802d03eff1eaaf98ce6d8653a7c573ef101935e"}, ] flake8-comprehensions = [ - {file = "flake8-comprehensions-3.2.3.tar.gz", hash = "sha256:d5751acc0f7364794c71d06f113f4686d6e2e26146a50fa93130b9f200fe160d"}, - {file = "flake8_comprehensions-3.2.3-py3-none-any.whl", hash = "sha256:44eaae9894aa15f86e0c86df1e218e7917494fab6f96d28f96a029c460f17d92"}, + {file = "flake8-comprehensions-3.3.1.tar.gz", hash = "sha256:e734bf03806bb562886d9bf635d23a65a1a995c251b67d7e007a7b608af9bd22"}, + {file = "flake8_comprehensions-3.3.1-py3-none-any.whl", hash = "sha256:6d80dfafda0d85633f88ea5bc7de949485f71f1e28db7af7719563fe5f62dcb1"}, ] flake8-debugger = [ {file = "flake8-debugger-3.2.1.tar.gz", hash = "sha256:712d7c1ff69ddf3f0130e94cc88c2519e720760bce45e8c330bfdcb61ab4090d"}, @@ -2118,17 +2333,28 @@ flake8-string-format = [ {file = "flake8-string-format-0.2.3.tar.gz", hash = "sha256:774d56103d9242ed968897455ef49b7d6de272000cfa83de5814273a868832f1"}, {file = "flake8_string_format-0.2.3-py2.py3-none-any.whl", hash = "sha256:68ea72a1a5b75e7018cae44d14f32473c798cf73d75cbaed86c6a9a907b770b2"}, ] +folium = [ + {file = "folium-0.12.1-py2.py3-none-any.whl", hash = "sha256:3d2c48dd6ffe5327975bbfd718468c4e81db9f2844c26e574f878adf4c08b644"}, +] +geographiclib = [ + {file = "geographiclib-1.50-py3-none-any.whl", hash = "sha256:51cfa698e7183792bce27d8fb63ac8e83689cd8170a730bf35e1a5c5bf8849b9"}, + {file = "geographiclib-1.50.tar.gz", hash = "sha256:12bd46ee7ec25b291ea139b17aa991e7ef373e21abd053949b75c0e9ca55c632"}, +] +geopy = [ + {file = "geopy-2.1.0-py3-none-any.whl", hash = "sha256:4db8a2b79a2b3358a7d020ea195be639251a831a1b429c0d1b20c9f00c67c788"}, + {file = "geopy-2.1.0.tar.gz", hash = "sha256:892b219413e7955587b029949af3a1949c6fbac9d5ad17b79d850718f6a9550f"}, +] gitdb = [ {file = "gitdb-4.0.5-py3-none-any.whl", hash = "sha256:91f36bfb1ab7949b3b40e23736db18231bf7593edada2ba5c3a174a7b23657ac"}, {file = "gitdb-4.0.5.tar.gz", hash = "sha256:c9e1f2d0db7ddb9a704c2a0217be31214e91a4fe1dea1efad19ae42ba0c285c9"}, ] gitpython = [ - {file = "GitPython-3.1.8-py3-none-any.whl", hash = "sha256:1858f4fd089abe92ae465f01d5aaaf55e937eca565fb2c1fce35a51b5f85c910"}, - {file = "GitPython-3.1.8.tar.gz", hash = "sha256:080bf8e2cf1a2b907634761c2eaefbe83b69930c94c66ad11b65a8252959f912"}, + {file = "GitPython-3.1.12-py3-none-any.whl", hash = "sha256:867ec3dfb126aac0f8296b19fb63b8c4a399f32b4b6fafe84c4b10af5fa9f7b5"}, + {file = "GitPython-3.1.12.tar.gz", hash = "sha256:42dbefd8d9e2576c496ed0059f3103dcef7125b9ce16f9d5f9c834aed44a1dac"}, ] identify = [ - {file = "identify-1.5.5-py2.py3-none-any.whl", hash = "sha256:da683bfb7669fa749fc7731f378229e2dbf29a1d1337cbde04106f02236eb29d"}, - {file = "identify-1.5.5.tar.gz", hash = "sha256:7c22c384a2c9b32c5cc891d13f923f6b2653aa83e2d75d8f79be240d6c86c4f4"}, + {file = "identify-1.5.13-py2.py3-none-any.whl", hash = "sha256:9dfb63a2e871b807e3ba62f029813552a24b5289504f5b071dea9b041aee9fe4"}, + {file = "identify-1.5.13.tar.gz", hash = "sha256:70b638cf4743f33042bebb3b51e25261a0a10e80f978739f17e7fd4837664a66"}, ] idna = [ {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, @@ -2139,16 +2365,16 @@ imagesize = [ {file = "imagesize-1.2.0.tar.gz", hash = "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"}, ] iniconfig = [ - {file = "iniconfig-1.0.1-py3-none-any.whl", hash = "sha256:80cf40c597eb564e86346103f609d74efce0f6b4d4f30ec8ce9e2c26411ba437"}, - {file = "iniconfig-1.0.1.tar.gz", hash = "sha256:e5f92f89355a67de0595932a6c6c02ab4afddc6fcdc0bfc5becd0d60884d3f69"}, + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] ipykernel = [ - {file = "ipykernel-5.3.4-py3-none-any.whl", hash = "sha256:d6fbba26dba3cebd411382bc484f7bc2caa98427ae0ddb4ab37fe8bfeb5c7dd3"}, - {file = "ipykernel-5.3.4.tar.gz", hash = "sha256:9b2652af1607986a1b231c62302d070bc0534f564c393a5d9d130db9abbbe89d"}, + {file = "ipykernel-5.4.3-py3-none-any.whl", hash = "sha256:4ed205700001a83b5832d4821c46a5733f1bf4b1c55744314ae3c756be6b6095"}, + {file = "ipykernel-5.4.3.tar.gz", hash = "sha256:697103d218e9a8828025af7986e033c89e0b36e2b6eb84a5bda4739b9a27f3cb"}, ] ipython = [ - {file = "ipython-7.18.1-py3-none-any.whl", hash = "sha256:2e22c1f74477b5106a6fb301c342ab8c64bb75d702e350f05a649e8cb40a0fb8"}, - {file = "ipython-7.18.1.tar.gz", hash = "sha256:a331e78086001931de9424940699691ad49dfb457cea31f5471eae7b78222d5e"}, + {file = "ipython-7.20.0-py3-none-any.whl", hash = "sha256:1918dea4bfdc5d1a830fcfce9a710d1d809cbed123e85eab0539259cb0f56640"}, + {file = "ipython-7.20.0.tar.gz", hash = "sha256:1923af00820a8cf58e91d56b89efc59780a6e81363b94464a0f17c039dffff9e"}, ] ipython-genutils = [ {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, @@ -2159,12 +2385,12 @@ isort = [ {file = "isort-4.3.21.tar.gz", hash = "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1"}, ] jedi = [ - {file = "jedi-0.17.2-py2.py3-none-any.whl", hash = "sha256:98cc583fa0f2f8304968199b01b6b4b94f469a1f4a74c1560506ca2a211378b5"}, - {file = "jedi-0.17.2.tar.gz", hash = "sha256:86ed7d9b750603e4ba582ea8edc678657fb4007894a12bcf6f4bb97892f31d20"}, + {file = "jedi-0.18.0-py2.py3-none-any.whl", hash = "sha256:18456d83f65f400ab0c2d3319e48520420ef43b23a086fdc05dff34132f0fb93"}, + {file = "jedi-0.18.0.tar.gz", hash = "sha256:92550a404bad8afed881a137ec9a461fed49eca661414be45059329614ed0707"}, ] jinja2 = [ - {file = "Jinja2-2.11.2-py2.py3-none-any.whl", hash = "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035"}, - {file = "Jinja2-2.11.2.tar.gz", hash = "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0"}, + {file = "Jinja2-2.11.3-py2.py3-none-any.whl", hash = "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419"}, + {file = "Jinja2-2.11.3.tar.gz", hash = "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6"}, ] json5 = [ {file = "json5-0.9.5-py2.py3-none-any.whl", hash = "sha256:af1a1b9a2850c7f62c23fde18be4749b3599fd302f494eebf957e2ada6b9e42c"}, @@ -2175,16 +2401,16 @@ jsonschema = [ {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, ] jupyter-client = [ - {file = "jupyter_client-6.1.7-py3-none-any.whl", hash = "sha256:c958d24d6eacb975c1acebb68ac9077da61b5f5c040f22f6849928ad7393b950"}, - {file = "jupyter_client-6.1.7.tar.gz", hash = "sha256:49e390b36fe4b4226724704ea28d9fb903f1a3601b6882ce3105221cd09377a1"}, + {file = "jupyter_client-6.1.11-py3-none-any.whl", hash = "sha256:5eaaa41df449167ebba5e1cf6ca9b31f7fd4f71625069836e2e4fee07fe3cb13"}, + {file = "jupyter_client-6.1.11.tar.gz", hash = "sha256:649ca3aca1e28f27d73ef15868a7c7f10d6e70f761514582accec3ca6bb13085"}, ] jupyter-core = [ - {file = "jupyter_core-4.6.3-py2.py3-none-any.whl", hash = "sha256:a4ee613c060fe5697d913416fc9d553599c05e4492d58fac1192c9a6844abb21"}, - {file = "jupyter_core-4.6.3.tar.gz", hash = "sha256:394fd5dd787e7c8861741880bdf8a00ce39f95de5d18e579c74b882522219e7e"}, + {file = "jupyter_core-4.7.1-py3-none-any.whl", hash = "sha256:8c6c0cac5c1b563622ad49321d5ec47017bd18b94facb381c6973a0486395f8e"}, + {file = "jupyter_core-4.7.1.tar.gz", hash = "sha256:79025cb3225efcd36847d0840f3fc672c0abd7afd0de83ba8a1d3837619122b4"}, ] jupyterlab = [ - {file = "jupyterlab-2.2.8-py3-none-any.whl", hash = "sha256:95d0509557881cfa8a5fcdf225f2fca46faf1bc52fc56a28e0b72fcc594c90ab"}, - {file = "jupyterlab-2.2.8.tar.gz", hash = "sha256:c8377bee30504919c1e79949f9fe35443ab7f5c4be622c95307e8108410c8b8c"}, + {file = "jupyterlab-2.2.9-py3-none-any.whl", hash = "sha256:59af02c26a15ec2d2862a15bc72e41ae304b406a0b0d3f4f705eeb7caf91902b"}, + {file = "jupyterlab-2.2.9.tar.gz", hash = "sha256:3be8f8edea173753dd838c1b6d3bbcb6f5c801121f824a477025c1b6a1d33dc6"}, ] jupyterlab-pygments = [ {file = "jupyterlab_pygments-0.1.2-py2.py3-none-any.whl", hash = "sha256:abfb880fd1561987efaefcb2d2ac75145d2a5d0139b1876d5be806e32f630008"}, @@ -2194,32 +2420,42 @@ jupyterlab-server = [ {file = "jupyterlab_server-1.2.0-py3-none-any.whl", hash = "sha256:55d256077bf13e5bc9e8fbd5aac51bef82f6315111cec6b712b9a5ededbba924"}, {file = "jupyterlab_server-1.2.0.tar.gz", hash = "sha256:5431d9dde96659364b7cc877693d5d21e7b80cea7ae3959ecc2b87518e5f5d8c"}, ] -lazy-object-proxy = [ - {file = "lazy-object-proxy-1.4.3.tar.gz", hash = "sha256:f3900e8a5de27447acbf900b4750b0ddfd7ec1ea7fbaf11dfa911141bc522af0"}, - {file = "lazy_object_proxy-1.4.3-cp27-cp27m-macosx_10_13_x86_64.whl", hash = "sha256:a2238e9d1bb71a56cd710611a1614d1194dc10a175c1e08d75e1a7bcc250d442"}, - {file = "lazy_object_proxy-1.4.3-cp27-cp27m-win32.whl", hash = "sha256:efa1909120ce98bbb3777e8b6f92237f5d5c8ea6758efea36a473e1d38f7d3e4"}, - {file = "lazy_object_proxy-1.4.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4677f594e474c91da97f489fea5b7daa17b5517190899cf213697e48d3902f5a"}, - {file = "lazy_object_proxy-1.4.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:0c4b206227a8097f05c4dbdd323c50edf81f15db3b8dc064d08c62d37e1a504d"}, - {file = "lazy_object_proxy-1.4.3-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:d945239a5639b3ff35b70a88c5f2f491913eb94871780ebfabb2568bd58afc5a"}, - {file = "lazy_object_proxy-1.4.3-cp34-cp34m-win32.whl", hash = "sha256:9651375199045a358eb6741df3e02a651e0330be090b3bc79f6d0de31a80ec3e"}, - {file = "lazy_object_proxy-1.4.3-cp34-cp34m-win_amd64.whl", hash = "sha256:eba7011090323c1dadf18b3b689845fd96a61ba0a1dfbd7f24b921398affc357"}, - {file = "lazy_object_proxy-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:48dab84ebd4831077b150572aec802f303117c8cc5c871e182447281ebf3ac50"}, - {file = "lazy_object_proxy-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:ca0a928a3ddbc5725be2dd1cf895ec0a254798915fb3a36af0964a0a4149e3db"}, - {file = "lazy_object_proxy-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:194d092e6f246b906e8f70884e620e459fc54db3259e60cf69a4d66c3fda3449"}, - {file = "lazy_object_proxy-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:97bb5884f6f1cdce0099f86b907aa41c970c3c672ac8b9c8352789e103cf3156"}, - {file = "lazy_object_proxy-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:cb2c7c57005a6804ab66f106ceb8482da55f5314b7fcb06551db1edae4ad1531"}, - {file = "lazy_object_proxy-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:8d859b89baf8ef7f8bc6b00aa20316483d67f0b1cbf422f5b4dc56701c8f2ffb"}, - {file = "lazy_object_proxy-1.4.3-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:1be7e4c9f96948003609aa6c974ae59830a6baecc5376c25c92d7d697e684c08"}, - {file = "lazy_object_proxy-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d74bb8693bf9cf75ac3b47a54d716bbb1a92648d5f781fc799347cfc95952383"}, - {file = "lazy_object_proxy-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:9b15f3f4c0f35727d3a0fba4b770b3c4ebbb1fa907dbcc046a1d2799f3edd142"}, - {file = "lazy_object_proxy-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9254f4358b9b541e3441b007a0ea0764b9d056afdeafc1a5569eee1cc6c1b9ea"}, - {file = "lazy_object_proxy-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:a6ae12d08c0bf9909ce12385803a543bfe99b95fe01e752536a60af2b7797c62"}, - {file = "lazy_object_proxy-1.4.3-cp38-cp38-win32.whl", hash = "sha256:5541cada25cd173702dbd99f8e22434105456314462326f06dba3e180f203dfd"}, - {file = "lazy_object_proxy-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:59f79fef100b09564bc2df42ea2d8d21a64fdcda64979c0fa3db7bdaabaf6239"}, +kiwisolver = [ + {file = "kiwisolver-1.3.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fd34fbbfbc40628200730bc1febe30631347103fc8d3d4fa012c21ab9c11eca9"}, + {file = "kiwisolver-1.3.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:d3155d828dec1d43283bd24d3d3e0d9c7c350cdfcc0bd06c0ad1209c1bbc36d0"}, + {file = "kiwisolver-1.3.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5a7a7dbff17e66fac9142ae2ecafb719393aaee6a3768c9de2fd425c63b53e21"}, + {file = "kiwisolver-1.3.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:f8d6f8db88049a699817fd9178782867bf22283e3813064302ac59f61d95be05"}, + {file = "kiwisolver-1.3.1-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:5f6ccd3dd0b9739edcf407514016108e2280769c73a85b9e59aa390046dbf08b"}, + {file = "kiwisolver-1.3.1-cp36-cp36m-win32.whl", hash = "sha256:225e2e18f271e0ed8157d7f4518ffbf99b9450fca398d561eb5c4a87d0986dd9"}, + {file = "kiwisolver-1.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:cf8b574c7b9aa060c62116d4181f3a1a4e821b2ec5cbfe3775809474113748d4"}, + {file = "kiwisolver-1.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:232c9e11fd7ac3a470d65cd67e4359eee155ec57e822e5220322d7b2ac84fbf0"}, + {file = "kiwisolver-1.3.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:b38694dcdac990a743aa654037ff1188c7a9801ac3ccc548d3341014bc5ca278"}, + {file = "kiwisolver-1.3.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ca3820eb7f7faf7f0aa88de0e54681bddcb46e485beb844fcecbcd1c8bd01689"}, + {file = "kiwisolver-1.3.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:c8fd0f1ae9d92b42854b2979024d7597685ce4ada367172ed7c09edf2cef9cb8"}, + {file = "kiwisolver-1.3.1-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:1e1bc12fb773a7b2ffdeb8380609f4f8064777877b2225dec3da711b421fda31"}, + {file = "kiwisolver-1.3.1-cp37-cp37m-win32.whl", hash = "sha256:72c99e39d005b793fb7d3d4e660aed6b6281b502e8c1eaf8ee8346023c8e03bc"}, + {file = "kiwisolver-1.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:8be8d84b7d4f2ba4ffff3665bcd0211318aa632395a1a41553250484a871d454"}, + {file = "kiwisolver-1.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:31dfd2ac56edc0ff9ac295193eeaea1c0c923c0355bf948fbd99ed6018010b72"}, + {file = "kiwisolver-1.3.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:563c649cfdef27d081c84e72a03b48ea9408c16657500c312575ae9d9f7bc1c3"}, + {file = "kiwisolver-1.3.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:78751b33595f7f9511952e7e60ce858c6d64db2e062afb325985ddbd34b5c131"}, + {file = "kiwisolver-1.3.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:a357fd4f15ee49b4a98b44ec23a34a95f1e00292a139d6015c11f55774ef10de"}, + {file = "kiwisolver-1.3.1-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:5989db3b3b34b76c09253deeaf7fbc2707616f130e166996606c284395da3f18"}, + {file = "kiwisolver-1.3.1-cp38-cp38-win32.whl", hash = "sha256:c08e95114951dc2090c4a630c2385bef681cacf12636fb0241accdc6b303fd81"}, + {file = "kiwisolver-1.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:44a62e24d9b01ba94ae7a4a6c3fb215dc4af1dde817e7498d901e229aaf50e4e"}, + {file = "kiwisolver-1.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:50af681a36b2a1dee1d3c169ade9fdc59207d3c31e522519181e12f1b3ba7000"}, + {file = "kiwisolver-1.3.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:a53d27d0c2a0ebd07e395e56a1fbdf75ffedc4a05943daf472af163413ce9598"}, + {file = "kiwisolver-1.3.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:834ee27348c4aefc20b479335fd422a2c69db55f7d9ab61721ac8cd83eb78882"}, + {file = "kiwisolver-1.3.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:5c3e6455341008a054cccee8c5d24481bcfe1acdbc9add30aa95798e95c65621"}, + {file = "kiwisolver-1.3.1-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:acef3d59d47dd85ecf909c359d0fd2c81ed33bdff70216d3956b463e12c38a54"}, + {file = "kiwisolver-1.3.1-cp39-cp39-win32.whl", hash = "sha256:c5518d51a0735b1e6cee1fdce66359f8d2b59c3ca85dc2b0813a8aa86818a030"}, + {file = "kiwisolver-1.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:b9edd0110a77fc321ab090aaa1cfcaba1d8499850a12848b81be2222eab648f6"}, + {file = "kiwisolver-1.3.1-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0cd53f403202159b44528498de18f9285b04482bab2a6fc3f5dd8dbb9352e30d"}, + {file = "kiwisolver-1.3.1-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:33449715e0101e4d34f64990352bce4095c8bf13bed1b390773fc0a7295967b3"}, + {file = "kiwisolver-1.3.1-pp36-pypy36_pp73-win32.whl", hash = "sha256:401a2e9afa8588589775fe34fc22d918ae839aaaf0c0e96441c0fdbce6d8ebe6"}, + {file = "kiwisolver-1.3.1.tar.gz", hash = "sha256:950a199911a8d94683a6b10321f9345d5a3a8433ec58b217ace979e18f16e248"}, ] mako = [ - {file = "Mako-1.1.3-py2.py3-none-any.whl", hash = "sha256:93729a258e4ff0747c876bd9e20df1b9758028946e976324ccd2d68245c7b6a9"}, - {file = "Mako-1.1.3.tar.gz", hash = "sha256:8195c8c1400ceb53496064314c6736719c6f25e7479cd24c77be3d9361cddc27"}, + {file = "Mako-1.1.4.tar.gz", hash = "sha256:17831f0b7087c313c0ffae2bcbbd3c1d5ba9eeac9c38f2eb7b50e8c99fe9d5ab"}, ] markupsafe = [ {file = "MarkupSafe-1.1.1-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161"}, @@ -2256,6 +2492,33 @@ markupsafe = [ {file = "MarkupSafe-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"}, {file = "MarkupSafe-1.1.1.tar.gz", hash = "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"}, ] +matplotlib = [ + {file = "matplotlib-3.3.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:672960dd114e342b7c610bf32fb99d14227f29919894388b41553217457ba7ef"}, + {file = "matplotlib-3.3.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:7c155437ae4fd366e2700e2716564d1787700687443de46bcb895fe0f84b761d"}, + {file = "matplotlib-3.3.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:a17f0a10604fac7627ec82820439e7db611722e80c408a726cd00d8c974c2fb3"}, + {file = "matplotlib-3.3.4-cp36-cp36m-win32.whl", hash = "sha256:215e2a30a2090221a9481db58b770ce56b8ef46f13224ae33afe221b14b24dc1"}, + {file = "matplotlib-3.3.4-cp36-cp36m-win_amd64.whl", hash = "sha256:348e6032f666ffd151b323342f9278b16b95d4a75dfacae84a11d2829a7816ae"}, + {file = "matplotlib-3.3.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:94bdd1d55c20e764d8aea9d471d2ae7a7b2c84445e0fa463f02e20f9730783e1"}, + {file = "matplotlib-3.3.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:a1acb72f095f1d58ecc2538ed1b8bca0b57df313b13db36ed34b8cdf1868e674"}, + {file = "matplotlib-3.3.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:46b1a60a04e6d884f0250d5cc8dc7bd21a9a96c584a7acdaab44698a44710bab"}, + {file = "matplotlib-3.3.4-cp37-cp37m-win32.whl", hash = "sha256:ed4a9e6dcacba56b17a0a9ac22ae2c72a35b7f0ef0693aa68574f0b2df607a89"}, + {file = "matplotlib-3.3.4-cp37-cp37m-win_amd64.whl", hash = "sha256:c24c05f645aef776e8b8931cb81e0f1632d229b42b6d216e30836e2e145a2b40"}, + {file = "matplotlib-3.3.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7310e353a4a35477c7f032409966920197d7df3e757c7624fd842f3eeb307d3d"}, + {file = "matplotlib-3.3.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:451cc89cb33d6652c509fc6b588dc51c41d7246afdcc29b8624e256b7663ed1f"}, + {file = "matplotlib-3.3.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:3d2eb9c1cc254d0ffa90bc96fde4b6005d09c2228f99dfd493a4219c1af99644"}, + {file = "matplotlib-3.3.4-cp38-cp38-win32.whl", hash = "sha256:e15fa23d844d54e7b3b7243afd53b7567ee71c721f592deb0727ee85e668f96a"}, + {file = "matplotlib-3.3.4-cp38-cp38-win_amd64.whl", hash = "sha256:1de0bb6cbfe460725f0e97b88daa8643bcf9571c18ba90bb8e41432aaeca91d6"}, + {file = "matplotlib-3.3.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f44149a0ef5b4991aaef12a93b8e8d66d6412e762745fea1faa61d98524e0ba9"}, + {file = "matplotlib-3.3.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:746a1df55749629e26af7f977ea426817ca9370ad1569436608dc48d1069b87c"}, + {file = "matplotlib-3.3.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:5f571b92a536206f7958f7cb2d367ff6c9a1fa8229dc35020006e4cdd1ca0acd"}, + {file = "matplotlib-3.3.4-cp39-cp39-win32.whl", hash = "sha256:9265ae0fb35e29f9b8cc86c2ab0a2e3dcddc4dd9de4b85bf26c0f63fe5c1c2ca"}, + {file = "matplotlib-3.3.4-cp39-cp39-win_amd64.whl", hash = "sha256:9a79e5dd7bb797aa611048f5b70588b23c5be05b63eefd8a0d152ac77c4243db"}, + {file = "matplotlib-3.3.4-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1e850163579a8936eede29fad41e202b25923a0a8d5ffd08ce50fc0a97dcdc93"}, + {file = "matplotlib-3.3.4-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:d738acfdfb65da34c91acbdb56abed46803db39af259b7f194dc96920360dbe4"}, + {file = "matplotlib-3.3.4-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:aa49571d8030ad0b9ac39708ee77bd2a22f87815e12bdee52ecaffece9313ed8"}, + {file = "matplotlib-3.3.4-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:cf3a7e54eff792f0815dbbe9b85df2f13d739289c93d346925554f71d484be78"}, + {file = "matplotlib-3.3.4.tar.gz", hash = "sha256:3e477db76c22929e4c6876c44f88d790aacdf3c3f8f3a90cb1975c0bf37825b0"}, +] mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, @@ -2288,97 +2551,100 @@ nb-black = [ {file = "nb_black-1.0.7.tar.gz", hash = "sha256:1ca52e3a46675f6a0a6d79ac73a1f8f951bef60f919eced56173e76ab1b6d62b"}, ] nbclient = [ - {file = "nbclient-0.5.0-py3-none-any.whl", hash = "sha256:8a6e27ff581cee50895f44c41936ce02369674e85e2ad58643d8d4a6c36771b0"}, - {file = "nbclient-0.5.0.tar.gz", hash = "sha256:8ad52d27ba144fca1402db014857e53c5a864a2f407be66ca9d74c3a56d6591d"}, + {file = "nbclient-0.5.1-py3-none-any.whl", hash = "sha256:4d6b116187c795c99b9dba13d46e764d596574b14c296d60670c8dfe454db364"}, + {file = "nbclient-0.5.1.tar.gz", hash = "sha256:01e2d726d16eaf2cde6db74a87e2451453547e8832d142f73f72fddcd4fe0250"}, ] nbconvert = [ - {file = "nbconvert-6.0.6-py3-none-any.whl", hash = "sha256:d8549f62e739a4d51f275c2932b1783ee5039dde07a2b71de70c0296a42c8394"}, - {file = "nbconvert-6.0.6.tar.gz", hash = "sha256:68335477288aab8a9b9ec03002dce59b4eb1ca967116741ec218a4e78c129efd"}, + {file = "nbconvert-6.0.7-py3-none-any.whl", hash = "sha256:39e9f977920b203baea0be67eea59f7b37a761caa542abe80f5897ce3cf6311d"}, + {file = "nbconvert-6.0.7.tar.gz", hash = "sha256:cbbc13a86dfbd4d1b5dee106539de0795b4db156c894c2c5dc382062bbc29002"}, ] nbformat = [ - {file = "nbformat-5.0.7-py3-none-any.whl", hash = "sha256:ea55c9b817855e2dfcd3f66d74857342612a60b1f09653440f4a5845e6e3523f"}, - {file = "nbformat-5.0.7.tar.gz", hash = "sha256:54d4d6354835a936bad7e8182dcd003ca3dc0cedfee5a306090e04854343b340"}, + {file = "nbformat-5.1.2-py3-none-any.whl", hash = "sha256:3949fdc8f5fa0b1afca16fb307546e78494fa7a7bceff880df8168eafda0e7ac"}, + {file = "nbformat-5.1.2.tar.gz", hash = "sha256:1d223e64a18bfa7cdf2db2e9ba8a818312fc2a0701d2e910b58df66809385a56"}, ] nest-asyncio = [ - {file = "nest_asyncio-1.4.1-py3-none-any.whl", hash = "sha256:a4487c4f49f2d11a7bb89a512a6886b6a5045f47097f49815b2851aaa8599cf0"}, - {file = "nest_asyncio-1.4.1.tar.gz", hash = "sha256:b86c3193abda5b2eeccf8c79894bc71c680369a178f4b068514ac00720b14e01"}, + {file = "nest_asyncio-1.5.1-py3-none-any.whl", hash = "sha256:76d6e972265063fe92a90b9cc4fb82616e07d586b346ed9d2c89a4187acea39c"}, + {file = "nest_asyncio-1.5.1.tar.gz", hash = "sha256:afc5a1c515210a23c461932765691ad39e8eba6551c055ac8d5546e69250d0aa"}, ] nodeenv = [ {file = "nodeenv-1.5.0-py2.py3-none-any.whl", hash = "sha256:5304d424c529c997bc888453aeaa6362d242b6b4631e90f3d4bf1b290f1c84a9"}, {file = "nodeenv-1.5.0.tar.gz", hash = "sha256:ab45090ae383b716c4ef89e690c41ff8c2b257b85b309f01f3654df3d084bd7c"}, ] notebook = [ - {file = "notebook-6.1.4-py3-none-any.whl", hash = "sha256:07b6e8b8a61aa2f780fe9a97430470485bc71262bc5cae8521f1441b910d2c88"}, - {file = "notebook-6.1.4.tar.gz", hash = "sha256:687d01f963ea20360c0b904ee7a37c3d8cda553858c8d6e33fd0afd13e89de32"}, + {file = "notebook-6.2.0-py3-none-any.whl", hash = "sha256:25ad93c982b623441b491e693ef400598d1a46cdf11b8c9c0b3be6c61ebbb6cd"}, + {file = "notebook-6.2.0.tar.gz", hash = "sha256:0464b28e18e7a06cec37e6177546c2322739be07962dd13bf712bcb88361f013"}, ] nox = [ - {file = "nox-2020.8.22-py3-none-any.whl", hash = "sha256:55f8cab16bcfaaea08b141c83bf2b7c779e943518d0de6cd9c38cd8da95d11ea"}, - {file = "nox-2020.8.22.tar.gz", hash = "sha256:efa5adcf1134012f96bcd0a496ccebd4c9e9da53a831888a2a779462440eebcf"}, + {file = "nox-2020.12.31-py3-none-any.whl", hash = "sha256:f179d6990f7a0a9cebad01b9ecea34556518b8d3340dfcafdc1d85f2c1a37ea0"}, + {file = "nox-2020.12.31.tar.gz", hash = "sha256:58a662070767ed4786beb46ce3a789fca6f1e689ed3ac15c73c4d0094e4f9dc4"}, ] numpy = [ - {file = "numpy-1.19.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b594f76771bc7fc8a044c5ba303427ee67c17a09b36e1fa32bde82f5c419d17a"}, - {file = "numpy-1.19.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:e6ddbdc5113628f15de7e4911c02aed74a4ccff531842c583e5032f6e5a179bd"}, - {file = "numpy-1.19.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3733640466733441295b0d6d3dcbf8e1ffa7e897d4d82903169529fd3386919a"}, - {file = "numpy-1.19.2-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:4339741994c775396e1a274dba3609c69ab0f16056c1077f18979bec2a2c2e6e"}, - {file = "numpy-1.19.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7c6646314291d8f5ea900a7ea9c4261f834b5b62159ba2abe3836f4fa6705526"}, - {file = "numpy-1.19.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:7118f0a9f2f617f921ec7d278d981244ba83c85eea197be7c5a4f84af80a9c3c"}, - {file = "numpy-1.19.2-cp36-cp36m-win32.whl", hash = "sha256:9a3001248b9231ed73894c773142658bab914645261275f675d86c290c37f66d"}, - {file = "numpy-1.19.2-cp36-cp36m-win_amd64.whl", hash = "sha256:967c92435f0b3ba37a4257c48b8715b76741410467e2bdb1097e8391fccfae15"}, - {file = "numpy-1.19.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d526fa58ae4aead839161535d59ea9565863bb0b0bdb3cc63214613fb16aced4"}, - {file = "numpy-1.19.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:eb25c381d168daf351147713f49c626030dcff7a393d5caa62515d415a6071d8"}, - {file = "numpy-1.19.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:62139af94728d22350a571b7c82795b9d59be77fc162414ada6c8b6a10ef5d02"}, - {file = "numpy-1.19.2-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:0c66da1d202c52051625e55a249da35b31f65a81cb56e4c69af0dfb8fb0125bf"}, - {file = "numpy-1.19.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:2117536e968abb7357d34d754e3733b0d7113d4c9f1d921f21a3d96dec5ff716"}, - {file = "numpy-1.19.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:54045b198aebf41bf6bf4088012777c1d11703bf74461d70cd350c0af2182e45"}, - {file = "numpy-1.19.2-cp37-cp37m-win32.whl", hash = "sha256:aba1d5daf1144b956bc87ffb87966791f5e9f3e1f6fab3d7f581db1f5b598f7a"}, - {file = "numpy-1.19.2-cp37-cp37m-win_amd64.whl", hash = "sha256:addaa551b298052c16885fc70408d3848d4e2e7352de4e7a1e13e691abc734c1"}, - {file = "numpy-1.19.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:58d66a6b3b55178a1f8a5fe98df26ace76260a70de694d99577ddeab7eaa9a9d"}, - {file = "numpy-1.19.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:59f3d687faea7a4f7f93bd9665e5b102f32f3fa28514f15b126f099b7997203d"}, - {file = "numpy-1.19.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cebd4f4e64cfe87f2039e4725781f6326a61f095bc77b3716502bed812b385a9"}, - {file = "numpy-1.19.2-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:c35a01777f81e7333bcf276b605f39c872e28295441c265cd0c860f4b40148c1"}, - {file = "numpy-1.19.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d7ac33585e1f09e7345aa902c281bd777fdb792432d27fca857f39b70e5dd31c"}, - {file = "numpy-1.19.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:04c7d4ebc5ff93d9822075ddb1751ff392a4375e5885299445fcebf877f179d5"}, - {file = "numpy-1.19.2-cp38-cp38-win32.whl", hash = "sha256:51ee93e1fac3fe08ef54ff1c7f329db64d8a9c5557e6c8e908be9497ac76374b"}, - {file = "numpy-1.19.2-cp38-cp38-win_amd64.whl", hash = "sha256:1669ec8e42f169ff715a904c9b2105b6640f3f2a4c4c2cb4920ae8b2785dac65"}, - {file = "numpy-1.19.2-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:0bfd85053d1e9f60234f28f63d4a5147ada7f432943c113a11afcf3e65d9d4c8"}, - {file = "numpy-1.19.2.zip", hash = "sha256:0d310730e1e793527065ad7dde736197b705d0e4c9999775f212b03c44a8484c"}, + {file = "numpy-1.20.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:89bd70c9ad540febe6c28451ba225eb4e49d27f64728357f512c808002325dfa"}, + {file = "numpy-1.20.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:1264c66129f5ef63187649dd43f1ca59532e8c098723643336a85131c0dcce3f"}, + {file = "numpy-1.20.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e9c5fd330d2fedf06051bafb996252de9b032fcb2ec03eefc9a543e56efa66d4"}, + {file = "numpy-1.20.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:db5e69d08756a2fa75a42b4e433880b6187768fe1bc73d21819def893e5128c6"}, + {file = "numpy-1.20.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:1abc02e30e3efd81a4571e00f8e62bf42e343c76698e0a3e11d9c2b3ee0d77a7"}, + {file = "numpy-1.20.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:5ae765dd29c71a555f8102281f6fb15a3f4dbd35f6e7daf36af9df6d9dd716a5"}, + {file = "numpy-1.20.0-cp37-cp37m-win32.whl", hash = "sha256:b51b9ef0624f4b01b846c981034c10d2e30db33f9f8be71e992f3900741f6f77"}, + {file = "numpy-1.20.0-cp37-cp37m-win_amd64.whl", hash = "sha256:afeee581b50df20ef07b736e62ca612858f1fcdba96651d26ab44e3d567a4e6e"}, + {file = "numpy-1.20.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2bf0e68c92ef077fe766e53f8937d8ac341bdbca68ec128ae049b7d5c34e3206"}, + {file = "numpy-1.20.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:2445a96fbae23a4109c61be0f0af0f3bc273905dc5687a710850c1dfde0fc994"}, + {file = "numpy-1.20.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:33edfc0eb229f86f539493917b34035054313a11afbed48404aaf9f86bf4b0f6"}, + {file = "numpy-1.20.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:894aaee60043a98b03f0ad992c810f62e3a15f98a701e1c0f58a4f4a0df13429"}, + {file = "numpy-1.20.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:b66a6c15d793eda7cdad986e737775aa31b9306d588c14dd0277d2dda5546150"}, + {file = "numpy-1.20.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:eee454d3aa3955d0c0069a0f265fea47f1e1384c35a110a95efed358eb6e1562"}, + {file = "numpy-1.20.0-cp38-cp38-win32.whl", hash = "sha256:abdfa075e293d73638ece434708aa60b510dc6e70d805f57f481a0f550b25a9e"}, + {file = "numpy-1.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:f1e9424e9aa3834ea27cc12f9c6ea8ace5da18ee60a720bb3a85b2f733f41782"}, + {file = "numpy-1.20.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cb257bb0c0a3176c32782a63cfab2eace7eabfa2a3b2dfd85a13700617ccaf28"}, + {file = "numpy-1.20.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:cf5d9dcbdbe523fa665c5309cce5f144648d94a7fddbf5a40f8e0d5c9f5b596d"}, + {file = "numpy-1.20.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:93c2abea7bb69f47029b84ceac30ab46dfcfdb99b671ad850a333ff794a765e4"}, + {file = "numpy-1.20.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:0d28a54afcf46f1f9ebd163e49ad6b49087f22986fefd01a23ca0c1cdda25ca6"}, + {file = "numpy-1.20.0-cp39-cp39-win32.whl", hash = "sha256:d1bc331e1706fd1809a1bc8a31205329e5b30cf5ba50461c624da267e99f6ae6"}, + {file = "numpy-1.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:e3db646af9f6a145f0c57202f4b55d4a33f975e395e78fb7b394644c17c1a3a6"}, + {file = "numpy-1.20.0-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:4d592264d2a4f368afbb4288b5ceb646d4cbaf559c0249c096fbb0a149806b90"}, + {file = "numpy-1.20.0.zip", hash = "sha256:3d8233c03f116d068d5365fed4477f2947c7229582dad81e5953088989294cec"}, ] packaging = [ - {file = "packaging-20.4-py2.py3-none-any.whl", hash = "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"}, - {file = "packaging-20.4.tar.gz", hash = "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8"}, + {file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"}, + {file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"}, ] pandas = [ - {file = "pandas-1.1.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:eb0ac2fd04428f18b547716f70c699a7cc9c65a6947ed8c7e688d96eb91e3db8"}, - {file = "pandas-1.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:02ec9f5f0b7df7227931a884569ef0b6d32d76789c84bcac1a719dafd1f912e8"}, - {file = "pandas-1.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:1edf6c254d2d138188e9987159978ee70e23362fe9197f3f100844a197f7e1e4"}, - {file = "pandas-1.1.2-cp36-cp36m-win32.whl", hash = "sha256:b821f239514a9ce46dd1cd6c9298a03ed58d0235d414ea264aacc1b14916bbe4"}, - {file = "pandas-1.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:ab6ea0f3116f408a8a59cd50158bfd19d2a024f4e221f14ab1bcd2da4f0c6fdf"}, - {file = "pandas-1.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:474fa53e3b2f3a543cbca81f7457bd1f44e7eb1be7171067636307e21b624e9c"}, - {file = "pandas-1.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:9e135ce9929cd0f0ba24f0545936af17ba935f844d4c3a2b979354a73c9440e0"}, - {file = "pandas-1.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:188cdfbf8399bc144fa95040536b5ce3429d2eda6c9c8b238c987af7df9f128c"}, - {file = "pandas-1.1.2-cp37-cp37m-win32.whl", hash = "sha256:08783a33989a6747317766b75be30a594a9764b9f145bb4bcc06e337930d9807"}, - {file = "pandas-1.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:f7008ec22b92d771b145150978d930a28fab8da3a10131b01bbf39574acdad0b"}, - {file = "pandas-1.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:59df9f0276aa4854d8bff28c5e5aeb74d9c6bb4d9f55d272b7124a7df40e47d0"}, - {file = "pandas-1.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:eeb64c5b3d4f2ea072ca8afdeb2b946cd681a863382ca79734f1b520b8d2fa26"}, - {file = "pandas-1.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:c9235b37489168ed6b173551c816b50aa89f03c24a8549a8b4d47d8dc79bfb1e"}, - {file = "pandas-1.1.2-cp38-cp38-win32.whl", hash = "sha256:0936991228241db937e87f82ec552a33888dd04a2e0d5a2fa3c689f92fab09e0"}, - {file = "pandas-1.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:026d764d0b86ee53183aa4c0b90774b6146123eeada4e24946d7d24290777be1"}, - {file = "pandas-1.1.2.tar.gz", hash = "sha256:b64ffd87a2cfd31b40acd4b92cb72ea9a52a48165aec4c140e78fd69c45d1444"}, + {file = "pandas-1.2.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:50e6c0a17ef7f831b5565fd0394dbf9bfd5d615ee4dd4bb60a3d8c9d2e872323"}, + {file = "pandas-1.2.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:324e60bea729cf3b55c1bf9e88fe8b9932c26f8669d13b928e3c96b3a1453dff"}, + {file = "pandas-1.2.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:37443199f451f8badfe0add666e43cdb817c59fa36bceedafd9c543a42f236ca"}, + {file = "pandas-1.2.1-cp37-cp37m-win32.whl", hash = "sha256:23ac77a3a222d9304cb2a7934bb7b4805ff43d513add7a42d1a22dc7df14edd2"}, + {file = "pandas-1.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:496fcc29321e9a804d56d5aa5d7ec1320edfd1898eee2f451aa70171cf1d5a29"}, + {file = "pandas-1.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:30e9e8bc8c5c17c03d943e8d6f778313efff59e413b8dbdd8214c2ed9aa165f6"}, + {file = "pandas-1.2.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:055647e7f4c5e66ba92c2a7dcae6c2c57898b605a3fb007745df61cc4015937f"}, + {file = "pandas-1.2.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:9d45f58b03af1fea4b48e44aa38a819a33dccb9821ef9e1d68f529995f8a632f"}, + {file = "pandas-1.2.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:b26e2dabda73d347c7af3e6fed58483161c7b87a886a4e06d76ccfe55a044aa9"}, + {file = "pandas-1.2.1-cp38-cp38-win32.whl", hash = "sha256:47ec0808a8357ab3890ce0eca39a63f79dcf941e2e7f494470fe1c9ec43f6091"}, + {file = "pandas-1.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:57d5c7ac62925a8d2ab43ea442b297a56cc8452015e71e24f4aa7e4ed6be3d77"}, + {file = "pandas-1.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d7cca42dba13bfee369e2944ae31f6549a55831cba3117e17636955176004088"}, + {file = "pandas-1.2.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:cfd237865d878da9b65cfee883da5e0067f5e2ff839e459466fb90565a77bda3"}, + {file = "pandas-1.2.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:050ed2c9d825ef36738e018454e6d055c63d947c1d52010fbadd7584f09df5db"}, + {file = "pandas-1.2.1-cp39-cp39-win32.whl", hash = "sha256:fe7de6fed43e7d086e3d947651ec89e55ddf00102f9dd5758763d56d182f0564"}, + {file = "pandas-1.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:2de012a36cc507debd9c3351b4d757f828d5a784a5fc4e6766eafc2b56e4b0f5"}, + {file = "pandas-1.2.1.tar.gz", hash = "sha256:5527c5475d955c0bc9689c56865aaa2a7b13c504d6c44f0aadbf57b565af5ebd"}, ] pandocfilters = [ - {file = "pandocfilters-1.4.2.tar.gz", hash = "sha256:b3dd70e169bb5449e6bc6ff96aea89c5eea8c5f6ab5e207fc2f521a2cf4a0da9"}, + {file = "pandocfilters-1.4.3.tar.gz", hash = "sha256:bc63fbb50534b4b1f8ebe1860889289e8af94a23bff7445259592df25a3906eb"}, ] parso = [ - {file = "parso-0.7.1-py2.py3-none-any.whl", hash = "sha256:97218d9159b2520ff45eb78028ba8b50d2bc61dcc062a9682666f2dc4bd331ea"}, - {file = "parso-0.7.1.tar.gz", hash = "sha256:caba44724b994a8a5e086460bb212abc5a8bc46951bf4a9a1210745953622eb9"}, + {file = "parso-0.8.1-py2.py3-none-any.whl", hash = "sha256:15b00182f472319383252c18d5913b69269590616c947747bc50bf4ac768f410"}, + {file = "parso-0.8.1.tar.gz", hash = "sha256:8519430ad07087d4c997fda3a7918f7cfa27cb58972a8c89c2a0295a1c940e9e"}, ] pathspec = [ - {file = "pathspec-0.8.0-py2.py3-none-any.whl", hash = "sha256:7d91249d21749788d07a2d0f94147accd8f845507400749ea19c1ec9054a12b0"}, - {file = "pathspec-0.8.0.tar.gz", hash = "sha256:da45173eb3a6f2a5a487efba21f050af2b41948be6ab52b6a1e3ff22bb8b7061"}, + {file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"}, + {file = "pathspec-0.8.1.tar.gz", hash = "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd"}, +] +patsy = [ + {file = "patsy-0.5.1-py2.py3-none-any.whl", hash = "sha256:5465be1c0e670c3a965355ec09e9a502bf2c4cbe4875e8528b0221190a8a5d40"}, + {file = "patsy-0.5.1.tar.gz", hash = "sha256:f115cec4201e1465cd58b9866b0b0e7b941caafec129869057405bfe5b5e3991"}, ] pbr = [ - {file = "pbr-5.5.0-py2.py3-none-any.whl", hash = "sha256:5adc0f9fc64319d8df5ca1e4e06eea674c26b80e6f00c530b18ce6a6592ead15"}, - {file = "pbr-5.5.0.tar.gz", hash = "sha256:14bfd98f51c78a3dd22a1ef45cf194ad79eee4a19e8e1a0d5c7f8e81ffe182ea"}, + {file = "pbr-5.5.1-py2.py3-none-any.whl", hash = "sha256:b236cde0ac9a6aedd5e3c34517b423cd4fd97ef723849da6b0d2231142d89c00"}, + {file = "pbr-5.5.1.tar.gz", hash = "sha256:5fad80b613c402d5b7df7bd84812548b2a61e9977387a80a5fc5c396492b13c9"}, ] pep8-naming = [ {file = "pep8-naming-0.9.1.tar.gz", hash = "sha256:a33d38177056321a167decd6ba70b890856ba5025f0a8eca6a3eda607da93caf"}, @@ -2392,21 +2658,55 @@ pickleshare = [ {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, ] +pillow = [ + {file = "Pillow-8.1.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:d355502dce85ade85a2511b40b4c61a128902f246504f7de29bbeec1ae27933a"}, + {file = "Pillow-8.1.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:93a473b53cc6e0b3ce6bf51b1b95b7b1e7e6084be3a07e40f79b42e83503fbf2"}, + {file = "Pillow-8.1.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2353834b2c49b95e1313fb34edf18fca4d57446675d05298bb694bca4b194174"}, + {file = "Pillow-8.1.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:1d208e670abfeb41b6143537a681299ef86e92d2a3dac299d3cd6830d5c7bded"}, + {file = "Pillow-8.1.0-cp36-cp36m-win32.whl", hash = "sha256:dd9eef866c70d2cbbea1ae58134eaffda0d4bfea403025f4db6859724b18ab3d"}, + {file = "Pillow-8.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:b09e10ec453de97f9a23a5aa5e30b334195e8d2ddd1ce76cc32e52ba63c8b31d"}, + {file = "Pillow-8.1.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:b02a0b9f332086657852b1f7cb380f6a42403a6d9c42a4c34a561aa4530d5234"}, + {file = "Pillow-8.1.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ca20739e303254287138234485579b28cb0d524401f83d5129b5ff9d606cb0a8"}, + {file = "Pillow-8.1.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:604815c55fd92e735f9738f65dabf4edc3e79f88541c221d292faec1904a4b17"}, + {file = "Pillow-8.1.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cf6e33d92b1526190a1de904df21663c46a456758c0424e4f947ae9aa6088bf7"}, + {file = "Pillow-8.1.0-cp37-cp37m-win32.whl", hash = "sha256:47c0d93ee9c8b181f353dbead6530b26980fe4f5485aa18be8f1fd3c3cbc685e"}, + {file = "Pillow-8.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:96d4dc103d1a0fa6d47c6c55a47de5f5dafd5ef0114fa10c85a1fd8e0216284b"}, + {file = "Pillow-8.1.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:7916cbc94f1c6b1301ac04510d0881b9e9feb20ae34094d3615a8a7c3db0dcc0"}, + {file = "Pillow-8.1.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:3de6b2ee4f78c6b3d89d184ade5d8fa68af0848f9b6b6da2b9ab7943ec46971a"}, + {file = "Pillow-8.1.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cdbbe7dff4a677fb555a54f9bc0450f2a21a93c5ba2b44e09e54fcb72d2bd13d"}, + {file = "Pillow-8.1.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:f50e7a98b0453f39000619d845be8b06e611e56ee6e8186f7f60c3b1e2f0feae"}, + {file = "Pillow-8.1.0-cp38-cp38-win32.whl", hash = "sha256:cb192176b477d49b0a327b2a5a4979552b7a58cd42037034316b8018ac3ebb59"}, + {file = "Pillow-8.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:6c5275bd82711cd3dcd0af8ce0bb99113ae8911fc2952805f1d012de7d600a4c"}, + {file = "Pillow-8.1.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:165c88bc9d8dba670110c689e3cc5c71dbe4bfb984ffa7cbebf1fac9554071d6"}, + {file = "Pillow-8.1.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:5e2fe3bb2363b862671eba632537cd3a823847db4d98be95690b7e382f3d6378"}, + {file = "Pillow-8.1.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7612520e5e1a371d77e1d1ca3a3ee6227eef00d0a9cddb4ef7ecb0b7396eddf7"}, + {file = "Pillow-8.1.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d673c4990acd016229a5c1c4ee8a9e6d8f481b27ade5fc3d95938697fa443ce0"}, + {file = "Pillow-8.1.0-cp39-cp39-win32.whl", hash = "sha256:dc577f4cfdda354db3ae37a572428a90ffdbe4e51eda7849bf442fb803f09c9b"}, + {file = "Pillow-8.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:22d070ca2e60c99929ef274cfced04294d2368193e935c5d6febfd8b601bf865"}, + {file = "Pillow-8.1.0-pp36-pypy36_pp73-macosx_10_10_x86_64.whl", hash = "sha256:a3d3e086474ef12ef13d42e5f9b7bbf09d39cf6bd4940f982263d6954b13f6a9"}, + {file = "Pillow-8.1.0-pp36-pypy36_pp73-manylinux2010_i686.whl", hash = "sha256:731ca5aabe9085160cf68b2dbef95fc1991015bc0a3a6ea46a371ab88f3d0913"}, + {file = "Pillow-8.1.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:bba80df38cfc17f490ec651c73bb37cd896bc2400cfba27d078c2135223c1206"}, + {file = "Pillow-8.1.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:c3d911614b008e8a576b8e5303e3db29224b455d3d66d1b2848ba6ca83f9ece9"}, + {file = "Pillow-8.1.0-pp37-pypy37_pp73-manylinux2010_i686.whl", hash = "sha256:39725acf2d2e9c17356e6835dccebe7a697db55f25a09207e38b835d5e1bc032"}, + {file = "Pillow-8.1.0-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:81c3fa9a75d9f1afafdb916d5995633f319db09bd773cb56b8e39f1e98d90820"}, + {file = "Pillow-8.1.0-pp37-pypy37_pp73-win32.whl", hash = "sha256:b6f00ad5ebe846cc91763b1d0c6d30a8042e02b2316e27b05de04fa6ec831ec5"}, + {file = "Pillow-8.1.0.tar.gz", hash = "sha256:887668e792b7edbfb1d3c9d8b5d8c859269a0f0eba4dda562adb95500f60dbba"}, +] pluggy = [ {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, ] pre-commit = [ - {file = "pre_commit-2.7.1-py2.py3-none-any.whl", hash = "sha256:810aef2a2ba4f31eed1941fc270e72696a1ad5590b9751839c90807d0fff6b9a"}, - {file = "pre_commit-2.7.1.tar.gz", hash = "sha256:c54fd3e574565fe128ecc5e7d2f91279772ddb03f8729645fa812fe809084a70"}, + {file = "pre_commit-2.10.0-py2.py3-none-any.whl", hash = "sha256:391ed331fdd0a21d0be48c1b9919921e9d372dfd60f6dc77b8f01dd6b13161c1"}, + {file = "pre_commit-2.10.0.tar.gz", hash = "sha256:f413348d3a8464b77987e36ef6e02c3372dadb823edf0dfe6fb0c3dc2f378ef9"}, ] prometheus-client = [ - {file = "prometheus_client-0.8.0-py2.py3-none-any.whl", hash = "sha256:983c7ac4b47478720db338f1491ef67a100b474e3bc7dafcbaefb7d0b8f9b01c"}, - {file = "prometheus_client-0.8.0.tar.gz", hash = "sha256:c6e6b706833a6bd1fd51711299edee907857be10ece535126a158f911ee80915"}, + {file = "prometheus_client-0.9.0-py2.py3-none-any.whl", hash = "sha256:b08c34c328e1bf5961f0b4352668e6c8f145b4a087e09b7296ef62cbe4693d35"}, + {file = "prometheus_client-0.9.0.tar.gz", hash = "sha256:9da7b32f02439d8c04f7777021c304ed51d9ec180604700c1ba72a4d44dceb03"}, ] prompt-toolkit = [ - {file = "prompt_toolkit-3.0.7-py3-none-any.whl", hash = "sha256:83074ee28ad4ba6af190593d4d4c607ff525272a504eb159199b6dd9f950c950"}, - {file = "prompt_toolkit-3.0.7.tar.gz", hash = "sha256:822f4605f28f7d2ba6b0b09a31e25e140871e96364d1d377667b547bb3bf4489"}, + {file = "prompt_toolkit-3.0.14-py3-none-any.whl", hash = "sha256:c96b30925025a7635471dc083ffb6af0cc67482a00611bd81aeaeeeb7e5a5e12"}, + {file = "prompt_toolkit-3.0.14.tar.gz", hash = "sha256:7e966747c18ececaec785699626b771c1ba8344c8d31759a1915d6b12fad6525"}, ] psycopg2 = [ {file = "psycopg2-2.8.6-cp27-cp27m-win32.whl", hash = "sha256:068115e13c70dc5982dfc00c5d70437fe37c014c808acce119b5448361c03725"}, @@ -2421,15 +2721,17 @@ psycopg2 = [ {file = "psycopg2-2.8.6-cp37-cp37m-win_amd64.whl", hash = "sha256:56fee7f818d032f802b8eed81ef0c1232b8b42390df189cab9cfa87573fe52c5"}, {file = "psycopg2-2.8.6-cp38-cp38-win32.whl", hash = "sha256:ad2fe8a37be669082e61fb001c185ffb58867fdbb3e7a6b0b0d2ffe232353a3e"}, {file = "psycopg2-2.8.6-cp38-cp38-win_amd64.whl", hash = "sha256:56007a226b8e95aa980ada7abdea6b40b75ce62a433bd27cec7a8178d57f4051"}, + {file = "psycopg2-2.8.6-cp39-cp39-win32.whl", hash = "sha256:2c93d4d16933fea5bbacbe1aaf8fa8c1348740b2e50b3735d1b0bf8154cbf0f3"}, + {file = "psycopg2-2.8.6-cp39-cp39-win_amd64.whl", hash = "sha256:d5062ae50b222da28253059880a871dc87e099c25cb68acf613d9d227413d6f7"}, {file = "psycopg2-2.8.6.tar.gz", hash = "sha256:fb23f6c71107c37fd667cb4ea363ddeb936b348bbd6449278eb92c189699f543"}, ] ptyprocess = [ - {file = "ptyprocess-0.6.0-py2.py3-none-any.whl", hash = "sha256:d7cc528d76e76342423ca640335bd3633420dc1366f258cb31d05e865ef5ca1f"}, - {file = "ptyprocess-0.6.0.tar.gz", hash = "sha256:923f299cc5ad920c68f2bc0bc98b75b9f838b93b599941a6b63ddbc2476394c0"}, + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, ] py = [ - {file = "py-1.9.0-py2.py3-none-any.whl", hash = "sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2"}, - {file = "py-1.9.0.tar.gz", hash = "sha256:9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342"}, + {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, + {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, ] pycodestyle = [ {file = "pycodestyle-2.6.0-py2.py3-none-any.whl", hash = "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367"}, @@ -2448,12 +2750,8 @@ pyflakes = [ {file = "pyflakes-2.2.0.tar.gz", hash = "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"}, ] pygments = [ - {file = "Pygments-2.7.1-py3-none-any.whl", hash = "sha256:307543fe65c0947b126e83dd5a61bd8acbd84abec11f43caebaf5534cbc17998"}, - {file = "Pygments-2.7.1.tar.gz", hash = "sha256:926c3f319eda178d1bd90851e4317e6d8cdb5e292a3386aac9bd75eca29cf9c7"}, -] -pylint = [ - {file = "pylint-2.6.0-py3-none-any.whl", hash = "sha256:bfe68f020f8a0fece830a22dd4d5dddb4ecc6137db04face4c3420a46a52239f"}, - {file = "pylint-2.6.0.tar.gz", hash = "sha256:bb4a908c9dadbc3aac18860550e870f58e1a02c9f2c204fdf5693d73be061210"}, + {file = "Pygments-2.7.4-py3-none-any.whl", hash = "sha256:bc9591213a8f0e0ca1a5e68a479b4887fdc3e75d0774e5c71c31920c427de435"}, + {file = "Pygments-2.7.4.tar.gz", hash = "sha256:df49d09b498e83c1a73128295860250b0b7edd4c723a32e9bc0d295c7c2ec337"}, ] pyparsing = [ {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, @@ -2463,24 +2761,28 @@ pyrsistent = [ {file = "pyrsistent-0.17.3.tar.gz", hash = "sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e"}, ] pytest = [ - {file = "pytest-6.1.0-py3-none-any.whl", hash = "sha256:1cd09785c0a50f9af72220dd12aa78cfa49cbffc356c61eab009ca189e018a33"}, - {file = "pytest-6.1.0.tar.gz", hash = "sha256:d010e24666435b39a4cf48740b039885642b6c273a3f77be3e7e03554d2806b7"}, + {file = "pytest-6.2.2-py3-none-any.whl", hash = "sha256:b574b57423e818210672e07ca1fa90aaf194a4f63f3ab909a2c67ebb22913839"}, + {file = "pytest-6.2.2.tar.gz", hash = "sha256:9d1edf9e7d0b84d72ea3dbcdfd22b35fb543a5e8f2a60092dd578936bf63d7f9"}, ] pytest-cov = [ - {file = "pytest-cov-2.10.1.tar.gz", hash = "sha256:47bd0ce14056fdd79f93e1713f88fad7bdcc583dcd7783da86ef2f085a0bb88e"}, - {file = "pytest_cov-2.10.1-py2.py3-none-any.whl", hash = "sha256:45ec2d5182f89a81fc3eb29e3d1ed3113b9e9a873bcddb2a71faaab066110191"}, + {file = "pytest-cov-2.11.1.tar.gz", hash = "sha256:359952d9d39b9f822d9d29324483e7ba04a3a17dd7d05aa6beb7ea01e359e5f7"}, + {file = "pytest_cov-2.11.1-py2.py3-none-any.whl", hash = "sha256:bdb9fdb0b85a7cc825269a4c56b48ccaa5c7e365054b6038772c32ddcdc969da"}, ] pytest-env = [ {file = "pytest-env-0.6.2.tar.gz", hash = "sha256:7e94956aef7f2764f3c147d216ce066bf6c42948bb9e293169b1b1c880a580c2"}, ] +pytest-mock = [ + {file = "pytest-mock-3.5.1.tar.gz", hash = "sha256:a1e2aba6af9560d313c642dae7e00a2a12b022b80301d9d7fc8ec6858e1dd9fc"}, + {file = "pytest_mock-3.5.1-py3-none-any.whl", hash = "sha256:379b391cfad22422ea2e252bdfc008edd08509029bcde3c25b2c0bd741e0424e"}, +] +pytest-randomly = [ + {file = "pytest-randomly-3.5.0.tar.gz", hash = "sha256:440cec143fd9b0adeb072006c71e0294402a2bc2ccd08079c2341087ba4cf2d1"}, + {file = "pytest_randomly-3.5.0-py3-none-any.whl", hash = "sha256:9db10d160237f3f8ee60cef72e4cb9ea88d2893c9dd5c8aa334b060cdeb67c3a"}, +] python-dateutil = [ {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, ] -python-dotenv = [ - {file = "python-dotenv-0.14.0.tar.gz", hash = "sha256:8c10c99a1b25d9a68058a1ad6f90381a62ba68230ca93966882a4dbc3bc9c33d"}, - {file = "python_dotenv-0.14.0-py2.py3-none-any.whl", hash = "sha256:c10863aee750ad720f4f43436565e4c1698798d763b63234fb5021b6c616e423"}, -] python-editor = [ {file = "python-editor-1.0.4.tar.gz", hash = "sha256:51fda6bcc5ddbbb7063b2af7509e43bd84bfc32a4ff71349ec7847713882327b"}, {file = "python_editor-1.0.4-py2-none-any.whl", hash = "sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8"}, @@ -2489,22 +2791,20 @@ python-editor = [ {file = "python_editor-1.0.4-py3.5.egg", hash = "sha256:c3da2053dbab6b29c94e43c486ff67206eafbe7eb52dbec7390b5e2fb05aac77"}, ] pytz = [ - {file = "pytz-2020.1-py2.py3-none-any.whl", hash = "sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed"}, - {file = "pytz-2020.1.tar.gz", hash = "sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048"}, + {file = "pytz-2020.5-py2.py3-none-any.whl", hash = "sha256:16962c5fb8db4a8f63a26646d8886e9d769b6c511543557bc84e9569fb9a9cb4"}, + {file = "pytz-2020.5.tar.gz", hash = "sha256:180befebb1927b16f6b57101720075a984c019ac16b1b7575673bea42c6c3da5"}, ] pywin32 = [ - {file = "pywin32-228-cp27-cp27m-win32.whl", hash = "sha256:37dc9935f6a383cc744315ae0c2882ba1768d9b06700a70f35dc1ce73cd4ba9c"}, - {file = "pywin32-228-cp27-cp27m-win_amd64.whl", hash = "sha256:11cb6610efc2f078c9e6d8f5d0f957620c333f4b23466931a247fb945ed35e89"}, - {file = "pywin32-228-cp35-cp35m-win32.whl", hash = "sha256:1f45db18af5d36195447b2cffacd182fe2d296849ba0aecdab24d3852fbf3f80"}, - {file = "pywin32-228-cp35-cp35m-win_amd64.whl", hash = "sha256:6e38c44097a834a4707c1b63efa9c2435f5a42afabff634a17f563bc478dfcc8"}, - {file = "pywin32-228-cp36-cp36m-win32.whl", hash = "sha256:ec16d44b49b5f34e99eb97cf270806fdc560dff6f84d281eb2fcb89a014a56a9"}, - {file = "pywin32-228-cp36-cp36m-win_amd64.whl", hash = "sha256:a60d795c6590a5b6baeacd16c583d91cce8038f959bd80c53bd9a68f40130f2d"}, - {file = "pywin32-228-cp37-cp37m-win32.whl", hash = "sha256:af40887b6fc200eafe4d7742c48417529a8702dcc1a60bf89eee152d1d11209f"}, - {file = "pywin32-228-cp37-cp37m-win_amd64.whl", hash = "sha256:00eaf43dbd05ba6a9b0080c77e161e0b7a601f9a3f660727a952e40140537de7"}, - {file = "pywin32-228-cp38-cp38-win32.whl", hash = "sha256:fa6ba028909cfc64ce9e24bcf22f588b14871980d9787f1e2002c99af8f1850c"}, - {file = "pywin32-228-cp38-cp38-win_amd64.whl", hash = "sha256:9b3466083f8271e1a5eb0329f4e0d61925d46b40b195a33413e0905dccb285e8"}, - {file = "pywin32-228-cp39-cp39-win32.whl", hash = "sha256:ed74b72d8059a6606f64842e7917aeee99159ebd6b8d6261c518d002837be298"}, - {file = "pywin32-228-cp39-cp39-win_amd64.whl", hash = "sha256:8319bafdcd90b7202c50d6014efdfe4fde9311b3ff15fd6f893a45c0868de203"}, + {file = "pywin32-300-cp35-cp35m-win32.whl", hash = "sha256:1c204a81daed2089e55d11eefa4826c05e604d27fe2be40b6bf8db7b6a39da63"}, + {file = "pywin32-300-cp35-cp35m-win_amd64.whl", hash = "sha256:350c5644775736351b77ba68da09a39c760d75d2467ecec37bd3c36a94fbed64"}, + {file = "pywin32-300-cp36-cp36m-win32.whl", hash = "sha256:a3b4c48c852d4107e8a8ec980b76c94ce596ea66d60f7a697582ea9dce7e0db7"}, + {file = "pywin32-300-cp36-cp36m-win_amd64.whl", hash = "sha256:27a30b887afbf05a9cbb05e3ffd43104a9b71ce292f64a635389dbad0ed1cd85"}, + {file = "pywin32-300-cp37-cp37m-win32.whl", hash = "sha256:d7e8c7efc221f10d6400c19c32a031add1c4a58733298c09216f57b4fde110dc"}, + {file = "pywin32-300-cp37-cp37m-win_amd64.whl", hash = "sha256:8151e4d7a19262d6694162d6da85d99a16f8b908949797fd99c83a0bfaf5807d"}, + {file = "pywin32-300-cp38-cp38-win32.whl", hash = "sha256:fbb3b1b0fbd0b4fc2a3d1d81fe0783e30062c1abed1d17c32b7879d55858cfae"}, + {file = "pywin32-300-cp38-cp38-win_amd64.whl", hash = "sha256:60a8fa361091b2eea27f15718f8eb7f9297e8d51b54dbc4f55f3d238093d5190"}, + {file = "pywin32-300-cp39-cp39-win32.whl", hash = "sha256:638b68eea5cfc8def537e43e9554747f8dee786b090e47ead94bfdafdb0f2f50"}, + {file = "pywin32-300-cp39-cp39-win_amd64.whl", hash = "sha256:b1609ce9bd5c411b81f941b246d683d6508992093203d4eb7f278f4ed1085c3f"}, ] pywinpty = [ {file = "pywinpty-0.5.7-cp27-cp27m-win32.whl", hash = "sha256:b358cb552c0f6baf790de375fab96524a0498c9df83489b8c23f7f08795e966b"}, @@ -2519,101 +2819,186 @@ pywinpty = [ {file = "pywinpty-0.5.7.tar.gz", hash = "sha256:2d7e9c881638a72ffdca3f5417dd1563b60f603e1b43e5895674c2a1b01f95a0"}, ] pyyaml = [ - {file = "PyYAML-5.3.1-cp27-cp27m-win32.whl", hash = "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f"}, - {file = "PyYAML-5.3.1-cp27-cp27m-win_amd64.whl", hash = "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76"}, - {file = "PyYAML-5.3.1-cp35-cp35m-win32.whl", hash = "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2"}, - {file = "PyYAML-5.3.1-cp35-cp35m-win_amd64.whl", hash = "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c"}, - {file = "PyYAML-5.3.1-cp36-cp36m-win32.whl", hash = "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2"}, - {file = "PyYAML-5.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648"}, - {file = "PyYAML-5.3.1-cp37-cp37m-win32.whl", hash = "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a"}, - {file = "PyYAML-5.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf"}, - {file = "PyYAML-5.3.1-cp38-cp38-win32.whl", hash = "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97"}, - {file = "PyYAML-5.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee"}, - {file = "PyYAML-5.3.1.tar.gz", hash = "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d"}, + {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, + {file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"}, + {file = "PyYAML-5.4.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8"}, + {file = "PyYAML-5.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185"}, + {file = "PyYAML-5.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253"}, + {file = "PyYAML-5.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc"}, + {file = "PyYAML-5.4.1-cp36-cp36m-win32.whl", hash = "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5"}, + {file = "PyYAML-5.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df"}, + {file = "PyYAML-5.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018"}, + {file = "PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63"}, + {file = "PyYAML-5.4.1-cp37-cp37m-win32.whl", hash = "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b"}, + {file = "PyYAML-5.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf"}, + {file = "PyYAML-5.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46"}, + {file = "PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb"}, + {file = "PyYAML-5.4.1-cp38-cp38-win32.whl", hash = "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc"}, + {file = "PyYAML-5.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696"}, + {file = "PyYAML-5.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77"}, + {file = "PyYAML-5.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183"}, + {file = "PyYAML-5.4.1-cp39-cp39-win32.whl", hash = "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10"}, + {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"}, + {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, ] pyzmq = [ - {file = "pyzmq-19.0.2-cp27-cp27m-macosx_10_9_intel.whl", hash = "sha256:59f1e54627483dcf61c663941d94c4af9bf4163aec334171686cdaee67974fe5"}, - {file = "pyzmq-19.0.2-cp27-cp27m-win32.whl", hash = "sha256:c36ffe1e5aa35a1af6a96640d723d0d211c5f48841735c2aa8d034204e87eb87"}, - {file = "pyzmq-19.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:0a422fc290d03958899743db091f8154958410fc76ce7ee0ceb66150f72c2c97"}, - {file = "pyzmq-19.0.2-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:c20dd60b9428f532bc59f2ef6d3b1029a28fc790d408af82f871a7db03e722ff"}, - {file = "pyzmq-19.0.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d46fb17f5693244de83e434648b3dbb4f4b0fec88415d6cbab1c1452b6f2ae17"}, - {file = "pyzmq-19.0.2-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:f1a25a61495b6f7bb986accc5b597a3541d9bd3ef0016f50be16dbb32025b302"}, - {file = "pyzmq-19.0.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:ab0d01148d13854de716786ca73701012e07dff4dfbbd68c4e06d8888743526e"}, - {file = "pyzmq-19.0.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:720d2b6083498a9281eaee3f2927486e9fe02cd16d13a844f2e95217f243efea"}, - {file = "pyzmq-19.0.2-cp35-cp35m-win32.whl", hash = "sha256:29d51279060d0a70f551663bc592418bcad7f4be4eea7b324f6dd81de05cb4c1"}, - {file = "pyzmq-19.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:5120c64646e75f6db20cc16b9a94203926ead5d633de9feba4f137004241221d"}, - {file = "pyzmq-19.0.2-cp36-cp36m-macosx_10_9_intel.whl", hash = "sha256:8a6ada5a3f719bf46a04ba38595073df8d6b067316c011180102ba2a1925f5b5"}, - {file = "pyzmq-19.0.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:fa411b1d8f371d3a49d31b0789eb6da2537dadbb2aef74a43aa99a78195c3f76"}, - {file = "pyzmq-19.0.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:00dca814469436455399660247d74045172955459c0bd49b54a540ce4d652185"}, - {file = "pyzmq-19.0.2-cp36-cp36m-win32.whl", hash = "sha256:046b92e860914e39612e84fa760fc3f16054d268c11e0e25dcb011fb1bc6a075"}, - {file = "pyzmq-19.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:99cc0e339a731c6a34109e5c4072aaa06d8e32c0b93dc2c2d90345dd45fa196c"}, - {file = "pyzmq-19.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e36f12f503511d72d9bdfae11cadbadca22ff632ff67c1b5459f69756a029c19"}, - {file = "pyzmq-19.0.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c40fbb2b9933369e994b837ee72193d6a4c35dfb9a7c573257ef7ff28961272c"}, - {file = "pyzmq-19.0.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5d9fc809aa8d636e757e4ced2302569d6e60e9b9c26114a83f0d9d6519c40493"}, - {file = "pyzmq-19.0.2-cp37-cp37m-win32.whl", hash = "sha256:3fa6debf4bf9412e59353defad1f8035a1e68b66095a94ead8f7a61ae90b2675"}, - {file = "pyzmq-19.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:73483a2caaa0264ac717af33d6fb3f143d8379e60a422730ee8d010526ce1913"}, - {file = "pyzmq-19.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:36ab114021c0cab1a423fe6689355e8f813979f2c750968833b318c1fa10a0fd"}, - {file = "pyzmq-19.0.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:8b66b94fe6243d2d1d89bca336b2424399aac57932858b9a30309803ffc28112"}, - {file = "pyzmq-19.0.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:654d3e06a4edc566b416c10293064732516cf8871a4522e0a2ba00cc2a2e600c"}, - {file = "pyzmq-19.0.2-cp38-cp38-win32.whl", hash = "sha256:276ad604bffd70992a386a84bea34883e696a6b22e7378053e5d3227321d9702"}, - {file = "pyzmq-19.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:09d24a80ccb8cbda1af6ed8eb26b005b6743e58e9290566d2a6841f4e31fa8e0"}, - {file = "pyzmq-19.0.2-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:c1a31cd42905b405530e92bdb70a8a56f048c8a371728b8acf9d746ecd4482c0"}, - {file = "pyzmq-19.0.2-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a7e7f930039ee0c4c26e4dfee015f20bd6919cd8b97c9cd7afbde2923a5167b6"}, - {file = "pyzmq-19.0.2.tar.gz", hash = "sha256:296540a065c8c21b26d63e3cea2d1d57902373b16e4256afe46422691903a438"}, + {file = "pyzmq-22.0.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c2a8d70fe2a321a83d274970481eb244bff027b58511e943ef564721530ba786"}, + {file = "pyzmq-22.0.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:b68033181dc2e622bb5baa9b16d5933303779a03dc89860f4c44f629426d802c"}, + {file = "pyzmq-22.0.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:9bae89912cac9f03d41adb66981f6e753cfd4e451937b2cd435d732fd4ccb1a3"}, + {file = "pyzmq-22.0.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:75b68890219231bd60556a1c6e0d2dc05fa1b179a26c876442c83a0d77958bc9"}, + {file = "pyzmq-22.0.2-cp36-cp36m-win32.whl", hash = "sha256:c6b1d235a08f2c42480cb9a0a5cd2a29c391052d8bc8f43db86aa15387734a33"}, + {file = "pyzmq-22.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f3ad3f77ed6a3cf31f61170fc1733afd83a4cf8e02edde0762d4e630bce2a97e"}, + {file = "pyzmq-22.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:490a9fe5509b09369722b18b85ef494abdf7c51cb1c9484cf83c3921961c2038"}, + {file = "pyzmq-22.0.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:303b8ebafce9906fc1e8eb35734b9dba4786ca3da7cdc88e04a8997dde2372d3"}, + {file = "pyzmq-22.0.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1ffb81b08bcaaac30ba913adef686ff41b257252e96fca32497029fdc3962ff0"}, + {file = "pyzmq-22.0.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:75fa832c79ce30a23cd44a4e89224c651ef6bf5144b842ad066246e914b92233"}, + {file = "pyzmq-22.0.2-cp37-cp37m-win32.whl", hash = "sha256:d77f6eb839097e4bce96fcac7e05e33b677efe0385bd0ab6c2a9ea818ed7e8f9"}, + {file = "pyzmq-22.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:5a565af3729b2bf7c2ce1d563084d0cd90a312290ba5e571a0c3ec770ea8a287"}, + {file = "pyzmq-22.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ff236d8653f8bb74198223c7af77b9378714f411d6d95255d97c2d69bf991b20"}, + {file = "pyzmq-22.0.2-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:37beae88d6cf102419bb0ec79acb19c062dcea6765b57cf2b265dac5542bcdad"}, + {file = "pyzmq-22.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:bc9f2c26485dc76520084ee8d76f18171cc89f24f801bed8402302ee99dbbcd9"}, + {file = "pyzmq-22.0.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:0b32bd5e7346e534fddb57eab309933ff6b3b177c0106b908b6193dfa75fdabe"}, + {file = "pyzmq-22.0.2-cp38-cp38-win32.whl", hash = "sha256:58a074afa254a53872202e92594b59c0ba8cda62effc6437e34ae7048559dd38"}, + {file = "pyzmq-22.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:66d1190eec0a78bd07d39d1615b7923190ed1ba8aa04742d963b09bc66628681"}, + {file = "pyzmq-22.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:013e1343b41aaeb482f40605f3fadcfeb841706039625d7b30d12ae8fa0d3cd0"}, + {file = "pyzmq-22.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d66724bf0d423aa18c9ea43a1bf24ed5c1d143f00bdace7c1b7fc3034f188cc9"}, + {file = "pyzmq-22.0.2-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:86cb0982b02b4fc2fbd4a65155289e0e4e5015982dbe2db14f8856c303cffa08"}, + {file = "pyzmq-22.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:7b6c855c562d1c1bf7a1ba72c2617c8298e0fa1b1c08dc8d60e225031567ad9e"}, + {file = "pyzmq-22.0.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:034f5b9e4ff0bcc67e49fe8f55a1b209ea5761c8fd00c246195c8d0cb6ce096d"}, + {file = "pyzmq-22.0.2-cp39-cp39-win32.whl", hash = "sha256:849444c1699c244d5770d3a684c51f024e95c538f71dd3d1ff423a91745bab7f"}, + {file = "pyzmq-22.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:506d4716ca6e5798345038e75adcb05b4118112a36700941967925285637198b"}, + {file = "pyzmq-22.0.2-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:888d850d4b7e1426d210e901bd93075991b36fe0e2ae2547ce5c18b96df95250"}, + {file = "pyzmq-22.0.2-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:03c001be8c3817d5721137660ed21d90f6175002f0e583306079c791b1d9a855"}, + {file = "pyzmq-22.0.2-pp36-pypy36_pp73-win32.whl", hash = "sha256:3f4e6574d2589e3e22514a3669e86a7bf18a95d3c3ae65733fa6a0a769ec4c9d"}, + {file = "pyzmq-22.0.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:35c8c5c8160f0f0fc6d4588037243b668c3f20d981c1b8e7b5d9c33f8eeb7eb6"}, + {file = "pyzmq-22.0.2-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:841e9563ce9bd33fe9f227ec680ac033e9f1060977d613568c1dcbff09e74cc9"}, + {file = "pyzmq-22.0.2-pp37-pypy37_pp73-win32.whl", hash = "sha256:cc814880ba27f2ea8cea48ff3b480076266d4dd9c3fe29ef6e5a0a807639abe7"}, + {file = "pyzmq-22.0.2.tar.gz", hash = "sha256:d7b82a959e5e22d492f4f5a1e650e909a6c8c76ede178f538313ddb9d1e92963"}, ] regex = [ - {file = "regex-2020.9.27-cp27-cp27m-win32.whl", hash = "sha256:d23a18037313714fb3bb5a94434d3151ee4300bae631894b1ac08111abeaa4a3"}, - {file = "regex-2020.9.27-cp27-cp27m-win_amd64.whl", hash = "sha256:84e9407db1b2eb368b7ecc283121b5e592c9aaedbe8c78b1a2f1102eb2e21d19"}, - {file = "regex-2020.9.27-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5f18875ac23d9aa2f060838e8b79093e8bb2313dbaaa9f54c6d8e52a5df097be"}, - {file = "regex-2020.9.27-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ae91972f8ac958039920ef6e8769277c084971a142ce2b660691793ae44aae6b"}, - {file = "regex-2020.9.27-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:9a02d0ae31d35e1ec12a4ea4d4cca990800f66a917d0fb997b20fbc13f5321fc"}, - {file = "regex-2020.9.27-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:ebbe29186a3d9b0c591e71b7393f1ae08c83cb2d8e517d2a822b8f7ec99dfd8b"}, - {file = "regex-2020.9.27-cp36-cp36m-win32.whl", hash = "sha256:4707f3695b34335afdfb09be3802c87fa0bc27030471dbc082f815f23688bc63"}, - {file = "regex-2020.9.27-cp36-cp36m-win_amd64.whl", hash = "sha256:9bc13e0d20b97ffb07821aa3e113f9998e84994fe4d159ffa3d3a9d1b805043b"}, - {file = "regex-2020.9.27-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:f1b3afc574a3db3b25c89161059d857bd4909a1269b0b3cb3c904677c8c4a3f7"}, - {file = "regex-2020.9.27-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5533a959a1748a5c042a6da71fe9267a908e21eded7a4f373efd23a2cbdb0ecc"}, - {file = "regex-2020.9.27-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:1fe0a41437bbd06063aa184c34804efa886bcc128222e9916310c92cd54c3b4c"}, - {file = "regex-2020.9.27-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:c570f6fa14b9c4c8a4924aaad354652366577b4f98213cf76305067144f7b100"}, - {file = "regex-2020.9.27-cp37-cp37m-win32.whl", hash = "sha256:eda4771e0ace7f67f58bc5b560e27fb20f32a148cbc993b0c3835970935c2707"}, - {file = "regex-2020.9.27-cp37-cp37m-win_amd64.whl", hash = "sha256:60b0e9e6dc45683e569ec37c55ac20c582973841927a85f2d8a7d20ee80216ab"}, - {file = "regex-2020.9.27-cp38-cp38-manylinux1_i686.whl", hash = "sha256:088afc8c63e7bd187a3c70a94b9e50ab3f17e1d3f52a32750b5b77dbe99ef5ef"}, - {file = "regex-2020.9.27-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:eaf548d117b6737df379fdd53bdde4f08870e66d7ea653e230477f071f861121"}, - {file = "regex-2020.9.27-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:41bb65f54bba392643557e617316d0d899ed5b4946dccee1cb6696152b29844b"}, - {file = "regex-2020.9.27-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:8d69cef61fa50c8133382e61fd97439de1ae623fe943578e477e76a9d9471637"}, - {file = "regex-2020.9.27-cp38-cp38-win32.whl", hash = "sha256:f2388013e68e750eaa16ccbea62d4130180c26abb1d8e5d584b9baf69672b30f"}, - {file = "regex-2020.9.27-cp38-cp38-win_amd64.whl", hash = "sha256:4318d56bccfe7d43e5addb272406ade7a2274da4b70eb15922a071c58ab0108c"}, - {file = "regex-2020.9.27.tar.gz", hash = "sha256:a6f32aea4260dfe0e55dc9733ea162ea38f0ea86aa7d0f77b15beac5bf7b369d"}, + {file = "regex-2020.11.13-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8b882a78c320478b12ff024e81dc7d43c1462aa4a3341c754ee65d857a521f85"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a63f1a07932c9686d2d416fb295ec2c01ab246e89b4d58e5fa468089cab44b70"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:6e4b08c6f8daca7d8f07c8d24e4331ae7953333dbd09c648ed6ebd24db5a10ee"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bba349276b126947b014e50ab3316c027cac1495992f10e5682dc677b3dfa0c5"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:56e01daca75eae420bce184edd8bb341c8eebb19dd3bce7266332258f9fb9dd7"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:6a8ce43923c518c24a2579fda49f093f1397dad5d18346211e46f134fc624e31"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:1ab79fcb02b930de09c76d024d279686ec5d532eb814fd0ed1e0051eb8bd2daa"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:9801c4c1d9ae6a70aeb2128e5b4b68c45d4f0af0d1535500884d644fa9b768c6"}, + {file = "regex-2020.11.13-cp36-cp36m-win32.whl", hash = "sha256:49cae022fa13f09be91b2c880e58e14b6da5d10639ed45ca69b85faf039f7a4e"}, + {file = "regex-2020.11.13-cp36-cp36m-win_amd64.whl", hash = "sha256:749078d1eb89484db5f34b4012092ad14b327944ee7f1c4f74d6279a6e4d1884"}, + {file = "regex-2020.11.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b2f4007bff007c96a173e24dcda236e5e83bde4358a557f9ccf5e014439eae4b"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:38c8fd190db64f513fe4e1baa59fed086ae71fa45083b6936b52d34df8f86a88"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5862975b45d451b6db51c2e654990c1820523a5b07100fc6903e9c86575202a0"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:262c6825b309e6485ec2493ffc7e62a13cf13fb2a8b6d212f72bd53ad34118f1"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:bafb01b4688833e099d79e7efd23f99172f501a15c44f21ea2118681473fdba0"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:e32f5f3d1b1c663af7f9c4c1e72e6ffe9a78c03a31e149259f531e0fed826512"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:3bddc701bdd1efa0d5264d2649588cbfda549b2899dc8d50417e47a82e1387ba"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:02951b7dacb123d8ea6da44fe45ddd084aa6777d4b2454fa0da61d569c6fa538"}, + {file = "regex-2020.11.13-cp37-cp37m-win32.whl", hash = "sha256:0d08e71e70c0237883d0bef12cad5145b84c3705e9c6a588b2a9c7080e5af2a4"}, + {file = "regex-2020.11.13-cp37-cp37m-win_amd64.whl", hash = "sha256:1fa7ee9c2a0e30405e21031d07d7ba8617bc590d391adfc2b7f1e8b99f46f444"}, + {file = "regex-2020.11.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:baf378ba6151f6e272824b86a774326f692bc2ef4cc5ce8d5bc76e38c813a55f"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e3faaf10a0d1e8e23a9b51d1900b72e1635c2d5b0e1bea1c18022486a8e2e52d"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2a11a3e90bd9901d70a5b31d7dd85114755a581a5da3fc996abfefa48aee78af"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1ebb090a426db66dd80df8ca85adc4abfcbad8a7c2e9a5ec7513ede522e0a8f"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:b2b1a5ddae3677d89b686e5c625fc5547c6e492bd755b520de5332773a8af06b"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:2c99e97d388cd0a8d30f7c514d67887d8021541b875baf09791a3baad48bb4f8"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:c084582d4215593f2f1d28b65d2a2f3aceff8342aa85afd7be23a9cad74a0de5"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:a3d748383762e56337c39ab35c6ed4deb88df5326f97a38946ddd19028ecce6b"}, + {file = "regex-2020.11.13-cp38-cp38-win32.whl", hash = "sha256:7913bd25f4ab274ba37bc97ad0e21c31004224ccb02765ad984eef43e04acc6c"}, + {file = "regex-2020.11.13-cp38-cp38-win_amd64.whl", hash = "sha256:6c54ce4b5d61a7129bad5c5dc279e222afd00e721bf92f9ef09e4fae28755683"}, + {file = "regex-2020.11.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1862a9d9194fae76a7aaf0150d5f2a8ec1da89e8b55890b1786b8f88a0f619dc"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux1_i686.whl", hash = "sha256:4902e6aa086cbb224241adbc2f06235927d5cdacffb2425c73e6570e8d862364"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7a25fcbeae08f96a754b45bdc050e1fb94b95cab046bf56b016c25e9ab127b3e"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:d2d8ce12b7c12c87e41123997ebaf1a5767a5be3ec545f64675388970f415e2e"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f7d29a6fc4760300f86ae329e3b6ca28ea9c20823df123a2ea8693e967b29917"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:717881211f46de3ab130b58ec0908267961fadc06e44f974466d1887f865bd5b"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3128e30d83f2e70b0bed9b2a34e92707d0877e460b402faca908c6667092ada9"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:8f6a2229e8ad946e36815f2a03386bb8353d4bde368fdf8ca5f0cb97264d3b5c"}, + {file = "regex-2020.11.13-cp39-cp39-win32.whl", hash = "sha256:f8f295db00ef5f8bae530fc39af0b40486ca6068733fb860b42115052206466f"}, + {file = "regex-2020.11.13-cp39-cp39-win_amd64.whl", hash = "sha256:a15f64ae3a027b64496a71ab1f722355e570c3fac5ba2801cafce846bf5af01d"}, + {file = "regex-2020.11.13.tar.gz", hash = "sha256:83d6b356e116ca119db8e7c6fc2983289d87b27b3fac238cfe5dca529d884562"}, ] requests = [ - {file = "requests-2.24.0-py2.py3-none-any.whl", hash = "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898"}, - {file = "requests-2.24.0.tar.gz", hash = "sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b"}, + {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, + {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, ] restructuredtext-lint = [ - {file = "restructuredtext_lint-1.3.1.tar.gz", hash = "sha256:470e53b64817211a42805c3a104d2216f6f5834b22fe7adb637d1de4d6501fb8"}, + {file = "restructuredtext_lint-1.3.2.tar.gz", hash = "sha256:d3b10a1fe2ecac537e51ae6d151b223b78de9fafdd50e5eb6b08c243df173c80"}, +] +rpy2 = [ + {file = "rpy2-3.4.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:dd2d1e9b4733a449f5c48e2f1da165bf77bc33c43ffcf9dacf051c6eb9a417d7"}, + {file = "rpy2-3.4.2-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:1eb4f7fd414a60dbe2555751b6789f64353db770b2d40917dac1bd20b7bec333"}, + {file = "rpy2-3.4.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:10944c3e38fb2c89fbe89c42308189936b055a9ad011d5c72e96f7ba8720dec1"}, + {file = "rpy2-3.4.2.tar.gz", hash = "sha256:8f7d1348b77bc45425b846a0d625f24a51a1c4f32ef2cd1c07a24222aa64e2e0"}, +] +scipy = [ + {file = "scipy-1.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3d4303e3e21d07d9557b26a1707bb9fc065510ee8501c9bf22a0157249a82fd0"}, + {file = "scipy-1.6.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:1bc5b446600c4ff7ab36bade47180673141322f0febaa555f1c433fe04f2a0e3"}, + {file = "scipy-1.6.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:8840a9adb4ede3751f49761653d3ebf664f25195fdd42ada394ffea8903dd51d"}, + {file = "scipy-1.6.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:8629135ee00cc2182ac8be8e75643b9f02235942443732c2ed69ab48edcb6614"}, + {file = "scipy-1.6.0-cp37-cp37m-win32.whl", hash = "sha256:58731bbe0103e96b89b2f41516699db9b63066e4317e31b8402891571f6d358f"}, + {file = "scipy-1.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:876badc33eec20709d4e042a09834f5953ebdac4088d45a4f3a1f18b56885718"}, + {file = "scipy-1.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c0911f3180de343643f369dc5cfedad6ba9f939c2d516bddea4a6871eb000722"}, + {file = "scipy-1.6.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:b8af26839ae343655f3ca377a5d5e5466f1d3b3ac7432a43449154fe958ae0e0"}, + {file = "scipy-1.6.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:4f1d9cc977ac6a4a63c124045c1e8bf67ec37098f67c699887a93736961a00ae"}, + {file = "scipy-1.6.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:eb7928275f3560d47e5538e15e9f32b3d64cd30ea8f85f3e82987425476f53f6"}, + {file = "scipy-1.6.0-cp38-cp38-win32.whl", hash = "sha256:31ab217b5c27ab429d07428a76002b33662f98986095bbce5d55e0788f7e8b15"}, + {file = "scipy-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:2f1c2ebca6fd867160e70102200b1bd07b3b2d31a3e6af3c58d688c15d0d07b7"}, + {file = "scipy-1.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:155225621df90fcd151e25d51c50217e412de717475999ebb76e17e310176981"}, + {file = "scipy-1.6.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:f68d5761a2d2376e2b194c8e9192bbf7c51306ca176f1a0889990a52ef0d551f"}, + {file = "scipy-1.6.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:d902d3a5ad7f28874c0a82db95246d24ca07ad932741df668595fe00a4819870"}, + {file = "scipy-1.6.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:aef3a2dbc436bbe8f6e0b635f0b5fe5ed024b522eee4637dbbe0b974129ca734"}, + {file = "scipy-1.6.0-cp39-cp39-win32.whl", hash = "sha256:cdbc47628184a0ebeb5c08f1892614e1bd4a51f6e0d609c6eed253823a960f5b"}, + {file = "scipy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:313785c4dab65060f9648112d025f6d2fec69a8a889c714328882d678a95f053"}, + {file = "scipy-1.6.0.tar.gz", hash = "sha256:cb6dc9f82dfd95f6b9032a8d7ea70efeeb15d5b5fd6ed4e8537bb3c673580566"}, ] send2trash = [ {file = "Send2Trash-1.5.0-py3-none-any.whl", hash = "sha256:f1691922577b6fa12821234aeb57599d887c4900b9ca537948d2dac34aea888b"}, {file = "Send2Trash-1.5.0.tar.gz", hash = "sha256:60001cc07d707fe247c94f74ca6ac0d3255aabcb930529690897ca2a39db28b2"}, ] +shapely = [ + {file = "Shapely-1.7.1-1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:46da0ea527da9cf9503e66c18bab6981c5556859e518fe71578b47126e54ca93"}, + {file = "Shapely-1.7.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:4c10f317e379cc404f8fc510cd9982d5d3e7ba13a9cfd39aa251d894c6366798"}, + {file = "Shapely-1.7.1-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:17df66e87d0fe0193910aeaa938c99f0b04f67b430edb8adae01e7be557b141b"}, + {file = "Shapely-1.7.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:da38ed3d65b8091447dc3717e5218cc336d20303b77b0634b261bc5c1aa2bae8"}, + {file = "Shapely-1.7.1-cp35-cp35m-win32.whl", hash = "sha256:8e7659dd994792a0aad8fb80439f59055a21163e236faf2f9823beb63a380e19"}, + {file = "Shapely-1.7.1-cp35-cp35m-win_amd64.whl", hash = "sha256:791477edb422692e7dc351c5ed6530eb0e949a31b45569946619a0d9cd5f53cb"}, + {file = "Shapely-1.7.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e3afccf0437edc108eef1e2bb9cc4c7073e7705924eb4cd0bf7715cd1ef0ce1b"}, + {file = "Shapely-1.7.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8f15b6ce67dcc05b61f19c689b60f3fe58550ba994290ff8332f711f5aaa9840"}, + {file = "Shapely-1.7.1-cp36-cp36m-win32.whl", hash = "sha256:60e5b2282619249dbe8dc5266d781cc7d7fb1b27fa49f8241f2167672ad26719"}, + {file = "Shapely-1.7.1-cp36-cp36m-win_amd64.whl", hash = "sha256:de618e67b64a51a0768d26a9963ecd7d338a2cf6e9e7582d2385f88ad005b3d1"}, + {file = "Shapely-1.7.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:182716ffb500d114b5d1b75d7fd9d14b7d3414cef3c38c0490534cc9ce20981a"}, + {file = "Shapely-1.7.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:4f3c59f6dbf86a9fc293546de492f5e07344e045f9333f3a753f2dda903c45d1"}, + {file = "Shapely-1.7.1-cp37-cp37m-win32.whl", hash = "sha256:6871acba8fbe744efa4f9f34e726d070bfbf9bffb356a8f6d64557846324232b"}, + {file = "Shapely-1.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:35be1c5d869966569d3dfd4ec31832d7c780e9df760e1fe52131105685941891"}, + {file = "Shapely-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:052eb5b9ba756808a7825e8a8020fb146ec489dd5c919e7d139014775411e688"}, + {file = "Shapely-1.7.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:90a3e2ae0d6d7d50ff2370ba168fbd416a53e7d8448410758c5d6a5920646c1d"}, + {file = "Shapely-1.7.1-cp38-cp38-win32.whl", hash = "sha256:a3774516c8a83abfd1ddffb8b6ec1b0935d7fe6ea0ff5c31a18bfdae567b4eba"}, + {file = "Shapely-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:6593026cd3f5daaea12bcc51ae5c979318070fefee210e7990cb8ac2364e79a1"}, + {file = "Shapely-1.7.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:b40cc7bb089ae4aa9ddba1db900b4cd1bce3925d2a4b5837b639e49de054784f"}, + {file = "Shapely-1.7.1-cp39-cp39-win32.whl", hash = "sha256:2df5260d0f2983309776cb41bfa85c464ec07018d88c0ecfca23d40bfadae2f1"}, + {file = "Shapely-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:a5c3a50d823c192f32615a2a6920e8c046b09e07a58eba220407335a9cd2e8ea"}, + {file = "Shapely-1.7.1.tar.gz", hash = "sha256:1641724c1055459a7e2b8bbe47ba25bdc89554582e62aec23cb3f3ca25f9b129"}, +] six = [ {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"}, {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"}, ] smmap = [ - {file = "smmap-3.0.4-py2.py3-none-any.whl", hash = "sha256:54c44c197c819d5ef1991799a7e30b662d1e520f2ac75c9efbeb54a742214cf4"}, - {file = "smmap-3.0.4.tar.gz", hash = "sha256:9c98bbd1f9786d22f14b3d4126894d56befb835ec90cef151af566c7e19b5d24"}, + {file = "smmap-3.0.5-py2.py3-none-any.whl", hash = "sha256:7bfcf367828031dc893530a29cb35eb8c8f2d7c8f2d0989354d75d24c8573714"}, + {file = "smmap-3.0.5.tar.gz", hash = "sha256:84c2751ef3072d4f6b2785ec7ee40244c6f45eb934d9e543e2c51f1bd3d54c50"}, ] snowballstemmer = [ - {file = "snowballstemmer-2.0.0-py2.py3-none-any.whl", hash = "sha256:209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0"}, - {file = "snowballstemmer-2.0.0.tar.gz", hash = "sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52"}, + {file = "snowballstemmer-2.1.0-py2.py3-none-any.whl", hash = "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2"}, + {file = "snowballstemmer-2.1.0.tar.gz", hash = "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914"}, ] sphinx = [ - {file = "Sphinx-3.2.1-py3-none-any.whl", hash = "sha256:ce6fd7ff5b215af39e2fcd44d4a321f6694b4530b6f2b2109b64d120773faea0"}, - {file = "Sphinx-3.2.1.tar.gz", hash = "sha256:321d6d9b16fa381a5306e5a0b76cd48ffbc588e6340059a729c6fdd66087e0e8"}, + {file = "Sphinx-3.4.3-py3-none-any.whl", hash = "sha256:c314c857e7cd47c856d2c5adff514ac2e6495f8b8e0f886a8a37e9305dfea0d8"}, + {file = "Sphinx-3.4.3.tar.gz", hash = "sha256:41cad293f954f7d37f803d97eb184158cfd90f51195131e94875bc07cd08b93c"}, ] sphinx-autodoc-typehints = [ - {file = "sphinx-autodoc-typehints-1.11.0.tar.gz", hash = "sha256:bbf0b203f1019b0f9843ee8eef0cff856dc04b341f6dbe1113e37f2ebf243e11"}, - {file = "sphinx_autodoc_typehints-1.11.0-py3-none-any.whl", hash = "sha256:89e19370a55db4aef1be2094d8fb1fb500ca455c55b3fcc8d2600ff805227e04"}, + {file = "sphinx-autodoc-typehints-1.11.1.tar.gz", hash = "sha256:244ba6d3e2fdb854622f643c7763d6f95b6886eba24bec28e86edf205e4ddb20"}, + {file = "sphinx_autodoc_typehints-1.11.1-py3-none-any.whl", hash = "sha256:da049791d719f4c9813642496ee4764203e317f0697eb75446183fa2a68e3f77"}, ] sphinxcontrib-applehelp = [ {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, @@ -2640,109 +3025,190 @@ sphinxcontrib-serializinghtml = [ {file = "sphinxcontrib_serializinghtml-1.1.4-py2.py3-none-any.whl", hash = "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a"}, ] sqlalchemy = [ - {file = "SQLAlchemy-1.3.19-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:f2e8a9c0c8813a468aa659a01af6592f71cd30237ec27c4cc0683f089f90dcfc"}, - {file = "SQLAlchemy-1.3.19-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:33d29ae8f1dc7c75b191bb6833f55a19c932514b9b5ce8c3ab9bc3047da5db36"}, - {file = "SQLAlchemy-1.3.19-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3292a28344922415f939ee7f4fc0c186f3d5a0bf02192ceabd4f1129d71b08de"}, - {file = "SQLAlchemy-1.3.19-cp27-cp27m-win32.whl", hash = "sha256:883c9fb62cebd1e7126dd683222b3b919657590c3e2db33bdc50ebbad53e0338"}, - {file = "SQLAlchemy-1.3.19-cp27-cp27m-win_amd64.whl", hash = "sha256:860d0fe234922fd5552b7f807fbb039e3e7ca58c18c8d38aa0d0a95ddf4f6c23"}, - {file = "SQLAlchemy-1.3.19-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:73a40d4fcd35fdedce07b5885905753d5d4edf413fbe53544dd871f27d48bd4f"}, - {file = "SQLAlchemy-1.3.19-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:5a49e8473b1ab1228302ed27365ea0fadd4bf44bc0f9e73fe38e10fdd3d6b4fc"}, - {file = "SQLAlchemy-1.3.19-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:6547b27698b5b3bbfc5210233bd9523de849b2bb8a0329cd754c9308fc8a05ce"}, - {file = "SQLAlchemy-1.3.19-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:107d4af989831d7b091e382d192955679ec07a9209996bf8090f1f539ffc5804"}, - {file = "SQLAlchemy-1.3.19-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:eb1d71643e4154398b02e88a42fc8b29db8c44ce4134cf0f4474bfc5cb5d4dac"}, - {file = "SQLAlchemy-1.3.19-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:b6ff91356354b7ff3bd208adcf875056d3d886ed7cef90c571aef2ab8a554b12"}, - {file = "SQLAlchemy-1.3.19-cp35-cp35m-win32.whl", hash = "sha256:96f51489ac187f4bab588cf51f9ff2d40b6d170ac9a4270ffaed535c8404256b"}, - {file = "SQLAlchemy-1.3.19-cp35-cp35m-win_amd64.whl", hash = "sha256:618db68745682f64cedc96ca93707805d1f3a031747b5a0d8e150cfd5055ae4d"}, - {file = "SQLAlchemy-1.3.19-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:6557af9e0d23f46b8cd56f8af08eaac72d2e3c632ac8d5cf4e20215a8dca7cea"}, - {file = "SQLAlchemy-1.3.19-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8280f9dae4adb5889ce0bb3ec6a541bf05434db5f9ab7673078c00713d148365"}, - {file = "SQLAlchemy-1.3.19-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:b595e71c51657f9ee3235db8b53d0b57c09eee74dfb5b77edff0e46d2218dc02"}, - {file = "SQLAlchemy-1.3.19-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:51064ee7938526bab92acd049d41a1dc797422256086b39c08bafeffb9d304c6"}, - {file = "SQLAlchemy-1.3.19-cp36-cp36m-win32.whl", hash = "sha256:8afcb6f4064d234a43fea108859942d9795c4060ed0fbd9082b0f280181a15c1"}, - {file = "SQLAlchemy-1.3.19-cp36-cp36m-win_amd64.whl", hash = "sha256:e49947d583fe4d29af528677e4f0aa21f5e535ca2ae69c48270ebebd0d8843c0"}, - {file = "SQLAlchemy-1.3.19-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:9e865835e36dfbb1873b65e722ea627c096c11b05f796831e3a9b542926e979e"}, - {file = "SQLAlchemy-1.3.19-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:276936d41111a501cf4a1a0543e25449108d87e9f8c94714f7660eaea89ae5fe"}, - {file = "SQLAlchemy-1.3.19-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:c7adb1f69a80573698c2def5ead584138ca00fff4ad9785a4b0b2bf927ba308d"}, - {file = "SQLAlchemy-1.3.19-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:465c999ef30b1c7525f81330184121521418a67189053bcf585824d833c05b66"}, - {file = "SQLAlchemy-1.3.19-cp37-cp37m-win32.whl", hash = "sha256:aa0554495fe06172b550098909be8db79b5accdf6ffb59611900bea345df5eba"}, - {file = "SQLAlchemy-1.3.19-cp37-cp37m-win_amd64.whl", hash = "sha256:15c0bcd3c14f4086701c33a9e87e2c7ceb3bcb4a246cd88ec54a49cf2a5bd1a6"}, - {file = "SQLAlchemy-1.3.19-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:fe7fe11019fc3e6600819775a7d55abc5446dda07e9795f5954fdbf8a49e1c37"}, - {file = "SQLAlchemy-1.3.19-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:c898b3ebcc9eae7b36bd0b4bbbafce2d8076680f6868bcbacee2d39a7a9726a7"}, - {file = "SQLAlchemy-1.3.19-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:072766c3bd09294d716b2d114d46ffc5ccf8ea0b714a4e1c48253014b771c6bb"}, - {file = "SQLAlchemy-1.3.19-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:26c5ca9d09f0e21b8671a32f7d83caad5be1f6ff45eef5ec2f6fd0db85fc5dc0"}, - {file = "SQLAlchemy-1.3.19-cp38-cp38-win32.whl", hash = "sha256:b70bad2f1a5bd3460746c3fb3ab69e4e0eb5f59d977a23f9b66e5bdc74d97b86"}, - {file = "SQLAlchemy-1.3.19-cp38-cp38-win_amd64.whl", hash = "sha256:83469ad15262402b0e0974e612546bc0b05f379b5aa9072ebf66d0f8fef16bea"}, - {file = "SQLAlchemy-1.3.19.tar.gz", hash = "sha256:3bba2e9fbedb0511769780fe1d63007081008c5c2d7d715e91858c94dbaa260e"}, + {file = "SQLAlchemy-1.3.23-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:fd3b96f8c705af8e938eaa99cbd8fd1450f632d38cad55e7367c33b263bf98ec"}, + {file = "SQLAlchemy-1.3.23-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:29cccc9606750fe10c5d0e8bd847f17a97f3850b8682aef1f56f5d5e1a5a64b1"}, + {file = "SQLAlchemy-1.3.23-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:927ce09e49bff3104459e1451ce82983b0a3062437a07d883a4c66f0b344c9b5"}, + {file = "SQLAlchemy-1.3.23-cp27-cp27m-win32.whl", hash = "sha256:b4b0e44d586cd64b65b507fa116a3814a1a53d55dce4836d7c1a6eb2823ff8d1"}, + {file = "SQLAlchemy-1.3.23-cp27-cp27m-win_amd64.whl", hash = "sha256:6b8b8c80c7f384f06825612dd078e4a31f0185e8f1f6b8c19e188ff246334205"}, + {file = "SQLAlchemy-1.3.23-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:9e9c25522933e569e8b53ccc644dc993cab87e922fb7e142894653880fdd419d"}, + {file = "SQLAlchemy-1.3.23-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:a0e306e9bb76fd93b29ae3a5155298e4c1b504c7cbc620c09c20858d32d16234"}, + {file = "SQLAlchemy-1.3.23-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:6c9e6cc9237de5660bcddea63f332428bb83c8e2015c26777281f7ffbd2efb84"}, + {file = "SQLAlchemy-1.3.23-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:94f667d86be82dd4cb17d08de0c3622e77ca865320e0b95eae6153faa7b4ecaf"}, + {file = "SQLAlchemy-1.3.23-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:751934967f5336a3e26fc5993ccad1e4fee982029f9317eb6153bc0bc3d2d2da"}, + {file = "SQLAlchemy-1.3.23-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:63677d0c08524af4c5893c18dbe42141de7178001360b3de0b86217502ed3601"}, + {file = "SQLAlchemy-1.3.23-cp35-cp35m-win32.whl", hash = "sha256:ddfb511e76d016c3a160910642d57f4587dc542ce5ee823b0d415134790eeeb9"}, + {file = "SQLAlchemy-1.3.23-cp35-cp35m-win_amd64.whl", hash = "sha256:040bdfc1d76a9074717a3f43455685f781c581f94472b010cd6c4754754e1862"}, + {file = "SQLAlchemy-1.3.23-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:d1a85dfc5dee741bf49cb9b6b6b8d2725a268e4992507cf151cba26b17d97c37"}, + {file = "SQLAlchemy-1.3.23-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:639940bbe1108ac667dcffc79925db2966826c270112e9159439ab6bb14f8d80"}, + {file = "SQLAlchemy-1.3.23-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:e8a1750b44ad6422ace82bf3466638f1aa0862dbb9689690d5f2f48cce3476c8"}, + {file = "SQLAlchemy-1.3.23-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:e5bb3463df697279e5459a7316ad5a60b04b0107f9392e88674d0ece70e9cf70"}, + {file = "SQLAlchemy-1.3.23-cp36-cp36m-win32.whl", hash = "sha256:e273367f4076bd7b9a8dc2e771978ef2bfd6b82526e80775a7db52bff8ca01dd"}, + {file = "SQLAlchemy-1.3.23-cp36-cp36m-win_amd64.whl", hash = "sha256:ac2244e64485c3778f012951fdc869969a736cd61375fde6096d08850d8be729"}, + {file = "SQLAlchemy-1.3.23-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:23927c3981d1ec6b4ea71eb99d28424b874d9c696a21e5fbd9fa322718be3708"}, + {file = "SQLAlchemy-1.3.23-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d90010304abb4102123d10cbad2cdf2c25a9f2e66a50974199b24b468509bad5"}, + {file = "SQLAlchemy-1.3.23-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:a8bfc1e1afe523e94974132d7230b82ca7fa2511aedde1f537ec54db0399541a"}, + {file = "SQLAlchemy-1.3.23-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:269990b3ab53cb035d662dcde51df0943c1417bdab707dc4a7e4114a710504b4"}, + {file = "SQLAlchemy-1.3.23-cp37-cp37m-win32.whl", hash = "sha256:fdd2ed7395df8ac2dbb10cefc44737b66c6a5cd7755c92524733d7a443e5b7e2"}, + {file = "SQLAlchemy-1.3.23-cp37-cp37m-win_amd64.whl", hash = "sha256:6a939a868fdaa4b504e8b9d4a61f21aac11e3fecc8a8214455e144939e3d2aea"}, + {file = "SQLAlchemy-1.3.23-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:24f9569e82a009a09ce2d263559acb3466eba2617203170e4a0af91e75b4f075"}, + {file = "SQLAlchemy-1.3.23-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2578dbdbe4dbb0e5126fb37ffcd9793a25dcad769a95f171a2161030bea850ff"}, + {file = "SQLAlchemy-1.3.23-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:1fe5d8d39118c2b018c215c37b73fd6893c3e1d4895be745ca8ff6eb83333ed3"}, + {file = "SQLAlchemy-1.3.23-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:c7dc052432cd5d060d7437e217dd33c97025287f99a69a50e2dc1478dd610d64"}, + {file = "SQLAlchemy-1.3.23-cp38-cp38-win32.whl", hash = "sha256:ecce8c021894a77d89808222b1ff9687ad84db54d18e4bd0500ca766737faaf6"}, + {file = "SQLAlchemy-1.3.23-cp38-cp38-win_amd64.whl", hash = "sha256:37b83bf81b4b85dda273aaaed5f35ea20ad80606f672d94d2218afc565fb0173"}, + {file = "SQLAlchemy-1.3.23-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:8be835aac18ec85351385e17b8665bd4d63083a7160a017bef3d640e8e65cadb"}, + {file = "SQLAlchemy-1.3.23-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:6ec1044908414013ebfe363450c22f14698803ce97fbb47e53284d55c5165848"}, + {file = "SQLAlchemy-1.3.23-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:eab063a70cca4a587c28824e18be41d8ecc4457f8f15b2933584c6c6cccd30f0"}, + {file = "SQLAlchemy-1.3.23-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:baeb451ee23e264de3f577fee5283c73d9bbaa8cb921d0305c0bbf700094b65b"}, + {file = "SQLAlchemy-1.3.23-cp39-cp39-win32.whl", hash = "sha256:94208867f34e60f54a33a37f1c117251be91a47e3bfdb9ab8a7847f20886ad06"}, + {file = "SQLAlchemy-1.3.23-cp39-cp39-win_amd64.whl", hash = "sha256:f4d972139d5000105fcda9539a76452039434013570d6059993120dc2a65e447"}, + {file = "SQLAlchemy-1.3.23.tar.gz", hash = "sha256:6fca33672578666f657c131552c4ef8979c1606e494f78cd5199742dfb26918b"}, +] +statsmodels = [ + {file = "statsmodels-0.12.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c1d98ce2072f5e772cbf91d05475490368da5d3ee4a3150062330c7b83221ceb"}, + {file = "statsmodels-0.12.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:4184487e9c281acad3d0bda19445c69db292f0dbb18f25ebf56a7966a0a28eef"}, + {file = "statsmodels-0.12.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:37e107fa11299090ed90f93c7172162b850c28fd09999937b971926813e887c5"}, + {file = "statsmodels-0.12.2-cp36-none-win32.whl", hash = "sha256:5d3e7333e1c5b234797ed57c3d1533371374c1e1e7e7ed54d27805611f96e2d5"}, + {file = "statsmodels-0.12.2-cp36-none-win_amd64.whl", hash = "sha256:aaf3c75fd22cb9dcf9c1b28f8ae87521310870f4dd8a6a4f1010f1e46d992377"}, + {file = "statsmodels-0.12.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:c48b7cbb37a651bb1cd23614abc10f447845ad3c3a713bf74e2aad20cfc94ae7"}, + {file = "statsmodels-0.12.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:a3bd3922463dda8ad33e5e5075d2080e9e012aeb2032b5cdaeea9b79c2472000"}, + {file = "statsmodels-0.12.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:43de84bc08c8b9f778502aed7a476d6e68674e6878718e533b07d569cf0927a9"}, + {file = "statsmodels-0.12.2-cp37-none-win32.whl", hash = "sha256:0197855aa1d40c42532d6a75b4ca72e30826a50d90ec3047a404f9702d8b814f"}, + {file = "statsmodels-0.12.2-cp37-none-win_amd64.whl", hash = "sha256:93273aa1c31caf59bcce9790ca4c3f54fdc45a37c61084d06f1ba4fbe56e7752"}, + {file = "statsmodels-0.12.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:3e94306d4c07e332532ea4911d1f1d1f661c79aa73f22c5bb22e6dd47b40d562"}, + {file = "statsmodels-0.12.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:f3a7622f3d0ce2fc204f43b74de4e03e42775609705bf94d656b730482ca935a"}, + {file = "statsmodels-0.12.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:587deb788e7f8f3f866d28e812cf5c082b4d4a2d3f5beea94d0e9699ea71ef22"}, + {file = "statsmodels-0.12.2-cp38-none-win32.whl", hash = "sha256:cbbdf6f708c9a1f1fad5cdea5e4342d6fdb37e42e92288c2cf906b99976ffe15"}, + {file = "statsmodels-0.12.2-cp38-none-win_amd64.whl", hash = "sha256:1fa720e895112a1b04b27002218b0ea7f10dd1d9cffd1c018c88bbfb82520f57"}, + {file = "statsmodels-0.12.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:c3782ce846a52862ac72f89d22b6b1ca13d877bc593872309228a6f05d934321"}, + {file = "statsmodels-0.12.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:8f93cb3f7d87c1fc7e51b3b239371c25a17a0a8e782467fdf4788cfef600724a"}, + {file = "statsmodels-0.12.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f61f33f64760a22100b6b146217823f73cfedd251c9bdbd58453ca94e63326c7"}, + {file = "statsmodels-0.12.2-cp39-none-win32.whl", hash = "sha256:3aab85174444f1bcad1e9218a3d3db08f0f86eeb97985236ca8605a0a39ce305"}, + {file = "statsmodels-0.12.2-cp39-none-win_amd64.whl", hash = "sha256:94d3632d56c13eebebaefb52bd4b43144ad5a131337b57842f46db826fa7d2d3"}, + {file = "statsmodels-0.12.2.tar.gz", hash = "sha256:8ad7a7ae7cdd929095684118e3b05836c0ccb08b6a01fe984159475d174a1b10"}, ] stevedore = [ - {file = "stevedore-3.2.2-py3-none-any.whl", hash = "sha256:5e1ab03eaae06ef6ce23859402de785f08d97780ed774948ef16c4652c41bc62"}, - {file = "stevedore-3.2.2.tar.gz", hash = "sha256:f845868b3a3a77a2489d226568abe7328b5c2d4f6a011cc759dfa99144a521f0"}, + {file = "stevedore-3.3.0-py3-none-any.whl", hash = "sha256:50d7b78fbaf0d04cd62411188fa7eedcb03eb7f4c4b37005615ceebe582aa82a"}, + {file = "stevedore-3.3.0.tar.gz", hash = "sha256:3a5bbd0652bf552748871eaa73a4a8dc2899786bc497a2aa1fcb4dcdb0debeee"}, ] terminado = [ - {file = "terminado-0.9.1-py3-none-any.whl", hash = "sha256:c55f025beb06c2e2669f7ba5a04f47bb3304c30c05842d4981d8f0fc9ab3b4e3"}, - {file = "terminado-0.9.1.tar.gz", hash = "sha256:3da72a155b807b01c9e8a5babd214e052a0a45a975751da3521a1c3381ce6d76"}, + {file = "terminado-0.9.2-py3-none-any.whl", hash = "sha256:23a053e06b22711269563c8bb96b36a036a86be8b5353e85e804f89b84aaa23f"}, + {file = "terminado-0.9.2.tar.gz", hash = "sha256:89e6d94b19e4bc9dce0ffd908dfaf55cc78a9bf735934e915a4a96f65ac9704c"}, ] testfixtures = [ - {file = "testfixtures-6.14.2-py2.py3-none-any.whl", hash = "sha256:816557888877f498081c1b5c572049b4a2ddffedb77401308ff4cdc1bb9147b7"}, - {file = "testfixtures-6.14.2.tar.gz", hash = "sha256:14d9907390f5f9c7189b3d511b64f34f1072d07cc13b604a57e1bb79029376e3"}, + {file = "testfixtures-6.17.1-py2.py3-none-any.whl", hash = "sha256:9ed31e83f59619e2fa17df053b241e16e0608f4580f7b5a9333a0c9bdcc99137"}, + {file = "testfixtures-6.17.1.tar.gz", hash = "sha256:5ec3a0dd6f71cc4c304fbc024a10cc293d3e0b852c868014b9f233203e149bda"}, ] testpath = [ {file = "testpath-0.4.4-py2.py3-none-any.whl", hash = "sha256:bfcf9411ef4bf3db7579063e0546938b1edda3d69f4e1fb8756991f5951f85d4"}, {file = "testpath-0.4.4.tar.gz", hash = "sha256:60e0a3261c149755f4399a1fff7d37523179a70fdc3abdf78de9fc2604aeec7e"}, ] +text-unidecode = [ + {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, + {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, +] toml = [ - {file = "toml-0.10.1-py2.py3-none-any.whl", hash = "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"}, - {file = "toml-0.10.1.tar.gz", hash = "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f"}, + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] tornado = [ - {file = "tornado-6.0.4-cp35-cp35m-win32.whl", hash = "sha256:5217e601700f24e966ddab689f90b7ea4bd91ff3357c3600fa1045e26d68e55d"}, - {file = "tornado-6.0.4-cp35-cp35m-win_amd64.whl", hash = "sha256:c98232a3ac391f5faea6821b53db8db461157baa788f5d6222a193e9456e1740"}, - {file = "tornado-6.0.4-cp36-cp36m-win32.whl", hash = "sha256:5f6a07e62e799be5d2330e68d808c8ac41d4a259b9cea61da4101b83cb5dc673"}, - {file = "tornado-6.0.4-cp36-cp36m-win_amd64.whl", hash = "sha256:c952975c8ba74f546ae6de2e226ab3cc3cc11ae47baf607459a6728585bb542a"}, - {file = "tornado-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:2c027eb2a393d964b22b5c154d1a23a5f8727db6fda837118a776b29e2b8ebc6"}, - {file = "tornado-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:5618f72e947533832cbc3dec54e1dffc1747a5cb17d1fd91577ed14fa0dc081b"}, - {file = "tornado-6.0.4-cp38-cp38-win32.whl", hash = "sha256:22aed82c2ea340c3771e3babc5ef220272f6fd06b5108a53b4976d0d722bcd52"}, - {file = "tornado-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:c58d56003daf1b616336781b26d184023ea4af13ae143d9dda65e31e534940b9"}, - {file = "tornado-6.0.4.tar.gz", hash = "sha256:0fe2d45ba43b00a41cd73f8be321a44936dc1aba233dee979f17a042b83eb6dc"}, + {file = "tornado-6.1-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:d371e811d6b156d82aa5f9a4e08b58debf97c302a35714f6f45e35139c332e32"}, + {file = "tornado-6.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:0d321a39c36e5f2c4ff12b4ed58d41390460f798422c4504e09eb5678e09998c"}, + {file = "tornado-6.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9de9e5188a782be6b1ce866e8a51bc76a0fbaa0e16613823fc38e4fc2556ad05"}, + {file = "tornado-6.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:61b32d06ae8a036a6607805e6720ef00a3c98207038444ba7fd3d169cd998910"}, + {file = "tornado-6.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:3e63498f680547ed24d2c71e6497f24bca791aca2fe116dbc2bd0ac7f191691b"}, + {file = "tornado-6.1-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:6c77c9937962577a6a76917845d06af6ab9197702a42e1346d8ae2e76b5e3675"}, + {file = "tornado-6.1-cp35-cp35m-win32.whl", hash = "sha256:6286efab1ed6e74b7028327365cf7346b1d777d63ab30e21a0f4d5b275fc17d5"}, + {file = "tornado-6.1-cp35-cp35m-win_amd64.whl", hash = "sha256:fa2ba70284fa42c2a5ecb35e322e68823288a4251f9ba9cc77be04ae15eada68"}, + {file = "tornado-6.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0a00ff4561e2929a2c37ce706cb8233b7907e0cdc22eab98888aca5dd3775feb"}, + {file = "tornado-6.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:748290bf9112b581c525e6e6d3820621ff020ed95af6f17fedef416b27ed564c"}, + {file = "tornado-6.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:e385b637ac3acaae8022e7e47dfa7b83d3620e432e3ecb9a3f7f58f150e50921"}, + {file = "tornado-6.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:25ad220258349a12ae87ede08a7b04aca51237721f63b1808d39bdb4b2164558"}, + {file = "tornado-6.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:65d98939f1a2e74b58839f8c4dab3b6b3c1ce84972ae712be02845e65391ac7c"}, + {file = "tornado-6.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:e519d64089b0876c7b467274468709dadf11e41d65f63bba207e04217f47c085"}, + {file = "tornado-6.1-cp36-cp36m-win32.whl", hash = "sha256:b87936fd2c317b6ee08a5741ea06b9d11a6074ef4cc42e031bc6403f82a32575"}, + {file = "tornado-6.1-cp36-cp36m-win_amd64.whl", hash = "sha256:cc0ee35043162abbf717b7df924597ade8e5395e7b66d18270116f8745ceb795"}, + {file = "tornado-6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7250a3fa399f08ec9cb3f7b1b987955d17e044f1ade821b32e5f435130250d7f"}, + {file = "tornado-6.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ed3ad863b1b40cd1d4bd21e7498329ccaece75db5a5bf58cd3c9f130843e7102"}, + {file = "tornado-6.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:dcef026f608f678c118779cd6591c8af6e9b4155c44e0d1bc0c87c036fb8c8c4"}, + {file = "tornado-6.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:70dec29e8ac485dbf57481baee40781c63e381bebea080991893cd297742b8fd"}, + {file = "tornado-6.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:d3f7594930c423fd9f5d1a76bee85a2c36fd8b4b16921cae7e965f22575e9c01"}, + {file = "tornado-6.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3447475585bae2e77ecb832fc0300c3695516a47d46cefa0528181a34c5b9d3d"}, + {file = "tornado-6.1-cp37-cp37m-win32.whl", hash = "sha256:e7229e60ac41a1202444497ddde70a48d33909e484f96eb0da9baf8dc68541df"}, + {file = "tornado-6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:cb5ec8eead331e3bb4ce8066cf06d2dfef1bfb1b2a73082dfe8a161301b76e37"}, + {file = "tornado-6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:20241b3cb4f425e971cb0a8e4ffc9b0a861530ae3c52f2b0434e6c1b57e9fd95"}, + {file = "tornado-6.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:c77da1263aa361938476f04c4b6c8916001b90b2c2fdd92d8d535e1af48fba5a"}, + {file = "tornado-6.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:fba85b6cd9c39be262fcd23865652920832b61583de2a2ca907dbd8e8a8c81e5"}, + {file = "tornado-6.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:1e8225a1070cd8eec59a996c43229fe8f95689cb16e552d130b9793cb570a288"}, + {file = "tornado-6.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d14d30e7f46a0476efb0deb5b61343b1526f73ebb5ed84f23dc794bdb88f9d9f"}, + {file = "tornado-6.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8f959b26f2634a091bb42241c3ed8d3cedb506e7c27b8dd5c7b9f745318ddbb6"}, + {file = "tornado-6.1-cp38-cp38-win32.whl", hash = "sha256:34ca2dac9e4d7afb0bed4677512e36a52f09caa6fded70b4e3e1c89dbd92c326"}, + {file = "tornado-6.1-cp38-cp38-win_amd64.whl", hash = "sha256:6196a5c39286cc37c024cd78834fb9345e464525d8991c21e908cc046d1cc02c"}, + {file = "tornado-6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0ba29bafd8e7e22920567ce0d232c26d4d47c8b5cf4ed7b562b5db39fa199c5"}, + {file = "tornado-6.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:33892118b165401f291070100d6d09359ca74addda679b60390b09f8ef325ffe"}, + {file = "tornado-6.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7da13da6f985aab7f6f28debab00c67ff9cbacd588e8477034c0652ac141feea"}, + {file = "tornado-6.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:e0791ac58d91ac58f694d8d2957884df8e4e2f6687cdf367ef7eb7497f79eaa2"}, + {file = "tornado-6.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:66324e4e1beede9ac79e60f88de548da58b1f8ab4b2f1354d8375774f997e6c0"}, + {file = "tornado-6.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:a48900ecea1cbb71b8c71c620dee15b62f85f7c14189bdeee54966fbd9a0c5bd"}, + {file = "tornado-6.1-cp39-cp39-win32.whl", hash = "sha256:d3d20ea5782ba63ed13bc2b8c291a053c8d807a8fa927d941bd718468f7b950c"}, + {file = "tornado-6.1-cp39-cp39-win_amd64.whl", hash = "sha256:548430be2740e327b3fe0201abe471f314741efcb0067ec4f2d7dcfb4825f3e4"}, + {file = "tornado-6.1.tar.gz", hash = "sha256:33c6e81d7bd55b468d2e793517c909b139960b6c790a60b7991b9b6b76fb9791"}, ] traitlets = [ - {file = "traitlets-5.0.4-py3-none-any.whl", hash = "sha256:9664ec0c526e48e7b47b7d14cd6b252efa03e0129011de0a9c1d70315d4309c3"}, - {file = "traitlets-5.0.4.tar.gz", hash = "sha256:86c9351f94f95de9db8a04ad8e892da299a088a64fd283f9f6f18770ae5eae1b"}, + {file = "traitlets-5.0.5-py3-none-any.whl", hash = "sha256:69ff3f9d5351f31a7ad80443c2674b7099df13cc41fc5fa6e2f6d3b0330b0426"}, + {file = "traitlets-5.0.5.tar.gz", hash = "sha256:178f4ce988f69189f7e523337a3e11d91c786ded9360174a3d9ca83e79bc5396"}, ] typed-ast = [ - {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3"}, - {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb"}, - {file = "typed_ast-1.4.1-cp35-cp35m-win32.whl", hash = "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919"}, - {file = "typed_ast-1.4.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01"}, - {file = "typed_ast-1.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75"}, - {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652"}, - {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"}, - {file = "typed_ast-1.4.1-cp36-cp36m-win32.whl", hash = "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1"}, - {file = "typed_ast-1.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa"}, - {file = "typed_ast-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614"}, - {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41"}, - {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b"}, - {file = "typed_ast-1.4.1-cp37-cp37m-win32.whl", hash = "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe"}, - {file = "typed_ast-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355"}, - {file = "typed_ast-1.4.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6"}, - {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907"}, - {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d"}, - {file = "typed_ast-1.4.1-cp38-cp38-win32.whl", hash = "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c"}, - {file = "typed_ast-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4"}, - {file = "typed_ast-1.4.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34"}, - {file = "typed_ast-1.4.1.tar.gz", hash = "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b"}, + {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:7703620125e4fb79b64aa52427ec192822e9f45d37d4b6625ab37ef403e1df70"}, + {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c9aadc4924d4b5799112837b226160428524a9a45f830e0d0f184b19e4090487"}, + {file = "typed_ast-1.4.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:9ec45db0c766f196ae629e509f059ff05fc3148f9ffd28f3cfe75d4afb485412"}, + {file = "typed_ast-1.4.2-cp35-cp35m-win32.whl", hash = "sha256:85f95aa97a35bdb2f2f7d10ec5bbdac0aeb9dafdaf88e17492da0504de2e6400"}, + {file = "typed_ast-1.4.2-cp35-cp35m-win_amd64.whl", hash = "sha256:9044ef2df88d7f33692ae3f18d3be63dec69c4fb1b5a4a9ac950f9b4ba571606"}, + {file = "typed_ast-1.4.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c1c876fd795b36126f773db9cbb393f19808edd2637e00fd6caba0e25f2c7b64"}, + {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5dcfc2e264bd8a1db8b11a892bd1647154ce03eeba94b461effe68790d8b8e07"}, + {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8db0e856712f79c45956da0c9a40ca4246abc3485ae0d7ecc86a20f5e4c09abc"}, + {file = "typed_ast-1.4.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d003156bb6a59cda9050e983441b7fa2487f7800d76bdc065566b7d728b4581a"}, + {file = "typed_ast-1.4.2-cp36-cp36m-win32.whl", hash = "sha256:4c790331247081ea7c632a76d5b2a265e6d325ecd3179d06e9cf8d46d90dd151"}, + {file = "typed_ast-1.4.2-cp36-cp36m-win_amd64.whl", hash = "sha256:d175297e9533d8d37437abc14e8a83cbc68af93cc9c1c59c2c292ec59a0697a3"}, + {file = "typed_ast-1.4.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf54cfa843f297991b7388c281cb3855d911137223c6b6d2dd82a47ae5125a41"}, + {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:b4fcdcfa302538f70929eb7b392f536a237cbe2ed9cba88e3bf5027b39f5f77f"}, + {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:987f15737aba2ab5f3928c617ccf1ce412e2e321c77ab16ca5a293e7bbffd581"}, + {file = "typed_ast-1.4.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:37f48d46d733d57cc70fd5f30572d11ab8ed92da6e6b28e024e4a3edfb456e37"}, + {file = "typed_ast-1.4.2-cp37-cp37m-win32.whl", hash = "sha256:36d829b31ab67d6fcb30e185ec996e1f72b892255a745d3a82138c97d21ed1cd"}, + {file = "typed_ast-1.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8368f83e93c7156ccd40e49a783a6a6850ca25b556c0fa0240ed0f659d2fe496"}, + {file = "typed_ast-1.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:963c80b583b0661918718b095e02303d8078950b26cc00b5e5ea9ababe0de1fc"}, + {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e683e409e5c45d5c9082dc1daf13f6374300806240719f95dc783d1fc942af10"}, + {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:84aa6223d71012c68d577c83f4e7db50d11d6b1399a9c779046d75e24bed74ea"}, + {file = "typed_ast-1.4.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:a38878a223bdd37c9709d07cd357bb79f4c760b29210e14ad0fb395294583787"}, + {file = "typed_ast-1.4.2-cp38-cp38-win32.whl", hash = "sha256:a2c927c49f2029291fbabd673d51a2180038f8cd5a5b2f290f78c4516be48be2"}, + {file = "typed_ast-1.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:c0c74e5579af4b977c8b932f40a5464764b2f86681327410aa028a22d2f54937"}, + {file = "typed_ast-1.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07d49388d5bf7e863f7fa2f124b1b1d89d8aa0e2f7812faff0a5658c01c59aa1"}, + {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:240296b27397e4e37874abb1df2a608a92df85cf3e2a04d0d4d61055c8305ba6"}, + {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:d746a437cdbca200622385305aedd9aef68e8a645e385cc483bdc5e488f07166"}, + {file = "typed_ast-1.4.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:14bf1522cdee369e8f5581238edac09150c765ec1cb33615855889cf33dcb92d"}, + {file = "typed_ast-1.4.2-cp39-cp39-win32.whl", hash = "sha256:cc7b98bf58167b7f2db91a4327da24fb93368838eb84a44c472283778fc2446b"}, + {file = "typed_ast-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:7147e2a76c75f0f64c4319886e7639e490fee87c9d25cb1d4faef1d8cf83a440"}, + {file = "typed_ast-1.4.2.tar.gz", hash = "sha256:9fc0b3cb5d1720e7141d103cf4819aea239f7d136acf9ee4a69b047b7986175a"}, ] typing-extensions = [ {file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"}, {file = "typing_extensions-3.7.4.3-py3-none-any.whl", hash = "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918"}, {file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"}, ] +tzlocal = [ + {file = "tzlocal-2.1-py2.py3-none-any.whl", hash = "sha256:e2cb6c6b5b604af38597403e9852872d7f534962ae2954c7f35efcb1ccacf4a4"}, + {file = "tzlocal-2.1.tar.gz", hash = "sha256:643c97c5294aedc737780a49d9df30889321cbe1204eac2c2ec6134035a92e44"}, +] urllib3 = [ - {file = "urllib3-1.25.10-py2.py3-none-any.whl", hash = "sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461"}, - {file = "urllib3-1.25.10.tar.gz", hash = "sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a"}, + {file = "urllib3-1.26.3-py2.py3-none-any.whl", hash = "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80"}, + {file = "urllib3-1.26.3.tar.gz", hash = "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73"}, +] +utm = [ + {file = "utm-0.7.0.tar.gz", hash = "sha256:3c9a3650e98bb6eecec535418d0dfd4db8f88c8ceaca112a0ff0787e116566e2"}, ] virtualenv = [ - {file = "virtualenv-20.0.31-py2.py3-none-any.whl", hash = "sha256:e0305af10299a7fb0d69393d8f04cb2965dda9351140d11ac8db4e5e3970451b"}, - {file = "virtualenv-20.0.31.tar.gz", hash = "sha256:43add625c53c596d38f971a465553f6318decc39d98512bc100fa1b1e839c8dc"}, + {file = "virtualenv-20.4.2-py2.py3-none-any.whl", hash = "sha256:2be72df684b74df0ea47679a7df93fd0e04e72520022c57b479d8f881485dbe3"}, + {file = "virtualenv-20.4.2.tar.gz", hash = "sha256:147b43894e51dd6bba882cf9c282447f780e2251cd35172403745fc381a0a80d"}, ] wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, @@ -2756,9 +3222,6 @@ wemake-python-styleguide = [ {file = "wemake-python-styleguide-0.14.1.tar.gz", hash = "sha256:e13dc580fa56b7b548de8da170bccb8ddff2d4ab026ca987db8a9893bf8a7b5b"}, {file = "wemake_python_styleguide-0.14.1-py3-none-any.whl", hash = "sha256:73a501e0547275287a2b926515c000cc25026a8bceb9dcc1bf73ef85a223a3c6"}, ] -wrapt = [ - {file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"}, -] xdoctest = [ {file = "xdoctest-0.13.0-py2.py3-none-any.whl", hash = "sha256:de861fd5230a46bd26c054b4981169dd963f813768cb62b62e104e4d2644ac94"}, {file = "xdoctest-0.13.0.tar.gz", hash = "sha256:4f113a430076561a9d7f31af65b5d5acda62ee06b05cb6894264cb9efb8196ac"}, diff --git a/pyproject.toml b/pyproject.toml index 0fa45fc..ac86668 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ target-version = ["py38"] [tool.poetry] name = "urban-meal-delivery" -version = "0.2.0" +version = "0.3.0" authors = ["Alexander Hess "] description = "Optimizing an urban meal delivery platform" @@ -28,18 +28,23 @@ repository = "https://github.com/webartifex/urban-meal-delivery" python = "^3.8" # Package => code developed in *.py files and packaged under src/urban_meal_delivery +Shapely = "^1.7.1" alembic = "^1.4.2" click = "^7.1.2" +folium = "^0.12.1" +matplotlib = "^3.3.3" +pandas = "^1.1.0" psycopg2 = "^2.8.5" # adapter for PostgreSQL -python-dotenv = "^0.14.0" +rpy2 = "^3.4.1" sqlalchemy = "^1.3.18" +statsmodels = "^0.12.1" +utm = "^0.7.0" # Jupyter Lab => notebooks with analyses using the developed package # IMPORTANT: must be kept in sync with the "research" extra below jupyterlab = { version="^2.2.2", optional=true } nb_black = { version="^1.0.7", optional=true } numpy = { version="^1.19.1", optional=true } -pandas = { version="^1.1.0", optional=true } pytz = { version="^2020.1", optional=true } [tool.poetry.extras] @@ -47,7 +52,6 @@ research = [ "jupyterlab", "nb_black", "numpy", - "pandas", "pytz", ] @@ -68,14 +72,18 @@ flake8-black = "^0.2.1" flake8-expression-complexity = "^0.0.8" flake8-pytest-style = "^1.2.2" mypy = "^0.782" -pylint = "^2.5.3" wemake-python-styleguide = "^0.14.1" # flake8 plug-in # Test Suite +Faker = "^5.0.1" +factory-boy = "^3.1.0" +geopy = "^2.1.0" packaging = "^20.4" # used to test the packaged version pytest = "^6.0.1" pytest-cov = "^2.10.0" pytest-env = "^0.6.2" +pytest-mock = "^3.5.1" +pytest-randomly = "^3.5.0" xdoctest = { version="^0.13.0", extras=["optional"] } # Documentation @@ -83,4 +91,4 @@ sphinx = "^3.1.2" sphinx-autodoc-typehints = "^1.11.0" [tool.poetry.scripts] -umd = "urban_meal_delivery.console:main" +umd = "urban_meal_delivery.console:cli" diff --git a/notebooks/00_clean_data.ipynb b/research/clean_data.ipynb similarity index 99% rename from notebooks/00_clean_data.ipynb rename to research/clean_data.ipynb index 7290073..3c7fdee 100644 --- a/notebooks/00_clean_data.ipynb +++ b/research/clean_data.ipynb @@ -19,7 +19,7 @@ "- numeric columns are checked for plausibility\n", "- foreign key relationships are strictly enforced\n", "\n", - "The structure of the data can be viewed at the [ORM layer](https://github.com/webartifex/urban-meal-delivery/tree/main/src/urban_meal_delivery/db) in the package." + "The structure of the data can be viewed at the [ORM layer](https://github.com/webartifex/urban-meal-delivery/tree/develop/src/urban_meal_delivery/db) in the package." ] }, { @@ -103,8 +103,7 @@ "metadata": {}, "outputs": [], "source": [ - "_engine = db.make_engine()\n", - "connection = _engine.connect()" + "connection = db.connection" ] }, { diff --git a/research/papers/demand-forecasting b/research/papers/demand-forecasting new file mode 160000 index 0000000..9ee3396 --- /dev/null +++ b/research/papers/demand-forecasting @@ -0,0 +1 @@ +Subproject commit 9ee3396a24ce20c9886b4cde5cfe2665fd5a8102 diff --git a/research/r_dependencies.ipynb b/research/r_dependencies.ipynb new file mode 100644 index 0000000..e2e1dc6 --- /dev/null +++ b/research/r_dependencies.ipynb @@ -0,0 +1,1868 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## R Dependencies\n", + "\n", + "This notebook installs R and the R packages needed to perform all the calculations throughout this project into a project-local sub-folder.\n", + "\n", + "**Note:** This notebook requires sudo privileges to install R and assumes a Ubuntu/Debian based system!\n", + "\n", + "**Important:** Once any notebook first imports anything from the `rpy2` site package, a new R process is spawned off and used to run all calls to R via `rpy2`. In order for this process to use the project-local sub-folder for the R dependencies, an environment variable `R_LIBS` must be set (with the path to this sub-folder) **before** any interaction with `rpy2`." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import getpass\n", + "import os\n", + "import shutil" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Obtain the sudo password." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdin", + "output_type": "stream", + "text": [ + " ············\n" + ] + } + ], + "source": [ + "password = getpass.getpass()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Install the packages *r-base r-base-dev libcurl4-openssl-dev libxml2-dev patchelf*" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "os.system(\n", + " f\"echo {password} | sudo -S apt-get -y install\"\n", + " \" r-base r-base-dev libcurl4-openssl-dev libxml2-dev patchelf\"\n", + ");" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Create a (hidden) folder in the project's root directory to install the R libraries into." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "/home/webartifex/repos/urban-meal-delivery\n" + ] + } + ], + "source": [ + "%cd .." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "# Sanity check to see if the R_LIBS variable is set.\n", + "assert os.getenv(\"R_LIBS\")\n", + "# Expand the R_LIBS path to an absolute path.\n", + "r_libs_path = os.path.join(os.getcwd(), os.environ[\"R_LIBS\"])" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "# Create a fresh folder for the R dependencies.\n", + "try:\n", + " shutil.rmtree(r_libs_path)\n", + "except FileNotFoundError:\n", + " pass\n", + "os.mkdir(r_libs_path)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "R version 4.0.2 (2020-06-22) -- \"Taking Off Again\"\n", + "Copyright (C) 2020 The R Foundation for Statistical Computing\n", + "Platform: x86_64-pc-linux-gnu (64-bit)\n", + "\n", + "R is free software and comes with ABSOLUTELY NO WARRANTY.\n", + "You are welcome to redistribute it under certain conditions.\n", + "Type 'license()' or 'licence()' for distribution details.\n", + "\n", + " Natural language support but running in an English locale\n", + "\n", + "R is a collaborative project with many contributors.\n", + "Type 'contributors()' for more information and\n", + "'citation()' on how to cite R or R packages in publications.\n", + "\n", + "Type 'demo()' for some demos, 'help()' for on-line help, or\n", + "'help.start()' for an HTML browser interface to help.\n", + "Type 'q()' to quit R.\n", + "\n", + "> install.packages('forecast')\n", + "Installing package into ‘/home/webartifex/repos/urban-meal-delivery/.cache/r_libs’\n", + "(as ‘lib’ is unspecified)\n", + "also installing the dependencies ‘prettyunits’, ‘rprojroot’, ‘pkgbuild’, ‘rstudioapi’, ‘diffobj’, ‘rematch2’, ‘brio’, ‘callr’, ‘desc’, ‘evaluate’, ‘jsonlite’, ‘pkgload’, ‘praise’, ‘processx’, ‘ps’, ‘waldo’, ‘assertthat’, ‘utf8’, ‘testthat’, ‘farver’, ‘labeling’, ‘lifecycle’, ‘munsell’, ‘R6’, ‘RColorBrewer’, ‘viridisLite’, ‘cli’, ‘crayon’, ‘ellipsis’, ‘fansi’, ‘pillar’, ‘pkgconfig’, ‘vctrs’, ‘xts’, ‘TTR’, ‘curl’, ‘digest’, ‘glue’, ‘gtable’, ‘isoband’, ‘rlang’, ‘scales’, ‘tibble’, ‘withr’, ‘quadprog’, ‘quantmod’, ‘colorspace’, ‘fracdiff’, ‘ggplot2’, ‘lmtest’, ‘magrittr’, ‘Rcpp’, ‘timeDate’, ‘tseries’, ‘urca’, ‘zoo’, ‘RcppArmadillo’\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/prettyunits_1.1.1.tar.gz'\n", + "Content type 'application/x-gzip' length 10366 bytes (10 KB)\n", + "==================================================\n", + "downloaded 10 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/rprojroot_2.0.2.tar.gz'\n", + "Content type 'application/x-gzip' length 59967 bytes (58 KB)\n", + "==================================================\n", + "downloaded 58 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/pkgbuild_1.2.0.tar.gz'\n", + "Content type 'application/x-gzip' length 30383 bytes (29 KB)\n", + "==================================================\n", + "downloaded 29 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/rstudioapi_0.13.tar.gz'\n", + "Content type 'application/x-gzip' length 110472 bytes (107 KB)\n", + "==================================================\n", + "downloaded 107 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/diffobj_0.3.3.tar.gz'\n", + "Content type 'application/x-gzip' length 472233 bytes (461 KB)\n", + "==================================================\n", + "downloaded 461 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/rematch2_2.1.2.tar.gz'\n", + "Content type 'application/x-gzip' length 13366 bytes (13 KB)\n", + "==================================================\n", + "downloaded 13 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/brio_1.1.0.tar.gz'\n", + "Content type 'application/x-gzip' length 11610 bytes (11 KB)\n", + "==================================================\n", + "downloaded 11 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/callr_3.5.1.tar.gz'\n", + "Content type 'application/x-gzip' length 77905 bytes (76 KB)\n", + "==================================================\n", + "downloaded 76 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/desc_1.2.0.tar.gz'\n", + "Content type 'application/x-gzip' length 65612 bytes (64 KB)\n", + "==================================================\n", + "downloaded 64 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/evaluate_0.14.tar.gz'\n", + "Content type 'application/x-gzip' length 24206 bytes (23 KB)\n", + "==================================================\n", + "downloaded 23 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/jsonlite_1.7.2.tar.gz'\n", + "Content type 'application/x-gzip' length 421716 bytes (411 KB)\n", + "==================================================\n", + "downloaded 411 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/pkgload_1.1.0.tar.gz'\n", + "Content type 'application/x-gzip' length 58046 bytes (56 KB)\n", + "==================================================\n", + "downloaded 56 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/praise_1.0.0.tar.gz'\n", + "Content type 'application/x-gzip' length 6100 bytes\n", + "==================================================\n", + "downloaded 6100 bytes\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/processx_3.4.5.tar.gz'\n", + "Content type 'application/x-gzip' length 135121 bytes (131 KB)\n", + "==================================================\n", + "downloaded 131 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/ps_1.5.0.tar.gz'\n", + "Content type 'application/x-gzip' length 115131 bytes (112 KB)\n", + "==================================================\n", + "downloaded 112 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/waldo_0.2.3.tar.gz'\n", + "Content type 'application/x-gzip' length 25726 bytes (25 KB)\n", + "==================================================\n", + "downloaded 25 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/assertthat_0.2.1.tar.gz'\n", + "Content type 'application/x-gzip' length 12742 bytes (12 KB)\n", + "==================================================\n", + "downloaded 12 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/utf8_1.1.4.tar.gz'\n", + "Content type 'application/x-gzip' length 218882 bytes (213 KB)\n", + "==================================================\n", + "downloaded 213 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/testthat_3.0.1.tar.gz'\n", + "Content type 'application/x-gzip' length 678199 bytes (662 KB)\n", + "==================================================\n", + "downloaded 662 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/farver_2.0.3.tar.gz'\n", + "Content type 'application/x-gzip' length 1279579 bytes (1.2 MB)\n", + "==================================================\n", + "downloaded 1.2 MB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/labeling_0.4.2.tar.gz'\n", + "Content type 'application/x-gzip' length 10156 bytes\n", + "==================================================\n", + "downloaded 10156 bytes\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/lifecycle_0.2.0.tar.gz'\n", + "Content type 'application/x-gzip' length 164455 bytes (160 KB)\n", + "==================================================\n", + "downloaded 160 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/munsell_0.5.0.tar.gz'\n", + "Content type 'application/x-gzip' length 182653 bytes (178 KB)\n", + "==================================================\n", + "downloaded 178 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/R6_2.5.0.tar.gz'\n", + "Content type 'application/x-gzip' length 63361 bytes (61 KB)\n", + "==================================================\n", + "downloaded 61 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/RColorBrewer_1.1-2.tar.gz'\n", + "Content type 'application/x-gzip' length 11532 bytes (11 KB)\n", + "==================================================\n", + "downloaded 11 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/viridisLite_0.3.0.tar.gz'\n", + "Content type 'application/x-gzip' length 44019 bytes (42 KB)\n", + "==================================================\n", + "downloaded 42 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/cli_2.2.0.tar.gz'\n", + "Content type 'application/x-gzip' length 120676 bytes (117 KB)\n", + "==================================================\n", + "downloaded 117 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/crayon_1.3.4.tar.gz'\n", + "Content type 'application/x-gzip' length 658694 bytes (643 KB)\n", + "==================================================\n", + "downloaded 643 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/ellipsis_0.3.1.tar.gz'\n", + "Content type 'application/x-gzip' length 7582 bytes\n", + "==================================================\n", + "downloaded 7582 bytes\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/fansi_0.4.1.tar.gz'\n", + "Content type 'application/x-gzip' length 270906 bytes (264 KB)\n", + "==================================================\n", + "downloaded 264 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/pillar_1.4.7.tar.gz'\n", + "Content type 'application/x-gzip' length 113345 bytes (110 KB)\n", + "==================================================\n", + "downloaded 110 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/pkgconfig_2.0.3.tar.gz'\n", + "Content type 'application/x-gzip' length 6080 bytes\n", + "==================================================\n", + "downloaded 6080 bytes\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/vctrs_0.3.6.tar.gz'\n", + "Content type 'application/x-gzip' length 778016 bytes (759 KB)\n", + "==================================================\n", + "downloaded 759 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/xts_0.12.1.tar.gz'\n", + "Content type 'application/x-gzip' length 517875 bytes (505 KB)\n", + "==================================================\n", + "downloaded 505 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/TTR_0.24.2.tar.gz'\n", + "Content type 'application/x-gzip' length 314035 bytes (306 KB)\n", + "==================================================\n", + "downloaded 306 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/curl_4.3.tar.gz'\n", + "Content type 'application/x-gzip' length 673779 bytes (657 KB)\n", + "==================================================\n", + "downloaded 657 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/digest_0.6.27.tar.gz'\n", + "Content type 'application/x-gzip' length 164373 bytes (160 KB)\n", + "==================================================\n", + "downloaded 160 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/glue_1.4.2.tar.gz'\n", + "Content type 'application/x-gzip' length 99049 bytes (96 KB)\n", + "==================================================\n", + "downloaded 96 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/gtable_0.3.0.tar.gz'\n", + "Content type 'application/x-gzip' length 368081 bytes (359 KB)\n", + "==================================================\n", + "downloaded 359 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/isoband_0.2.3.tar.gz'\n", + "Content type 'application/x-gzip' length 1902568 bytes (1.8 MB)\n", + "==================================================\n", + "downloaded 1.8 MB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/rlang_0.4.10.tar.gz'\n", + "Content type 'application/x-gzip' length 915685 bytes (894 KB)\n", + "==================================================\n", + "downloaded 894 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/scales_1.1.1.tar.gz'\n", + "Content type 'application/x-gzip' length 515201 bytes (503 KB)\n", + "==================================================\n", + "downloaded 503 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/tibble_3.0.4.tar.gz'\n", + "Content type 'application/x-gzip' length 255457 bytes (249 KB)\n", + "==================================================\n", + "downloaded 249 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/withr_2.3.0.tar.gz'\n", + "Content type 'application/x-gzip' length 91443 bytes (89 KB)\n", + "==================================================\n", + "downloaded 89 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/quadprog_1.5-8.tar.gz'\n", + "Content type 'application/x-gzip' length 36141 bytes (35 KB)\n", + "==================================================\n", + "downloaded 35 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/quantmod_0.4.18.tar.gz'\n", + "Content type 'application/x-gzip' length 154512 bytes (150 KB)\n", + "==================================================\n", + "downloaded 150 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/colorspace_2.0-0.tar.gz'\n", + "Content type 'application/x-gzip' length 2203295 bytes (2.1 MB)\n", + "==================================================\n", + "downloaded 2.1 MB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/fracdiff_1.5-1.tar.gz'\n", + "Content type 'application/x-gzip' length 63764 bytes (62 KB)\n", + "==================================================\n", + "downloaded 62 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/ggplot2_3.3.3.tar.gz'\n", + "Content type 'application/x-gzip' length 3058840 bytes (2.9 MB)\n", + "==================================================\n", + "downloaded 2.9 MB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/lmtest_0.9-38.tar.gz'\n", + "Content type 'application/x-gzip' length 227052 bytes (221 KB)\n", + "==================================================\n", + "downloaded 221 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/magrittr_2.0.1.tar.gz'\n", + "Content type 'application/x-gzip' length 265580 bytes (259 KB)\n", + "==================================================\n", + "downloaded 259 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/Rcpp_1.0.5.tar.gz'\n", + "Content type 'application/x-gzip' length 2950521 bytes (2.8 MB)\n", + "==================================================\n", + "downloaded 2.8 MB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/timeDate_3043.102.tar.gz'\n", + "Content type 'application/x-gzip' length 314656 bytes (307 KB)\n", + "==================================================\n", + "downloaded 307 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/tseries_0.10-48.tar.gz'\n", + "Content type 'application/x-gzip' length 170342 bytes (166 KB)\n", + "==================================================\n", + "downloaded 166 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/urca_1.3-0.tar.gz'\n", + "Content type 'application/x-gzip' length 682935 bytes (666 KB)\n", + "==================================================\n", + "downloaded 666 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/zoo_1.8-8.tar.gz'\n", + "Content type 'application/x-gzip' length 849487 bytes (829 KB)\n", + "==================================================\n", + "downloaded 829 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/RcppArmadillo_0.10.1.2.2.tar.gz'\n", + "Content type 'application/x-gzip' length 1647570 bytes (1.6 MB)\n", + "==================================================\n", + "downloaded 1.6 MB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/forecast_8.13.tar.gz'\n", + "Content type 'application/x-gzip' length 796389 bytes (777 KB)\n", + "==================================================\n", + "downloaded 777 KB\n", + "\n", + "* installing *source* package ‘prettyunits’ ...\n", + "** package ‘prettyunits’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (prettyunits)\n", + "* installing *source* package ‘rprojroot’ ...\n", + "** package ‘rprojroot’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (rprojroot)\n", + "* installing *source* package ‘rstudioapi’ ...\n", + "** package ‘rstudioapi’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (rstudioapi)\n", + "* installing *source* package ‘brio’ ...\n", + "** package ‘brio’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c brio.c -o brio.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c file_line_endings.c -o file_line_endings.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c read_file.c -o read_file.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c read_file_raw.c -o read_file_raw.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c read_lines.c -o read_lines.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c write_file.c -o write_file.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c write_lines.c -o write_lines.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o brio.so brio.o file_line_endings.o init.o read_file.o read_file_raw.o read_lines.o write_file.o write_lines.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-brio/00new/brio/libs\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (brio)\n", + "* installing *source* package ‘evaluate’ ...\n", + "** package ‘evaluate’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (evaluate)\n", + "* installing *source* package ‘jsonlite’ ...\n", + "** package ‘jsonlite’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c base64.c -o base64.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c collapse_array.c -o collapse_array.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c collapse_object.c -o collapse_object.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c collapse_pretty.c -o collapse_pretty.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c escape_chars.c -o escape_chars.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c integer64_to_na.c -o integer64_to_na.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c is_datelist.c -o is_datelist.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c is_recordlist.c -o is_recordlist.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c is_scalarlist.c -o is_scalarlist.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c modp_numtoa.c -o modp_numtoa.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c null_to_na.c -o null_to_na.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c num_to_char.c -o num_to_char.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c parse.c -o parse.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c prettify.c -o prettify.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c push_parser.c -o push_parser.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c r-base64.c -o r-base64.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c register.c -o register.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c row_collapse.c -o row_collapse.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c transpose_list.c -o transpose_list.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c validate.c -o validate.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c yajl/yajl.c -o yajl/yajl.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c yajl/yajl_alloc.c -o yajl/yajl_alloc.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c yajl/yajl_buf.c -o yajl/yajl_buf.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c yajl/yajl_encode.c -o yajl/yajl_encode.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c yajl/yajl_gen.c -o yajl/yajl_gen.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c yajl/yajl_lex.c -o yajl/yajl_lex.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c yajl/yajl_parser.c -o yajl/yajl_parser.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c yajl/yajl_tree.c -o yajl/yajl_tree.o\n", + "ar rcs yajl/libstatyajl.a yajl/yajl.o yajl/yajl_alloc.o yajl/yajl_buf.o yajl/yajl_encode.o yajl/yajl_gen.o yajl/yajl_lex.o yajl/yajl_parser.o yajl/yajl_tree.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o jsonlite.so base64.o collapse_array.o collapse_object.o collapse_pretty.o escape_chars.o integer64_to_na.o is_datelist.o is_recordlist.o is_scalarlist.o modp_numtoa.o null_to_na.o num_to_char.o parse.o prettify.o push_parser.o r-base64.o register.o row_collapse.o transpose_list.o validate.o -Lyajl -lstatyajl -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-jsonlite/00new/jsonlite/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "in method for ‘asJSON’ with signature ‘\"blob\"’: no definition for class “blob”\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (jsonlite)\n", + "* installing *source* package ‘praise’ ...\n", + "** package ‘praise’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (praise)\n", + "* installing *source* package ‘ps’ ...\n", + "** package ‘ps’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -Wall px.c -o px\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c api-common.c -o api-common.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c common.c -o common.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c extra.c -o extra.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c dummy.c -o dummy.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c error-codes.c -o error-codes.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c posix.c -o posix.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c api-posix.c -o api-posix.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c linux.c -o linux.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c api-linux.c -o api-linux.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o ps.so init.o api-common.o common.o extra.o dummy.o error-codes.o posix.o api-posix.o linux.o api-linux.o -L/usr/lib/R/lib -lR\n", + "installing via 'install.libs.R' to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-ps/00new/ps\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (ps)\n", + "* installing *source* package ‘assertthat’ ...\n", + "** package ‘assertthat’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (assertthat)\n", + "* installing *source* package ‘utf8’ ...\n", + "** package ‘utf8’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c as_utf8.c -o as_utf8.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c bytes.c -o bytes.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c context.c -o context.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c render.c -o render.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c render_table.c -o render_table.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c string.c -o string.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c text.c -o text.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8_encode.c -o utf8_encode.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8_format.c -o utf8_format.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8_normalize.c -o utf8_normalize.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8_valid.c -o utf8_valid.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8_width.c -o utf8_width.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c util.c -o util.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/array.c -o utf8lite/src/array.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/char.c -o utf8lite/src/char.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/encode.c -o utf8lite/src/encode.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/error.c -o utf8lite/src/error.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/escape.c -o utf8lite/src/escape.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/graph.c -o utf8lite/src/graph.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/graphscan.c -o utf8lite/src/graphscan.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/normalize.c -o utf8lite/src/normalize.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/render.c -o utf8lite/src/render.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/text.c -o utf8lite/src/text.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/textassign.c -o utf8lite/src/textassign.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/textiter.c -o utf8lite/src/textiter.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/textmap.c -o utf8lite/src/textmap.o\n", + "ar rcs libcutf8lite.a utf8lite/src/array.o utf8lite/src/char.o utf8lite/src/encode.o utf8lite/src/error.o utf8lite/src/escape.o utf8lite/src/graph.o utf8lite/src/graphscan.o utf8lite/src/normalize.o utf8lite/src/render.o utf8lite/src/text.o utf8lite/src/textassign.o utf8lite/src/textiter.o utf8lite/src/textmap.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o utf8.so as_utf8.o bytes.o context.o init.o render.o render_table.o string.o text.o utf8_encode.o utf8_format.o utf8_normalize.o utf8_valid.o utf8_width.o util.o -L. -lcutf8lite -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-utf8/00new/utf8/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (utf8)\n", + "* installing *source* package ‘farver’ ...\n", + "** package ‘farver’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c ColorSpace.cpp -o ColorSpace.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c Comparison.cpp -o Comparison.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c Conversion.cpp -o Conversion.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c encode.cpp -o encode.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c farver.cpp -o farver.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.cpp -o init.o\n", + "g++ -std=gnu++11 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o farver.so ColorSpace.o Comparison.o Conversion.o encode.o farver.o init.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-farver/00new/farver/libs\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (farver)\n", + "* installing *source* package ‘labeling’ ...\n", + "** package ‘labeling’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (labeling)\n", + "* installing *source* package ‘R6’ ...\n", + "** package ‘R6’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (R6)\n", + "* installing *source* package ‘RColorBrewer’ ...\n", + "** package ‘RColorBrewer’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (RColorBrewer)\n", + "* installing *source* package ‘viridisLite’ ...\n", + "** package ‘viridisLite’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** data\n", + "*** moving datasets to lazyload DB\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (viridisLite)\n", + "* installing *source* package ‘crayon’ ...\n", + "** package ‘crayon’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (crayon)\n", + "* installing *source* package ‘fansi’ ...\n", + "** package ‘fansi’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c assumptions.c -o assumptions.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c has.c -o has.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c nchar.c -o nchar.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c read.c -o read.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c rnchar.c -o rnchar.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c state.c -o state.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c strip.c -o strip.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c strsplit.c -o strsplit.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c tabs.c -o tabs.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c tohtml.c -o tohtml.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c unhandled.c -o unhandled.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c unique.c -o unique.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8.c -o utf8.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utils.c -o utils.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c wrap.c -o wrap.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o fansi.so assumptions.o has.o init.o nchar.o read.o rnchar.o state.o strip.o strsplit.o tabs.o tohtml.o unhandled.o unique.o utf8.o utils.o wrap.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-fansi/00new/fansi/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (fansi)\n", + "* installing *source* package ‘pkgconfig’ ...\n", + "** package ‘pkgconfig’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (pkgconfig)\n", + "* installing *source* package ‘curl’ ...\n", + "** package ‘curl’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "Found pkg-config cflags and libs!\n", + "Using PKG_CFLAGS=-I/usr/include/x86_64-linux-gnu\n", + "Using PKG_LIBS=-lcurl\n", + "** libs\n", + "rm -f curl.so callbacks.o curl.o download.o escape.o fetch.o form.o getdate.o handle.o ieproxy.o init.o interrupt.o multi.o nslookup.o reflist.o split.o ssl.o typechecking.o utils.o version.o winidn.o writer.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c callbacks.c -o callbacks.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c curl.c -o curl.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c download.c -o download.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c escape.c -o escape.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c fetch.c -o fetch.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c form.c -o form.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c getdate.c -o getdate.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c handle.c -o handle.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c ieproxy.c -o ieproxy.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c interrupt.c -o interrupt.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c multi.c -o multi.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c nslookup.c -o nslookup.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c reflist.c -o reflist.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c split.c -o split.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c ssl.c -o ssl.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c typechecking.c -o typechecking.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utils.c -o utils.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c version.c -o version.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c winidn.c -o winidn.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c writer.c -o writer.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o curl.so callbacks.o curl.o download.o escape.o fetch.o form.o getdate.o handle.o ieproxy.o init.o interrupt.o multi.o nslookup.o reflist.o split.o ssl.o typechecking.o utils.o version.o winidn.o writer.o -lcurl -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-curl/00new/curl/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (curl)\n", + "* installing *source* package ‘digest’ ...\n", + "** package ‘digest’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c SpookyV2.cpp -o SpookyV2.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c aes.c -o aes.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c blake3.c -o blake3.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c blake3_dispatch.c -o blake3_dispatch.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c blake3_portable.c -o blake3_portable.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c crc32.c -o crc32.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c digest.c -o digest.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c digest2int.c -o digest2int.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c md5.c -o md5.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c pmurhash.c -o pmurhash.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c raes.c -o raes.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c sha1.c -o sha1.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c sha2.c -o sha2.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c sha256.c -o sha256.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c spooky_serialize.cpp -o spooky_serialize.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c xxhash.c -o xxhash.o\n", + "g++ -std=gnu++11 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o digest.so SpookyV2.o aes.o blake3.o blake3_dispatch.o blake3_portable.o crc32.o digest.o digest2int.o init.o md5.o pmurhash.o raes.o sha1.o sha2.o sha256.o spooky_serialize.o xxhash.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-digest/00new/digest/libs\n", + "** R\n", + "** demo\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (digest)\n", + "* installing *source* package ‘glue’ ...\n", + "** package ‘glue’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c glue.c -o glue.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c trim.c -o trim.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o glue.so glue.o init.o trim.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-glue/00new/glue/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (glue)\n", + "* installing *source* package ‘gtable’ ...\n", + "** package ‘gtable’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (gtable)\n", + "* installing *source* package ‘rlang’ ...\n", + "** package ‘rlang’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I./lib/ -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c capture.c -o capture.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I./lib/ -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c export.c -o export.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I./lib/ -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c internal.c -o internal.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I./lib/ -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c lib.c -o lib.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I./lib/ -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c version.c -o version.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o rlang.so capture.o export.o internal.o lib.o version.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-rlang/00new/rlang/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (rlang)\n", + "* installing *source* package ‘withr’ ...\n", + "** package ‘withr’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (withr)\n", + "* installing *source* package ‘quadprog’ ...\n", + "** package ‘quadprog’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gfortran -fno-optimize-sibling-calls -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -msse2 -mfpmath=sse -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -c aind.f -o aind.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gfortran -fno-optimize-sibling-calls -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -msse2 -mfpmath=sse -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -c solve.QP.compact.f -o solve.QP.compact.o\n", + "gfortran -fno-optimize-sibling-calls -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -msse2 -mfpmath=sse -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -c solve.QP.f -o solve.QP.o\n", + "gfortran -fno-optimize-sibling-calls -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -msse2 -mfpmath=sse -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -c util.f -o util.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o quadprog.so aind.o init.o solve.QP.compact.o solve.QP.o util.o -lblas -lgfortran -lm -lquadmath -lgfortran -lm -lquadmath -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-quadprog/00new/quadprog/libs\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (quadprog)\n", + "* installing *source* package ‘colorspace’ ...\n", + "** package ‘colorspace’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c colorspace.c -o colorspace.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o colorspace.so colorspace.o init.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-colorspace/00new/colorspace/libs\n", + "** R\n", + "** data\n", + "*** moving datasets to lazyload DB\n", + "** demo\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (colorspace)\n", + "* installing *source* package ‘fracdiff’ ...\n", + "** package ‘fracdiff’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c fdcore.c -o fdcore.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c fdgam.c -o fdgam.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c fdhess.c -o fdhess.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c fdmin.c -o fdmin.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c fdsim.c -o fdsim.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c pmult.c -o pmult.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o fracdiff.so fdcore.o fdgam.o fdhess.o fdmin.o fdsim.o init.o pmult.o -lblas -lgfortran -lm -lquadmath -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-fracdiff/00new/fracdiff/libs\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (fracdiff)\n", + "* installing *source* package ‘magrittr’ ...\n", + "** package ‘magrittr’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c pipe.c -o pipe.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utils.c -o utils.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o magrittr.so pipe.o utils.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-magrittr/00new/magrittr/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (magrittr)\n", + "* installing *source* package ‘Rcpp’ ...\n", + "** package ‘Rcpp’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include/ -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c api.cpp -o api.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include/ -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c attributes.cpp -o attributes.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include/ -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c barrier.cpp -o barrier.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include/ -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c date.cpp -o date.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include/ -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c module.cpp -o module.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include/ -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c rcpp_init.cpp -o rcpp_init.o\n", + "g++ -std=gnu++11 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o Rcpp.so api.o attributes.o barrier.o date.o module.o rcpp_init.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-Rcpp/00new/Rcpp/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (Rcpp)\n", + "* installing *source* package ‘timeDate’ ...\n", + "** package ‘timeDate’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "Creating a generic function for ‘sample’ from package ‘base’ in package ‘timeDate’\n", + "Creating a generic function for ‘getDataPart’ from package ‘methods’ in package ‘timeDate’\n", + "Creating a generic function for ‘abline’ from package ‘graphics’ in package ‘timeDate’\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (timeDate)\n", + "* installing *source* package ‘urca’ ...\n", + "** package ‘urca’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gfortran -fno-optimize-sibling-calls -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -c UnitRootMacKinnon.f -o UnitRootMacKinnon.o\n", + "\u001b[01m\u001b[KUnitRootMacKinnon.f:502:72:\u001b[m\u001b[K\n", + "\n", + " 502 | do 21 k=1,nobs\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: Shared DO termination label 21 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[KUnitRootMacKinnon.f:505:72:\u001b[m\u001b[K\n", + "\n", + " 505 | do 24 l=j,nvar\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: Shared DO termination label 24 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[KUnitRootMacKinnon.f:524:72:\u001b[m\u001b[K\n", + "\n", + " 524 | do 5 j=1,nvar\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: Shared DO termination label 5 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[KUnitRootMacKinnon.f:570:72:\u001b[m\u001b[K\n", + "\n", + " 570 | 3 amat(i,j) = amat(i,j) - amat(k,i)*amat(k,j)\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 3 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[KUnitRootMacKinnon.f:595:72:\u001b[m\u001b[K\n", + "\n", + " 595 | 11 t = t - amat(i,k)*amat(k,j)\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 11 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[KUnitRootMacKinnon.f:596:72:\u001b[m\u001b[K\n", + "\n", + " 596 | 12 amat(i,j) = t*ooa\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 12 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[KUnitRootMacKinnon.f:602:72:\u001b[m\u001b[K\n", + "\n", + " 602 | 14 t = t + amat(i,k)*amat(j,k)\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 14 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o urca.so UnitRootMacKinnon.o -llapack -lblas -lgfortran -lm -lquadmath -lgfortran -lm -lquadmath -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-urca/00new/urca/libs\n", + "** R\n", + "** data\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (urca)\n", + "* installing *source* package ‘zoo’ ...\n", + "** package ‘zoo’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c coredata.c -o coredata.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c lag.c -o lag.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o zoo.so coredata.o init.o lag.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-zoo/00new/zoo/libs\n", + "** R\n", + "** demo\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (zoo)\n", + "* installing *source* package ‘diffobj’ ...\n", + "** package ‘diffobj’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c diff.c -o diff.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c diffobj.c -o diffobj.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o diffobj.so diff.o diffobj.o init.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-diffobj/00new/diffobj/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (diffobj)\n", + "* installing *source* package ‘desc’ ...\n", + "** package ‘desc’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (desc)\n", + "* installing *source* package ‘processx’ ...\n", + "** package ‘processx’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -Wall tools/px.c -o tools/px\n", + "gcc -std=gnu99 -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g supervisor/supervisor.c supervisor/utils.c \\\n", + " -o supervisor/supervisor\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c base64.c -o base64.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c client.c -o client.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c errors.c -o errors.o\n", + "gcc -std=gnu99 -shared -L\"/usr/lib/R/lib\" -Wl,-Bsymbolic-functions -Wl,-z,relro -o client.so base64.o client.o errors.o -L\"/usr/lib/R/lib\" -lR\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c poll.c -o poll.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c processx-connection.c -o processx-connection.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c processx-vector.c -o processx-vector.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c create-time.c -o create-time.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c unix/childlist.c -o unix/childlist.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c unix/connection.c -o unix/connection.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c unix/processx.c -o unix/processx.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c unix/sigchld.c -o unix/sigchld.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c unix/utils.c -o unix/utils.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c unix/named_pipe.c -o unix/named_pipe.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c cleancall.c -o cleancall.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o processx.so init.o poll.o errors.o processx-connection.o processx-vector.o create-time.o base64.o unix/childlist.o unix/connection.o unix/processx.o unix/sigchld.o unix/utils.o unix/named_pipe.o cleancall.o -L/usr/lib/R/lib -lR\n", + "installing via 'install.libs.R' to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-processx/00new/processx\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (processx)\n", + "* installing *source* package ‘lifecycle’ ...\n", + "** package ‘lifecycle’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (lifecycle)\n", + "* installing *source* package ‘munsell’ ...\n", + "** package ‘munsell’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (munsell)\n", + "* installing *source* package ‘cli’ ...\n", + "** package ‘cli’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (cli)\n", + "* installing *source* package ‘ellipsis’ ...\n", + "** package ‘ellipsis’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c dots.c -o dots.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o ellipsis.so dots.o init.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-ellipsis/00new/ellipsis/libs\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (ellipsis)\n", + "* installing *source* package ‘xts’ ...\n", + "** package ‘xts’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c add_class.c -o add_class.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c any.c -o any.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c attr.c -o attr.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c binsearch.c -o binsearch.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c coredata.c -o coredata.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c diff.c -o diff.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c dimnames.c -o dimnames.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c endpoints.c -o endpoints.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c extract_col.c -o extract_col.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c isOrdered.c -o isOrdered.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c isXts.c -o isXts.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c merge.c -o merge.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c na.c -o na.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c period_apply.c -o period_apply.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c period_arithmetic.c -o period_arithmetic.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c period_quantile.c -o period_quantile.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c rbind.c -o rbind.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c rollfun.c -o rollfun.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c runSum.c -o runSum.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c startofyear.c -o startofyear.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c subset.c -o subset.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c subset.old.c -o subset.old.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c toperiod.c -o toperiod.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c totalcols.c -o totalcols.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c tryXts.c -o tryXts.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c unique.time.c -o unique.time.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o xts.so add_class.o any.o attr.o binsearch.o coredata.o diff.o dimnames.o endpoints.o extract_col.o init.o isOrdered.o isXts.o merge.o na.o period_apply.o period_arithmetic.o period_quantile.o rbind.o rollfun.o runSum.o startofyear.o subset.o subset.old.o toperiod.o totalcols.o tryXts.o unique.time.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-xts/00new/xts/libs\n", + "** R\n", + "** data\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (xts)\n", + "* installing *source* package ‘lmtest’ ...\n", + "** package ‘lmtest’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gfortran -fno-optimize-sibling-calls -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -c pan.f -o pan.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o lmtest.so init.o pan.o -lgfortran -lm -lquadmath -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-lmtest/00new/lmtest/libs\n", + "** R\n", + "** data\n", + "*** moving datasets to lazyload DB\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (lmtest)\n", + "* installing *source* package ‘RcppArmadillo’ ...\n", + "** package ‘RcppArmadillo’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "checking whether the C++ compiler works... yes\n", + "checking for C++ compiler default output file name... a.out\n", + "checking for suffix of executables... \n", + "checking whether we are cross compiling... no\n", + "checking for suffix of object files... o\n", + "checking whether we are using the GNU C++ compiler... yes\n", + "checking whether g++ -std=gnu++11 accepts -g... yes\n", + "checking how to run the C++ preprocessor... g++ -std=gnu++11 -E\n", + "checking whether we are using the GNU C++ compiler... (cached) yes\n", + "checking whether g++ -std=gnu++11 accepts -g... (cached) yes\n", + "checking whether we have a suitable tempdir... /tmp/user/1000\n", + "checking whether R CMD SHLIB can already compile programs using OpenMP... yes\n", + "checking LAPACK_LIBS... system LAPACK found\n", + "configure: creating ./config.status\n", + "config.status: creating inst/include/RcppArmadilloConfigGenerated.h\n", + "config.status: creating src/Makevars\n", + "** libs\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I../inst/include -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c RcppArmadillo.cpp -o RcppArmadillo.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I../inst/include -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c RcppExports.cpp -o RcppExports.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I../inst/include -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c fastLm.cpp -o fastLm.o\n", + "g++ -std=gnu++11 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o RcppArmadillo.so RcppArmadillo.o RcppExports.o fastLm.o -llapack -lblas -lgfortran -lm -lquadmath -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-RcppArmadillo/00new/RcppArmadillo/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (RcppArmadillo)\n", + "* installing *source* package ‘callr’ ...\n", + "** package ‘callr’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (callr)\n", + "* installing *source* package ‘vctrs’ ...\n", + "** package ‘vctrs’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c altrep-rle.c -o altrep-rle.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c arg-counter.c -o arg-counter.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c arg.c -o arg.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c bind.c -o bind.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c c-unchop.c -o c-unchop.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c c.c -o c.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c callables.c -o callables.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c cast-bare.c -o cast-bare.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c cast-dispatch.c -o cast-dispatch.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c cast.c -o cast.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c compare.c -o compare.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c complete.c -o complete.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c conditions.c -o conditions.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c dictionary.c -o dictionary.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c dim.c -o dim.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c equal.c -o equal.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c fields.c -o fields.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c fill.c -o fill.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c group.c -o group.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c growable.c -o growable.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c hash.c -o hash.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c names.c -o names.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c order-groups.c -o order-groups.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c order-radix.c -o order-radix.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c order-sortedness.c -o order-sortedness.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c order-truelength.c -o order-truelength.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c poly-op.c -o poly-op.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c proxy-restore.c -o proxy-restore.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c proxy.c -o proxy.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c ptype2-dispatch.c -o ptype2-dispatch.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c rep.c -o rep.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c runs.c -o runs.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c shape.c -o shape.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c size-common.c -o size-common.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c size.c -o size.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c slice-array.c -o slice-array.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c slice-assign-array.c -o slice-assign-array.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c slice-assign.c -o slice-assign.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c slice-chop.c -o slice-chop.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c slice.c -o slice.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c split.c -o split.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c subscript-loc.c -o subscript-loc.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c subscript.c -o subscript.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c translate.c -o translate.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c type-data-frame.c -o type-data-frame.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c type-date-time.c -o type-date-time.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c type-factor.c -o type-factor.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c type-info.c -o type-info.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c type-tibble.c -o type-tibble.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c type.c -o type.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c type2.c -o type2.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c typeof2-s3.c -o typeof2-s3.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c typeof2.c -o typeof2.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c unspecified.c -o unspecified.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utils-dispatch.c -o utils-dispatch.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utils-rlang.c -o utils-rlang.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utils.c -o utils.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c version.c -o version.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o vctrs.so altrep-rle.o arg-counter.o arg.o bind.o c-unchop.o c.o callables.o cast-bare.o cast-dispatch.o cast.o compare.o complete.o conditions.o dictionary.o dim.o equal.o fields.o fill.o group.o growable.o hash.o init.o names.o order-groups.o order-radix.o order-sortedness.o order-truelength.o poly-op.o proxy-restore.o proxy.o ptype2-dispatch.o rep.o runs.o shape.o size-common.o size.o slice-array.o slice-assign-array.o slice-assign.o slice-chop.o slice.o split.o subscript-loc.o subscript.o translate.o type-data-frame.o type-date-time.o type-factor.o type-info.o type-tibble.o type.o type2.o typeof2-s3.o typeof2.o unspecified.o utils-dispatch.o utils-rlang.o utils.o version.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-vctrs/00new/vctrs/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (vctrs)\n", + "* installing *source* package ‘TTR’ ...\n", + "** package ‘TTR’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/xts/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c adjRatios.c -o adjRatios.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/xts/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c aroon.c -o aroon.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/xts/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/xts/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c moving_averages.c -o moving_averages.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/xts/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c percent_rank.c -o percent_rank.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/xts/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c runfun.c -o runfun.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/xts/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c sar.c -o sar.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/xts/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c wilderSum.c -o wilderSum.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/xts/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c zigzag.c -o zigzag.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o TTR.so adjRatios.o aroon.o init.o moving_averages.o percent_rank.o runfun.o sar.o wilderSum.o zigzag.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-TTR/00new/TTR/libs\n", + "** R\n", + "** data\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (TTR)\n", + "* installing *source* package ‘scales’ ...\n", + "** package ‘scales’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (scales)\n", + "* installing *source* package ‘pkgbuild’ ...\n", + "** package ‘pkgbuild’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (pkgbuild)\n", + "* installing *source* package ‘pillar’ ...\n", + "** package ‘pillar’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (pillar)\n", + "* installing *source* package ‘quantmod’ ...\n", + "** package ‘quantmod’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** demo\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (quantmod)\n", + "* installing *source* package ‘pkgload’ ...\n", + "** package ‘pkgload’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c unlock.c -o unlock.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o pkgload.so unlock.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-pkgload/00new/pkgload/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (pkgload)\n", + "* installing *source* package ‘tibble’ ...\n", + "** package ‘tibble’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c attributes.c -o attributes.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c coerce.c -o coerce.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c matrixToDataFrame.c -o matrixToDataFrame.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o tibble.so attributes.o coerce.o init.o matrixToDataFrame.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-tibble/00new/tibble/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (tibble)\n", + "* installing *source* package ‘tseries’ ...\n", + "** package ‘tseries’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c arma.c -o arma.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c bdstest.c -o bdstest.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c boot.c -o boot.o\n", + "gfortran -fno-optimize-sibling-calls -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -c cfuncs.f90 -o cfuncs.o\n", + "gfortran -fno-optimize-sibling-calls -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -c dsumsl.f -o dsumsl.o\n", + "\u001b[01m\u001b[Kdsumsl.f:1004:72:\u001b[m\u001b[K\n", + "\n", + " 1004 | 10 W(I) = A*X(I) + Y(I)\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 10 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[Kdsumsl.f:1123:72:\u001b[m\u001b[K\n", + "\n", + " 1123 | 10 Y(I) = S\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 10 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[Kdsumsl.f:1137:72:\u001b[m\u001b[K\n", + "\n", + " 1137 | 10 X(I) = Y(I) / Z(I)\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 10 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[Kdsumsl.f:1141:72:\u001b[m\u001b[K\n", + "\n", + " 1141 | 30 X(I) = Y(I) * Z(I)\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 30 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[Kdsumsl.f:1224:72:\u001b[m\u001b[K\n", + "\n", + " 1224 | 30 Z(I) = CY * Z(I) - CS * W(I)\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 30 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[Kdsumsl.f:1908:72:\u001b[m\u001b[K\n", + "\n", + " 1908 | 10 STEP(I) = G(I) / GNORM\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 10 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[Kdsumsl.f:1923:72:\u001b[m\u001b[K\n", + "\n", + " 1923 | 20 STEP(I) = -NWTSTP(I)\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 20 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[Kdsumsl.f:1941:72:\u001b[m\u001b[K\n", + "\n", + " 1941 | 40 STEP(I) = T * NWTSTP(I)\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 40 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[Kdsumsl.f:1955:72:\u001b[m\u001b[K\n", + "\n", + " 1955 | 60 STEP(I) = T * DIG(I)\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 60 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[Kdsumsl.f:1982:72:\u001b[m\u001b[K\n", + "\n", + " 1982 | 80 STEP(I) = T1*DIG(I) + T2*NWTSTP(I)\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 80 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[Kdsumsl.f:2226:72:\u001b[m\u001b[K\n", + "\n", + " 2226 | 10 X(I) = Y(I)\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 10 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c formats.c -o formats.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c garch.c -o garch.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c ppsum.c -o ppsum.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c tsutils.c -o tsutils.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o tseries.so arma.o bdstest.o boot.o cfuncs.o dsumsl.o formats.o garch.o init.o ppsum.o tsutils.o -lblas -lgfortran -lm -lquadmath -lgfortran -lm -lquadmath -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-tseries/00new/tseries/libs\n", + "** R\n", + "** data\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (tseries)\n", + "* installing *source* package ‘rematch2’ ...\n", + "** package ‘rematch2’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (rematch2)\n", + "* installing *source* package ‘waldo’ ...\n", + "** package ‘waldo’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (waldo)\n", + "* installing *source* package ‘testthat’ ...\n", + "** package ‘testthat’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -DCOMPILING_TESTTHAT -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -DCOMPILING_TESTTHAT -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c reassign.c -o reassign.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -DCOMPILING_TESTTHAT -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c test-catch.cpp -o test-catch.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -DCOMPILING_TESTTHAT -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c test-example.cpp -o test-example.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -DCOMPILING_TESTTHAT -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c test-runner.cpp -o test-runner.o\n", + "g++ -std=gnu++11 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o testthat.so init.o reassign.o test-catch.o test-example.o test-runner.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-testthat/00new/testthat/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (testthat)\n", + "* installing *source* package ‘isoband’ ...\n", + "** package ‘isoband’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/testthat/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c clip-lines.cpp -o clip-lines.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/testthat/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.cpp -o init.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/testthat/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c isoband.cpp -o isoband.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/testthat/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c polygon.cpp -o polygon.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/testthat/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c separate-polygons.cpp -o separate-polygons.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/testthat/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c test-clip-lines.cpp -o test-clip-lines.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/testthat/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c test-runner.cpp -o test-runner.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/testthat/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c test-separate-polygons.cpp -o test-separate-polygons.o\n", + "g++ -std=gnu++11 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o isoband.so clip-lines.o init.o isoband.o polygon.o separate-polygons.o test-clip-lines.o test-runner.o test-separate-polygons.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-isoband/00new/isoband/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (isoband)\n", + "* installing *source* package ‘ggplot2’ ...\n", + "** package ‘ggplot2’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** data\n", + "*** moving datasets to lazyload DB\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (ggplot2)\n", + "* installing *source* package ‘forecast’ ...\n", + "** package ‘forecast’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/RcppArmadillo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c calcBATS.cpp -o calcBATS.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/RcppArmadillo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c calcTBATS.cpp -o calcTBATS.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/RcppArmadillo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c etsTargetFunction.cpp -o etsTargetFunction.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/RcppArmadillo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c etsTargetFunctionWrapper.cpp -o etsTargetFunctionWrapper.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/RcppArmadillo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c etscalc.c -o etscalc.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/RcppArmadillo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c etspolyroot.c -o etspolyroot.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/RcppArmadillo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c makeBATSMatrices.cpp -o makeBATSMatrices.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/RcppArmadillo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c makeTBATSMatrices.cpp -o makeTBATSMatrices.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/RcppArmadillo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c registerDynamicSymbol.c -o registerDynamicSymbol.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/RcppArmadillo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c updateMatrices.cpp -o updateMatrices.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/RcppArmadillo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c updateTBATSMatrices.cpp -o updateTBATSMatrices.o\n", + "g++ -std=gnu++11 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o forecast.so calcBATS.o calcTBATS.o etsTargetFunction.o etsTargetFunctionWrapper.o etscalc.o etspolyroot.o makeBATSMatrices.o makeTBATSMatrices.o registerDynamicSymbol.o updateMatrices.o updateTBATSMatrices.o -llapack -lblas -lgfortran -lm -lquadmath -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-forecast/00new/forecast/libs\n", + "** R\n", + "** data\n", + "*** moving datasets to lazyload DB\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (forecast)\n", + "\n", + "The downloaded source packages are in\n", + "\t‘/tmp/user/1000/RtmpzOhFwP/downloaded_packages’\n", + "> \n", + "> \n" + ] + } + ], + "source": [ + "!R -e \"install.packages('forecast')\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Lastly, check if the `urban_meal_delivery.init_r` module can be imported (works only if all R dependencies can be loaded)." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "from urban_meal_delivery import init_r" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.6" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/setup.cfg b/setup.cfg index 5bbd00d..47924d4 100644 --- a/setup.cfg +++ b/setup.cfg @@ -89,16 +89,33 @@ extend-ignore = # Comply with black's style. # Source: https://github.com/psf/black/blob/master/docs/compatible_configs.md#flake8 E203, W503, WPS348, + # Google's Python Style Guide is not reStructuredText + # until after being processed by Sphinx Napoleon. + # Source: https://github.com/peterjc/flake8-rst-docstrings/issues/17 + RST201,RST203,RST210,RST213,RST301, + # String constant over-use is checked visually by the programmer. + WPS226, + # Allow underscores in numbers. + WPS303, # f-strings are ok. WPS305, # Classes should not have to specify a base class. WPS306, + # Let's be modern: The Walrus is ok. + WPS332, + # Let's not worry about the number of noqa's. + WPS402, # Putting logic into __init__.py files may be justified. WPS412, # Allow multiple assignment, e.g., x = y = 123 WPS429, + # There are no magic numbers. + WPS432, per-file-ignores = + # Top-levels of a sub-packages are intended to import a lot. + **/__init__.py: + F401,WPS201, docs/conf.py: # Allow shadowing built-ins and reading __*__ variables. WPS125,WPS609, @@ -108,14 +125,12 @@ per-file-ignores = migrations/versions/*.py: # Type annotations are not strictly enforced. ANN0, ANN2, + # Do not worry about SQL injection here. + S608, # File names of revisions are ok. WPS114,WPS118, # Revisions may have too many expressions. WPS204,WPS213, - # No overuse of string constants (e.g., 'RESTRICT'). - WPS226, - # Too many noqa's are ok. - WPS402, noxfile.py: # Type annotations are not strictly enforced. ANN0, ANN2, @@ -123,38 +138,70 @@ per-file-ignores = WPS202, # TODO (isort): Remove after simplifying the nox session "lint". WPS213, - # No overuse of string constants (e.g., '--version'). - WPS226, - # The noxfile is rather long => allow many noqa's. - WPS402, src/urban_meal_delivery/configuration.py: # Allow upper case class variables within classes. WPS115, - # Numbers are normal in config files. - WPS432, - src/urban_meal_delivery/db/addresses.py: - WPS226, - src/urban_meal_delivery/db/orders.py: - WPS226, + src/urban_meal_delivery/console/forecasts.py: + # The module is not too complex. + WPS232, + src/urban_meal_delivery/db/customers.py: + # The module is not too complex. + WPS232, + src/urban_meal_delivery/db/restaurants.py: + # The module is not too complex. + WPS232, + src/urban_meal_delivery/forecasts/methods/decomposition.py: + # The module is not too complex. + WPS232, + src/urban_meal_delivery/forecasts/methods/extrapolate_season.py: + # The module is not too complex. + WPS232, + src/urban_meal_delivery/forecasts/models/tactical/horizontal.py: + # The many noqa's are ok. + WPS403, + src/urban_meal_delivery/forecasts/timify.py: + # No SQL injection as the inputs come from a safe source. + S608, + # The many noqa's are ok. + WPS403, tests/*.py: # Type annotations are not strictly enforced. ANN0, ANN2, + # The `Meta` class inside the factory_boy models do not need a docstring. + D106, # `assert` statements are ok in the test suite. S101, + # The `random` module is not used for cryptography. + S311, # Shadowing outer scopes occurs naturally with mocks. WPS442, + # Test names may be longer than 40 characters. + WPS118, # Modules may have many test cases. WPS202,WPS204,WPS214, - # No overuse of string constants (e.g., '__version__'). - WPS226, - # Numbers are normal in test cases as expected results. - WPS432, + # Do not check for Jones complexity in the test suite. + WPS221, + # "Private" methods are really just a convention for + # fixtures without a return value. + WPS338, + # We do not care about the number of "# noqa"s in the test suite. + WPS402, + # Allow closures. + WPS430, + # When testing, it is normal to use implementation details. + WPS437, # Explicitly set mccabe's maximum complexity to 10 as recommended by # Thomas McCabe, the inventor of the McCabe complexity, and the NIST. # Source: https://en.wikipedia.org/wiki/Cyclomatic_complexity#Limiting_complexity_during_development max-complexity = 10 +# Allow more than wemake-python-styleguide's 5 local variables per function. +max-local-variables = 8 + +# Allow more than wemake-python-styleguide's 7 methods per class. +max-methods = 12 + # Comply with black's style. # Source: https://github.com/psf/black/blob/master/docs/the_black_code_style.md#line-length max-line-length = 88 @@ -166,6 +213,7 @@ show-source = true # wemake-python-styleguide's settings # =================================== allowed-domain-names = + data, obj, param, result, @@ -217,53 +265,28 @@ single_line_exclusions = typing [mypy] cache_dir = .cache/mypy -[mypy-dotenv] +[mypy-folium.*] +ignore_missing_imports = true +[mypy-matplotlib.*] ignore_missing_imports = true [mypy-nox.*] ignore_missing_imports = true +[mypy-numpy.*] +ignore_missing_imports = true [mypy-packaging] ignore_missing_imports = true +[mypy-pandas] +ignore_missing_imports = true [mypy-pytest] ignore_missing_imports = true +[mypy-rpy2.*] +ignore_missing_imports = true [mypy-sqlalchemy.*] ignore_missing_imports = true - - -[pylint.FORMAT] -# Comply with black's style. -max-line-length = 88 - -[pylint.MESSAGES CONTROL] -disable = - # We use TODO's to indicate locations in the source base - # that must be worked on in the near future. - fixme, - # Too many false positives and cannot be disabled within a file. - # Source: https://github.com/PyCQA/pylint/issues/214 - duplicate-code, - # Comply with black's style. - bad-continuation, bad-whitespace, - # ===================== - # flake8 de-duplication - # Source: https://pylint.pycqa.org/en/latest/faq.html#i-am-using-another-popular-linter-alongside-pylint-which-messages-should-i-disable-to-avoid-duplicates - # ===================== - # mccabe - too-many-branches, - # pep8-naming - bad-classmethod-argument, bad-mcs-classmethod-argument, - invalid-name, no-self-argument, - # pycodestyle - bad-indentation, bare-except, line-too-long, missing-final-newline, - multiple-statements, trailing-whitespace, unnecessary-semicolon, unneeded-not, - # pydocstyle - missing-class-docstring, missing-function-docstring, missing-module-docstring, - # pyflakes - undefined-variable, unused-import, unused-variable, - # wemake-python-styleguide - redefined-outer-name, - -[pylint.REPORTS] -score = no +[mypy-statsmodels.*] +ignore_missing_imports = true +[mypy-utm.*] +ignore_missing_imports = true [tool:pytest] @@ -273,5 +296,9 @@ cache_dir = .cache/pytest console_output_style = count env = TESTING=true +filterwarnings = + ignore:::patsy.* markers = - e2e: integration tests, inlc., for example, tests touching a database + db: (integration) tests touching the database + e2e: non-db and non-r integration tests + r: (integration) tests using rpy2 diff --git a/src/urban_meal_delivery/__init__.py b/src/urban_meal_delivery/__init__.py index 943ba9b..b2f39fe 100644 --- a/src/urban_meal_delivery/__init__.py +++ b/src/urban_meal_delivery/__init__.py @@ -5,11 +5,13 @@ Example: >>> umd.__version__ != '0.0.0' True """ +# The config object must come before all other project-internal imports. +from urban_meal_delivery.configuration import config # isort:skip -import os as _os from importlib import metadata as _metadata -from urban_meal_delivery import configuration as _configuration +from urban_meal_delivery import db +from urban_meal_delivery import forecasts try: @@ -24,14 +26,3 @@ else: __author__ = _pkg_info['author'] __pkg_name__ = _pkg_info['name'] __version__ = _pkg_info['version'] - - -# Global `config` object to be used in the package. -config: _configuration.Config = _configuration.make_config( - 'testing' if _os.getenv('TESTING') else 'production', -) - - -# Import `db` down here as it depends on `config`. -# pylint:disable=wrong-import-position -from urban_meal_delivery import db # noqa:E402,F401 isort:skip diff --git a/src/urban_meal_delivery/configuration.py b/src/urban_meal_delivery/configuration.py index 0e6eefa..8e43cf5 100644 --- a/src/urban_meal_delivery/configuration.py +++ b/src/urban_meal_delivery/configuration.py @@ -13,11 +13,6 @@ import random import string import warnings -import dotenv - - -dotenv.load_dotenv() - def random_schema_name() -> str: """Generate a random PostgreSQL schema name for testing.""" @@ -31,14 +26,43 @@ def random_schema_name() -> str: class Config: """Configuration that applies in all situations.""" - # pylint:disable=too-few-public-methods + # Application-specific settings + # ----------------------------- + # Date after which the real-life data is discarded. CUTOFF_DAY = datetime.datetime(2017, 2, 1) # If a scheduled pre-order is made within this # time horizon, we treat it as an ad-hoc order. QUASI_AD_HOC_LIMIT = datetime.timedelta(minutes=45) + # Operating hours of the platform. + SERVICE_START = 11 + SERVICE_END = 23 + + # Side lengths (in meters) for which pixel grids are created. + # They are the basis for the aggregated demand forecasts. + GRID_SIDE_LENGTHS = [707, 1000, 1414] + + # Time steps (in minutes) used to aggregate the + # individual orders into time series. + TIME_STEPS = [60] + + # Training horizons (in full weeks) used to train the forecasting models. + # For now, we only use 8 weeks as that was the best performing in + # a previous study (note:4f79e8fa). + TRAIN_HORIZONS = [8] + + # The demand forecasting methods used in the simulations. + FORECASTING_METHODS = ['hets', 'rtarima'] + + # Colors for the visualizations ins `folium`. + RESTAURANT_COLOR = 'red' + CUSTOMER_COLOR = 'blue' + + # Implementation-specific settings + # -------------------------------- + DATABASE_URI = os.getenv('DATABASE_URI') # The PostgreSQL schema that holds the tables with the original data. @@ -50,6 +74,8 @@ class Config: ALEMBIC_TABLE = 'alembic_version' ALEMBIC_TABLE_SCHEMA = 'public' + R_LIBS_PATH = os.getenv('R_LIBS') + def __repr__(self) -> str: """Non-literal text representation.""" return '' @@ -58,16 +84,12 @@ class Config: class ProductionConfig(Config): """Configuration for the real dataset.""" - # pylint:disable=too-few-public-methods - TESTING = False class TestingConfig(Config): """Configuration for the test suite.""" - # pylint:disable=too-few-public-methods - TESTING = True DATABASE_URI = os.getenv('DATABASE_URI_TESTING') or Config.DATABASE_URI @@ -78,7 +100,7 @@ def make_config(env: str = 'production') -> Config: """Create a new `Config` object. Args: - env: either 'production' or 'testing'; defaults to the first + env: either 'production' or 'testing' Returns: config: a namespace with all configurations @@ -86,7 +108,8 @@ def make_config(env: str = 'production') -> Config: Raises: ValueError: if `env` is not as specified """ # noqa:DAR203 - config: Config + config: Config # otherwise mypy is confused + if env.strip().lower() == 'production': config = ProductionConfig() elif env.strip().lower() == 'testing': @@ -95,7 +118,19 @@ def make_config(env: str = 'production') -> Config: raise ValueError("Must be either 'production' or 'testing'") # Without a PostgreSQL database the package cannot work. - if config.DATABASE_URI is None: + # As pytest sets the "TESTING" environment variable explicitly, + # the warning is only emitted if the code is not run by pytest. + # We see the bad configuration immediately as all "db" tests fail. + if config.DATABASE_URI is None and not os.getenv('TESTING'): warnings.warn('Bad configurartion: no DATABASE_URI set in the environment') + # Some functionalities require R and some packages installed. + # To ensure isolation and reproducibility, the projects keeps the R dependencies + # in a project-local folder that must be set in the environment. + if config.R_LIBS_PATH is None and not os.getenv('TESTING'): + warnings.warn('Bad configuration: no R_LIBS set in the environment') + return config + + +config = make_config('testing' if os.getenv('TESTING') else 'production') diff --git a/src/urban_meal_delivery/console/__init__.py b/src/urban_meal_delivery/console/__init__.py new file mode 100644 index 0000000..baa089c --- /dev/null +++ b/src/urban_meal_delivery/console/__init__.py @@ -0,0 +1,11 @@ +"""Provide CLI scripts for the project.""" + +from urban_meal_delivery.console import forecasts +from urban_meal_delivery.console import gridify +from urban_meal_delivery.console import main + + +cli = main.entry_point + +cli.add_command(forecasts.tactical_heuristic, name='tactical-forecasts') +cli.add_command(gridify.gridify) diff --git a/src/urban_meal_delivery/console/decorators.py b/src/urban_meal_delivery/console/decorators.py new file mode 100644 index 0000000..ef416dd --- /dev/null +++ b/src/urban_meal_delivery/console/decorators.py @@ -0,0 +1,37 @@ +"""Utils for the CLI scripts.""" + +import functools +import os +import subprocess # noqa:S404 +import sys +from typing import Any, Callable + +import click + + +def db_revision(rev: str) -> Callable: # pragma: no cover -> easy to check visually + """A decorator ensuring the database is at a given revision.""" + + def decorator(func: Callable) -> Callable: + @functools.wraps(func) + def ensure(*args: Any, **kwargs: Any) -> Any: # noqa:WPS430 + """Do not execute the `func` if the revision does not match.""" + if not os.getenv('TESTING'): + result = subprocess.run( # noqa:S603,S607 + ['alembic', 'current'], + capture_output=True, + check=False, + encoding='utf8', + ) + + if not result.stdout.startswith(rev): + click.echo( + click.style(f'Database is not at revision {rev}', fg='red'), + ) + sys.exit(1) + + return func(*args, **kwargs) + + return ensure + + return decorator diff --git a/src/urban_meal_delivery/console/forecasts.py b/src/urban_meal_delivery/console/forecasts.py new file mode 100644 index 0000000..1a7bcf9 --- /dev/null +++ b/src/urban_meal_delivery/console/forecasts.py @@ -0,0 +1,144 @@ +"""CLI script to forecast demand. + +The main purpose of this script is to pre-populate the `db.Forecast` table +with demand predictions such that they can readily be used by the +predictive routing algorithms. +""" + +import datetime as dt +import sys + +import click +from sqlalchemy import func +from sqlalchemy.orm import exc as orm_exc + +from urban_meal_delivery import config +from urban_meal_delivery import db +from urban_meal_delivery.console import decorators +from urban_meal_delivery.forecasts import timify + + +@click.command() +@click.argument('city', default='Paris', type=str) +@click.argument('side_length', default=1000, type=int) +@click.argument('time_step', default=60, type=int) +@click.argument('train_horizon', default=8, type=int) +@decorators.db_revision('8bfb928a31f8') +def tactical_heuristic( # noqa:C901,WPS213,WPS216,WPS231 + city: str, side_length: int, time_step: int, train_horizon: int, +) -> None: # pragma: no cover + """Predict demand for all pixels and days in a city. + + This command makes demand `Forecast`s for all `Pixel`s and days + for tactical purposes with the heuristic specified in + `urban_meal_delivery.forecasts.timify.OrderHistory.choose_tactical_model()`. + + According to this heuristic, there is exactly one `Forecast` per + `Pixel` and time step (e.g., hour of the day with 60-minute time steps) + given the lengths of the training horizon and a time step. That is so + as the heuristic chooses the most promising forecasting `*Model`. + + All `Forecast`s are persisted to the database so that they can be readily + used by the predictive routing algorithms. + + This command first checks, which `Forecast`s still need to be made + and then does its work. So, it can be interrupted at any point in + time and then simply continues where it left off the next time it + is executed. + + Important: In a future revision, this command may need to be adapted such + that is does not simply obtain the last time step for which a `Forecast` + was made and continues from there. The reason is that another future command + may make predictions using all available forecasting `*Model`s per `Pixel` + and time step. + + Arguments: + + CITY: one of "Bordeaux", "Lyon", or "Paris" (=default) + + SIDE_LENGTH: of a pixel in the grid; defaults to `1000` + + TIME_STEP: length of one time step in minutes; defaults to `60` + + TRAIN_HORIZON: length of the training horizon; defaults to `8` + """ # noqa:D412,D417,RST215 + # Input validation. + + try: + city_obj = ( + db.session.query(db.City).filter_by(name=city.title()).one() # noqa:WPS221 + ) + except orm_exc.NoResultFound: + click.echo('NAME must be one of "Paris", "Lyon", or "Bordeaux"') + sys.exit(1) + + for grid in city_obj.grids: + if grid.side_length == side_length: + break + else: + click.echo(f'SIDE_LENGTH must be in {config.GRID_SIDE_LENGTHS}') + sys.exit(1) + + if time_step not in config.TIME_STEPS: + click.echo(f'TIME_STEP must be in {config.TIME_STEPS}') + sys.exit(1) + + if train_horizon not in config.TRAIN_HORIZONS: + click.echo(f'TRAIN_HORIZON must be in {config.TRAIN_HORIZONS}') + sys.exit(1) + + click.echo( + 'Parameters: ' + + f'city="{city}", grid.side_length={side_length}, ' + + f'time_step={time_step}, train_horizon={train_horizon}', + ) + + # Load the historic order data. + order_history = timify.OrderHistory(grid=grid, time_step=time_step) # noqa:WPS441 + order_history.aggregate_orders() + + # Run the tactical heuristic. + + for pixel in grid.pixels: # noqa:WPS441 + # Important: this check may need to be adapted once further + # commands are added the make `Forecast`s without the heuristic! + # Continue with forecasting on the day the last prediction was made ... + last_predict_at = ( # noqa:ECE001 + db.session.query(func.max(db.Forecast.start_at)) + .filter(db.Forecast.pixel == pixel) + .first() + )[0] + # ... or start `train_horizon` weeks after the first `Order` + # if no `Forecast`s are in the database yet. + if last_predict_at is None: + predict_day = order_history.first_order_at(pixel_id=pixel.id).date() + predict_day += dt.timedelta(weeks=train_horizon) + else: + predict_day = last_predict_at.date() + + # Go over all days in chronological order ... + while predict_day <= order_history.last_order_at(pixel_id=pixel.id).date(): + # ... and choose the most promising `*Model` for that day. + model = order_history.choose_tactical_model( + pixel_id=pixel.id, predict_day=predict_day, train_horizon=train_horizon, + ) + click.echo( + f'Predicting pixel #{pixel.id} in {city} ' + + f'for {predict_day} with {model.name}', + ) + + # Only loop over the time steps corresponding to working hours. + predict_at = dt.datetime( + predict_day.year, + predict_day.month, + predict_day.day, + config.SERVICE_START, + ) + while predict_at.hour < config.SERVICE_END: + model.make_forecast( + pixel=pixel, predict_at=predict_at, train_horizon=train_horizon, + ) + + predict_at += dt.timedelta(minutes=time_step) + + predict_day += dt.timedelta(days=1) diff --git a/src/urban_meal_delivery/console/gridify.py b/src/urban_meal_delivery/console/gridify.py new file mode 100644 index 0000000..3024f14 --- /dev/null +++ b/src/urban_meal_delivery/console/gridify.py @@ -0,0 +1,48 @@ +"""CLI script to create pixel grids.""" + +import click + +from urban_meal_delivery import config +from urban_meal_delivery import db +from urban_meal_delivery.console import decorators + + +@click.command() +@decorators.db_revision('e86290e7305e') +def gridify() -> None: # pragma: no cover note:b1f68d24 + """Create grids for all cities. + + This command creates grids with pixels of various + side lengths (specified in `urban_meal_delivery.config`). + + Pixels are only generated if they contain at least one + (pickup or delivery) address. + + All data are persisted to the database. + """ + cities = db.session.query(db.City).all() + click.echo(f'{len(cities)} cities retrieved from the database') + + for city in cities: + click.echo(f'\nCreating grids for {city.name}') + + for side_length in config.GRID_SIDE_LENGTHS: + click.echo(f'Creating grid with a side length of {side_length} meters') + + grid = db.Grid.gridify(city=city, side_length=side_length) + db.session.add(grid) + + click.echo(f' -> created {len(grid.pixels)} pixels') + + # The number of assigned addresses is the same across different `side_length`s. + db.session.flush() # necessary for the query to work + n_assigned = ( + db.session.query(db.AddressPixelAssociation) + .filter(db.AddressPixelAssociation.grid_id == grid.id) + .count() + ) + click.echo( + f'=> assigned {n_assigned} out of {len(city.addresses)} addresses in {city.name}', # noqa:E501 + ) + + db.session.commit() diff --git a/src/urban_meal_delivery/console.py b/src/urban_meal_delivery/console/main.py similarity index 80% rename from src/urban_meal_delivery/console.py rename to src/urban_meal_delivery/console/main.py index 0141370..8acb4c3 100644 --- a/src/urban_meal_delivery/console.py +++ b/src/urban_meal_delivery/console/main.py @@ -1,14 +1,14 @@ -"""Provide CLI scripts for the project.""" +"""The entry point for all CLI scripts in the project.""" from typing import Any import click -from click.core import Context +from click import core as cli_core import urban_meal_delivery -def show_version(ctx: Context, _param: Any, value: bool) -> None: +def show_version(ctx: cli_core.Context, _param: Any, value: bool) -> None: """Show the package's version.""" # If --version / -V is NOT passed in, # continue with the command. @@ -24,7 +24,7 @@ def show_version(ctx: Context, _param: Any, value: bool) -> None: ctx.exit() -@click.command() +@click.group() @click.option( '--version', '-V', @@ -33,5 +33,5 @@ def show_version(ctx: Context, _param: Any, value: bool) -> None: is_eager=True, expose_value=False, ) -def main() -> None: +def entry_point() -> None: """The urban-meal-delivery research project.""" diff --git a/src/urban_meal_delivery/db/__init__.py b/src/urban_meal_delivery/db/__init__.py index 8b9f0b4..ecd9fa1 100644 --- a/src/urban_meal_delivery/db/__init__.py +++ b/src/urban_meal_delivery/db/__init__.py @@ -1,11 +1,16 @@ """Provide the ORM models and a connection to the database.""" -from urban_meal_delivery.db.addresses import Address # noqa:F401 -from urban_meal_delivery.db.cities import City # noqa:F401 -from urban_meal_delivery.db.connection import make_engine # noqa:F401 -from urban_meal_delivery.db.connection import make_session_factory # noqa:F401 -from urban_meal_delivery.db.couriers import Courier # noqa:F401 -from urban_meal_delivery.db.customers import Customer # noqa:F401 -from urban_meal_delivery.db.meta import Base # noqa:F401 -from urban_meal_delivery.db.orders import Order # noqa:F401 -from urban_meal_delivery.db.restaurants import Restaurant # noqa:F401 +from urban_meal_delivery.db.addresses import Address +from urban_meal_delivery.db.addresses_pixels import AddressPixelAssociation +from urban_meal_delivery.db.cities import City +from urban_meal_delivery.db.connection import connection +from urban_meal_delivery.db.connection import engine +from urban_meal_delivery.db.connection import session +from urban_meal_delivery.db.couriers import Courier +from urban_meal_delivery.db.customers import Customer +from urban_meal_delivery.db.forecasts import Forecast +from urban_meal_delivery.db.grids import Grid +from urban_meal_delivery.db.meta import Base +from urban_meal_delivery.db.orders import Order +from urban_meal_delivery.db.pixels import Pixel +from urban_meal_delivery.db.restaurants import Restaurant diff --git a/src/urban_meal_delivery/db/addresses.py b/src/urban_meal_delivery/db/addresses.py index d9bfa48..8ce7193 100644 --- a/src/urban_meal_delivery/db/addresses.py +++ b/src/urban_meal_delivery/db/addresses.py @@ -1,31 +1,35 @@ -"""Provide the ORM's Address model.""" +"""Provide the ORM's `Address` model.""" +from __future__ import annotations + +from typing import Any + +import folium import sqlalchemy as sa from sqlalchemy import orm from sqlalchemy.dialects import postgresql from sqlalchemy.ext import hybrid from urban_meal_delivery.db import meta +from urban_meal_delivery.db import utils class Address(meta.Base): - """An Address of a Customer or a Restaurant on the UDP.""" + """An address of a `Customer` or a `Restaurant` on the UDP.""" __tablename__ = 'addresses' # Columns id = sa.Column(sa.Integer, primary_key=True, autoincrement=False) # noqa:WPS125 - _primary_id = sa.Column('primary_id', sa.Integer, nullable=False, index=True) + primary_id = sa.Column(sa.Integer, nullable=False, index=True) created_at = sa.Column(sa.DateTime, nullable=False) - place_id = sa.Column( - sa.Unicode(length=120), nullable=False, index=True, # noqa:WPS432 - ) + place_id = sa.Column(sa.Unicode(length=120), nullable=False, index=True) latitude = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False) longitude = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False) - _city_id = sa.Column('city_id', sa.SmallInteger, nullable=False, index=True) - city_name = sa.Column('city', sa.Unicode(length=25), nullable=False) # noqa:WPS432 + city_id = sa.Column(sa.SmallInteger, nullable=False, index=True) + city_name = sa.Column('city', sa.Unicode(length=25), nullable=False) zip_code = sa.Column(sa.Integer, nullable=False, index=True) - street = sa.Column(sa.Unicode(length=80), nullable=False) # noqa:WPS432 + street = sa.Column(sa.Unicode(length=80), nullable=False) floor = sa.Column(sa.SmallInteger) # Constraints @@ -43,6 +47,8 @@ class Address(meta.Base): '-180 <= longitude AND longitude <= 180', name='longitude_between_180_degrees', ), + # Needed by a `ForeignKeyConstraint` in `AddressPixelAssociation`. + sa.UniqueConstraint('id', 'city_id'), sa.CheckConstraint( '30000 <= zip_code AND zip_code <= 99999', name='valid_zip_code', ), @@ -51,18 +57,21 @@ class Address(meta.Base): # Relationships city = orm.relationship('City', back_populates='addresses') - restaurant = orm.relationship('Restaurant', back_populates='address', uselist=False) + restaurants = orm.relationship('Restaurant', back_populates='address') orders_picked_up = orm.relationship( 'Order', back_populates='pickup_address', - foreign_keys='[Order._pickup_address_id]', + foreign_keys='[Order.pickup_address_id]', ) - orders_delivered = orm.relationship( 'Order', back_populates='delivery_address', - foreign_keys='[Order._delivery_address_id]', + foreign_keys='[Order.delivery_address_id]', ) + pixels = orm.relationship('AddressPixelAssociation', back_populates='address') + + # We do not implement a `.__init__()` method and leave that to SQLAlchemy. + # Instead, we use `hasattr()` to check for uninitialized attributes. grep:b1f68d24 def __repr__(self) -> str: """Non-literal text representation.""" @@ -72,11 +81,85 @@ class Address(meta.Base): @hybrid.hybrid_property def is_primary(self) -> bool: - """If an Address object is the earliest one entered at its location. + """If an `Address` object is the earliest one entered at its location. Street addresses may have been entered several times with different versions/spellings of the street name and/or different floors. - `is_primary` indicates the first in a group of addresses. + `.is_primary` indicates the first in a group of `Address` objects. """ - return self.id == self._primary_id + return self.id == self.primary_id + + @property + def location(self) -> utils.Location: + """The location of the address. + + The returned `Location` object relates to `.city.southwest`. + + See also the `.x` and `.y` properties that are shortcuts for + `.location.x` and `.location.y`. + + Implementation detail: This property is cached as none of the + underlying attributes to calculate the value are to be changed. + """ + if not hasattr(self, '_location'): # noqa:WPS421 note:b1f68d24 + self._location = utils.Location(self.latitude, self.longitude) + self._location.relate_to(self.city.southwest) + return self._location + + @property + def x(self) -> int: # noqa=WPS111 + """The relative x-coordinate within the `.city` in meters. + + On the implied x-y plane, the `.city`'s southwest corner is the origin. + + Shortcut for `.location.x`. + """ + return self.location.x + + @property + def y(self) -> int: # noqa=WPS111 + """The relative y-coordinate within the `.city` in meters. + + On the implied x-y plane, the `.city`'s southwest corner is the origin. + + Shortcut for `.location.y`. + """ + return self.location.y + + def clear_map(self) -> Address: # pragma: no cover + """Shortcut to the `.city.clear_map()` method. + + Returns: + self: enabling method chaining + """ # noqa:D402,DAR203 + self.city.clear_map() + return self + + @property # pragma: no cover + def map(self) -> folium.Map: # noqa:WPS125 + """Shortcut to the `.city.map` object.""" + return self.city.map + + def draw(self, **kwargs: Any) -> folium.Map: # pragma: no cover + """Draw the address on the `.city.map`. + + By default, addresses are shown as black dots. + Use `**kwargs` to overwrite that. + + Args: + **kwargs: passed on to `folium.Circle()`; overwrite default settings + + Returns: + `.city.map` for convenience in interactive usage + """ + defaults = { + 'color': 'black', + 'popup': f'{self.street}, {self.zip_code} {self.city_name}', + } + defaults.update(kwargs) + + marker = folium.Circle((self.latitude, self.longitude), **defaults) + marker.add_to(self.city.map) + + return self.map diff --git a/src/urban_meal_delivery/db/addresses_pixels.py b/src/urban_meal_delivery/db/addresses_pixels.py new file mode 100644 index 0000000..293bde7 --- /dev/null +++ b/src/urban_meal_delivery/db/addresses_pixels.py @@ -0,0 +1,56 @@ +"""Model for the many-to-many relationship between `Address` and `Pixel` objects.""" + +import sqlalchemy as sa +from sqlalchemy import orm + +from urban_meal_delivery.db import meta + + +class AddressPixelAssociation(meta.Base): + """Association pattern between `Address` and `Pixel`. + + This approach is needed here mainly because it implicitly + updates the `_city_id` and `_grid_id` columns. + + Further info: + https://docs.sqlalchemy.org/en/stable/orm/basic_relationships.html#association-object # noqa:E501 + """ + + __tablename__ = 'addresses_pixels' + + # Columns + address_id = sa.Column(sa.Integer, primary_key=True) + city_id = sa.Column(sa.SmallInteger, nullable=False) + grid_id = sa.Column(sa.SmallInteger, nullable=False) + pixel_id = sa.Column(sa.Integer, primary_key=True) + + # Constraints + __table_args__ = ( + # An `Address` can only be on a `Grid` ... + sa.ForeignKeyConstraint( + ['address_id', 'city_id'], + ['addresses.id', 'addresses.city_id'], + onupdate='RESTRICT', + ondelete='RESTRICT', + ), + # ... if their `.city` attributes match. + sa.ForeignKeyConstraint( + ['grid_id', 'city_id'], + ['grids.id', 'grids.city_id'], + onupdate='RESTRICT', + ondelete='RESTRICT', + ), + # Each `Address` can only be on a `Grid` once. + sa.UniqueConstraint('address_id', 'grid_id'), + # An association must reference an existing `Grid`-`Pixel` pair. + sa.ForeignKeyConstraint( + ['pixel_id', 'grid_id'], + ['pixels.id', 'pixels.grid_id'], + onupdate='RESTRICT', + ondelete='RESTRICT', + ), + ) + + # Relationships + address = orm.relationship('Address', back_populates='pixels') + pixel = orm.relationship('Pixel', back_populates='addresses') diff --git a/src/urban_meal_delivery/db/cities.py b/src/urban_meal_delivery/db/cities.py index 00305b2..b6cf4e0 100644 --- a/src/urban_meal_delivery/db/cities.py +++ b/src/urban_meal_delivery/db/cities.py @@ -1,16 +1,20 @@ -"""Provide the ORM's City model.""" +"""Provide the ORM's `City` model.""" -from typing import Dict +from __future__ import annotations +import folium import sqlalchemy as sa from sqlalchemy import orm from sqlalchemy.dialects import postgresql +from urban_meal_delivery import config +from urban_meal_delivery import db from urban_meal_delivery.db import meta +from urban_meal_delivery.db import utils class City(meta.Base): - """A City where the UDP operates in.""" + """A city where the UDP operates in.""" __tablename__ = 'cities' @@ -22,62 +26,227 @@ class City(meta.Base): kml = sa.Column(sa.UnicodeText, nullable=False) # Google Maps related columns - _center_latitude = sa.Column( - 'center_latitude', postgresql.DOUBLE_PRECISION, nullable=False, - ) - _center_longitude = sa.Column( - 'center_longitude', postgresql.DOUBLE_PRECISION, nullable=False, - ) - _northeast_latitude = sa.Column( - 'northeast_latitude', postgresql.DOUBLE_PRECISION, nullable=False, - ) - _northeast_longitude = sa.Column( - 'northeast_longitude', postgresql.DOUBLE_PRECISION, nullable=False, - ) - _southwest_latitude = sa.Column( - 'southwest_latitude', postgresql.DOUBLE_PRECISION, nullable=False, - ) - _southwest_longitude = sa.Column( - 'southwest_longitude', postgresql.DOUBLE_PRECISION, nullable=False, - ) + center_latitude = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False) + center_longitude = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False) + northeast_latitude = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False) + northeast_longitude = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False) + southwest_latitude = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False) + southwest_longitude = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False) initial_zoom = sa.Column(sa.SmallInteger, nullable=False) # Relationships addresses = orm.relationship('Address', back_populates='city') + grids = orm.relationship('Grid', back_populates='city') + + # We do not implement a `.__init__()` method and leave that to SQLAlchemy. + # Instead, we use `hasattr()` to check for uninitialized attributes. grep:d334120e def __repr__(self) -> str: """Non-literal text representation.""" return '<{cls}({name})>'.format(cls=self.__class__.__name__, name=self.name) @property - def location(self) -> Dict[str, float]: - """GPS location of the city's center. + def center(self) -> utils.Location: + """Location of the city's center. - Example: - {"latitude": 48.856614, "longitude": 2.3522219} + Implementation detail: This property is cached as none of the + underlying attributes to calculate the value are to be changed. """ - return { - 'latitude': self._center_latitude, - 'longitude': self._center_longitude, - } + if not hasattr(self, '_center'): # noqa:WPS421 note:d334120e + self._center = utils.Location(self.center_latitude, self.center_longitude) + return self._center @property - def viewport(self) -> Dict[str, Dict[str, float]]: - """Google Maps viewport of the city. + def northeast(self) -> utils.Location: + """The city's northeast corner of the Google Maps viewport. - Example: - { - 'northeast': {'latitude': 48.9021449, 'longitude': 2.4699208}, - 'southwest': {'latitude': 48.815573, 'longitude': 2.225193}, - } - """ # noqa:RST203 - return { - 'northeast': { - 'latitude': self._northeast_latitude, - 'longitude': self._northeast_longitude, - }, - 'southwest': { - 'latitude': self._southwest_latitude, - 'longitude': self._southwest_longitude, - }, + Implementation detail: This property is cached as none of the + underlying attributes to calculate the value are to be changed. + """ + if not hasattr(self, '_northeast'): # noqa:WPS421 note:d334120e + self._northeast = utils.Location( + self.northeast_latitude, self.northeast_longitude, + ) + + return self._northeast + + @property + def southwest(self) -> utils.Location: + """The city's southwest corner of the Google Maps viewport. + + Implementation detail: This property is cached as none of the + underlying attributes to calculate the value are to be changed. + """ + if not hasattr(self, '_southwest'): # noqa:WPS421 note:d334120e + self._southwest = utils.Location( + self.southwest_latitude, self.southwest_longitude, + ) + + return self._southwest + + @property + def total_x(self) -> int: + """The horizontal distance from the city's west to east end in meters. + + The city borders refer to the Google Maps viewport. + """ + return self.northeast.easting - self.southwest.easting + + @property + def total_y(self) -> int: + """The vertical distance from the city's south to north end in meters. + + The city borders refer to the Google Maps viewport. + """ + return self.northeast.northing - self.southwest.northing + + def clear_map(self) -> City: # pragma: no cover + """Create a new `folium.Map` object aligned with the city's viewport. + + The map is available via the `.map` property. Note that it is a + mutable objects that is changed from various locations in the code base. + + Returns: + self: enabling method chaining + """ # noqa:DAR203 + self._map = folium.Map( + location=[self.center_latitude, self.center_longitude], + zoom_start=self.initial_zoom, + ) + return self + + @property # pragma: no cover + def map(self) -> folium.Map: # noqa:WPS125 + """A `folium.Map` object aligned with the city's viewport. + + See docstring for `.clear_map()` for further info. + """ + if not hasattr(self, '_map'): # noqa:WPS421 note:d334120e + self.clear_map() + + return self._map + + def draw_restaurants( # noqa:WPS231 + self, order_counts: bool = False, # pragma: no cover + ) -> folium.Map: + """Draw all restaurants on the`.map`. + + Args: + order_counts: show the number of orders + + Returns: + `.map` for convenience in interactive usage + """ + # Obtain all primary `Address`es in the city that host `Restaurant`s. + addresses = ( # noqa:ECE001 + db.session.query(db.Address) + .filter( + db.Address.id.in_( + db.session.query(db.Address.primary_id) # noqa:WPS221 + .join(db.Restaurant, db.Address.id == db.Restaurant.address_id) + .filter(db.Address.city == self) + .distinct() + .all(), + ), + ) + .all() + ) + + for address in addresses: + # Show the restaurant's name if there is only one. + # Otherwise, list all the restaurants' ID's. + restaurants = ( # noqa:ECE001 + db.session.query(db.Restaurant) + .join(db.Address, db.Restaurant.address_id == db.Address.id) + .filter(db.Address.primary_id == address.id) + .all() + ) + if len(restaurants) == 1: + tooltip = f'{restaurants[0].name} (#{restaurants[0].id})' # noqa:WPS221 + else: + tooltip = 'Restaurants ' + ', '.join( # noqa:WPS336 + f'#{restaurant.id}' for restaurant in restaurants + ) + + if order_counts: + # Calculate the number of orders for ALL restaurants ... + n_orders = ( # noqa:ECE001 + db.session.query(db.Order.id) + .join(db.Address, db.Order.pickup_address_id == db.Address.id) + .filter(db.Address.primary_id == address.id) + .count() + ) + # ... and adjust the size of the red dot on the `.map`. + if n_orders >= 1000: + radius = 20 # noqa:WPS220 + elif n_orders >= 500: + radius = 15 # noqa:WPS220 + elif n_orders >= 100: + radius = 10 # noqa:WPS220 + elif n_orders >= 10: + radius = 5 # noqa:WPS220 + else: + radius = 1 # noqa:WPS220 + + tooltip += f' | n_orders={n_orders}' # noqa:WPS336 + + address.draw( + radius=radius, + color=config.RESTAURANT_COLOR, + fill_color=config.RESTAURANT_COLOR, + fill_opacity=0.3, + tooltip=tooltip, + ) + + else: + address.draw( + radius=1, color=config.RESTAURANT_COLOR, tooltip=tooltip, + ) + + return self.map + + def draw_zip_codes(self) -> folium.Map: # pragma: no cover + """Draw all addresses on the `.map`, colorized by their `.zip_code`. + + This does not make a distinction between restaurant and customer addresses. + Also, due to the high memory usage, the number of orders is not calculated. + + Returns: + `.map` for convenience in interactive usage + """ + # First, create a color map with distinct colors for each zip code. + all_zip_codes = sorted( + row[0] + for row in db.session.execute( + sa.text( + f""" -- # noqa:S608 + SELECT DISTINCT + zip_code + FROM + {config.CLEAN_SCHEMA}.addresses + WHERE + city_id = {self.id}; + """, + ), + ) + ) + cmap = utils.make_random_cmap(len(all_zip_codes), bright=False) + colors = { + code: utils.rgb_to_hex(*cmap(index)) + for index, code in enumerate(all_zip_codes) } + + # Second, draw every address on the `.map. + for address in self.addresses: + # Non-primary addresses are covered by primary ones anyway. + if not address.is_primary: + continue + + marker = folium.Circle( # noqa:WPS317 + (address.latitude, address.longitude), + color=colors[address.zip_code], + radius=1, + ) + marker.add_to(self.map) + + return self.map diff --git a/src/urban_meal_delivery/db/connection.py b/src/urban_meal_delivery/db/connection.py index 460ef9d..9c50709 100644 --- a/src/urban_meal_delivery/db/connection.py +++ b/src/urban_meal_delivery/db/connection.py @@ -1,17 +1,28 @@ -"""Provide connection utils for the ORM layer.""" +"""Provide connection utils for the ORM layer. + +This module defines fully configured `engine`, `connection`, and `session` +objects to be used as globals within the `urban_meal_delivery` package. + +If a database is not guaranteed to be available, they are set to `None`. +That is the case on the CI server. +""" + +import os import sqlalchemy as sa -from sqlalchemy import engine +from sqlalchemy import engine as engine_mod from sqlalchemy import orm import urban_meal_delivery -def make_engine() -> engine.Engine: # pragma: no cover - """Provide a configured Engine object.""" - return sa.create_engine(urban_meal_delivery.config.DATABASE_URI) +if os.getenv('TESTING'): + # Specify the types explicitly to make mypy happy. + engine: engine_mod.Engine = None + connection: engine_mod.Connection = None + session: orm.Session = None - -def make_session_factory() -> orm.Session: # pragma: no cover - """Provide a configured Session factory.""" - return orm.sessionmaker(bind=make_engine()) +else: # pragma: no cover + engine = sa.create_engine(urban_meal_delivery.config.DATABASE_URI) + connection = engine.connect() + session = orm.sessionmaker(bind=connection)() diff --git a/src/urban_meal_delivery/db/couriers.py b/src/urban_meal_delivery/db/couriers.py index be065a5..a4c85ca 100644 --- a/src/urban_meal_delivery/db/couriers.py +++ b/src/urban_meal_delivery/db/couriers.py @@ -1,4 +1,4 @@ -"""Provide the ORM's Courier model.""" +"""Provide the ORM's `Courier` model.""" import sqlalchemy as sa from sqlalchemy import orm @@ -8,9 +8,7 @@ from urban_meal_delivery.db import meta class Courier(meta.Base): - """A Courier working for the UDP.""" - - # pylint:disable=too-few-public-methods + """A courier working for the UDP.""" __tablename__ = 'couriers' diff --git a/src/urban_meal_delivery/db/customers.py b/src/urban_meal_delivery/db/customers.py index e96361a..f6d59c2 100644 --- a/src/urban_meal_delivery/db/customers.py +++ b/src/urban_meal_delivery/db/customers.py @@ -1,15 +1,18 @@ -"""Provide the ORM's Customer model.""" +"""Provide the ORM's `Customer` model.""" +from __future__ import annotations + +import folium import sqlalchemy as sa from sqlalchemy import orm +from urban_meal_delivery import config +from urban_meal_delivery import db from urban_meal_delivery.db import meta class Customer(meta.Base): - """A Customer of the UDP.""" - - # pylint:disable=too-few-public-methods + """A customer of the UDP.""" __tablename__ = 'customers' @@ -24,3 +27,155 @@ class Customer(meta.Base): # Relationships orders = orm.relationship('Order', back_populates='customer') + + def clear_map(self) -> Customer: # pragma: no cover + """Shortcut to the `...city.clear_map()` method. + + Returns: + self: enabling method chaining + """ # noqa:D402,DAR203 + self.orders[0].pickup_address.city.clear_map() # noqa:WPS219 + return self + + @property # pragma: no cover + def map(self) -> folium.Map: # noqa:WPS125 + """Shortcut to the `...city.map` object.""" + return self.orders[0].pickup_address.city.map # noqa:WPS219 + + def draw( # noqa:C901,WPS210,WPS231 + self, restaurants: bool = True, order_counts: bool = False, # pragma: no cover + ) -> folium.Map: + """Draw all the customer's delivery addresses on the `...city.map`. + + By default, the pickup locations (= restaurants) are also shown. + + Args: + restaurants: show the pickup locations + order_counts: show both the number of pickups at the restaurants + and the number of deliveries at the customer's delivery addresses; + the former is only shown if `restaurants=True` + + Returns: + `...city.map` for convenience in interactive usage + """ + # Note: a `Customer` may have more than one delivery `Address`es. + # That is not true for `Restaurant`s after the data cleaning. + + # Obtain all primary `Address`es where + # at least one delivery was made to `self`. + delivery_addresses = ( # noqa:ECE001 + db.session.query(db.Address) + .filter( + db.Address.id.in_( + db.session.query(db.Address.primary_id) # noqa:WPS221 + .join(db.Order, db.Address.id == db.Order.delivery_address_id) + .filter(db.Order.customer_id == self.id) + .distinct() + .all(), + ), + ) + .all() + ) + + for address in delivery_addresses: + if order_counts: + n_orders = ( # noqa:ECE001 + db.session.query(db.Order) + .join(db.Address, db.Order.delivery_address_id == db.Address.id) + .filter(db.Order.customer_id == self.id) + .filter(db.Address.primary_id == address.id) + .count() + ) + if n_orders >= 25: + radius = 20 # noqa:WPS220 + elif n_orders >= 10: + radius = 15 # noqa:WPS220 + elif n_orders >= 5: + radius = 10 # noqa:WPS220 + elif n_orders > 1: + radius = 5 # noqa:WPS220 + else: + radius = 1 # noqa:WPS220 + + address.draw( + radius=radius, + color=config.CUSTOMER_COLOR, + fill_color=config.CUSTOMER_COLOR, + fill_opacity=0.3, + tooltip=f'n_orders={n_orders}', + ) + + else: + address.draw( + radius=1, color=config.CUSTOMER_COLOR, + ) + + if restaurants: + pickup_addresses = ( # noqa:ECE001 + db.session.query(db.Address) + .filter( + db.Address.id.in_( + db.session.query(db.Address.primary_id) # noqa:WPS221 + .join(db.Order, db.Address.id == db.Order.pickup_address_id) + .filter(db.Order.customer_id == self.id) + .distinct() + .all(), + ), + ) + .all() + ) + + for address in pickup_addresses: # noqa:WPS440 + # Show the restaurant's name if there is only one. + # Otherwise, list all the restaurants' ID's. + # We cannot show the `Order.restaurant.name` due to the aggregation. + restaurants = ( # noqa:ECE001 + db.session.query(db.Restaurant) + .join(db.Address, db.Restaurant.address_id == db.Address.id) + .filter(db.Address.primary_id == address.id) # noqa:WPS441 + .all() + ) + if len(restaurants) == 1: # type:ignore + tooltip = ( + f'{restaurants[0].name} (#{restaurants[0].id})' # type:ignore + ) + else: + tooltip = 'Restaurants ' + ', '.join( # noqa:WPS336 + f'#{restaurant.id}' for restaurant in restaurants # type:ignore + ) + + if order_counts: + n_orders = ( # noqa:ECE001 + db.session.query(db.Order) + .join(db.Address, db.Order.pickup_address_id == db.Address.id) + .filter(db.Order.customer_id == self.id) + .filter(db.Address.primary_id == address.id) # noqa:WPS441 + .count() + ) + if n_orders >= 25: + radius = 20 # noqa:WPS220 + elif n_orders >= 10: + radius = 15 # noqa:WPS220 + elif n_orders >= 5: + radius = 10 # noqa:WPS220 + elif n_orders > 1: + radius = 5 # noqa:WPS220 + else: + radius = 1 # noqa:WPS220 + + tooltip += f' | n_orders={n_orders}' # noqa:WPS336 + + address.draw( # noqa:WPS441 + radius=radius, + color=config.RESTAURANT_COLOR, + fill_color=config.RESTAURANT_COLOR, + fill_opacity=0.3, + tooltip=tooltip, + ) + + else: + address.draw( # noqa:WPS441 + radius=1, color=config.RESTAURANT_COLOR, tooltip=tooltip, + ) + + return self.map diff --git a/src/urban_meal_delivery/db/forecasts.py b/src/urban_meal_delivery/db/forecasts.py new file mode 100644 index 0000000..a85fa74 --- /dev/null +++ b/src/urban_meal_delivery/db/forecasts.py @@ -0,0 +1,231 @@ +"""Provide the ORM's `Forecast` model.""" + +from __future__ import annotations + +import math +from typing import List + +import pandas as pd +import sqlalchemy as sa +from sqlalchemy import orm +from sqlalchemy.dialects import postgresql + +from urban_meal_delivery.db import meta + + +class Forecast(meta.Base): + """A demand forecast for a `.pixel` and `.time_step` pair. + + This table is denormalized on purpose to keep things simple. In particular, + the `.model` and `.actual` hold redundant values. + """ + + __tablename__ = 'forecasts' + + # Columns + id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) # noqa:WPS125 + pixel_id = sa.Column(sa.Integer, nullable=False, index=True) + start_at = sa.Column(sa.DateTime, nullable=False) + time_step = sa.Column(sa.SmallInteger, nullable=False) + train_horizon = sa.Column(sa.SmallInteger, nullable=False) + model = sa.Column(sa.Unicode(length=20), nullable=False) + # We also store the actual order counts for convenient retrieval. + # A `UniqueConstraint` below ensures that redundant values that + # are to be expected are consistend across rows. + actual = sa.Column(sa.SmallInteger, nullable=False) + # Raw `.prediction`s are stored as `float`s (possibly negative). + # The rounding is then done on the fly if required. + prediction = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False) + # The confidence intervals are treated like the `.prediction`s + # but they may be nullable as some methods do not calculate them. + low80 = sa.Column(postgresql.DOUBLE_PRECISION, nullable=True) + high80 = sa.Column(postgresql.DOUBLE_PRECISION, nullable=True) + low95 = sa.Column(postgresql.DOUBLE_PRECISION, nullable=True) + high95 = sa.Column(postgresql.DOUBLE_PRECISION, nullable=True) + + # Constraints + __table_args__ = ( + sa.ForeignKeyConstraint( + ['pixel_id'], ['pixels.id'], onupdate='RESTRICT', ondelete='RESTRICT', + ), + sa.CheckConstraint( + """ + NOT ( + EXTRACT(HOUR FROM start_at) < 11 + OR + EXTRACT(HOUR FROM start_at) > 22 + ) + """, + name='start_at_must_be_within_operating_hours', + ), + sa.CheckConstraint( + 'CAST(EXTRACT(MINUTES FROM start_at) AS INTEGER) % 15 = 0', + name='start_at_minutes_must_be_quarters_of_the_hour', + ), + sa.CheckConstraint( + 'EXTRACT(SECONDS FROM start_at) = 0', name='start_at_allows_no_seconds', + ), + sa.CheckConstraint( + 'CAST(EXTRACT(MICROSECONDS FROM start_at) AS INTEGER) % 1000000 = 0', + name='start_at_allows_no_microseconds', + ), + sa.CheckConstraint('time_step > 0', name='time_step_must_be_positive'), + sa.CheckConstraint( + 'train_horizon > 0', name='training_horizon_must_be_positive', + ), + sa.CheckConstraint('actual >= 0', name='actuals_must_be_non_negative'), + sa.CheckConstraint( + """ + NOT ( + low80 IS NULL AND high80 IS NOT NULL + OR + low80 IS NOT NULL AND high80 IS NULL + OR + low95 IS NULL AND high95 IS NOT NULL + OR + low95 IS NOT NULL AND high95 IS NULL + ) + """, + name='ci_upper_and_lower_bounds', + ), + sa.CheckConstraint( + """ + NOT ( + prediction < low80 + OR + prediction < low95 + OR + prediction > high80 + OR + prediction > high95 + ) + """, + name='prediction_must_be_within_ci', + ), + sa.CheckConstraint( + """ + NOT ( + low80 > high80 + OR + low95 > high95 + ) + """, + name='ci_upper_bound_greater_than_lower_bound', + ), + sa.CheckConstraint( + """ + NOT ( + low80 < low95 + OR + high80 > high95 + ) + """, + name='ci95_must_be_wider_than_ci80', + ), + # There can be only one prediction per forecasting setting. + sa.UniqueConstraint( + 'pixel_id', 'start_at', 'time_step', 'train_horizon', 'model', + ), + ) + + # Relationships + pixel = orm.relationship('Pixel', back_populates='forecasts') + + def __repr__(self) -> str: + """Non-literal text representation.""" + return '<{cls}: {prediction} for pixel ({n_x}|{n_y}) at {start_at}>'.format( + cls=self.__class__.__name__, + prediction=self.prediction, + n_x=self.pixel.n_x, + n_y=self.pixel.n_y, + start_at=self.start_at, + ) + + @classmethod + def from_dataframe( # noqa:WPS210,WPS211 + cls, + pixel: db.Pixel, + time_step: int, + train_horizon: int, + model: str, + data: pd.Dataframe, + ) -> List[db.Forecast]: + """Convert results from the forecasting `*Model`s into `Forecast` objects. + + This is an alternative constructor method. + + Background: The functions in `urban_meal_delivery.forecasts.methods` + return `pd.Dataframe`s with "start_at" (i.e., `pd.Timestamp` objects) + values in the index and five columns "prediction", "low80", "high80", + "low95", and "high95" with `np.float` values. The `*Model.predic()` + methods in `urban_meal_delivery.forecasts.models` then add an "actual" + column. This constructor converts these results into ORM models. + Also, the `np.float` values are cast as plain `float` ones as + otherwise SQLAlchemy and the database would complain. + + Args: + pixel: in which the forecast is made + time_step: length of one time step in minutes + train_horizon: length of the training horizon in weeks + model: name of the forecasting model + data: a `pd.Dataframe` as described above (i.e., + with the six columns holding `float`s) + + Returns: + forecasts: the `data` as `Forecast` objects + """ # noqa:RST215 + forecasts = [] + + for timestamp_idx in data.index: + start_at = timestamp_idx.to_pydatetime() + actual = int(data.loc[timestamp_idx, 'actual']) + prediction = round(data.loc[timestamp_idx, 'prediction'], 5) + + # Explicit type casting. SQLAlchemy does not convert + # `float('NaN')`s into plain `None`s. + + low80 = data.loc[timestamp_idx, 'low80'] + high80 = data.loc[timestamp_idx, 'high80'] + low95 = data.loc[timestamp_idx, 'low95'] + high95 = data.loc[timestamp_idx, 'high95'] + + if math.isnan(low80): + low80 = None + else: + low80 = round(low80, 5) + + if math.isnan(high80): + high80 = None + else: + high80 = round(high80, 5) + + if math.isnan(low95): + low95 = None + else: + low95 = round(low95, 5) + + if math.isnan(high95): + high95 = None + else: + high95 = round(high95, 5) + + forecasts.append( + cls( + pixel=pixel, + start_at=start_at, + time_step=time_step, + train_horizon=train_horizon, + model=model, + actual=actual, + prediction=prediction, + low80=low80, + high80=high80, + low95=low95, + high95=high95, + ), + ) + + return forecasts + + +from urban_meal_delivery import db # noqa:E402 isort:skip diff --git a/src/urban_meal_delivery/db/grids.py b/src/urban_meal_delivery/db/grids.py new file mode 100644 index 0000000..dac6e48 --- /dev/null +++ b/src/urban_meal_delivery/db/grids.py @@ -0,0 +1,137 @@ +"""Provide the ORM's `Grid` model.""" + +from __future__ import annotations + +from typing import Any + +import folium +import sqlalchemy as sa +from sqlalchemy import orm + +from urban_meal_delivery import db +from urban_meal_delivery.db import meta + + +class Grid(meta.Base): + """A grid of `Pixel`s to partition a `City`. + + A grid is characterized by the uniform size of the `Pixel`s it contains. + That is configures via the `Grid.side_length` attribute. + """ + + __tablename__ = 'grids' + + # Columns + id = sa.Column( # noqa:WPS125 + sa.SmallInteger, primary_key=True, autoincrement=True, + ) + city_id = sa.Column(sa.SmallInteger, nullable=False) + side_length = sa.Column(sa.SmallInteger, nullable=False, unique=True) + + # Constraints + __table_args__ = ( + sa.ForeignKeyConstraint( + ['city_id'], ['cities.id'], onupdate='RESTRICT', ondelete='RESTRICT', + ), + # Each `Grid`, characterized by its `.side_length`, + # may only exists once for a given `.city`. + sa.UniqueConstraint('city_id', 'side_length'), + # Needed by a `ForeignKeyConstraint` in `address_pixel_association`. + sa.UniqueConstraint('id', 'city_id'), + ) + + # Relationships + city = orm.relationship('City', back_populates='grids') + pixels = orm.relationship('Pixel', back_populates='grid') + + def __repr__(self) -> str: + """Non-literal text representation.""" + return '<{cls}: {area} sqr. km>'.format( + cls=self.__class__.__name__, area=self.pixel_area, + ) + + # Convenience properties + @property + def pixel_area(self) -> float: + """The area of a `Pixel` on the grid in square kilometers.""" + return round((self.side_length ** 2) / 1_000_000, 1) + + @classmethod + def gridify(cls, city: db.City, side_length: int) -> db.Grid: # noqa:WPS210 + """Create a fully populated `Grid` for a `city`. + + The `Grid` contains only `Pixel`s that have at least one + `Order.pickup_address`. `Address` objects outside the `.city`'s + viewport are discarded. + + Args: + city: city for which the grid is created + side_length: the length of a square `Pixel`'s side + + Returns: + grid: including `grid.pixels` with the associated `city.addresses` + """ + grid = cls(city=city, side_length=side_length) + + # `Pixel`s grouped by `.n_x`-`.n_y` coordinates. + pixels = {} + + pickup_addresses = ( # noqa:ECE:001 + db.session.query(db.Address) + .join(db.Order, db.Address.id == db.Order.pickup_address_id) + .filter(db.Address.city == city) + .all() + ) + + for address in pickup_addresses: + # Check if an `address` is not within the `city`'s viewport, ... + not_within_city_viewport = ( + address.x < 0 + or address.x > city.total_x + or address.y < 0 + or address.y > city.total_y + ) + # ... and, if so, the `address` does not belong to any `Pixel`. + if not_within_city_viewport: + continue + + # Determine which `pixel` the `address` belongs to ... + n_x, n_y = address.x // side_length, address.y // side_length + # ... and create a new `Pixel` object if necessary. + if (n_x, n_y) not in pixels: + pixels[(n_x, n_y)] = db.Pixel(grid=grid, n_x=n_x, n_y=n_y) + pixel = pixels[(n_x, n_y)] + + # Create an association between the `address` and `pixel`. + assoc = db.AddressPixelAssociation(address=address, pixel=pixel) + pixel.addresses.append(assoc) + + return grid + + def clear_map(self) -> Grid: # pragma: no cover + """Shortcut to the `.city.clear_map()` method. + + Returns: + self: enabling method chaining + """ # noqa:D402,DAR203 + self.city.clear_map() + return self + + @property # pragma: no cover + def map(self) -> folium.Map: # noqa:WPS125 + """Shortcut to the `.city.map` object.""" + return self.city.map + + def draw(self, **kwargs: Any) -> folium.Map: # pragma: no cover + """Draw all pixels in the grid. + + Args: + **kwargs: passed on to `Pixel.draw()` + + Returns: + `.city.map` for convenience in interactive usage + """ + for pixel in self.pixels: + pixel.draw(**kwargs) + + return self.map diff --git a/src/urban_meal_delivery/db/orders.py b/src/urban_meal_delivery/db/orders.py index 5bb617c..0b4550b 100644 --- a/src/urban_meal_delivery/db/orders.py +++ b/src/urban_meal_delivery/db/orders.py @@ -1,4 +1,4 @@ -"""Provide the ORM's Order model.""" +"""Provide the ORM's `Order` model.""" import datetime @@ -10,14 +10,14 @@ from urban_meal_delivery.db import meta class Order(meta.Base): # noqa:WPS214 - """An Order by a Customer of the UDP.""" + """An order by a `Customer` of the UDP.""" __tablename__ = 'orders' # Generic columns id = sa.Column(sa.Integer, primary_key=True, autoincrement=False) # noqa:WPS125 _delivery_id = sa.Column('delivery_id', sa.Integer, index=True, unique=True) - _customer_id = sa.Column('customer_id', sa.Integer, nullable=False, index=True) + customer_id = sa.Column(sa.Integer, nullable=False, index=True) placed_at = sa.Column(sa.DateTime, nullable=False, index=True) ad_hoc = sa.Column(sa.Boolean, nullable=False) scheduled_delivery_at = sa.Column(sa.DateTime, index=True) @@ -33,9 +33,7 @@ class Order(meta.Base): # noqa:WPS214 total = sa.Column(sa.Integer, nullable=False) # Restaurant-related columns - _restaurant_id = sa.Column( - 'restaurant_id', sa.SmallInteger, nullable=False, index=True, - ) + restaurant_id = sa.Column(sa.SmallInteger, nullable=False, index=True) restaurant_notified_at = sa.Column(sa.DateTime) restaurant_notified_at_corrected = sa.Column(sa.Boolean, index=True) restaurant_confirmed_at = sa.Column(sa.DateTime) @@ -45,7 +43,7 @@ class Order(meta.Base): # noqa:WPS214 estimated_prep_buffer = sa.Column(sa.Integer, nullable=False, index=True) # Dispatch-related columns - _courier_id = sa.Column('courier_id', sa.Integer, index=True) + courier_id = sa.Column(sa.Integer, index=True) dispatch_at = sa.Column(sa.DateTime) dispatch_at_corrected = sa.Column(sa.Boolean, index=True) courier_notified_at = sa.Column(sa.DateTime) @@ -55,9 +53,7 @@ class Order(meta.Base): # noqa:WPS214 utilization = sa.Column(sa.SmallInteger, nullable=False) # Pickup-related columns - _pickup_address_id = sa.Column( - 'pickup_address_id', sa.Integer, nullable=False, index=True, - ) + pickup_address_id = sa.Column(sa.Integer, nullable=False, index=True) reached_pickup_at = sa.Column(sa.DateTime) pickup_at = sa.Column(sa.DateTime) pickup_at_corrected = sa.Column(sa.Boolean, index=True) @@ -66,9 +62,7 @@ class Order(meta.Base): # noqa:WPS214 left_pickup_at_corrected = sa.Column(sa.Boolean, index=True) # Delivery-related columns - _delivery_address_id = sa.Column( - 'delivery_address_id', sa.Integer, nullable=False, index=True, - ) + delivery_address_id = sa.Column(sa.Integer, nullable=False, index=True) reached_delivery_at = sa.Column(sa.DateTime) delivery_at = sa.Column(sa.DateTime) delivery_at_corrected = sa.Column(sa.Boolean, index=True) @@ -85,12 +79,6 @@ class Order(meta.Base): # noqa:WPS214 sa.ForeignKeyConstraint( ['customer_id'], ['customers.id'], onupdate='RESTRICT', ondelete='RESTRICT', ), - sa.ForeignKeyConstraint( - ['restaurant_id'], - ['restaurants.id'], - onupdate='RESTRICT', - ondelete='RESTRICT', - ), sa.ForeignKeyConstraint( ['courier_id'], ['couriers.id'], onupdate='RESTRICT', ondelete='RESTRICT', ), @@ -100,6 +88,14 @@ class Order(meta.Base): # noqa:WPS214 onupdate='RESTRICT', ondelete='RESTRICT', ), + sa.ForeignKeyConstraint( + # This foreign key ensures that there is only + # one `.pickup_address` per `.restaurant` + ['restaurant_id', 'pickup_address_id'], + ['restaurants.id', 'restaurants.address_id'], + onupdate='RESTRICT', + ondelete='RESTRICT', + ), sa.ForeignKeyConstraint( ['delivery_address_id'], ['addresses.id'], @@ -308,29 +304,33 @@ class Order(meta.Base): # noqa:WPS214 # Relationships customer = orm.relationship('Customer', back_populates='orders') - restaurant = orm.relationship('Restaurant', back_populates='orders') + restaurant = orm.relationship( + 'Restaurant', + back_populates='orders', + primaryjoin='Restaurant.id == Order.restaurant_id', + ) courier = orm.relationship('Courier', back_populates='orders') pickup_address = orm.relationship( 'Address', back_populates='orders_picked_up', - foreign_keys='[Order._pickup_address_id]', + foreign_keys='[Order.pickup_address_id]', ) delivery_address = orm.relationship( 'Address', back_populates='orders_delivered', - foreign_keys='[Order._delivery_address_id]', + foreign_keys='[Order.delivery_address_id]', ) # Convenience properties @property def scheduled(self) -> bool: - """Inverse of Order.ad_hoc.""" + """Inverse of `.ad_hoc`.""" return not self.ad_hoc @property def completed(self) -> bool: - """Inverse of Order.cancelled.""" + """Inverse of `.cancelled`.""" return not self.cancelled @property @@ -353,9 +353,9 @@ class Order(meta.Base): # noqa:WPS214 @property def time_to_accept(self) -> datetime.timedelta: - """Time until a courier accepted an order. + """Time until the `.courier` accepted the order. - This adds the time it took the UDP to notify a courier. + This measures the time it took the UDP to notify the `.courier` after dispatch. """ if not self.dispatch_at: raise RuntimeError('dispatch_at is not set') @@ -365,9 +365,9 @@ class Order(meta.Base): # noqa:WPS214 @property def time_to_react(self) -> datetime.timedelta: - """Time a courier took to accept an order. + """Time the `.courier` took to accept an order. - This time is a subset of Order.time_to_accept. + A subset of `.time_to_accept`. """ if not self.courier_notified_at: raise RuntimeError('courier_notified_at is not set') @@ -377,7 +377,7 @@ class Order(meta.Base): # noqa:WPS214 @property def time_to_pickup(self) -> datetime.timedelta: - """Time from a courier's acceptance to arrival at the pickup location.""" + """Time from the `.courier`'s acceptance to arrival at `.pickup_address`.""" if not self.courier_accepted_at: raise RuntimeError('courier_accepted_at is not set') if not self.reached_pickup_at: @@ -386,7 +386,7 @@ class Order(meta.Base): # noqa:WPS214 @property def time_at_pickup(self) -> datetime.timedelta: - """Time a courier stayed at the pickup location.""" + """Time the `.courier` stayed at the `.pickup_address`.""" if not self.reached_pickup_at: raise RuntimeError('reached_pickup_at is not set') if not self.pickup_at: @@ -405,13 +405,13 @@ class Order(meta.Base): # noqa:WPS214 @property def courier_early(self) -> datetime.timedelta: - """Time by which a courier is early for pickup. + """Time by which the `.courier` is early for pickup. - Measured relative to Order.scheduled_pickup_at. + Measured relative to `.scheduled_pickup_at`. - 0 if the courier is on time or late. + `datetime.timedelta(seconds=0)` if the `.courier` is on time or late. - Goes together with Order.courier_late. + Goes together with `.courier_late`. """ return max( datetime.timedelta(), self.scheduled_pickup_at - self.reached_pickup_at, @@ -419,13 +419,13 @@ class Order(meta.Base): # noqa:WPS214 @property def courier_late(self) -> datetime.timedelta: - """Time by which a courier is late for pickup. + """Time by which the `.courier` is late for pickup. - Measured relative to Order.scheduled_pickup_at. + Measured relative to `.scheduled_pickup_at`. - 0 if the courier is on time or early. + `datetime.timedelta(seconds=0)` if the `.courier` is on time or early. - Goes together with Order.courier_early. + Goes together with `.courier_early`. """ return max( datetime.timedelta(), self.reached_pickup_at - self.scheduled_pickup_at, @@ -433,31 +433,31 @@ class Order(meta.Base): # noqa:WPS214 @property def restaurant_early(self) -> datetime.timedelta: - """Time by which a restaurant is early for pickup. + """Time by which the `.restaurant` is early for pickup. - Measured relative to Order.scheduled_pickup_at. + Measured relative to `.scheduled_pickup_at`. - 0 if the restaurant is on time or late. + `datetime.timedelta(seconds=0)` if the `.restaurant` is on time or late. - Goes together with Order.restaurant_late. + Goes together with `.restaurant_late`. """ return max(datetime.timedelta(), self.scheduled_pickup_at - self.pickup_at) @property def restaurant_late(self) -> datetime.timedelta: - """Time by which a restaurant is late for pickup. + """Time by which the `.restaurant` is late for pickup. - Measured relative to Order.scheduled_pickup_at. + Measured relative to `.scheduled_pickup_at`. - 0 if the restaurant is on time or early. + `datetime.timedelta(seconds=0)` if the `.restaurant` is on time or early. - Goes together with Order.restaurant_early. + Goes together with `.restaurant_early`. """ return max(datetime.timedelta(), self.pickup_at - self.scheduled_pickup_at) @property def time_to_delivery(self) -> datetime.timedelta: - """Time a courier took from pickup to delivery location.""" + """Time the `.courier` took from `.pickup_address` to `.delivery_address`.""" if not self.pickup_at: raise RuntimeError('pickup_at is not set') if not self.reached_delivery_at: @@ -466,7 +466,7 @@ class Order(meta.Base): # noqa:WPS214 @property def time_at_delivery(self) -> datetime.timedelta: - """Time a courier stayed at the delivery location.""" + """Time the `.courier` stayed at the `.delivery_address`.""" if not self.reached_delivery_at: raise RuntimeError('reached_delivery_at is not set') if not self.delivery_at: @@ -475,20 +475,20 @@ class Order(meta.Base): # noqa:WPS214 @property def courier_waited_at_delivery(self) -> datetime.timedelta: - """Time a courier waited at the delivery location.""" + """Time the `.courier` waited at the `.delivery_address`.""" if self._courier_waited_at_delivery: return self.time_at_delivery return datetime.timedelta() @property def delivery_early(self) -> datetime.timedelta: - """Time by which a scheduled order was early. + """Time by which a `.scheduled` order was early. - Measured relative to Order.scheduled_delivery_at. + Measured relative to `.scheduled_delivery_at`. - 0 if the delivery is on time or late. + `datetime.timedelta(seconds=0)` if the delivery is on time or late. - Goes together with Order.delivery_late. + Goes together with `.delivery_late`. """ if not self.scheduled: raise AttributeError('Makes sense only for scheduled orders') @@ -496,13 +496,13 @@ class Order(meta.Base): # noqa:WPS214 @property def delivery_late(self) -> datetime.timedelta: - """Time by which a scheduled order was late. + """Time by which a `.scheduled` order was late. - Measured relative to Order.scheduled_delivery_at. + Measured relative to `.scheduled_delivery_at`. - 0 if the delivery is on time or early. + `datetime.timedelta(seconds=0)` if the delivery is on time or early. - Goes together with Order.delivery_early. + Goes together with `.delivery_early`. """ if not self.scheduled: raise AttributeError('Makes sense only for scheduled orders') @@ -510,7 +510,7 @@ class Order(meta.Base): # noqa:WPS214 @property def total_time(self) -> datetime.timedelta: - """Time from order placement to delivery for an ad-hoc order.""" + """Time from order placement to delivery for an `.ad_hoc` order.""" if self.scheduled: raise AttributeError('Scheduled orders have no total_time') if self.cancelled: diff --git a/src/urban_meal_delivery/db/pixels.py b/src/urban_meal_delivery/db/pixels.py new file mode 100644 index 0000000..9461d4d --- /dev/null +++ b/src/urban_meal_delivery/db/pixels.py @@ -0,0 +1,261 @@ +"""Provide the ORM's `Pixel` model.""" + +from __future__ import annotations + +from typing import List + +import folium +import sqlalchemy as sa +import utm +from sqlalchemy import orm + +from urban_meal_delivery import config +from urban_meal_delivery import db +from urban_meal_delivery.db import meta +from urban_meal_delivery.db import utils + + +class Pixel(meta.Base): + """A pixel in a `Grid`. + + Square pixels aggregate `Address` objects within a `City`. + Every `Address` belongs to exactly one `Pixel` in a `Grid`. + + Every `Pixel` has a unique `n_x`-`n_y` coordinate within the `Grid`. + """ + + __tablename__ = 'pixels' + + # Columns + id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) # noqa:WPS125 + grid_id = sa.Column(sa.SmallInteger, nullable=False, index=True) + n_x = sa.Column(sa.SmallInteger, nullable=False, index=True) + n_y = sa.Column(sa.SmallInteger, nullable=False, index=True) + + # Constraints + __table_args__ = ( + sa.ForeignKeyConstraint( + ['grid_id'], ['grids.id'], onupdate='RESTRICT', ondelete='RESTRICT', + ), + sa.CheckConstraint('0 <= n_x', name='n_x_is_positive'), + sa.CheckConstraint('0 <= n_y', name='n_y_is_positive'), + # Needed by a `ForeignKeyConstraint` in `AddressPixelAssociation`. + sa.UniqueConstraint('id', 'grid_id'), + # Each coordinate within the same `grid` is used at most once. + sa.UniqueConstraint('grid_id', 'n_x', 'n_y'), + ) + + # Relationships + grid = orm.relationship('Grid', back_populates='pixels') + addresses = orm.relationship('AddressPixelAssociation', back_populates='pixel') + forecasts = orm.relationship('Forecast', back_populates='pixel') + + def __repr__(self) -> str: + """Non-literal text representation.""" + return '<{cls}: ({x}|{y})>'.format( + cls=self.__class__.__name__, x=self.n_x, y=self.n_y, + ) + + # Convenience properties + + @property + def side_length(self) -> int: + """The length of one side of a pixel in meters.""" + return self.grid.side_length + + @property + def area(self) -> float: + """The area of a pixel in square kilometers.""" + return self.grid.pixel_area + + @property + def northeast(self) -> utils.Location: + """The pixel's northeast corner, relative to `.grid.city.southwest`. + + Implementation detail: This property is cached as none of the + underlying attributes to calculate the value are to be changed. + """ + if not hasattr(self, '_northeast'): # noqa:WPS421 note:d334120e + # The origin is the southwest corner of the `.grid.city`'s viewport. + easting_origin = self.grid.city.southwest.easting + northing_origin = self.grid.city.southwest.northing + + # `+1` as otherwise we get the pixel's `.southwest` corner. + easting = easting_origin + ((self.n_x + 1) * self.side_length) + northing = northing_origin + ((self.n_y + 1) * self.side_length) + zone, band = self.grid.city.southwest.zone_details + latitude, longitude = utm.to_latlon(easting, northing, zone, band) + + self._northeast = utils.Location(latitude, longitude) + self._northeast.relate_to(self.grid.city.southwest) + + return self._northeast + + @property + def southwest(self) -> utils.Location: + """The pixel's northeast corner, relative to `.grid.city.southwest`. + + Implementation detail: This property is cached as none of the + underlying attributes to calculate the value are to be changed. + """ + if not hasattr(self, '_southwest'): # noqa:WPS421 note:d334120e + # The origin is the southwest corner of the `.grid.city`'s viewport. + easting_origin = self.grid.city.southwest.easting + northing_origin = self.grid.city.southwest.northing + + easting = easting_origin + (self.n_x * self.side_length) + northing = northing_origin + (self.n_y * self.side_length) + zone, band = self.grid.city.southwest.zone_details + latitude, longitude = utm.to_latlon(easting, northing, zone, band) + + self._southwest = utils.Location(latitude, longitude) + self._southwest.relate_to(self.grid.city.southwest) + + return self._southwest + + @property + def restaurants(self) -> List[db.Restaurant]: # pragma: no cover + """Obtain all `Restaurant`s in `self`.""" + if not hasattr(self, '_restaurants'): # noqa:WPS421 note:d334120e + self._restaurants = ( # noqa:ECE001 + db.session.query(db.Restaurant) + .join( + db.AddressPixelAssociation, + db.Restaurant.address_id == db.AddressPixelAssociation.address_id, + ) + .filter(db.AddressPixelAssociation.pixel_id == self.id) + .all() + ) + + return self._restaurants + + def clear_map(self) -> Pixel: # pragma: no cover + """Shortcut to the `.city.clear_map()` method. + + Returns: + self: enabling method chaining + """ # noqa:D402,DAR203 + self.grid.city.clear_map() + return self + + @property # pragma: no cover + def map(self) -> folium.Map: # noqa:WPS125 + """Shortcut to the `.city.map` object.""" + return self.grid.city.map + + def draw( # noqa:C901,WPS210,WPS231 + self, restaurants: bool = True, order_counts: bool = False, # pragma: no cover + ) -> folium.Map: + """Draw the pixel on the `.grid.city.map`. + + Args: + restaurants: include the restaurants + order_counts: show the number of orders at a restaurant + + Returns: + `.grid.city.map` for convenience in interactive usage + """ + bounds = ( + (self.southwest.latitude, self.southwest.longitude), + (self.northeast.latitude, self.northeast.longitude), + ) + info_text = f'Pixel({self.n_x}|{self.n_y})' + + # Make the `Pixel`s look like a checkerboard. + if (self.n_x + self.n_y) % 2: + color = '#808000' + else: + color = '#ff8c00' + + marker = folium.Rectangle( + bounds=bounds, + color='gray', + opacity=0.2, + weight=5, + fill_color=color, + fill_opacity=0.2, + popup=info_text, + tooltip=info_text, + ) + marker.add_to(self.grid.city.map) + + if restaurants: + # Obtain all primary `Address`es in the city that host `Restaurant`s + # and are in the `self` `Pixel`. + addresses = ( # noqa:ECE001 + db.session.query(db.Address) + .filter( + db.Address.id.in_( + ( + db.session.query(db.Address.primary_id) + .join( + db.Restaurant, + db.Address.id == db.Restaurant.address_id, + ) + .join( + db.AddressPixelAssociation, + db.Address.id == db.AddressPixelAssociation.address_id, + ) + .filter(db.AddressPixelAssociation.pixel_id == self.id) + ) + .distinct() + .all(), + ), + ) + .all() + ) + + for address in addresses: + # Show the restaurant's name if there is only one. + # Otherwise, list all the restaurants' ID's. + restaurants = ( # noqa:ECE001 + db.session.query(db.Restaurant) + .join(db.Address, db.Restaurant.address_id == db.Address.id) + .filter(db.Address.primary_id == address.id) + .all() + ) + if len(restaurants) == 1: # type:ignore + tooltip = ( + f'{restaurants[0].name} (#{restaurants[0].id})' # type:ignore + ) + else: + tooltip = 'Restaurants ' + ', '.join( # noqa:WPS336 + f'#{restaurant.id}' for restaurant in restaurants # type:ignore + ) + + if order_counts: + # Calculate the number of orders for ALL restaurants ... + n_orders = ( # noqa:ECE001 + db.session.query(db.Order.id) + .join(db.Address, db.Order.pickup_address_id == db.Address.id) + .filter(db.Address.primary_id == address.id) + .count() + ) + # ... and adjust the size of the red dot on the `.map`. + if n_orders >= 1000: + radius = 20 # noqa:WPS220 + elif n_orders >= 500: + radius = 15 # noqa:WPS220 + elif n_orders >= 100: + radius = 10 # noqa:WPS220 + elif n_orders >= 10: + radius = 5 # noqa:WPS220 + else: + radius = 1 # noqa:WPS220 + + tooltip += f' | n_orders={n_orders}' # noqa:WPS336 + + address.draw( + radius=radius, + color=config.RESTAURANT_COLOR, + fill_color=config.RESTAURANT_COLOR, + fill_opacity=0.3, + tooltip=tooltip, + ) + + else: + address.draw( + radius=1, color=config.RESTAURANT_COLOR, tooltip=tooltip, + ) + + return self.map diff --git a/src/urban_meal_delivery/db/restaurants.py b/src/urban_meal_delivery/db/restaurants.py index 4531d09..cf02e53 100644 --- a/src/urban_meal_delivery/db/restaurants.py +++ b/src/urban_meal_delivery/db/restaurants.py @@ -1,15 +1,23 @@ -"""Provide the ORM's Restaurant model.""" +"""Provide the ORM's `Restaurant` model.""" +from __future__ import annotations + +import folium import sqlalchemy as sa from sqlalchemy import orm +from urban_meal_delivery import config +from urban_meal_delivery import db from urban_meal_delivery.db import meta class Restaurant(meta.Base): - """A Restaurant selling meals on the UDP.""" + """A restaurant selling meals on the UDP. - # pylint:disable=too-few-public-methods + In the historic dataset, a `Restaurant` may have changed its `Address` + throughout its life time. The ORM model only stores the current one, + which in most cases is also the only one. + """ __tablename__ = 'restaurants' @@ -18,8 +26,8 @@ class Restaurant(meta.Base): sa.SmallInteger, primary_key=True, autoincrement=False, ) created_at = sa.Column(sa.DateTime, nullable=False) - name = sa.Column(sa.Unicode(length=45), nullable=False) # noqa:WPS432 - _address_id = sa.Column('address_id', sa.Integer, nullable=False, index=True) + name = sa.Column(sa.Unicode(length=45), nullable=False) + address_id = sa.Column(sa.Integer, nullable=False, index=True) estimated_prep_duration = sa.Column(sa.SmallInteger, nullable=False) # Constraints @@ -31,12 +39,103 @@ class Restaurant(meta.Base): '0 <= estimated_prep_duration AND estimated_prep_duration <= 2400', name='realistic_estimated_prep_duration', ), + # Needed by a `ForeignKeyConstraint` in `Order`. + sa.UniqueConstraint('id', 'address_id'), ) # Relationships - address = orm.relationship('Address', back_populates='restaurant') + address = orm.relationship('Address', back_populates='restaurants') orders = orm.relationship('Order', back_populates='restaurant') def __repr__(self) -> str: """Non-literal text representation.""" return '<{cls}({name})>'.format(cls=self.__class__.__name__, name=self.name) + + def clear_map(self) -> Restaurant: # pragma: no cover + """Shortcut to the `.address.city.clear_map()` method. + + Returns: + self: enabling method chaining + """ # noqa:D402,DAR203 + self.address.city.clear_map() + return self + + @property # pragma: no cover + def map(self) -> folium.Map: # noqa:WPS125 + """Shortcut to the `.address.city.map` object.""" + return self.address.city.map + + def draw( # noqa:WPS231 + self, customers: bool = True, order_counts: bool = False, # pragma: no cover + ) -> folium.Map: + """Draw the restaurant on the `.address.city.map`. + + By default, the restaurant's delivery locations are also shown. + + Args: + customers: show the restaurant's delivery locations + order_counts: show the number of orders at the delivery locations; + only useful if `customers=True` + + Returns: + `.address.city.map` for convenience in interactive usage + """ + if customers: + # Obtain all primary `Address`es in the city that + # received at least one delivery from `self`. + delivery_addresses = ( # noqa:ECE001 + db.session.query(db.Address) + .filter( + db.Address.id.in_( + db.session.query(db.Address.primary_id) # noqa:WPS221 + .join(db.Order, db.Address.id == db.Order.delivery_address_id) + .filter(db.Order.restaurant_id == self.id) + .distinct() + .all(), + ), + ) + .all() + ) + + for address in delivery_addresses: + if order_counts: + n_orders = ( # noqa:ECE001 + db.session.query(db.Order) + .join(db.Address, db.Order.delivery_address_id == db.Address.id) + .filter(db.Order.restaurant_id == self.id) + .filter(db.Address.primary_id == address.id) + .count() + ) + if n_orders >= 25: + radius = 20 # noqa:WPS220 + elif n_orders >= 10: + radius = 15 # noqa:WPS220 + elif n_orders >= 5: + radius = 10 # noqa:WPS220 + elif n_orders > 1: + radius = 5 # noqa:WPS220 + else: + radius = 1 # noqa:WPS220 + + address.draw( + radius=radius, + color=config.CUSTOMER_COLOR, + fill_color=config.CUSTOMER_COLOR, + fill_opacity=0.3, + tooltip=f'n_orders={n_orders}', + ) + + else: + address.draw( + radius=1, color=config.CUSTOMER_COLOR, + ) + + self.address.draw( + radius=20, + color=config.RESTAURANT_COLOR, + fill_color=config.RESTAURANT_COLOR, + fill_opacity=0.3, + tooltip=f'{self.name} (#{self.id}) | n_orders={len(self.orders)}', + ) + + return self.map diff --git a/src/urban_meal_delivery/db/utils/__init__.py b/src/urban_meal_delivery/db/utils/__init__.py new file mode 100644 index 0000000..5d6f8b6 --- /dev/null +++ b/src/urban_meal_delivery/db/utils/__init__.py @@ -0,0 +1,5 @@ +"""Utilities used by the ORM models.""" + +from urban_meal_delivery.db.utils.colors import make_random_cmap +from urban_meal_delivery.db.utils.colors import rgb_to_hex +from urban_meal_delivery.db.utils.locations import Location diff --git a/src/urban_meal_delivery/db/utils/colors.py b/src/urban_meal_delivery/db/utils/colors.py new file mode 100644 index 0000000..ad45327 --- /dev/null +++ b/src/urban_meal_delivery/db/utils/colors.py @@ -0,0 +1,69 @@ +"""Utilities for drawing maps with `folium`.""" + +import colorsys + +import numpy as np +from matplotlib import colors + + +def make_random_cmap( + n_colors: int, bright: bool = True, # pragma: no cover +) -> colors.LinearSegmentedColormap: + """Create a random `Colormap` with `n_colors` different colors. + + Args: + n_colors: number of of different colors; size of `Colormap` + bright: `True` for strong colors, `False` for pastel colors + + Returns: + colormap + """ + np.random.seed(42) + + if bright: + hsv_colors = [ + ( + np.random.uniform(low=0.0, high=1), + np.random.uniform(low=0.2, high=1), + np.random.uniform(low=0.9, high=1), + ) + for _ in range(n_colors) + ] + + rgb_colors = [] + for color in hsv_colors: + rgb_colors.append(colorsys.hsv_to_rgb(*color)) + + else: + low = 0.0 + high = 0.66 + + rgb_colors = [ + ( + np.random.uniform(low=low, high=high), + np.random.uniform(low=low, high=high), + np.random.uniform(low=low, high=high), + ) + for _ in range(n_colors) + ] + + return colors.LinearSegmentedColormap.from_list( + 'random_color_map', rgb_colors, N=n_colors, + ) + + +def rgb_to_hex(*args: float) -> str: # pragma: no cover + """Convert RGB colors into hexadecimal notation. + + Args: + *args: percentages (0% - 100%) for the RGB channels + + Returns: + hexadecimal_representation + """ + red, green, blue = ( + int(255 * args[0]), + int(255 * args[1]), + int(255 * args[2]), + ) + return f'#{red:02x}{green:02x}{blue:02x}' # noqa:WPS221 diff --git a/src/urban_meal_delivery/db/utils/locations.py b/src/urban_meal_delivery/db/utils/locations.py new file mode 100644 index 0000000..b6ef41e --- /dev/null +++ b/src/urban_meal_delivery/db/utils/locations.py @@ -0,0 +1,142 @@ +"""A `Location` class to unify working with coordinates.""" + +from __future__ import annotations + +from typing import Optional, Tuple + +import utm + + +class Location: + """A location represented in WGS84 and UTM coordinates. + + WGS84: + - "conventional" system with latitude-longitude pairs + - assumes earth is a sphere and models the location in 3D + + UTM: + - the Universal Transverse Mercator sytem + - projects WGS84 coordinates onto a 2D map + - can be used for visualizations and calculations directly + - distances are in meters + + Further info how WGS84 and UTM are related: + https://en.wikipedia.org/wiki/Universal_Transverse_Mercator_coordinate_system + """ + + def __init__(self, latitude: float, longitude: float) -> None: + """Create a location from a WGS84-conforming `latitude`-`longitude` pair.""" + # The SQLAlchemy columns come as `Decimal`s due to the `DOUBLE_PRECISION`. + self._latitude = float(latitude) + self._longitude = float(longitude) + + easting, northing, zone, band = utm.from_latlon(self._latitude, self._longitude) + + # `.easting` and `.northing` as `int`s are precise enough. + self._easting = int(easting) + self._northing = int(northing) + self._zone = zone + self._band = band.upper() + + self._normalized_easting: Optional[int] = None + self._normalized_northing: Optional[int] = None + + def __repr__(self) -> str: + """A non-literal text representation in the UTM system. + + Convention is {ZONE} {EASTING} {NORTHING}. + + Example: + `'` + """ + return f'' # noqa:WPS221 + + @property + def latitude(self) -> float: + """The latitude of the location in degrees (WGS84). + + Between -90 and +90 degrees. + """ + return self._latitude + + @property + def longitude(self) -> float: + """The longitude of the location in degrees (WGS84). + + Between -180 and +180 degrees. + """ + return self._longitude + + @property + def easting(self) -> int: + """The easting of the location in meters (UTM).""" + return self._easting + + @property + def northing(self) -> int: + """The northing of the location in meters (UTM).""" + return self._northing + + @property + def zone(self) -> str: + """The UTM zone of the location.""" + return f'{self._zone}{self._band}' + + @property + def zone_details(self) -> Tuple[int, str]: + """The UTM zone of the location as the zone number and the band.""" + return (self._zone, self._band) + + def __eq__(self, other: object) -> bool: + """Check if two `Location` objects are the same location.""" + if not isinstance(other, Location): + return NotImplemented + + if self.zone != other.zone: + raise ValueError('locations must be in the same zone, including the band') + + return (self.easting, self.northing) == (other.easting, other.northing) + + @property + def x(self) -> int: # noqa:WPS111 + """The `.easting` of the location in meters, relative to some origin. + + The origin, which defines the `(0, 0)` coordinate, is set with `.relate_to()`. + """ + if self._normalized_easting is None: + raise RuntimeError('an origin to relate to must be set first') + + return self._normalized_easting + + @property + def y(self) -> int: # noqa:WPS111 + """The `.northing` of the location in meters, relative to some origin. + + The origin, which defines the `(0, 0)` coordinate, is set with `.relate_to()`. + """ + if self._normalized_northing is None: + raise RuntimeError('an origin to relate to must be set first') + + return self._normalized_northing + + def relate_to(self, other: Location) -> None: + """Make the origin in the lower-left corner relative to `other`. + + The `.x` and `.y` properties are the `.easting` and `.northing` values + of `self` minus the ones from `other`. So, `.x` and `.y` make up a + Cartesian coordinate system where the `other` origin is `(0, 0)`. + + To prevent semantic errors in calculations based on the `.x` and `.y` + properties, the `other` origin may only be set once! + """ + if self._normalized_easting is not None: + raise RuntimeError('the `other` origin may only be set once') + + if not isinstance(other, Location): + raise TypeError('`other` is not a `Location` object') + + if self.zone != other.zone: + raise ValueError('`other` must be in the same zone, including the band') + + self._normalized_easting = self.easting - other.easting + self._normalized_northing = self.northing - other.northing diff --git a/src/urban_meal_delivery/forecasts/__init__.py b/src/urban_meal_delivery/forecasts/__init__.py new file mode 100644 index 0000000..2dcd196 --- /dev/null +++ b/src/urban_meal_delivery/forecasts/__init__.py @@ -0,0 +1,29 @@ +"""Demand forecasting utilities. + +This sub-package is divided into further sub-packages and modules as follows: + +`methods` contains various time series related statistical methods, implemented +as plain `function` objects that are used to predict into the future given a +time series of historic order counts. The methods are context-agnostic, meaning +that they only take and return `pd.Series/DataFrame`s holding numbers and +are not concerned with how these numbers were generated or what they mean. +Some functions, like `arima.predict()` or `ets.predict()` wrap functions called +in R using the `rpy2` library. Others, like `extrapolate_season.predict()`, are +written in plain Python. + +`timify` defines an `OrderHistory` class that abstracts away the communication +with the database and provides `pd.Series` objects with the order counts that +are fed into the `methods`. In particular, it uses SQL statements behind the +scenes to calculate the historic order counts on a per-`Pixel` level. Once the +data is loaded from the database, an `OrderHistory` instance provides various +ways to slice out, or generate, different kinds of order time series (e.g., +"horizontal" vs. "vertical" time series). + +`models` defines various forecasting `*Model`s that combine a given kind of +time series with one of the forecasting `methods`. For example, the ETS method +applied to a horizontal time series is implemented in the `HorizontalETSModel`. +""" + +from urban_meal_delivery.forecasts import methods +from urban_meal_delivery.forecasts import models +from urban_meal_delivery.forecasts import timify diff --git a/src/urban_meal_delivery/forecasts/methods/__init__.py b/src/urban_meal_delivery/forecasts/methods/__init__.py new file mode 100644 index 0000000..5690e79 --- /dev/null +++ b/src/urban_meal_delivery/forecasts/methods/__init__.py @@ -0,0 +1,6 @@ +"""Various forecasting methods implemented as functions.""" + +from urban_meal_delivery.forecasts.methods import arima +from urban_meal_delivery.forecasts.methods import decomposition +from urban_meal_delivery.forecasts.methods import ets +from urban_meal_delivery.forecasts.methods import extrapolate_season diff --git a/src/urban_meal_delivery/forecasts/methods/arima.py b/src/urban_meal_delivery/forecasts/methods/arima.py new file mode 100644 index 0000000..3abd60e --- /dev/null +++ b/src/urban_meal_delivery/forecasts/methods/arima.py @@ -0,0 +1,76 @@ +"""A wrapper around R's "auto.arima" function.""" + +import pandas as pd +from rpy2 import robjects +from rpy2.robjects import pandas2ri + + +def predict( + training_ts: pd.Series, + forecast_interval: pd.DatetimeIndex, + *, + frequency: int, + seasonal_fit: bool = False, +) -> pd.DataFrame: + """Predict with an automatically chosen ARIMA model. + + Note: The function does not check if the `forecast_interval` + extends the `training_ts`'s interval without a gap! + + Args: + training_ts: past observations to be fitted + forecast_interval: interval into which the `training_ts` is forecast; + its length becomes the step size `h` in the forecasting model in R + frequency: frequency of the observations in the `training_ts` + seasonal_fit: if a seasonal ARIMA model should be fitted + + Returns: + predictions: point forecasts (i.e., the "prediction" column) and + confidence intervals (i.e, the four "low/high80/95" columns) + + Raises: + ValueError: if `training_ts` contains `NaN` values + """ + # Initialize R only if necessary as it is tested only in nox's + # "ci-tests-slow" session and "ci-tests-fast" should not fail. + from urban_meal_delivery import init_r # noqa:F401,WPS433 + + # Re-seed R every time it is used to ensure reproducibility. + robjects.r('set.seed(42)') + + if training_ts.isnull().any(): + raise ValueError('`training_ts` must not contain `NaN` values') + + # Copy the data from Python to R. + robjects.globalenv['data'] = robjects.r['ts']( + pandas2ri.py2rpy(training_ts), frequency=frequency, + ) + + seasonal = 'TRUE' if bool(seasonal_fit) else 'FALSE' + n_steps_ahead = len(forecast_interval) + + # Make the predictions in R. + result = robjects.r( + f""" + as.data.frame( + forecast( + auto.arima(data, approximation = TRUE, seasonal = {seasonal:s}), + h = {n_steps_ahead:d} + ) + ) + """, + ) + + # Convert the results into a nice `pd.DataFrame` with the right `.index`. + forecasts = pandas2ri.rpy2py(result) + forecasts.index = forecast_interval + + return forecasts.round(5).rename( + columns={ + 'Point Forecast': 'prediction', + 'Lo 80': 'low80', + 'Hi 80': 'high80', + 'Lo 95': 'low95', + 'Hi 95': 'high95', + }, + ) diff --git a/src/urban_meal_delivery/forecasts/methods/decomposition.py b/src/urban_meal_delivery/forecasts/methods/decomposition.py new file mode 100644 index 0000000..3be8582 --- /dev/null +++ b/src/urban_meal_delivery/forecasts/methods/decomposition.py @@ -0,0 +1,181 @@ +"""Seasonal-trend decomposition procedure based on LOESS (STL). + +This module defines a `stl()` function that wraps R's STL decomposition function +using the `rpy2` library. +""" + +import math + +import pandas as pd +from rpy2 import robjects +from rpy2.robjects import pandas2ri + + +def stl( # noqa:C901,WPS210,WPS211,WPS231 + time_series: pd.Series, + *, + frequency: int, + ns: int, + nt: int = None, + nl: int = None, + ds: int = 0, + dt: int = 1, + dl: int = 1, + js: int = None, + jt: int = None, + jl: int = None, + ni: int = 2, + no: int = 0, # noqa:WPS110 +) -> pd.DataFrame: + """Decompose a time series into seasonal, trend, and residual components. + + This is a Python wrapper around the corresponding R function. + + Further info on the STL method: + https://www.nniiem.ru/file/news/2016/stl-statistical-model.pdf + https://otexts.com/fpp2/stl.html + + Further info on the R's "stl" function: + https://www.rdocumentation.org/packages/stats/versions/3.6.2/topics/stl + + Args: + time_series: time series with a `DateTime` based index; + must not contain `NaN` values + frequency: frequency of the observations in the `time_series` + ns: smoothing parameter for the seasonal component + (= window size of the seasonal smoother); + must be odd and `>= 7` so that the seasonal component is smooth; + the greater `ns`, the smoother the seasonal component; + so, this is a hyper-parameter optimized in accordance with the application + nt: smoothing parameter for the trend component + (= window size of the trend smoother); + must be odd and `>= (1.5 * frequency) / [1 - (1.5 / ns)]`; + the latter threshold is the default value; + the greater `nt`, the smoother the trend component + nl: smoothing parameter for the low-pass filter; + must be odd and `>= frequency`; + the least odd number `>= frequency` is the default + ds: degree of locally fitted polynomial in seasonal smoothing; + must be `0` or `1` + dt: degree of locally fitted polynomial in trend smoothing; + must be `0` or `1` + dl: degree of locally fitted polynomial in low-pass smoothing; + must be `0` or `1` + js: number of steps by which the seasonal smoother skips ahead + and then linearly interpolates between observations; + if set to `1`, the smoother is evaluated at all points; + to make the STL decomposition faster, increase this value; + by default, `js` is the smallest integer `>= 0.1 * ns` + jt: number of steps by which the trend smoother skips ahead + and then linearly interpolates between observations; + if set to `1`, the smoother is evaluated at all points; + to make the STL decomposition faster, increase this value; + by default, `jt` is the smallest integer `>= 0.1 * nt` + jl: number of steps by which the low-pass smoother skips ahead + and then linearly interpolates between observations; + if set to `1`, the smoother is evaluated at all points; + to make the STL decomposition faster, increase this value; + by default, `jl` is the smallest integer `>= 0.1 * nl` + ni: number of iterations of the inner loop that updates the + seasonal and trend components; + usually, a low value (e.g., `2`) suffices + no: number of iterations of the outer loop that handles outliers; + also known as the "robustness" loop; + if no outliers need to be handled, set `no=0`; + otherwise, `no=5` or `no=10` combined with `ni=1` is a good choice + + Returns: + result: a DataFrame with three columns ("seasonal", "trend", and "residual") + providing time series of the individual components + + Raises: + ValueError: some argument does not adhere to the specifications above + """ + # Validate all arguments and set default values. + + if time_series.isnull().any(): + raise ValueError('`time_series` must not contain `NaN` values') + + if ns % 2 == 0 or ns < 7: + raise ValueError('`ns` must be odd and `>= 7`') + + default_nt = math.ceil((1.5 * frequency) / (1 - (1.5 / ns))) + if nt is not None: + if nt % 2 == 0 or nt < default_nt: + raise ValueError( + '`nt` must be odd and `>= (1.5 * frequency) / [1 - (1.5 / ns)]`, ' + + 'which is {0}'.format(default_nt), + ) + else: + nt = default_nt + if nt % 2 == 0: # pragma: no cover => hard to construct edge case + nt += 1 + + if nl is not None: + if nl % 2 == 0 or nl < frequency: + raise ValueError('`nl` must be odd and `>= frequency`') + elif frequency % 2 == 0: + nl = frequency + 1 + else: # pragma: no cover => hard to construct edge case + nl = frequency + + if ds not in {0, 1}: + raise ValueError('`ds` must be either `0` or `1`') + if dt not in {0, 1}: + raise ValueError('`dt` must be either `0` or `1`') + if dl not in {0, 1}: + raise ValueError('`dl` must be either `0` or `1`') + + if js is not None: + if js <= 0: + raise ValueError('`js` must be positive') + else: + js = math.ceil(ns / 10) + + if jt is not None: + if jt <= 0: + raise ValueError('`jt` must be positive') + else: + jt = math.ceil(nt / 10) + + if jl is not None: + if jl <= 0: + raise ValueError('`jl` must be positive') + else: + jl = math.ceil(nl / 10) + + if ni <= 0: + raise ValueError('`ni` must be positive') + + if no < 0: + raise ValueError('`no` must be non-negative') + elif no > 0: + robust = True + else: + robust = False + + # Initialize R only if necessary as it is tested only in nox's + # "ci-tests-slow" session and "ci-tests-fast" should not fail. + from urban_meal_delivery import init_r # noqa:F401,WPS433 + + # Re-seed R every time it is used to ensure reproducibility. + robjects.r('set.seed(42)') + + # Call the STL function in R. + ts = robjects.r['ts'](pandas2ri.py2rpy(time_series), frequency=frequency) + result = robjects.r['stl']( + ts, ns, ds, nt, dt, nl, dl, js, jt, jl, robust, ni, no, # noqa:WPS221 + ) + + # Unpack the result to a `pd.DataFrame`. + result = pandas2ri.rpy2py(result[0]) + result = pd.DataFrame( + data={ + 'seasonal': result[:, 0], + 'trend': result[:, 1], + 'residual': result[:, 2], + }, + index=time_series.index, + ) + + return result.round(5) diff --git a/src/urban_meal_delivery/forecasts/methods/ets.py b/src/urban_meal_delivery/forecasts/methods/ets.py new file mode 100644 index 0000000..5b70aef --- /dev/null +++ b/src/urban_meal_delivery/forecasts/methods/ets.py @@ -0,0 +1,77 @@ +"""A wrapper around R's "ets" function.""" + +import pandas as pd +from rpy2 import robjects +from rpy2.robjects import pandas2ri + + +def predict( + training_ts: pd.Series, + forecast_interval: pd.DatetimeIndex, + *, + frequency: int, + seasonal_fit: bool = False, +) -> pd.DataFrame: + """Predict with an automatically calibrated ETS model. + + Note: The function does not check if the `forecast_interval` + extends the `training_ts`'s interval without a gap! + + Args: + training_ts: past observations to be fitted + forecast_interval: interval into which the `training_ts` is forecast; + its length becomes the step size `h` in the forecasting model in R + frequency: frequency of the observations in the `training_ts` + seasonal_fit: if a "ZZZ" (seasonal) or a "ZZN" (non-seasonal) + type ETS model should be fitted + + Returns: + predictions: point forecasts (i.e., the "prediction" column) and + confidence intervals (i.e, the four "low/high80/95" columns) + + Raises: + ValueError: if `training_ts` contains `NaN` values + """ + # Initialize R only if necessary as it is tested only in nox's + # "ci-tests-slow" session and "ci-tests-fast" should not fail. + from urban_meal_delivery import init_r # noqa:F401,WPS433 + + # Re-seed R every time it is used to ensure reproducibility. + robjects.r('set.seed(42)') + + if training_ts.isnull().any(): + raise ValueError('`training_ts` must not contain `NaN` values') + + # Copy the data from Python to R. + robjects.globalenv['data'] = robjects.r['ts']( + pandas2ri.py2rpy(training_ts), frequency=frequency, + ) + + model = 'ZZZ' if bool(seasonal_fit) else 'ZZN' + n_steps_ahead = len(forecast_interval) + + # Make the predictions in R. + result = robjects.r( + f""" + as.data.frame( + forecast( + ets(data, model = "{model:s}"), + h = {n_steps_ahead:d} + ) + ) + """, + ) + + # Convert the results into a nice `pd.DataFrame` with the right `.index`. + forecasts = pandas2ri.rpy2py(result) + forecasts.index = forecast_interval + + return forecasts.round(5).rename( + columns={ + 'Point Forecast': 'prediction', + 'Lo 80': 'low80', + 'Hi 80': 'high80', + 'Lo 95': 'low95', + 'Hi 95': 'high95', + }, + ) diff --git a/src/urban_meal_delivery/forecasts/methods/extrapolate_season.py b/src/urban_meal_delivery/forecasts/methods/extrapolate_season.py new file mode 100644 index 0000000..dfbc9c4 --- /dev/null +++ b/src/urban_meal_delivery/forecasts/methods/extrapolate_season.py @@ -0,0 +1,72 @@ +"""Forecast by linear extrapolation of a seasonal component.""" + +import pandas as pd +from statsmodels.tsa import api as ts_stats + + +def predict( + training_ts: pd.Series, forecast_interval: pd.DatetimeIndex, *, frequency: int, +) -> pd.DataFrame: + """Extrapolate a seasonal component with a linear model. + + A naive forecast for each time unit of the day is calculated by linear + extrapolation from all observations of the same time of day and on the same + day of the week (i.e., same seasonal lag). + + Note: The function does not check if the `forecast_interval` + extends the `training_ts`'s interval without a gap! + + Args: + training_ts: past observations to be fitted; + assumed to be a seasonal component after time series decomposition + forecast_interval: interval into which the `training_ts` is forecast; + its length becomes the numbers of time steps to be forecast + frequency: frequency of the observations in the `training_ts` + + Returns: + predictions: point forecasts (i.e., the "prediction" column); + includes the four "low/high80/95" columns for the confidence intervals + that only contain `NaN` values as this method does not make + any statistical assumptions about the time series process + + Raises: + ValueError: if `training_ts` contains `NaN` values or some predictions + could not be made for time steps in the `forecast_interval` + """ + if training_ts.isnull().any(): + raise ValueError('`training_ts` must not contain `NaN` values') + + extrapolated_ts = pd.Series(index=forecast_interval, dtype=float) + seasonal_lag = frequency * (training_ts.index[1] - training_ts.index[0]) + + for lag in range(frequency): + # Obtain all `observations` of the same seasonal lag and + # fit a straight line through them (= `trend`). + observations = training_ts[slice(lag, 999_999_999, frequency)] + trend = observations - ts_stats.detrend(observations) + + # Create a point forecast by linear extrapolation + # for one or even more time steps ahead. + slope = trend[-1] - trend[-2] + prediction = trend[-1] + slope + idx = observations.index.max() + seasonal_lag + while idx <= forecast_interval.max(): + if idx in forecast_interval: + extrapolated_ts.loc[idx] = prediction + prediction += slope + idx += seasonal_lag + + # Sanity check. + if extrapolated_ts.isnull().any(): # pragma: no cover + raise ValueError('missing predictions in the `forecast_interval`') + + return pd.DataFrame( + data={ + 'prediction': extrapolated_ts.round(5), + 'low80': float('NaN'), + 'high80': float('NaN'), + 'low95': float('NaN'), + 'high95': float('NaN'), + }, + index=forecast_interval, + ) diff --git a/src/urban_meal_delivery/forecasts/models/__init__.py b/src/urban_meal_delivery/forecasts/models/__init__.py new file mode 100644 index 0000000..c5c905f --- /dev/null +++ b/src/urban_meal_delivery/forecasts/models/__init__.py @@ -0,0 +1,37 @@ +"""Define the forecasting `*Model`s used in this project. + +`*Model`s are different from plain forecasting `methods` in that they are tied +to a given kind of historic order time series, as provided by the `OrderHistory` +class in the `timify` module. For example, the ARIMA model applied to a vertical +time series becomes the `VerticalARIMAModel`. + +An overview of the `*Model`s used for tactical forecasting can be found in section +"3.6 Forecasting Models" in the paper "Real-time Demand Forecasting for an Urban +Delivery Platform" that is part of the `urban-meal-delivery` research project. + +For the paper check: + https://github.com/webartifex/urban-meal-delivery-demand-forecasting/blob/main/paper.pdf + https://www.sciencedirect.com/science/article/pii/S1366554520307936 + +This sub-package is organized as follows. The `base` module defines an abstract +`ForecastingModelABC` class that unifies how the concrete `*Model`s work. +While the abstact `.predict()` method returns a `pd.DataFrame` (= basically, +the result of one of the forecasting `methods`, the concrete `.make_forecast()` +method converts the results into `Forecast` (=ORM) objects. +Also, `.make_forecast()` implements a caching strategy where already made +`Forecast`s are loaded from the database instead of calculating them again, +which could be a heavier computation. + +The `tactical` sub-package contains all the `*Model`s used to implement the +UDP's predictive routing strategy. + +A future `planning` sub-package will contain the `*Model`s used to plan the +`Courier`'s shifts a week ahead. +""" # noqa:RST215 + +from urban_meal_delivery.forecasts.models.base import ForecastingModelABC +from urban_meal_delivery.forecasts.models.tactical.horizontal import HorizontalETSModel +from urban_meal_delivery.forecasts.models.tactical.horizontal import HorizontalSMAModel +from urban_meal_delivery.forecasts.models.tactical.other import TrivialModel +from urban_meal_delivery.forecasts.models.tactical.realtime import RealtimeARIMAModel +from urban_meal_delivery.forecasts.models.tactical.vertical import VerticalARIMAModel diff --git a/src/urban_meal_delivery/forecasts/models/base.py b/src/urban_meal_delivery/forecasts/models/base.py new file mode 100644 index 0000000..9a9cd72 --- /dev/null +++ b/src/urban_meal_delivery/forecasts/models/base.py @@ -0,0 +1,116 @@ +"""The abstract blueprint for a forecasting `*Model`.""" + +import abc +import datetime as dt + +import pandas as pd + +from urban_meal_delivery import db +from urban_meal_delivery.forecasts import timify + + +class ForecastingModelABC(abc.ABC): + """An abstract interface of a forecasting `*Model`.""" + + def __init__(self, order_history: timify.OrderHistory) -> None: + """Initialize a new forecasting model. + + Args: + order_history: an abstraction providing the time series data + """ + self._order_history = order_history + + @property + @abc.abstractmethod + def name(self) -> str: + """The name of the model. + + Used to identify `Forecast`s of the same `*Model` in the database. + So, these must be chosen carefully and must not be changed later on! + + Example: "hets" or "varima" for tactical demand forecasting + """ + + @abc.abstractmethod + def predict( + self, pixel: db.Pixel, predict_at: dt.datetime, train_horizon: int, + ) -> pd.DataFrame: + """Concrete implementation of how a `*Model` makes a prediction. + + This method is called by the unified `*Model.make_forecast()` method, + which implements the caching logic with the database. + + Args: + pixel: pixel in which the prediction is made + predict_at: time step (i.e., "start_at") to make the prediction for + train_horizon: weeks of historic data used to predict `predict_at` + + Returns: + actuals, predictions, and possibly 80%/95% confidence intervals; + includes a row for the time step starting at `predict_at` and + may contain further rows for other time steps on the same day + """ # noqa:DAR202 + + def make_forecast( + self, pixel: db.Pixel, predict_at: dt.datetime, train_horizon: int, + ) -> db.Forecast: + """Make a forecast for the time step starting at `predict_at`. + + Important: This method uses a unified `predict_at` argument. + Some `*Model`s, in particular vertical ones, are only trained once per + day and then make a prediction for all time steps on that day, and + therefore, work with a `predict_day` argument instead of `predict_at` + behind the scenes. Then, all `Forecast`s are stored into the database + and only the one starting at `predict_at` is returned. + + Args: + pixel: pixel in which the `Forecast` is made + predict_at: time step (i.e., "start_at") to make the `Forecast` for + train_horizon: weeks of historic data used to forecast `predict_at` + + Returns: + actual, prediction, and possibly 80%/95% confidence intervals + for the time step starting at `predict_at` + + # noqa:DAR401 RuntimeError + """ + if ( # noqa:WPS337 + cached_forecast := db.session.query(db.Forecast) # noqa:ECE001,WPS221 + .filter_by(pixel=pixel) + .filter_by(start_at=predict_at) + .filter_by(time_step=self._order_history.time_step) + .filter_by(train_horizon=train_horizon) + .filter_by(model=self.name) + .first() + ) : + return cached_forecast + + # Horizontal and real-time `*Model`s return a `pd.DataFrame` with one + # row corresponding to the time step starting at `predict_at` whereas + # vertical models return several rows, covering all time steps of a day. + predictions = self.predict(pixel, predict_at, train_horizon) + + # Convert the `predictions` into a `list` of `Forecast` objects. + forecasts = db.Forecast.from_dataframe( + pixel=pixel, + time_step=self._order_history.time_step, + train_horizon=train_horizon, + model=self.name, + data=predictions, + ) + + # We persist all `Forecast`s into the database to + # not have to run the same model training again. + db.session.add_all(forecasts) + db.session.commit() + + # The one `Forecast` object asked for must be in `forecasts` + # if the concrete `*Model.predict()` method works correctly; ... + for forecast in forecasts: + if forecast.start_at == predict_at: + return forecast + + # ..., however, we put in a loud error, just in case. + raise RuntimeError( # pragma: no cover + '`Forecast` for `predict_at` was not returned by `*Model.predict()`', + ) diff --git a/src/urban_meal_delivery/forecasts/models/tactical/__init__.py b/src/urban_meal_delivery/forecasts/models/tactical/__init__.py new file mode 100644 index 0000000..df70622 --- /dev/null +++ b/src/urban_meal_delivery/forecasts/models/tactical/__init__.py @@ -0,0 +1,16 @@ +"""Forecasting `*Model`s to predict demand for tactical purposes. + +The `*Model`s in this module predict only a small number (e.g., one) +of time steps into the near future and are used to implement the UDP's +predictive routing strategies. + +They are classified into "horizontal", "vertical", and "real-time" models +with respect to what historic data they are trained on and how often they +are re-trained on the day to be predicted. For the details, check section +"3.6 Forecasting Models" in the paper "Real-time Demand Forecasting for an +Urban Delivery Platform". + +For the paper check: + https://github.com/webartifex/urban-meal-delivery-demand-forecasting/blob/main/paper.pdf + https://www.sciencedirect.com/science/article/pii/S1366554520307936 +""" # noqa:RST215 diff --git a/src/urban_meal_delivery/forecasts/models/tactical/horizontal.py b/src/urban_meal_delivery/forecasts/models/tactical/horizontal.py new file mode 100644 index 0000000..3a18d76 --- /dev/null +++ b/src/urban_meal_delivery/forecasts/models/tactical/horizontal.py @@ -0,0 +1,130 @@ +"""Horizontal forecasting `*Model`s to predict demand for tactical purposes. + +Horizontal `*Model`s take the historic order counts only from time steps +corresponding to the same time of day as the one to be predicted (i.e., the +one starting at `predict_at`). Then, they make a prediction for only one day +into the future. Thus, the training time series have a `frequency` of `7`, the +number of days in a week. +""" # noqa:RST215 + +import datetime as dt + +import pandas as pd + +from urban_meal_delivery import db +from urban_meal_delivery.forecasts import methods +from urban_meal_delivery.forecasts.models import base + + +class HorizontalETSModel(base.ForecastingModelABC): + """The ETS model applied on a horizontal time series.""" + + name = 'hets' + + def predict( + self, pixel: db.Pixel, predict_at: dt.datetime, train_horizon: int, + ) -> pd.DataFrame: + """Predict demand for a time step. + + Args: + pixel: pixel in which the prediction is made + predict_at: time step (i.e., "start_at") to make the prediction for + train_horizon: weeks of historic data used to predict `predict_at` + + Returns: + actual order counts (i.e., the "actual" column), + point forecasts (i.e., the "prediction" column), and + confidence intervals (i.e, the four "low/high/80/95" columns); + contains one row for the `predict_at` time step + + # noqa:DAR401 RuntimeError + """ + # Generate the historic (and horizontal) order time series. + training_ts, frequency, actuals_ts = self._order_history.make_horizontal_ts( + pixel_id=pixel.id, predict_at=predict_at, train_horizon=train_horizon, + ) + + # Sanity check. + if frequency != 7: # pragma: no cover + raise RuntimeError('`frequency` should be `7`') + + # Make `predictions` with the seasonal ETS method ("ZZZ" model). + predictions = methods.ets.predict( + training_ts=training_ts, + forecast_interval=actuals_ts.index, + frequency=frequency, # `== 7`, the number of weekdays + seasonal_fit=True, # because there was no decomposition before + ) + + predictions.insert(loc=0, column='actual', value=actuals_ts) + + # Sanity checks. + if predictions.isnull().any().any(): # pragma: no cover + raise RuntimeError('missing predictions in hets model') + if predict_at not in predictions.index: # pragma: no cover + raise RuntimeError('missing prediction for `predict_at`') + + return predictions + + +class HorizontalSMAModel(base.ForecastingModelABC): + """A simple moving average model applied on a horizontal time series.""" + + name = 'hsma' + + def predict( + self, pixel: db.Pixel, predict_at: dt.datetime, train_horizon: int, + ) -> pd.DataFrame: + """Predict demand for a time step. + + Args: + pixel: pixel in which the prediction is made + predict_at: time step (i.e., "start_at") to make the prediction for + train_horizon: weeks of historic data used to predict `predict_at` + + Returns: + actual order counts (i.e., the "actual" column) and + point forecasts (i.e., the "prediction" column); + this model does not support confidence intervals; + contains one row for the `predict_at` time step + + # noqa:DAR401 RuntimeError + """ + # Generate the historic (and horizontal) order time series. + training_ts, frequency, actuals_ts = self._order_history.make_horizontal_ts( + pixel_id=pixel.id, predict_at=predict_at, train_horizon=train_horizon, + ) + + # Sanity checks. + if frequency != 7: # pragma: no cover + raise RuntimeError('`frequency` should be `7`') + if len(actuals_ts) != 1: # pragma: no cover + raise RuntimeError( + 'the hsma model can only predict one step into the future', + ) + + # The "prediction" is calculated as the `np.mean()`. + # As the `training_ts` covers only full week horizons, + # no adjustment regarding the weekly seasonality is needed. + predictions = pd.DataFrame( + data={ + 'actual': actuals_ts, + 'prediction': training_ts.values.mean(), + 'low80': float('NaN'), + 'high80': float('NaN'), + 'low95': float('NaN'), + 'high95': float('NaN'), + }, + index=actuals_ts.index, + ) + + # Sanity checks. + if ( # noqa:WPS337 + predictions[['actual', 'prediction']].isnull().any().any() + ): # pragma: no cover + + raise RuntimeError('missing predictions in hsma model') + if predict_at not in predictions.index: # pragma: no cover + raise RuntimeError('missing prediction for `predict_at`') + + return predictions diff --git a/src/urban_meal_delivery/forecasts/models/tactical/other.py b/src/urban_meal_delivery/forecasts/models/tactical/other.py new file mode 100644 index 0000000..b439957 --- /dev/null +++ b/src/urban_meal_delivery/forecasts/models/tactical/other.py @@ -0,0 +1,75 @@ +"""Forecasting `*Model`s to predict demand for tactical purposes ... + +... that cannot be classified into either "horizontal", "vertical", +or "real-time". +""" # noqa:RST215 + +import datetime as dt + +import pandas as pd + +from urban_meal_delivery import db +from urban_meal_delivery.forecasts.models import base + + +class TrivialModel(base.ForecastingModelABC): + """A trivial model predicting `0` demand. + + No need to distinguish between a "horizontal", "vertical", or + "real-time" model here as all give the same prediction for all time steps. + """ + + name = 'trivial' + + def predict( + self, pixel: db.Pixel, predict_at: dt.datetime, train_horizon: int, + ) -> pd.DataFrame: + """Predict demand for a time step. + + Args: + pixel: pixel in which the prediction is made + predict_at: time step (i.e., "start_at") to make the prediction for + train_horizon: weeks of historic data used to predict `predict_at` + + Returns: + actual order counts (i.e., the "actual" column) and + point forecasts (i.e., the "prediction" column); + this model does not support confidence intervals; + contains one row for the `predict_at` time step + + # noqa:DAR401 RuntimeError + """ + # Generate the historic order time series mainly to check if a valid + # `training_ts` exists (i.e., the demand history is long enough). + _, frequency, actuals_ts = self._order_history.make_horizontal_ts( + pixel_id=pixel.id, predict_at=predict_at, train_horizon=train_horizon, + ) + + # Sanity checks. + if frequency != 7: # pragma: no cover + raise RuntimeError('`frequency` should be `7`') + if len(actuals_ts) != 1: # pragma: no cover + raise RuntimeError( + 'the trivial model can only predict one step into the future', + ) + + # The "prediction" is simply `0.0`. + predictions = pd.DataFrame( + data={ + 'actual': actuals_ts, + 'prediction': 0.0, + 'low80': float('NaN'), + 'high80': float('NaN'), + 'low95': float('NaN'), + 'high95': float('NaN'), + }, + index=actuals_ts.index, + ) + + # Sanity checks. + if predictions['actual'].isnull().any(): # pragma: no cover + raise RuntimeError('missing actuals in trivial model') + if predict_at not in predictions.index: # pragma: no cover + raise RuntimeError('missing prediction for `predict_at`') + + return predictions diff --git a/src/urban_meal_delivery/forecasts/models/tactical/realtime.py b/src/urban_meal_delivery/forecasts/models/tactical/realtime.py new file mode 100644 index 0000000..bf30ee5 --- /dev/null +++ b/src/urban_meal_delivery/forecasts/models/tactical/realtime.py @@ -0,0 +1,117 @@ +"""Real-time forecasting `*Model`s to predict demand for tactical purposes. + +Real-time `*Model`s take order counts of all time steps in the training data +and make a prediction for only one time step on the day to be predicted (i.e., +the one starting at `predict_at`). Thus, the training time series have a +`frequency` of the number of weekdays, `7`, times the number of time steps on a +day. For example, for 60-minute time steps, the `frequency` becomes `7 * 12` +(= operating hours from 11 am to 11 pm), which is `84`. Real-time `*Model`s +train the forecasting `methods` on a seasonally decomposed time series internally. +""" # noqa:RST215 + +import datetime as dt + +import pandas as pd + +from urban_meal_delivery import db +from urban_meal_delivery.forecasts import methods +from urban_meal_delivery.forecasts.models import base + + +class RealtimeARIMAModel(base.ForecastingModelABC): + """The ARIMA model applied on a real-time time series.""" + + name = 'rtarima' + + def predict( + self, pixel: db.Pixel, predict_at: dt.datetime, train_horizon: int, + ) -> pd.DataFrame: + """Predict demand for a time step. + + Args: + pixel: pixel in which the prediction is made + predict_at: time step (i.e., "start_at") to make the prediction for + train_horizon: weeks of historic data used to predict `predict_at` + + Returns: + actual order counts (i.e., the "actual" column), + point forecasts (i.e., the "prediction" column), and + confidence intervals (i.e, the four "low/high/80/95" columns); + contains one row for the `predict_at` time step + + # noqa:DAR401 RuntimeError + """ + # Generate the historic (and real-time) order time series. + training_ts, frequency, actuals_ts = self._order_history.make_realtime_ts( + pixel_id=pixel.id, predict_at=predict_at, train_horizon=train_horizon, + ) + + # Decompose the `training_ts` to make predictions for the seasonal + # component and the seasonally adjusted observations separately. + decomposed_training_ts = methods.decomposition.stl( + time_series=training_ts, + frequency=frequency, + # "Periodic" `ns` parameter => same seasonal component value + # for observations of the same lag. + ns=999, + ) + + # Make predictions for the seasonal component by linear extrapolation. + seasonal_predictions = methods.extrapolate_season.predict( + training_ts=decomposed_training_ts['seasonal'], + forecast_interval=actuals_ts.index, + frequency=frequency, + ) + + # Make predictions with the ARIMA model on the seasonally adjusted time series. + seasonally_adjusted_predictions = methods.arima.predict( + training_ts=( + decomposed_training_ts['trend'] + decomposed_training_ts['residual'] + ), + forecast_interval=actuals_ts.index, + # Because the seasonality was taken out before, + # the `training_ts` has, by definition, a `frequency` of `1`. + frequency=1, + seasonal_fit=False, + ) + + # The overall `predictions` are the sum of the separate predictions above. + # As the linear extrapolation of the seasonal component has no + # confidence interval, we put the one from the ARIMA model around + # the extrapolated seasonal component. + predictions = pd.DataFrame( + data={ + 'actual': actuals_ts, + 'prediction': ( + seasonal_predictions['prediction'] # noqa:WPS204 + + seasonally_adjusted_predictions['prediction'] + ), + 'low80': ( + seasonal_predictions['prediction'] + + seasonally_adjusted_predictions['low80'] + ), + 'high80': ( + seasonal_predictions['prediction'] + + seasonally_adjusted_predictions['high80'] + ), + 'low95': ( + seasonal_predictions['prediction'] + + seasonally_adjusted_predictions['low95'] + ), + 'high95': ( + seasonal_predictions['prediction'] + + seasonally_adjusted_predictions['high95'] + ), + }, + index=actuals_ts.index, + ) + + # Sanity checks. + if len(predictions) != 1: # pragma: no cover + raise RuntimeError('real-time models should predict exactly one time step') + if predictions.isnull().any().any(): # pragma: no cover + raise RuntimeError('missing predictions in rtarima model') + if predict_at not in predictions.index: # pragma: no cover + raise RuntimeError('missing prediction for `predict_at`') + + return predictions diff --git a/src/urban_meal_delivery/forecasts/models/tactical/vertical.py b/src/urban_meal_delivery/forecasts/models/tactical/vertical.py new file mode 100644 index 0000000..caf4317 --- /dev/null +++ b/src/urban_meal_delivery/forecasts/models/tactical/vertical.py @@ -0,0 +1,119 @@ +"""Vertical forecasting `*Model`s to predict demand for tactical purposes. + +Vertical `*Model`s take order counts of all time steps in the training data +and make a prediction for all time steps on the day to be predicted at once. +Thus, the training time series have a `frequency` of the number of weekdays, +`7`, times the number of time steps on a day. For example, with 60-minute time +steps, the `frequency` becomes `7 * 12` (= operating hours from 11 am to 11 pm), +which is `84`. Vertical `*Model`s train the forecasting `methods` on a seasonally +decomposed time series internally. +""" # noqa:RST215 + +import datetime as dt + +import pandas as pd + +from urban_meal_delivery import db +from urban_meal_delivery.forecasts import methods +from urban_meal_delivery.forecasts.models import base + + +class VerticalARIMAModel(base.ForecastingModelABC): + """The ARIMA model applied on a vertical time series.""" + + name = 'varima' + + def predict( + self, pixel: db.Pixel, predict_at: dt.datetime, train_horizon: int, + ) -> pd.DataFrame: + """Predict demand for a time step. + + Args: + pixel: pixel in which the prediction is made + predict_at: time step (i.e., "start_at") to make the prediction for + train_horizon: weeks of historic data used to predict `predict_at` + + Returns: + actual order counts (i.e., the "actual" column), + point forecasts (i.e., the "prediction" column), and + confidence intervals (i.e, the four "low/high/80/95" columns); + contains several rows, including one for the `predict_at` time step + + # noqa:DAR401 RuntimeError + """ + # Generate the historic (and vertical) order time series. + training_ts, frequency, actuals_ts = self._order_history.make_vertical_ts( + pixel_id=pixel.id, + predict_day=predict_at.date(), + train_horizon=train_horizon, + ) + + # Decompose the `training_ts` to make predictions for the seasonal + # component and the seasonally adjusted observations separately. + decomposed_training_ts = methods.decomposition.stl( + time_series=training_ts, + frequency=frequency, + # "Periodic" `ns` parameter => same seasonal component value + # for observations of the same lag. + ns=999, + ) + + # Make predictions for the seasonal component by linear extrapolation. + seasonal_predictions = methods.extrapolate_season.predict( + training_ts=decomposed_training_ts['seasonal'], + forecast_interval=actuals_ts.index, + frequency=frequency, + ) + + # Make predictions with the ARIMA model on the seasonally adjusted time series. + seasonally_adjusted_predictions = methods.arima.predict( + training_ts=( + decomposed_training_ts['trend'] + decomposed_training_ts['residual'] + ), + forecast_interval=actuals_ts.index, + # Because the seasonality was taken out before, + # the `training_ts` has, by definition, a `frequency` of `1`. + frequency=1, + seasonal_fit=False, + ) + + # The overall `predictions` are the sum of the separate predictions above. + # As the linear extrapolation of the seasonal component has no + # confidence interval, we put the one from the ARIMA model around + # the extrapolated seasonal component. + predictions = pd.DataFrame( + data={ + 'actual': actuals_ts, + 'prediction': ( + seasonal_predictions['prediction'] # noqa:WPS204 + + seasonally_adjusted_predictions['prediction'] + ), + 'low80': ( + seasonal_predictions['prediction'] + + seasonally_adjusted_predictions['low80'] + ), + 'high80': ( + seasonal_predictions['prediction'] + + seasonally_adjusted_predictions['high80'] + ), + 'low95': ( + seasonal_predictions['prediction'] + + seasonally_adjusted_predictions['low95'] + ), + 'high95': ( + seasonal_predictions['prediction'] + + seasonally_adjusted_predictions['high95'] + ), + }, + index=actuals_ts.index, + ) + + # Sanity checks. + if len(predictions) <= 1: # pragma: no cover + raise RuntimeError('vertical models should predict several time steps') + if predictions.isnull().any().any(): # pragma: no cover + raise RuntimeError('missing predictions in varima model') + if predict_at not in predictions.index: # pragma: no cover + raise RuntimeError('missing prediction for `predict_at`') + + return predictions diff --git a/src/urban_meal_delivery/forecasts/timify.py b/src/urban_meal_delivery/forecasts/timify.py new file mode 100644 index 0000000..b0b6497 --- /dev/null +++ b/src/urban_meal_delivery/forecasts/timify.py @@ -0,0 +1,560 @@ +"""Obtain and work with time series data.""" + +from __future__ import annotations + +import datetime as dt +from typing import Tuple + +import pandas as pd +import sqlalchemy as sa + +from urban_meal_delivery import config +from urban_meal_delivery import db +from urban_meal_delivery.forecasts import models + + +class OrderHistory: + """Generate time series from the `Order` model in the database. + + The purpose of this class is to abstract away the managing of the order data + in memory and the slicing the data into various kinds of time series. + """ + + def __init__(self, grid: db.Grid, time_step: int) -> None: + """Initialize a new `OrderHistory` object. + + Args: + grid: pixel grid used to aggregate orders spatially + time_step: interval length (in minutes) into which orders are aggregated + + # noqa:DAR401 RuntimeError + """ + self._grid = grid + self._time_step = time_step + + # Number of daily time steps must be a whole multiple of `time_step` length. + n_daily_time_steps = ( + 60 * (config.SERVICE_END - config.SERVICE_START) / time_step + ) + if n_daily_time_steps != int(n_daily_time_steps): # pragma: no cover + raise RuntimeError('Internal error: configuration has invalid TIME_STEPS') + self._n_daily_time_steps = int(n_daily_time_steps) + + # The `_data` are populated by `.aggregate_orders()`. + self._data = None + + @property + def time_step(self) -> int: + """The length of one time step.""" + return self._time_step + + @property + def totals(self) -> pd.DataFrame: + """The order totals by `Pixel` and `.time_step`. + + The returned object should not be mutated! + + Returns: + order_totals: a one-column `DataFrame` with a `MultiIndex` of the + "pixel_id"s and "start_at"s (i.e., beginnings of the intervals); + the column with data is "n_orders" + """ + if self._data is None: + self._data = self.aggregate_orders() + + return self._data + + def aggregate_orders(self) -> pd.DataFrame: # pragma: no cover + """Generate and load all order totals from the database.""" + # `data` is probably missing "pixel_id"-"start_at" pairs. + # This happens when there is no demand in the `Pixel` in the given `time_step`. + data = pd.read_sql_query( + sa.text( + f""" -- # noqa:WPS221 + SELECT + pixel_id, + start_at, + COUNT(*) AS n_orders + FROM ( + SELECT + pixel_id, + placed_at_without_seconds - minutes_to_be_cut AS start_at + FROM ( + SELECT + pixels.pixel_id, + DATE_TRUNC('MINUTE', orders.placed_at) + AS placed_at_without_seconds, + (( + EXTRACT(MINUTES FROM orders.placed_at)::INTEGER + % {self._time_step} + )::TEXT || ' MINUTES')::INTERVAL + AS minutes_to_be_cut + FROM ( + SELECT + id, + placed_at, + pickup_address_id + FROM + {config.CLEAN_SCHEMA}.orders + INNER JOIN ( + SELECT + id AS address_id + FROM + {config.CLEAN_SCHEMA}.addresses + WHERE + city_id = {self._grid.city.id} + ) AS in_city + ON orders.pickup_address_id = in_city.address_id + WHERE + ad_hoc IS TRUE + ) AS + orders + INNER JOIN ( + SELECT + address_id, + pixel_id + FROM + {config.CLEAN_SCHEMA}.addresses_pixels + WHERE + grid_id = {self._grid.id} + AND + city_id = {self._grid.city.id} -- -> sanity check + ) AS pixels + ON orders.pickup_address_id = pixels.address_id + ) AS placed_at_aggregated_into_start_at + ) AS pixel_start_at_combinations + GROUP BY + pixel_id, + start_at + ORDER BY + pixel_id, + start_at; + """, + ), # noqa:WPS355 + con=db.connection, + index_col=['pixel_id', 'start_at'], + ) + + if data.empty: + return data + + # Calculate the first and last "start_at" value ... + start_day = data.index.levels[1].min().date() + start = dt.datetime( + start_day.year, start_day.month, start_day.day, config.SERVICE_START, + ) + end_day = data.index.levels[1].max().date() + end = dt.datetime(end_day.year, end_day.month, end_day.day, config.SERVICE_END) + # ... and all possible `tuple`s of "pixel_id"-"start_at" combinations. + # The "start_at" values must lie within the operating hours. + gen = ( + (pixel_id, start_at) + for pixel_id in sorted(data.index.levels[0]) + for start_at in pd.date_range(start, end, freq=f'{self._time_step}T') + if config.SERVICE_START <= start_at.hour < config.SERVICE_END + ) + + # Re-index `data` filling in `0`s where there is no demand. + index = pd.MultiIndex.from_tuples(gen) + index.names = ['pixel_id', 'start_at'] + + return data.reindex(index, fill_value=0) + + def first_order_at(self, pixel_id: int) -> dt.datetime: + """Get the time step with the first order in a pixel. + + Args: + pixel_id: pixel for which to get the first order + + Returns: + minimum "start_at" from when orders take place + + Raises: + LookupError: `pixel_id` not in `grid` + + # noqa:DAR401 RuntimeError + """ + try: + intra_pixel = self.totals.loc[pixel_id] + except KeyError: + raise LookupError('The `pixel_id` is not in the `grid`') from None + + first_order = intra_pixel[intra_pixel['n_orders'] > 0].index.min() + + # Sanity check: without an `Order`, the `Pixel` should not exist. + if first_order is pd.NaT: # pragma: no cover + raise RuntimeError('no orders in the pixel') + + # Return a proper `datetime.datetime` object. + return dt.datetime( + first_order.year, + first_order.month, + first_order.day, + first_order.hour, + first_order.minute, + ) + + def last_order_at(self, pixel_id: int) -> dt.datetime: + """Get the time step with the last order in a pixel. + + Args: + pixel_id: pixel for which to get the last order + + Returns: + maximum "start_at" from when orders take place + + Raises: + LookupError: `pixel_id` not in `grid` + + # noqa:DAR401 RuntimeError + """ + try: + intra_pixel = self.totals.loc[pixel_id] + except KeyError: + raise LookupError('The `pixel_id` is not in the `grid`') from None + + last_order = intra_pixel[intra_pixel['n_orders'] > 0].index.max() + + # Sanity check: without an `Order`, the `Pixel` should not exist. + if last_order is pd.NaT: # pragma: no cover + raise RuntimeError('no orders in the pixel') + + # Return a proper `datetime.datetime` object. + return dt.datetime( + last_order.year, + last_order.month, + last_order.day, + last_order.hour, + last_order.minute, + ) + + def make_horizontal_ts( # noqa:WPS210 + self, pixel_id: int, predict_at: dt.datetime, train_horizon: int, + ) -> Tuple[pd.Series, int, pd.Series]: + """Slice a horizontal time series out of the `.totals`. + + Create a time series covering `train_horizon` weeks that can be used + for training a forecasting model to predict the demand at `predict_at`. + + For explanation of the terms "horizontal", "vertical", and "real-time" + in the context of time series, see section 3.2 in the following paper: + https://github.com/webartifex/urban-meal-delivery-demand-forecasting/blob/main/paper.pdf + + Args: + pixel_id: pixel in which the time series is aggregated + predict_at: time step (i.e., "start_at") for which a prediction is made + train_horizon: weeks of historic data used to predict `predict_at` + + Returns: + training time series, frequency, actual order count at `predict_at` + + Raises: + LookupError: `pixel_id` not in `grid` or `predict_at` not in `.totals` + RuntimeError: desired time series slice is not entirely in `.totals` + """ + try: + intra_pixel = self.totals.loc[pixel_id] + except KeyError: + raise LookupError('The `pixel_id` is not in the `grid`') from None + + if predict_at >= config.CUTOFF_DAY: # pragma: no cover + raise RuntimeError('Internal error: cannot predict beyond the given data') + + # The first and last training day are just before the `predict_at` day + # and span exactly `train_horizon` weeks covering only the times of the + # day equal to the hour/minute of `predict_at`. + first_train_day = predict_at.date() - dt.timedelta(weeks=train_horizon) + first_start_at = dt.datetime( + first_train_day.year, + first_train_day.month, + first_train_day.day, + predict_at.hour, + predict_at.minute, + ) + last_train_day = predict_at.date() - dt.timedelta(days=1) + last_start_at = dt.datetime( + last_train_day.year, + last_train_day.month, + last_train_day.day, + predict_at.hour, + predict_at.minute, + ) + + # The frequency is the number of weekdays. + frequency = 7 + + # Take only the counts at the `predict_at` time. + training_ts = intra_pixel.loc[ + first_start_at : last_start_at : self._n_daily_time_steps, # type:ignore + 'n_orders', + ] + if len(training_ts) != frequency * train_horizon: + raise RuntimeError('Not enough historic data for `predict_at`') + + actuals_ts = intra_pixel.loc[[predict_at], 'n_orders'] + if not len(actuals_ts): # pragma: no cover + raise LookupError('`predict_at` is not in the order history') + + return training_ts, frequency, actuals_ts + + def make_vertical_ts( # noqa:WPS210 + self, pixel_id: int, predict_day: dt.date, train_horizon: int, + ) -> Tuple[pd.Series, int, pd.Series]: + """Slice a vertical time series out of the `.totals`. + + Create a time series covering `train_horizon` weeks that can be used + for training a forecasting model to predict the demand on the `predict_day`. + + For explanation of the terms "horizontal", "vertical", and "real-time" + in the context of time series, see section 3.2 in the following paper: + https://github.com/webartifex/urban-meal-delivery-demand-forecasting/blob/main/paper.pdf + + Args: + pixel_id: pixel in which the time series is aggregated + predict_day: day for which predictions are made + train_horizon: weeks of historic data used to predict `predict_at` + + Returns: + training time series, frequency, actual order counts on `predict_day` + + Raises: + LookupError: `pixel_id` not in `grid` or `predict_day` not in `.totals` + RuntimeError: desired time series slice is not entirely in `.totals` + """ + try: + intra_pixel = self.totals.loc[pixel_id] + except KeyError: + raise LookupError('The `pixel_id` is not in the `grid`') from None + + if predict_day >= config.CUTOFF_DAY.date(): # pragma: no cover + raise RuntimeError('Internal error: cannot predict beyond the given data') + + # The first and last training day are just before the `predict_day` + # and span exactly `train_horizon` weeks covering all times of the day. + first_train_day = predict_day - dt.timedelta(weeks=train_horizon) + first_start_at = dt.datetime( + first_train_day.year, + first_train_day.month, + first_train_day.day, + config.SERVICE_START, + 0, + ) + last_train_day = predict_day - dt.timedelta(days=1) + last_start_at = dt.datetime( + last_train_day.year, + last_train_day.month, + last_train_day.day, + config.SERVICE_END, # subtract one `time_step` below + 0, + ) - dt.timedelta(minutes=self._time_step) + + # The frequency is the number of weekdays times the number of daily time steps. + frequency = 7 * self._n_daily_time_steps + + # Take all the counts between `first_train_day` and `last_train_day`. + training_ts = intra_pixel.loc[ + first_start_at:last_start_at, # type:ignore + 'n_orders', + ] + if len(training_ts) != frequency * train_horizon: + raise RuntimeError('Not enough historic data for `predict_day`') + + first_prediction_at = dt.datetime( + predict_day.year, + predict_day.month, + predict_day.day, + config.SERVICE_START, + 0, + ) + last_prediction_at = dt.datetime( + predict_day.year, + predict_day.month, + predict_day.day, + config.SERVICE_END, # subtract one `time_step` below + 0, + ) - dt.timedelta(minutes=self._time_step) + + actuals_ts = intra_pixel.loc[ + first_prediction_at:last_prediction_at, # type:ignore + 'n_orders', + ] + if not len(actuals_ts): # pragma: no cover + raise LookupError('`predict_day` is not in the order history') + + return training_ts, frequency, actuals_ts + + def make_realtime_ts( # noqa:WPS210 + self, pixel_id: int, predict_at: dt.datetime, train_horizon: int, + ) -> Tuple[pd.Series, int, pd.Series]: + """Slice a vertical real-time time series out of the `.totals`. + + Create a time series covering `train_horizon` weeks that can be used + for training a forecasting model to predict the demand at `predict_at`. + + For explanation of the terms "horizontal", "vertical", and "real-time" + in the context of time series, see section 3.2 in the following paper: + https://github.com/webartifex/urban-meal-delivery-demand-forecasting/blob/main/paper.pdf + + Args: + pixel_id: pixel in which the time series is aggregated + predict_at: time step (i.e., "start_at") for which a prediction is made + train_horizon: weeks of historic data used to predict `predict_at` + + Returns: + training time series, frequency, actual order count at `predict_at` + + Raises: + LookupError: `pixel_id` not in `grid` or `predict_at` not in `.totals` + RuntimeError: desired time series slice is not entirely in `.totals` + """ + try: + intra_pixel = self.totals.loc[pixel_id] + except KeyError: + raise LookupError('The `pixel_id` is not in the `grid`') from None + + if predict_at >= config.CUTOFF_DAY: # pragma: no cover + raise RuntimeError('Internal error: cannot predict beyond the given data') + + # The first and last training day are just before the `predict_at` day + # and span exactly `train_horizon` weeks covering all times of the day, + # including times on the `predict_at` day that are earlier than `predict_at`. + first_train_day = predict_at.date() - dt.timedelta(weeks=train_horizon) + first_start_at = dt.datetime( + first_train_day.year, + first_train_day.month, + first_train_day.day, + config.SERVICE_START, + 0, + ) + # Predicting the first time step on the `predict_at` day is a corner case. + # Then, the previous day is indeed the `last_train_day`. Predicting any + # other time step implies that the `predict_at` day is the `last_train_day`. + # `last_train_time` is the last "start_at" before the one being predicted. + if predict_at.hour == config.SERVICE_START: + last_train_day = predict_at.date() - dt.timedelta(days=1) + last_train_time = dt.time(config.SERVICE_END, 0) + else: + last_train_day = predict_at.date() + last_train_time = predict_at.time() + last_start_at = dt.datetime( + last_train_day.year, + last_train_day.month, + last_train_day.day, + last_train_time.hour, + last_train_time.minute, + ) - dt.timedelta(minutes=self._time_step) + + # The frequency is the number of weekdays times the number of daily time steps. + frequency = 7 * self._n_daily_time_steps + + # Take all the counts between `first_train_day` and `last_train_day`, + # including the ones on the `predict_at` day prior to `predict_at`. + training_ts = intra_pixel.loc[ + first_start_at:last_start_at, # type:ignore + 'n_orders', + ] + n_time_steps_on_predict_day = ( + ( + predict_at + - dt.datetime( + predict_at.year, + predict_at.month, + predict_at.day, + config.SERVICE_START, + 0, + ) + ).seconds + // 60 # -> minutes + // self._time_step + ) + if len(training_ts) != frequency * train_horizon + n_time_steps_on_predict_day: + raise RuntimeError('Not enough historic data for `predict_day`') + + actuals_ts = intra_pixel.loc[[predict_at], 'n_orders'] + if not len(actuals_ts): # pragma: no cover + raise LookupError('`predict_at` is not in the order history') + + return training_ts, frequency, actuals_ts + + def avg_daily_demand( + self, pixel_id: int, predict_day: dt.date, train_horizon: int, + ) -> float: + """Calculate the average daily demand (ADD) for a `Pixel`. + + The ADD is defined as the average number of daily `Order`s in a + `Pixel` within the training horizon preceding the `predict_day`. + + The ADD is primarily used for the rule-based heuristic to determine + the best forecasting model for a `Pixel` on the `predict_day`. + + Implementation note: To calculate the ADD, the order counts are + generated as a vertical time series. That must be so as we need to + include all time steps of the days before the `predict_day` and + no time step of the latter. + + Args: + pixel_id: pixel for which the ADD is calculated + predict_day: following the `train_horizon` on which the ADD is calculated + train_horizon: time horizon over which the ADD is calculated + + Returns: + average number of orders per day + """ + training_ts, _, _ = self.make_vertical_ts( # noqa:WPS434 + pixel_id=pixel_id, predict_day=predict_day, train_horizon=train_horizon, + ) + + first_day = training_ts.index.min().date() + last_day = training_ts.index.max().date() + # `+1` as both `first_day` and `last_day` are included. + n_days = (last_day - first_day).days + 1 + + return round(training_ts.sum() / n_days, 1) + + def choose_tactical_model( + self, pixel_id: int, predict_day: dt.date, train_horizon: int, + ) -> models.ForecastingModelABC: + """Choose the most promising forecasting `*Model` for tactical purposes. + + The rules are deduced from "Table 1: Top-3 models by ..." in the article + "Real-time demand forecasting for an urban delivery platform", the first + research paper published for this `urban-meal-delivery` project. + + According to the research findings in the article "Real-time demand + forecasting for an urban delivery platform", the best model is a function + of the average daily demand (ADD) and the length of the training horizon. + + For the paper check: + https://github.com/webartifex/urban-meal-delivery-demand-forecasting/blob/main/paper.pdf + https://www.sciencedirect.com/science/article/pii/S1366554520307936 + + Args: + pixel_id: pixel for which a forecasting `*Model` is chosen + predict_day: day for which demand is to be predicted with the `*Model` + train_horizon: time horizon available for training the `*Model` + + Returns: + most promising forecasting `*Model` + + # noqa:DAR401 RuntimeError + """ # noqa:RST215 + add = self.avg_daily_demand( + pixel_id=pixel_id, predict_day=predict_day, train_horizon=train_horizon, + ) + + # For now, we only make forecasts with 8 weeks + # as the training horizon (note:4f79e8fa). + if train_horizon == 8: + if add >= 25: # = "high demand" + return models.HorizontalETSModel(order_history=self) + elif add >= 10: # = "medium demand" + return models.HorizontalETSModel(order_history=self) + elif add >= 2.5: # = "low demand" + return models.HorizontalSMAModel(order_history=self) + + # = "no demand" + return models.TrivialModel(order_history=self) + + raise RuntimeError( + 'no rule for the given average daily demand and training horizon', + ) diff --git a/src/urban_meal_delivery/init_r.py b/src/urban_meal_delivery/init_r.py new file mode 100644 index 0000000..189a0dc --- /dev/null +++ b/src/urban_meal_delivery/init_r.py @@ -0,0 +1,28 @@ +"""Initialize the R dependencies. + +The purpose of this module is to import all the R packages that are installed +into a sub-folder (see `config.R_LIBS_PATH`) in the project's root directory. + +The Jupyter notebook "research/r_dependencies.ipynb" can be used to install all +R dependencies on a Ubuntu/Debian based system. +""" + +from rpy2.rinterface_lib import callbacks as rcallbacks +from rpy2.robjects import packages as rpackages + + +# Suppress R's messages to stdout and stderr. +# Source: https://stackoverflow.com/a/63220287 +rcallbacks.consolewrite_print = lambda msg: None # pragma: no cover +rcallbacks.consolewrite_warnerror = lambda msg: None # pragma: no cover + + +# For clarity and convenience, re-raise the error that results from missing R +# dependencies with clearer instructions as to how to deal with it. +try: # noqa:WPS229 + rpackages.importr('forecast') + rpackages.importr('zoo') + +except rpackages.PackageNotInstalledError: # pragma: no cover + msg = 'See the "research/r_dependencies.ipynb" notebook!' + raise rpackages.PackageNotInstalledError(msg) from None diff --git a/tests/config.py b/tests/config.py new file mode 100644 index 0000000..2af0d60 --- /dev/null +++ b/tests/config.py @@ -0,0 +1,34 @@ +"""Globals used when testing.""" + +import datetime as dt + +from urban_meal_delivery import config + + +# The day on which most test cases take place. +YEAR, MONTH, DAY = 2016, 7, 1 + +# The hour when most test cases take place. +NOON = 12 + +# `START` and `END` constitute a 57-day time span, 8 full weeks plus 1 day. +# That implies a maximum `train_horizon` of `8` as that needs full 7-day weeks. +START = dt.datetime(YEAR, MONTH, DAY, config.SERVICE_START, 0) +_end = START + dt.timedelta(days=56) # `56` as `START` is not included +END = dt.datetime(_end.year, _end.month, _end.day, config.SERVICE_END, 0) + +# Default time steps (in minutes), for example, for `OrderHistory` objects. +LONG_TIME_STEP = 60 +SHORT_TIME_STEP = 30 +TIME_STEPS = (SHORT_TIME_STEP, LONG_TIME_STEP) +# The `frequency` of vertical time series is the number of days in a week, 7, +# times the number of time steps per day. With 12 operating hours (11 am - 11 pm) +# the `frequency`s are 84 and 168 for the `LONG/SHORT_TIME_STEP`s. +VERTICAL_FREQUENCY_LONG = 7 * 12 +VERTICAL_FREQUENCY_SHORT = 7 * 24 + +# Default training horizons, for example, for +# `OrderHistory.make_horizontal_time_series()`. +LONG_TRAIN_HORIZON = 8 +SHORT_TRAIN_HORIZON = 2 +TRAIN_HORIZONS = (SHORT_TRAIN_HORIZON, LONG_TRAIN_HORIZON) diff --git a/tests/conftest.py b/tests/conftest.py index 1b91688..b7bafd5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,12 +1,116 @@ -"""Utils for testing the entire package.""" +"""Fixtures for testing the entire package. + +The ORM related fixtures are placed here too as some integration tests +in the CLI layer need access to the database. +""" import os +import pytest +import sqlalchemy as sa +from alembic import command as migrations_cmd +from alembic import config as migrations_config +from sqlalchemy import orm + +from tests.db import fake_data from urban_meal_delivery import config +from urban_meal_delivery import db +# The TESTING environment variable is set +# in setup.cfg in pytest's config section. if not os.getenv('TESTING'): raise RuntimeError('Tests must be executed with TESTING set in the environment') if not config.TESTING: raise RuntimeError('The testing configuration was not loaded') + + +@pytest.fixture(scope='session', params=['all_at_once', 'sequentially']) +def db_connection(request): + """Create all tables given the ORM models. + + The tables are put into a distinct PostgreSQL schema + that is removed after all tests are over. + + The database connection used to do that is yielded. + + There are two modes for this fixture: + + - "all_at_once": build up the tables all at once with MetaData.create_all() + - "sequentially": build up the tables sequentially with `alembic upgrade head` + + This ensures that Alembic's migration files are consistent. + """ + # We need a fresh database connection for each of the two `params`. + # Otherwise, the first test of the parameter run second will fail. + engine = sa.create_engine(config.DATABASE_URI) + connection = engine.connect() + + # Monkey patch the package's global `engine` and `connection` objects, + # just in case if it is used somewhere in the code base. + db.engine = engine + db.connection = connection + + if request.param == 'all_at_once': + connection.execute(f'CREATE SCHEMA {config.CLEAN_SCHEMA};') + db.Base.metadata.create_all(connection) + else: + cfg = migrations_config.Config('alembic.ini') + migrations_cmd.upgrade(cfg, 'head') + + try: + yield connection + + finally: + connection.execute(f'DROP SCHEMA {config.CLEAN_SCHEMA} CASCADE;') + + if request.param == 'sequentially': + tmp_alembic_version = f'{config.ALEMBIC_TABLE}_{config.CLEAN_SCHEMA}' + connection.execute( + f'DROP TABLE {config.ALEMBIC_TABLE_SCHEMA}.{tmp_alembic_version};', + ) + + connection.close() + + +@pytest.fixture +def db_session(db_connection): + """A SQLAlchemy session that rolls back everything after a test case.""" + # Begin the outermost transaction + # that is rolled back at the end of each test case. + transaction = db_connection.begin() + + # Create a session bound to the same connection as the `transaction`. + # Using any other session would not result in the roll back. + session = orm.sessionmaker()(bind=db_connection) + + # Monkey patch the package's global `session` object, + # which is used heavily in the code base. + db.session = session + + try: + yield session + + finally: + session.close() + transaction.rollback() + + +# Import the fixtures from the `fake_data` sub-package. + +make_address = fake_data.make_address +make_courier = fake_data.make_courier +make_customer = fake_data.make_customer +make_order = fake_data.make_order +make_restaurant = fake_data.make_restaurant + +address = fake_data.address +city = fake_data.city +city_data = fake_data.city_data +courier = fake_data.courier +customer = fake_data.customer +order = fake_data.order +restaurant = fake_data.restaurant +grid = fake_data.grid +pixel = fake_data.pixel diff --git a/tests/console/__init__.py b/tests/console/__init__.py new file mode 100644 index 0000000..49b8d86 --- /dev/null +++ b/tests/console/__init__.py @@ -0,0 +1,5 @@ +"""Test the CLI scripts in the urban-meal-delivery package. + +Some tests require a database. Therefore, the corresponding code is excluded +from coverage reporting with "pragma: no cover" (grep:b1f68d24). +""" diff --git a/tests/console/conftest.py b/tests/console/conftest.py new file mode 100644 index 0000000..d6c2e59 --- /dev/null +++ b/tests/console/conftest.py @@ -0,0 +1,10 @@ +"""Fixture for testing the CLI scripts.""" + +import pytest +from click import testing as click_testing + + +@pytest.fixture +def cli() -> click_testing.CliRunner: + """Initialize Click's CLI Test Runner.""" + return click_testing.CliRunner() diff --git a/tests/console/test_gridify.py b/tests/console/test_gridify.py new file mode 100644 index 0000000..515d153 --- /dev/null +++ b/tests/console/test_gridify.py @@ -0,0 +1,48 @@ +"""Integration test for the `urban_meal_delivery.console.gridify` module.""" + +import pytest + +import urban_meal_delivery +from urban_meal_delivery import db +from urban_meal_delivery.console import gridify + + +@pytest.mark.db +def test_two_pixels_with_two_addresses( # noqa:WPS211 + cli, db_session, monkeypatch, city, make_address, make_restaurant, make_order, +): + """Two `Address` objects in distinct `Pixel` objects. + + This is roughly the same test case as + `tests.db.test_grids.test_two_pixels_with_two_addresses`. + The difference is that the result is written to the database. + """ + # Create two `Address` objects in distinct `Pixel`s. + # One `Address` in the lower-left `Pixel`, ... + address1 = make_address(latitude=48.8357377, longitude=2.2517412) + # ... and another one in the upper-right one. + address2 = make_address(latitude=48.8898312, longitude=2.4357622) + + # Locate a `Restaurant` at the two `Address` objects and + # place one `Order` for each of them so that the `Address` + # objects are used as `Order.pickup_address`s. + restaurant1 = make_restaurant(address=address1) + restaurant2 = make_restaurant(address=address2) + order1 = make_order(restaurant=restaurant1) + order2 = make_order(restaurant=restaurant2) + + db_session.add(order1) + db_session.add(order2) + db_session.commit() + + side_length = max(city.total_x // 2, city.total_y // 2) + 1 + + # Hack the configuration regarding the grids to be created. + monkeypatch.setattr(urban_meal_delivery.config, 'GRID_SIDE_LENGTHS', [side_length]) + + result = cli.invoke(gridify.gridify) + + assert result.exit_code == 0 + + assert db_session.query(db.Grid).count() == 1 + assert db_session.query(db.Pixel).count() == 2 diff --git a/tests/test_console.py b/tests/console/test_main.py similarity index 61% rename from tests/test_console.py rename to tests/console/test_main.py index 00c721f..8832239 100644 --- a/tests/test_console.py +++ b/tests/console/test_main.py @@ -1,34 +1,31 @@ -"""Test the package's `umd` command-line client.""" +"""Test the package's top-level `umd` CLI command.""" import click import pytest -from click import testing as click_testing -from urban_meal_delivery import console +from urban_meal_delivery.console import main class TestShowVersion: - """Test console.show_version(). + """Test `console.main.show_version()`. The function is used as a callback to a click command option. - show_version() prints the name and version of the installed package to + `show_version()` prints the name and version of the installed package to stdout. The output looks like this: "{pkg_name}, version {version}". Development (= non-final) versions are indicated by appending a " (development)" to the output. """ - # pylint:disable=no-self-use - @pytest.fixture def ctx(self) -> click.Context: - """Context around the console.main Command.""" - return click.Context(console.main) + """Context around the `main.entry_point` Command.""" + return click.Context(main.entry_point) def test_no_version(self, capsys, ctx): - """The the early exit branch without any output.""" - console.show_version(ctx, _param='discarded', value=False) + """Test the early exit branch without any output.""" + main.show_version(ctx, _param='discarded', value=False) captured = capsys.readouterr() @@ -37,10 +34,10 @@ class TestShowVersion: def test_final_version(self, capsys, ctx, monkeypatch): """For final versions, NO "development" warning is emitted.""" version = '1.2.3' - monkeypatch.setattr(console.urban_meal_delivery, '__version__', version) + monkeypatch.setattr(main.urban_meal_delivery, '__version__', version) with pytest.raises(click.exceptions.Exit): - console.show_version(ctx, _param='discarded', value=True) + main.show_version(ctx, _param='discarded', value=True) captured = capsys.readouterr() @@ -49,37 +46,29 @@ class TestShowVersion: def test_develop_version(self, capsys, ctx, monkeypatch): """For develop versions, a warning thereof is emitted.""" version = '1.2.3.dev0' - monkeypatch.setattr(console.urban_meal_delivery, '__version__', version) + monkeypatch.setattr(main.urban_meal_delivery, '__version__', version) with pytest.raises(click.exceptions.Exit): - console.show_version(ctx, _param='discarded', value=True) + main.show_version(ctx, _param='discarded', value=True) captured = capsys.readouterr() assert captured.out.strip().endswith(f', version {version} (development)') -class TestCLI: - """Test the `umd` CLI utility. +class TestCLIWithoutCommand: + """Test the `umd` CLI utility, invoked without any specific command. The test cases are integration tests. Therefore, they are not considered for coverage reporting. """ - # pylint:disable=no-self-use - - @pytest.fixture - def cli(self) -> click_testing.CliRunner: - """Initialize Click's CLI Test Runner.""" - return click_testing.CliRunner() - @pytest.mark.no_cover def test_no_options(self, cli): """Exit with 0 status code and no output if run without options.""" - result = cli.invoke(console.main) + result = cli.invoke(main.entry_point) assert result.exit_code == 0 - assert result.output == '' # The following test cases validate the --version / -V option. @@ -90,9 +79,9 @@ class TestCLI: def test_final_version(self, cli, monkeypatch, option): """For final versions, NO "development" warning is emitted.""" version = '1.2.3' - monkeypatch.setattr(console.urban_meal_delivery, '__version__', version) + monkeypatch.setattr(main.urban_meal_delivery, '__version__', version) - result = cli.invoke(console.main, option) + result = cli.invoke(main.entry_point, option) assert result.exit_code == 0 assert result.output.strip().endswith(f', version {version}') @@ -102,9 +91,9 @@ class TestCLI: def test_develop_version(self, cli, monkeypatch, option): """For develop versions, a warning thereof is emitted.""" version = '1.2.3.dev0' - monkeypatch.setattr(console.urban_meal_delivery, '__version__', version) + monkeypatch.setattr(main.urban_meal_delivery, '__version__', version) - result = cli.invoke(console.main, option) + result = cli.invoke(main.entry_point, option) assert result.exit_code == 0 assert result.output.strip().endswith(f', version {version} (development)') diff --git a/tests/db/conftest.py b/tests/db/conftest.py deleted file mode 100644 index 2508161..0000000 --- a/tests/db/conftest.py +++ /dev/null @@ -1,263 +0,0 @@ -"""Utils for testing the ORM layer.""" - -import datetime - -import pytest -from alembic import command as migrations_cmd -from alembic import config as migrations_config - -from urban_meal_delivery import config -from urban_meal_delivery import db - - -@pytest.fixture(scope='session', params=['all_at_once', 'sequentially']) -def db_engine(request): - """Create all tables given the ORM models. - - The tables are put into a distinct PostgreSQL schema - that is removed after all tests are over. - - The engine used to do that is yielded. - - There are two modes for this fixture: - - - "all_at_once": build up the tables all at once with MetaData.create_all() - - "sequentially": build up the tables sequentially with `alembic upgrade head` - - This ensures that Alembic's migration files are consistent. - """ - engine = db.make_engine() - - if request.param == 'all_at_once': - engine.execute(f'CREATE SCHEMA {config.CLEAN_SCHEMA};') - db.Base.metadata.create_all(engine) - else: - cfg = migrations_config.Config('alembic.ini') - migrations_cmd.upgrade(cfg, 'head') - - try: - yield engine - - finally: - engine.execute(f'DROP SCHEMA {config.CLEAN_SCHEMA} CASCADE;') - - if request.param == 'sequentially': - tmp_alembic_version = f'{config.ALEMBIC_TABLE}_{config.CLEAN_SCHEMA}' - engine.execute( - f'DROP TABLE {config.ALEMBIC_TABLE_SCHEMA}.{tmp_alembic_version};', - ) - - -@pytest.fixture -def db_session(db_engine): - """A SQLAlchemy session that rolls back everything after a test case.""" - connection = db_engine.connect() - # Begin the outer most transaction - # that is rolled back at the end of the test. - transaction = connection.begin() - # Create a session bound on the same connection as the transaction. - # Using any other session would not work. - Session = db.make_session_factory() # noqa:N806 - session = Session(bind=connection) - - try: - yield session - - finally: - session.close() - transaction.rollback() - connection.close() - - -@pytest.fixture -def address_data(): - """The data for an Address object in Paris.""" - return { - 'id': 1, - '_primary_id': 1, # => "itself" - 'created_at': datetime.datetime(2020, 1, 2, 3, 4, 5), - 'place_id': 'ChIJxSr71vZt5kcRoFHY4caCCxw', - 'latitude': 48.85313, - 'longitude': 2.37461, - '_city_id': 1, - 'city_name': 'St. German', - 'zip_code': '75011', - 'street': '42 Rue De Charonne', - 'floor': None, - } - - -@pytest.fixture -def address(address_data, city): - """An Address object.""" - address = db.Address(**address_data) - address.city = city - return address - - -@pytest.fixture -def address2_data(): - """The data for an Address object in Paris.""" - return { - 'id': 2, - '_primary_id': 2, # => "itself" - 'created_at': datetime.datetime(2020, 1, 2, 4, 5, 6), - 'place_id': 'ChIJs-9a6QZy5kcRY8Wwk9Ywzl8', - 'latitude': 48.852196, - 'longitude': 2.373937, - '_city_id': 1, - 'city_name': 'Paris', - 'zip_code': '75011', - 'street': 'Rue De Charonne 3', - 'floor': 2, - } - - -@pytest.fixture -def address2(address2_data, city): - """An Address object.""" - address2 = db.Address(**address2_data) - address2.city = city - return address2 - - -@pytest.fixture -def city_data(): - """The data for the City object modeling Paris.""" - return { - 'id': 1, - 'name': 'Paris', - 'kml': " ...", - '_center_latitude': 48.856614, - '_center_longitude': 2.3522219, - '_northeast_latitude': 48.9021449, - '_northeast_longitude': 2.4699208, - '_southwest_latitude': 48.815573, - '_southwest_longitude': 2.225193, - 'initial_zoom': 12, - } - - -@pytest.fixture -def city(city_data): - """A City object.""" - return db.City(**city_data) - - -@pytest.fixture -def courier_data(): - """The data for a Courier object.""" - return { - 'id': 1, - 'created_at': datetime.datetime(2020, 1, 2, 3, 4, 5), - 'vehicle': 'bicycle', - 'historic_speed': 7.89, - 'capacity': 100, - 'pay_per_hour': 750, - 'pay_per_order': 200, - } - - -@pytest.fixture -def courier(courier_data): - """A Courier object.""" - return db.Courier(**courier_data) - - -@pytest.fixture -def customer_data(): - """The data for the Customer object.""" - return {'id': 1} - - -@pytest.fixture -def customer(customer_data): - """A Customer object.""" - return db.Customer(**customer_data) - - -@pytest.fixture -def order_data(): - """The data for an ad-hoc Order object.""" - return { - 'id': 1, - '_delivery_id': 1, - '_customer_id': 1, - 'placed_at': datetime.datetime(2020, 1, 2, 11, 55, 11), - 'ad_hoc': True, - 'scheduled_delivery_at': None, - 'scheduled_delivery_at_corrected': None, - 'first_estimated_delivery_at': datetime.datetime(2020, 1, 2, 12, 35, 0), - 'cancelled': False, - 'cancelled_at': None, - 'cancelled_at_corrected': None, - 'sub_total': 2000, - 'delivery_fee': 250, - 'total': 2250, - '_restaurant_id': 1, - 'restaurant_notified_at': datetime.datetime(2020, 1, 2, 12, 5, 5), - 'restaurant_notified_at_corrected': False, - 'restaurant_confirmed_at': datetime.datetime(2020, 1, 2, 12, 5, 25), - 'restaurant_confirmed_at_corrected': False, - 'estimated_prep_duration': 900, - 'estimated_prep_duration_corrected': False, - 'estimated_prep_buffer': 480, - '_courier_id': 1, - 'dispatch_at': datetime.datetime(2020, 1, 2, 12, 5, 1), - 'dispatch_at_corrected': False, - 'courier_notified_at': datetime.datetime(2020, 1, 2, 12, 6, 2), - 'courier_notified_at_corrected': False, - 'courier_accepted_at': datetime.datetime(2020, 1, 2, 12, 6, 17), - 'courier_accepted_at_corrected': False, - 'utilization': 50, - '_pickup_address_id': 1, - 'reached_pickup_at': datetime.datetime(2020, 1, 2, 12, 16, 21), - 'pickup_at': datetime.datetime(2020, 1, 2, 12, 18, 1), - 'pickup_at_corrected': False, - 'pickup_not_confirmed': False, - 'left_pickup_at': datetime.datetime(2020, 1, 2, 12, 19, 45), - 'left_pickup_at_corrected': False, - '_delivery_address_id': 2, - 'reached_delivery_at': datetime.datetime(2020, 1, 2, 12, 27, 33), - 'delivery_at': datetime.datetime(2020, 1, 2, 12, 29, 55), - 'delivery_at_corrected': False, - 'delivery_not_confirmed': False, - '_courier_waited_at_delivery': False, - 'logged_delivery_distance': 500, - 'logged_avg_speed': 7.89, - 'logged_avg_speed_distance': 490, - } - - -@pytest.fixture -def order( # noqa:WPS211 pylint:disable=too-many-arguments - order_data, customer, restaurant, courier, address, address2, -): - """An Order object.""" - order = db.Order(**order_data) - order.customer = customer - order.restaurant = restaurant - order.courier = courier - order.pickup_address = address - order.delivery_address = address2 - return order - - -@pytest.fixture -def restaurant_data(): - """The data for the Restaurant object.""" - return { - 'id': 1, - 'created_at': datetime.datetime(2020, 1, 2, 3, 4, 5), - 'name': 'Vevay', - '_address_id': 1, - 'estimated_prep_duration': 1000, - } - - -@pytest.fixture -def restaurant(restaurant_data, address): - """A Restaurant object.""" - restaurant = db.Restaurant(**restaurant_data) - restaurant.address = address - return restaurant diff --git a/tests/db/fake_data/__init__.py b/tests/db/fake_data/__init__.py new file mode 100644 index 0000000..80a7be3 --- /dev/null +++ b/tests/db/fake_data/__init__.py @@ -0,0 +1,16 @@ +"""Fixtures for testing the ORM layer with fake data.""" + +from tests.db.fake_data.fixture_makers import make_address +from tests.db.fake_data.fixture_makers import make_courier +from tests.db.fake_data.fixture_makers import make_customer +from tests.db.fake_data.fixture_makers import make_order +from tests.db.fake_data.fixture_makers import make_restaurant +from tests.db.fake_data.static_fixtures import address +from tests.db.fake_data.static_fixtures import city +from tests.db.fake_data.static_fixtures import city_data +from tests.db.fake_data.static_fixtures import courier +from tests.db.fake_data.static_fixtures import customer +from tests.db.fake_data.static_fixtures import grid +from tests.db.fake_data.static_fixtures import order +from tests.db.fake_data.static_fixtures import pixel +from tests.db.fake_data.static_fixtures import restaurant diff --git a/tests/db/fake_data/factories.py b/tests/db/fake_data/factories.py new file mode 100644 index 0000000..46f2ff3 --- /dev/null +++ b/tests/db/fake_data/factories.py @@ -0,0 +1,378 @@ +"""Factories to create instances for the SQLAlchemy models.""" + +import datetime as dt +import random +import string + +import factory +import faker +from factory import alchemy +from geopy import distance + +from tests import config as test_config +from urban_meal_delivery import db + + +def _random_timespan( # noqa:WPS211 + *, + min_hours=0, + min_minutes=0, + min_seconds=0, + max_hours=0, + max_minutes=0, + max_seconds=0, +): + """A randomized `timedelta` object between the specified arguments.""" + total_min_seconds = min_hours * 3600 + min_minutes * 60 + min_seconds + total_max_seconds = max_hours * 3600 + max_minutes * 60 + max_seconds + return dt.timedelta(seconds=random.randint(total_min_seconds, total_max_seconds)) + + +def _early_in_the_morning(): + """A randomized `datetime` object early in the morning.""" + early = dt.datetime(test_config.YEAR, test_config.MONTH, test_config.DAY, 3, 0) + return early + _random_timespan(max_hours=2) + + +class AddressFactory(alchemy.SQLAlchemyModelFactory): + """Create instances of the `db.Address` model.""" + + class Meta: + model = db.Address + sqlalchemy_get_or_create = ('id',) + + id = factory.Sequence(lambda num: num) # noqa:WPS125 + created_at = factory.LazyFunction(_early_in_the_morning) + + # When testing, all addresses are considered primary ones. + # As non-primary addresses have no different behavior and + # the property is only kept from the original dataset for + # completeness sake, that is ok to do. + primary_id = factory.LazyAttribute(lambda obj: obj.id) + + # Mimic a Google Maps Place ID with just random characters. + place_id = factory.LazyFunction( + lambda: ''.join(random.choice(string.ascii_lowercase) for _ in range(20)), + ) + + # Place the addresses somewhere in downtown Paris. + latitude = factory.Faker('coordinate', center=48.855, radius=0.01) + longitude = factory.Faker('coordinate', center=2.34, radius=0.03) + # city -> set by the `make_address` fixture as there is only one `city` + city_name = 'Paris' + zip_code = factory.LazyFunction(lambda: random.randint(75001, 75020)) + street = factory.Faker('street_address', locale='fr_FR') + + +class CourierFactory(alchemy.SQLAlchemyModelFactory): + """Create instances of the `db.Courier` model.""" + + class Meta: + model = db.Courier + sqlalchemy_get_or_create = ('id',) + + id = factory.Sequence(lambda num: num) # noqa:WPS125 + created_at = factory.LazyFunction(_early_in_the_morning) + vehicle = 'bicycle' + historic_speed = 7.89 + capacity = 100 + pay_per_hour = 750 + pay_per_order = 200 + + +class CustomerFactory(alchemy.SQLAlchemyModelFactory): + """Create instances of the `db.Customer` model.""" + + class Meta: + model = db.Customer + sqlalchemy_get_or_create = ('id',) + + id = factory.Sequence(lambda num: num) # noqa:WPS125 + + +_restaurant_names = faker.Faker() + + +class RestaurantFactory(alchemy.SQLAlchemyModelFactory): + """Create instances of the `db.Restaurant` model.""" + + class Meta: + model = db.Restaurant + sqlalchemy_get_or_create = ('id',) + + id = factory.Sequence(lambda num: num) # noqa:WPS125 + created_at = factory.LazyFunction(_early_in_the_morning) + name = factory.LazyFunction( + lambda: f"{_restaurant_names.first_name()}'s Restaurant", + ) + # address -> set by the `make_restaurant` fixture as there is only one `city` + estimated_prep_duration = 1000 + + +class AdHocOrderFactory(alchemy.SQLAlchemyModelFactory): + """Create instances of the `db.Order` model. + + This factory creates ad-hoc `Order`s while the `ScheduledOrderFactory` + below creates pre-orders. They are split into two classes mainly + because the logic regarding how the timestamps are calculated from + each other differs. + + See the docstring in the contained `Params` class for + flags to adapt how the `Order` is created. + """ + + class Meta: + model = db.Order + sqlalchemy_get_or_create = ('id',) + + class Params: + """Define flags that overwrite some attributes. + + The `factory.Trait` objects in this class are executed after all + the normal attributes in the `OrderFactory` classes were evaluated. + + Flags: + cancel_before_pickup + cancel_after_pickup + """ + + # Timestamps after `cancelled_at` are discarded + # by the `post_generation` hook at the end of the `OrderFactory`. + cancel_ = factory.Trait( # noqa:WPS120 -> leading underscore does not work + cancelled=True, cancelled_at_corrected=False, + ) + cancel_before_pickup = factory.Trait( + cancel_=True, + cancelled_at=factory.LazyAttribute( + lambda obj: obj.dispatch_at + + _random_timespan( + max_seconds=(obj.pickup_at - obj.dispatch_at).total_seconds(), + ), + ), + ) + cancel_after_pickup = factory.Trait( + cancel_=True, + cancelled_at=factory.LazyAttribute( + lambda obj: obj.pickup_at + + _random_timespan( + max_seconds=(obj.delivery_at - obj.pickup_at).total_seconds(), + ), + ), + ) + + # Generic attributes + id = factory.Sequence(lambda num: num) # noqa:WPS125 + # customer -> set by the `make_order` fixture for better control + + # Attributes regarding the specialization of an `Order`: ad-hoc or scheduled. + # Ad-hoc `Order`s are placed between 11.45 and 14.15. + placed_at = factory.LazyFunction( + lambda: dt.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, 11, 45, + ) + + _random_timespan(max_hours=2, max_minutes=30), + ) + ad_hoc = True + scheduled_delivery_at = None + scheduled_delivery_at_corrected = None + # Without statistical info, we assume an ad-hoc `Order` delivered after 45 minutes. + first_estimated_delivery_at = factory.LazyAttribute( + lambda obj: obj.placed_at + dt.timedelta(minutes=45), + ) + + # Attributes regarding the cancellation of an `Order`. + # May be overwritten with the `cancel_before_pickup` or `cancel_after_pickup` flags. + cancelled = False + cancelled_at = None + cancelled_at_corrected = None + + # Price-related attributes -> sample realistic prices + sub_total = factory.LazyFunction(lambda: 100 * random.randint(15, 25)) + delivery_fee = 250 + total = factory.LazyAttribute(lambda obj: obj.sub_total + obj.delivery_fee) + + # Restaurant-related attributes + # restaurant -> set by the `make_order` fixture for better control + restaurant_notified_at = factory.LazyAttribute( + lambda obj: obj.placed_at + _random_timespan(min_seconds=30, max_seconds=90), + ) + restaurant_notified_at_corrected = False + restaurant_confirmed_at = factory.LazyAttribute( + lambda obj: obj.restaurant_notified_at + + _random_timespan(min_seconds=30, max_seconds=150), + ) + restaurant_confirmed_at_corrected = False + # Use the database defaults of the historic data. + estimated_prep_duration = 900 + estimated_prep_duration_corrected = False + estimated_prep_buffer = 480 + + # Dispatch-related columns + # courier -> set by the `make_order` fixture for better control + dispatch_at = factory.LazyAttribute( + lambda obj: obj.placed_at + _random_timespan(min_seconds=600, max_seconds=1080), + ) + dispatch_at_corrected = False + courier_notified_at = factory.LazyAttribute( + lambda obj: obj.dispatch_at + + _random_timespan(min_seconds=100, max_seconds=140), + ) + courier_notified_at_corrected = False + courier_accepted_at = factory.LazyAttribute( + lambda obj: obj.courier_notified_at + + _random_timespan(min_seconds=15, max_seconds=45), + ) + courier_accepted_at_corrected = False + # Sample a realistic utilization. + utilization = factory.LazyFunction(lambda: random.choice([50, 60, 70, 80, 90, 100])) + + # Pickup-related attributes + # pickup_address -> aligned with `restaurant.address` by the `make_order` fixture + reached_pickup_at = factory.LazyAttribute( + lambda obj: obj.courier_accepted_at + + _random_timespan(min_seconds=300, max_seconds=600), + ) + pickup_at = factory.LazyAttribute( + lambda obj: obj.reached_pickup_at + + _random_timespan(min_seconds=120, max_seconds=600), + ) + pickup_at_corrected = False + pickup_not_confirmed = False + left_pickup_at = factory.LazyAttribute( + lambda obj: obj.pickup_at + _random_timespan(min_seconds=60, max_seconds=180), + ) + left_pickup_at_corrected = False + + # Delivery-related attributes + # delivery_address -> set by the `make_order` fixture as there is only one `city` + reached_delivery_at = factory.LazyAttribute( + lambda obj: obj.left_pickup_at + + _random_timespan(min_seconds=240, max_seconds=480), + ) + delivery_at = factory.LazyAttribute( + lambda obj: obj.reached_delivery_at + + _random_timespan(min_seconds=240, max_seconds=660), + ) + delivery_at_corrected = False + delivery_not_confirmed = False + _courier_waited_at_delivery = factory.LazyAttribute( + lambda obj: False if obj.delivery_at else None, + ) + + # Statistical attributes -> calculate realistic stats + logged_delivery_distance = factory.LazyAttribute( + lambda obj: distance.great_circle( # noqa:WPS317 + (obj.pickup_address.latitude, obj.pickup_address.longitude), + (obj.delivery_address.latitude, obj.delivery_address.longitude), + ).meters, + ) + logged_avg_speed = factory.LazyAttribute( # noqa:ECE001 + lambda obj: round( + ( + obj.logged_avg_speed_distance + / (obj.delivery_at - obj.pickup_at).total_seconds() + ), + 2, + ), + ) + logged_avg_speed_distance = factory.LazyAttribute( + lambda obj: 0.95 * obj.logged_delivery_distance, + ) + + @factory.post_generation + def post( # noqa:C901,WPS231 + obj, create, extracted, **kwargs, # noqa:B902,N805 + ): + """Discard timestamps that occur after cancellation.""" + if obj.cancelled: + if obj.cancelled_at <= obj.restaurant_notified_at: + obj.restaurant_notified_at = None + obj.restaurant_notified_at_corrected = None + if obj.cancelled_at <= obj.restaurant_confirmed_at: + obj.restaurant_confirmed_at = None + obj.restaurant_confirmed_at_corrected = None + if obj.cancelled_at <= obj.dispatch_at: + obj.dispatch_at = None + obj.dispatch_at_corrected = None + if obj.cancelled_at <= obj.courier_notified_at: + obj.courier_notified_at = None + obj.courier_notified_at_corrected = None + if obj.cancelled_at <= obj.courier_accepted_at: + obj.courier_accepted_at = None + obj.courier_accepted_at_corrected = None + if obj.cancelled_at <= obj.reached_pickup_at: + obj.reached_pickup_at = None + if obj.cancelled_at <= obj.pickup_at: + obj.pickup_at = None + obj.pickup_at_corrected = None + obj.pickup_not_confirmed = None + if obj.cancelled_at <= obj.left_pickup_at: + obj.left_pickup_at = None + obj.left_pickup_at_corrected = None + if obj.cancelled_at <= obj.reached_delivery_at: + obj.reached_delivery_at = None + if obj.cancelled_at <= obj.delivery_at: + obj.delivery_at = None + obj.delivery_at_corrected = None + obj.delivery_not_confirmed = None + obj._courier_waited_at_delivery = None + + +class ScheduledOrderFactory(AdHocOrderFactory): + """Create instances of the `db.Order` model. + + This class takes care of the various timestamps for pre-orders. + + Pre-orders are placed long before the test day's lunch time starts. + All timestamps are relative to either `.dispatch_at` or `.restaurant_notified_at` + and calculated backwards from `.scheduled_delivery_at`. + """ + + # Attributes regarding the specialization of an `Order`: ad-hoc or scheduled. + placed_at = factory.LazyFunction(_early_in_the_morning) + ad_hoc = False + # Discrete `datetime` objects in the "core" lunch time are enough. + scheduled_delivery_at = factory.LazyFunction( + lambda: random.choice( + [ + dt.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, 12, 0, + ), + dt.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, 12, 15, + ), + dt.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, 12, 30, + ), + dt.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, 12, 45, + ), + dt.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, 13, 0, + ), + dt.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, 13, 15, + ), + dt.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, 13, 30, + ), + ], + ), + ) + scheduled_delivery_at_corrected = False + # Assume the `Order` is on time. + first_estimated_delivery_at = factory.LazyAttribute( + lambda obj: obj.scheduled_delivery_at, + ) + + # Restaurant-related attributes + restaurant_notified_at = factory.LazyAttribute( + lambda obj: obj.scheduled_delivery_at + - _random_timespan(min_minutes=45, max_minutes=50), + ) + + # Dispatch-related attributes + dispatch_at = factory.LazyAttribute( + lambda obj: obj.scheduled_delivery_at + - _random_timespan(min_minutes=40, max_minutes=45), + ) diff --git a/tests/db/fake_data/fixture_makers.py b/tests/db/fake_data/fixture_makers.py new file mode 100644 index 0000000..9a5419b --- /dev/null +++ b/tests/db/fake_data/fixture_makers.py @@ -0,0 +1,105 @@ +"""Fixture factories for testing the ORM layer with fake data.""" + +import pytest + +from tests.db.fake_data import factories + + +@pytest.fixture +def make_address(city): + """Replaces `AddressFactory.build()`: Create an `Address` in the `city`.""" + # Reset the identifiers before every test. + factories.AddressFactory.reset_sequence(1) + + def func(**kwargs): + """Create an `Address` object in the `city`.""" + return factories.AddressFactory.build(city=city, **kwargs) + + return func + + +@pytest.fixture +def make_courier(): + """Replaces `CourierFactory.build()`: Create a `Courier`.""" + # Reset the identifiers before every test. + factories.CourierFactory.reset_sequence(1) + + def func(**kwargs): + """Create a new `Courier` object.""" + return factories.CourierFactory.build(**kwargs) + + return func + + +@pytest.fixture +def make_customer(): + """Replaces `CustomerFactory.build()`: Create a `Customer`.""" + # Reset the identifiers before every test. + factories.CustomerFactory.reset_sequence(1) + + def func(**kwargs): + """Create a new `Customer` object.""" + return factories.CustomerFactory.build(**kwargs) + + return func + + +@pytest.fixture +def make_restaurant(make_address): + """Replaces `RestaurantFactory.build()`: Create a `Restaurant`.""" + # Reset the identifiers before every test. + factories.RestaurantFactory.reset_sequence(1) + + def func(address=None, **kwargs): + """Create a new `Restaurant` object. + + If no `address` is provided, a new `Address` is created. + """ + if address is None: + address = make_address() + + return factories.RestaurantFactory.build(address=address, **kwargs) + + return func + + +@pytest.fixture +def make_order(make_address, make_courier, make_customer, make_restaurant): + """Replaces `OrderFactory.build()`: Create a `Order`.""" + # Reset the identifiers before every test. + factories.AdHocOrderFactory.reset_sequence(1) + + def func(scheduled=False, restaurant=None, courier=None, **kwargs): + """Create a new `Order` object. + + Each `Order` is made by a new `Customer` with a unique `Address` for delivery. + + Args: + scheduled: if an `Order` is a pre-order + restaurant: who receives the `Order`; defaults to a new `Restaurant` + courier: who delivered the `Order`; defaults to a new `Courier` + kwargs: additional keyword arguments forwarded to the `OrderFactory` + + Returns: + order + """ + if scheduled: + factory_cls = factories.ScheduledOrderFactory + else: + factory_cls = factories.AdHocOrderFactory + + if restaurant is None: + restaurant = make_restaurant() + if courier is None: + courier = make_courier() + + return factory_cls.build( + customer=make_customer(), # assume a unique `Customer` per order + restaurant=restaurant, + courier=courier, + pickup_address=restaurant.address, # no `Address` history + delivery_address=make_address(), # unique `Customer` => new `Address` + **kwargs, + ) + + return func diff --git a/tests/db/fake_data/static_fixtures.py b/tests/db/fake_data/static_fixtures.py new file mode 100644 index 0000000..60d4181 --- /dev/null +++ b/tests/db/fake_data/static_fixtures.py @@ -0,0 +1,70 @@ +"""Fake data for testing the ORM layer.""" + +import pytest + +from urban_meal_delivery import db + + +@pytest.fixture +def city_data(): + """The data for the one and only `City` object as a `dict`.""" + return { + 'id': 1, + 'name': 'Paris', + 'kml': " ...", + 'center_latitude': 48.856614, + 'center_longitude': 2.3522219, + 'northeast_latitude': 48.9021449, + 'northeast_longitude': 2.4699208, + 'southwest_latitude': 48.815573, + 'southwest_longitude': 2.225193, + 'initial_zoom': 12, + } + + +@pytest.fixture +def city(city_data): + """The one and only `City` object.""" + return db.City(**city_data) + + +@pytest.fixture +def address(make_address): + """An `Address` object in the `city`.""" + return make_address() + + +@pytest.fixture +def courier(make_courier): + """A `Courier` object.""" + return make_courier() + + +@pytest.fixture +def customer(make_customer): + """A `Customer` object.""" + return make_customer() + + +@pytest.fixture +def restaurant(address, make_restaurant): + """A `Restaurant` object located at the `address`.""" + return make_restaurant(address=address) + + +@pytest.fixture +def order(make_order, restaurant): + """An `Order` object for the `restaurant`.""" + return make_order(restaurant=restaurant) + + +@pytest.fixture +def grid(city): + """A `Grid` with a pixel area of 1 square kilometer.""" + return db.Grid(city=city, side_length=1000) + + +@pytest.fixture +def pixel(grid): + """The `Pixel` in the lower-left corner of the `grid`.""" + return db.Pixel(id=1, grid=grid, n_x=0, n_y=0) diff --git a/tests/db/test_addresses.py b/tests/db/test_addresses.py index ffb5618..ab49855 100644 --- a/tests/db/test_addresses.py +++ b/tests/db/test_addresses.py @@ -1,141 +1,154 @@ -"""Test the ORM's Address model.""" +"""Test the ORM's `Address` model.""" import pytest +import sqlalchemy as sqla from sqlalchemy import exc as sa_exc -from sqlalchemy.orm import exc as orm_exc from urban_meal_delivery import db +from urban_meal_delivery.db import utils class TestSpecialMethods: - """Test special methods in Address.""" + """Test special methods in `Address`.""" - # pylint:disable=no-self-use - - def test_create_address(self, address_data): - """Test instantiation of a new Address object.""" - result = db.Address(**address_data) - - assert result is not None - - def test_text_representation(self, address_data): - """Address has a non-literal text representation.""" - address = db.Address(**address_data) - street = address_data['street'] - city_name = address_data['city_name'] + def test_create_address(self, address): + """Test instantiation of a new `Address` object.""" + assert address is not None + def test_text_representation(self, address): + """`Address` has a non-literal text representation.""" result = repr(address) - assert result == f'' + assert result == f'' -@pytest.mark.e2e +@pytest.mark.db @pytest.mark.no_cover class TestConstraints: - """Test the database constraints defined in Address.""" + """Test the database constraints defined in `Address`.""" - # pylint:disable=no-self-use + def test_insert_into_database(self, db_session, address): + """Insert an instance into the (empty) database.""" + assert db_session.query(db.Address).count() == 0 - def test_insert_into_database(self, address, db_session): - """Insert an instance into the database.""" db_session.add(address) db_session.commit() - def test_dublicate_primary_key(self, address, address_data, city, db_session): - """Can only add a record once.""" + assert db_session.query(db.Address).count() == 1 + + def test_delete_a_referenced_address(self, db_session, address, make_address): + """Remove a record that is referenced with a FK.""" db_session.add(address) + # Fake another_address that has the same `.primary_id` as `address`. + db_session.add(make_address(primary_id=address.id)) db_session.commit() - another_address = db.Address(**address_data) - another_address.city = city - db_session.add(another_address) + db_session.delete(address) - with pytest.raises(orm_exc.FlushError): + with pytest.raises( + sa_exc.IntegrityError, match='fk_addresses_to_addresses_via_primary_id', + ): db_session.commit() - def test_delete_a_referenced_address(self, address, address_data, db_session): + def test_delete_a_referenced_city(self, db_session, address): """Remove a record that is referenced with a FK.""" db_session.add(address) db_session.commit() - # Fake a second address that belongs to the same primary address. - address_data['id'] += 1 - another_address = db.Address(**address_data) - db_session.add(another_address) - db_session.commit() + # Must delete without ORM as otherwise an UPDATE statement is emitted. + stmt = sqla.delete(db.City).where(db.City.id == address.city.id) - with pytest.raises(sa_exc.IntegrityError): - db_session.execute( - db.Address.__table__.delete().where( # noqa:WPS609 - db.Address.id == address.id, - ), - ) - - def test_delete_a_referenced_city(self, address, city, db_session): - """Remove a record that is referenced with a FK.""" - db_session.add(address) - db_session.commit() - - with pytest.raises(sa_exc.IntegrityError): - db_session.execute( - db.City.__table__.delete().where(db.City.id == city.id), # noqa:WPS609 - ) + with pytest.raises( + sa_exc.IntegrityError, match='fk_addresses_to_cities_via_city_id', + ): + db_session.execute(stmt) @pytest.mark.parametrize('latitude', [-91, 91]) - def test_invalid_latitude(self, address, db_session, latitude): + def test_invalid_latitude(self, db_session, address, latitude): """Insert an instance with invalid data.""" address.latitude = latitude db_session.add(address) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises( + sa_exc.IntegrityError, match='latitude_between_90_degrees', + ): db_session.commit() @pytest.mark.parametrize('longitude', [-181, 181]) - def test_invalid_longitude(self, address, db_session, longitude): + def test_invalid_longitude(self, db_session, address, longitude): """Insert an instance with invalid data.""" address.longitude = longitude db_session.add(address) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises( + sa_exc.IntegrityError, match='longitude_between_180_degrees', + ): db_session.commit() @pytest.mark.parametrize('zip_code', [-1, 0, 9999, 100000]) - def test_invalid_zip_code(self, address, db_session, zip_code): + def test_invalid_zip_code(self, db_session, address, zip_code): """Insert an instance with invalid data.""" address.zip_code = zip_code db_session.add(address) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises(sa_exc.IntegrityError, match='valid_zip_code'): db_session.commit() @pytest.mark.parametrize('floor', [-1, 41]) - def test_invalid_floor(self, address, db_session, floor): + def test_invalid_floor(self, db_session, address, floor): """Insert an instance with invalid data.""" address.floor = floor db_session.add(address) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises(sa_exc.IntegrityError, match='realistic_floor'): db_session.commit() class TestProperties: - """Test properties in Address.""" + """Test properties in `Address`.""" - # pylint:disable=no-self-use - - def test_is_primary(self, address_data): - """Test Address.is_primary property.""" - address = db.Address(**address_data) + def test_is_primary(self, address): + """Test `Address.is_primary` property.""" + assert address.id == address.primary_id result = address.is_primary assert result is True - def test_is_not_primary(self, address_data): - """Test Address.is_primary property.""" - address_data['_primary_id'] = 999 - address = db.Address(**address_data) + def test_is_not_primary(self, address): + """Test `Address.is_primary` property.""" + address.primary_id = 999 result = address.is_primary assert result is False + + def test_location(self, address): + """Test `Address.location` property.""" + latitude = float(address.latitude) + longitude = float(address.longitude) + + result = address.location + + assert isinstance(result, utils.Location) + assert result.latitude == pytest.approx(latitude) + assert result.longitude == pytest.approx(longitude) + + def test_location_is_cached(self, address): + """Test `Address.location` property.""" + result1 = address.location + result2 = address.location + + assert result1 is result2 + + def test_x_is_positive(self, address): + """Test `Address.x` property.""" + result = address.x + + assert result > 0 + + def test_y_is_positive(self, address): + """Test `Address.y` property.""" + result = address.y + + assert result > 0 diff --git a/tests/db/test_addresses_pixels.py b/tests/db/test_addresses_pixels.py new file mode 100644 index 0000000..d5beadd --- /dev/null +++ b/tests/db/test_addresses_pixels.py @@ -0,0 +1,135 @@ +"""Test the ORM's `AddressPixelAssociation` model. + +Implementation notes: + The test suite has 100% coverage without the test cases in this module. + That is so as the `AddressPixelAssociation` model is imported into the + `urban_meal_delivery.db` namespace so that the `Address` and `Pixel` models + can find it upon initialization. Yet, none of the other unit tests run any + code associated with it. Therefore, we test it here as non-e2e tests and do + not measure its coverage. +""" + +import pytest +import sqlalchemy as sqla +from sqlalchemy import exc as sa_exc + +from urban_meal_delivery import db + + +@pytest.fixture +def assoc(address, pixel): + """An association between `address` and `pixel`.""" + return db.AddressPixelAssociation(address=address, pixel=pixel) + + +@pytest.mark.no_cover +class TestSpecialMethods: + """Test special methods in `Pixel`.""" + + def test_create_an_address_pixel_association(self, assoc): + """Test instantiation of a new `AddressPixelAssociation` object.""" + assert assoc is not None + + +@pytest.mark.db +@pytest.mark.no_cover +class TestConstraints: + """Test the database constraints defined in `AddressPixelAssociation`. + + The foreign keys to `City` and `Grid` are tested via INSERT and not + DELETE statements as the latter would already fail because of foreign + keys defined in `Address` and `Pixel`. + """ + + def test_insert_into_database(self, db_session, assoc): + """Insert an instance into the (empty) database.""" + assert db_session.query(db.AddressPixelAssociation).count() == 0 + + db_session.add(assoc) + db_session.commit() + + assert db_session.query(db.AddressPixelAssociation).count() == 1 + + def test_delete_a_referenced_address(self, db_session, assoc): + """Remove a record that is referenced with a FK.""" + db_session.add(assoc) + db_session.commit() + + # Must delete without ORM as otherwise an UPDATE statement is emitted. + stmt = sqla.delete(db.Address).where(db.Address.id == assoc.address.id) + + with pytest.raises( + sa_exc.IntegrityError, + match='fk_addresses_pixels_to_addresses_via_address_id_city_id', + ): + db_session.execute(stmt) + + def test_reference_an_invalid_city(self, db_session, address, pixel): + """Insert a record with an invalid foreign key.""" + db_session.add(address) + db_session.add(pixel) + db_session.commit() + + # Must insert without ORM as otherwise SQLAlchemy figures out + # that something is wrong before any query is sent to the database. + stmt = sqla.insert(db.AddressPixelAssociation).values( + address_id=address.id, + city_id=999, + grid_id=pixel.grid.id, + pixel_id=pixel.id, + ) + + with pytest.raises( + sa_exc.IntegrityError, + match='fk_addresses_pixels_to_addresses_via_address_id_city_id', + ): + db_session.execute(stmt) + + def test_reference_an_invalid_grid(self, db_session, address, pixel): + """Insert a record with an invalid foreign key.""" + db_session.add(address) + db_session.add(pixel) + db_session.commit() + + # Must insert without ORM as otherwise SQLAlchemy figures out + # that something is wrong before any query is sent to the database. + stmt = sqla.insert(db.AddressPixelAssociation).values( + address_id=address.id, + city_id=address.city.id, + grid_id=999, + pixel_id=pixel.id, + ) + + with pytest.raises( + sa_exc.IntegrityError, + match='fk_addresses_pixels_to_grids_via_grid_id_city_id', + ): + db_session.execute(stmt) + + def test_delete_a_referenced_pixel(self, db_session, assoc): + """Remove a record that is referenced with a FK.""" + db_session.add(assoc) + db_session.commit() + + # Must delete without ORM as otherwise an UPDATE statement is emitted. + stmt = sqla.delete(db.Pixel).where(db.Pixel.id == assoc.pixel.id) + + with pytest.raises( + sa_exc.IntegrityError, + match='fk_addresses_pixels_to_pixels_via_pixel_id_grid_id', + ): + db_session.execute(stmt) + + def test_put_an_address_on_a_grid_twice(self, db_session, address, assoc, pixel): + """Insert a record that violates a unique constraint.""" + db_session.add(assoc) + db_session.commit() + + # Create a neighboring `Pixel` and put the same `address` as in `pixel` in it. + neighbor = db.Pixel(grid=pixel.grid, n_x=pixel.n_x, n_y=pixel.n_y + 1) + another_assoc = db.AddressPixelAssociation(address=address, pixel=neighbor) + + db_session.add(another_assoc) + + with pytest.raises(sa_exc.IntegrityError, match='duplicate key value'): + db_session.commit() diff --git a/tests/db/test_cities.py b/tests/db/test_cities.py index 50a7ecb..840b2dd 100644 --- a/tests/db/test_cities.py +++ b/tests/db/test_cities.py @@ -1,99 +1,96 @@ -"""Test the ORM's City model.""" +"""Test the ORM's `City` model.""" import pytest -from sqlalchemy.orm import exc as orm_exc from urban_meal_delivery import db +from urban_meal_delivery.db import utils class TestSpecialMethods: - """Test special methods in City.""" + """Test special methods in `City`.""" - # pylint:disable=no-self-use - - def test_create_city(self, city_data): - """Test instantiation of a new City object.""" - result = db.City(**city_data) - - assert result is not None - - def test_text_representation(self, city_data): - """City has a non-literal text representation.""" - city = db.City(**city_data) - name = city_data['name'] + def test_create_city(self, city): + """Test instantiation of a new `City` object.""" + assert city is not None + def test_text_representation(self, city): + """`City` has a non-literal text representation.""" result = repr(city) - assert result == f'' + assert result == f'' -@pytest.mark.e2e +@pytest.mark.db @pytest.mark.no_cover class TestConstraints: - """Test the database constraints defined in City.""" + """Test the database constraints defined in `City`.""" - # pylint:disable=no-self-use + def test_insert_into_database(self, db_session, city): + """Insert an instance into the (empty) database.""" + assert db_session.query(db.City).count() == 0 - def test_insert_into_database(self, city, db_session): - """Insert an instance into the database.""" db_session.add(city) db_session.commit() - def test_dublicate_primary_key(self, city, city_data, db_session): - """Can only add a record once.""" - db_session.add(city) - db_session.commit() - - another_city = db.City(**city_data) - db_session.add(another_city) - - with pytest.raises(orm_exc.FlushError): - db_session.commit() + assert db_session.query(db.City).count() == 1 class TestProperties: - """Test properties in City.""" + """Test properties in `City`.""" - # pylint:disable=no-self-use + def test_center(self, city, city_data): + """Test `City.center` property.""" + result = city.center - def test_location_data(self, city_data): - """Test City.location property.""" - city = db.City(**city_data) + assert isinstance(result, utils.Location) + assert result.latitude == pytest.approx(city_data['center_latitude']) + assert result.longitude == pytest.approx(city_data['center_longitude']) - result = city.location + def test_center_is_cached(self, city): + """Test `City.center` property.""" + result1 = city.center + result2 = city.center - assert isinstance(result, dict) - assert len(result) == 2 - assert result['latitude'] == pytest.approx(city_data['_center_latitude']) - assert result['longitude'] == pytest.approx(city_data['_center_longitude']) + assert result1 is result2 - def test_viewport_data_overall(self, city_data): - """Test City.viewport property.""" - city = db.City(**city_data) + def test_northeast(self, city, city_data): + """Test `City.northeast` property.""" + result = city.northeast - result = city.viewport + assert isinstance(result, utils.Location) + assert result.latitude == pytest.approx(city_data['northeast_latitude']) + assert result.longitude == pytest.approx(city_data['northeast_longitude']) - assert isinstance(result, dict) - assert len(result) == 2 + def test_northeast_is_cached(self, city): + """Test `City.northeast` property.""" + result1 = city.northeast + result2 = city.northeast - def test_viewport_data_northeast(self, city_data): - """Test City.viewport property.""" - city = db.City(**city_data) + assert result1 is result2 - result = city.viewport['northeast'] + def test_southwest(self, city, city_data): + """Test `City.southwest` property.""" + result = city.southwest - assert isinstance(result, dict) - assert len(result) == 2 - assert result['latitude'] == pytest.approx(city_data['_northeast_latitude']) - assert result['longitude'] == pytest.approx(city_data['_northeast_longitude']) + assert isinstance(result, utils.Location) + assert result.latitude == pytest.approx(city_data['southwest_latitude']) + assert result.longitude == pytest.approx(city_data['southwest_longitude']) - def test_viewport_data_southwest(self, city_data): - """Test City.viewport property.""" - city = db.City(**city_data) + def test_southwest_is_cached(self, city): + """Test `City.southwest` property.""" + result1 = city.southwest + result2 = city.southwest - result = city.viewport['southwest'] + assert result1 is result2 - assert isinstance(result, dict) - assert len(result) == 2 - assert result['latitude'] == pytest.approx(city_data['_southwest_latitude']) - assert result['longitude'] == pytest.approx(city_data['_southwest_longitude']) + def test_total_x(self, city): + """Test `City.total_x` property.""" + result = city.total_x + + assert result > 18_000 + + def test_total_y(self, city): + """Test `City.total_y` property.""" + result = city.total_y + + assert result > 9_000 diff --git a/tests/db/test_couriers.py b/tests/db/test_couriers.py index a3ba103..5376cae 100644 --- a/tests/db/test_couriers.py +++ b/tests/db/test_couriers.py @@ -1,125 +1,107 @@ -"""Test the ORM's Courier model.""" +"""Test the ORM's `Courier` model.""" import pytest from sqlalchemy import exc as sa_exc -from sqlalchemy.orm import exc as orm_exc from urban_meal_delivery import db class TestSpecialMethods: - """Test special methods in Courier.""" + """Test special methods in `Courier`.""" - # pylint:disable=no-self-use - - def test_create_courier(self, courier_data): - """Test instantiation of a new Courier object.""" - result = db.Courier(**courier_data) - - assert result is not None - - def test_text_representation(self, courier_data): - """Courier has a non-literal text representation.""" - courier_data['id'] = 1 - courier = db.Courier(**courier_data) - id_ = courier_data['id'] + def test_create_courier(self, courier): + """Test instantiation of a new `Courier` object.""" + assert courier is not None + def test_text_representation(self, courier): + """`Courier` has a non-literal text representation.""" result = repr(courier) - assert result == f'' + assert result == f'' -@pytest.mark.e2e +@pytest.mark.db @pytest.mark.no_cover class TestConstraints: - """Test the database constraints defined in Courier.""" + """Test the database constraints defined in `Courier`.""" - # pylint:disable=no-self-use + def test_insert_into_database(self, db_session, courier): + """Insert an instance into the (empty) database.""" + assert db_session.query(db.Courier).count() == 0 - def test_insert_into_database(self, courier, db_session): - """Insert an instance into the database.""" db_session.add(courier) db_session.commit() - def test_dublicate_primary_key(self, courier, courier_data, db_session): - """Can only add a record once.""" - db_session.add(courier) - db_session.commit() + assert db_session.query(db.Courier).count() == 1 - another_courier = db.Courier(**courier_data) - db_session.add(another_courier) - - with pytest.raises(orm_exc.FlushError): - db_session.commit() - - def test_invalid_vehicle(self, courier, db_session): + def test_invalid_vehicle(self, db_session, courier): """Insert an instance with invalid data.""" courier.vehicle = 'invalid' db_session.add(courier) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises(sa_exc.IntegrityError, match='available_vehicle_types'): db_session.commit() - def test_negative_speed(self, courier, db_session): + def test_negative_speed(self, db_session, courier): """Insert an instance with invalid data.""" courier.historic_speed = -1 db_session.add(courier) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises(sa_exc.IntegrityError, match='realistic_speed'): db_session.commit() - def test_unrealistic_speed(self, courier, db_session): + def test_unrealistic_speed(self, db_session, courier): """Insert an instance with invalid data.""" courier.historic_speed = 999 db_session.add(courier) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises(sa_exc.IntegrityError, match='realistic_speed'): db_session.commit() - def test_negative_capacity(self, courier, db_session): + def test_negative_capacity(self, db_session, courier): """Insert an instance with invalid data.""" courier.capacity = -1 db_session.add(courier) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises(sa_exc.IntegrityError, match='capacity_under_200_liters'): db_session.commit() - def test_too_much_capacity(self, courier, db_session): + def test_too_much_capacity(self, db_session, courier): """Insert an instance with invalid data.""" courier.capacity = 999 db_session.add(courier) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises(sa_exc.IntegrityError, match='capacity_under_200_liters'): db_session.commit() - def test_negative_pay_per_hour(self, courier, db_session): + def test_negative_pay_per_hour(self, db_session, courier): """Insert an instance with invalid data.""" courier.pay_per_hour = -1 db_session.add(courier) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises(sa_exc.IntegrityError, match='realistic_pay_per_hour'): db_session.commit() - def test_too_much_pay_per_hour(self, courier, db_session): + def test_too_much_pay_per_hour(self, db_session, courier): """Insert an instance with invalid data.""" courier.pay_per_hour = 9999 db_session.add(courier) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises(sa_exc.IntegrityError, match='realistic_pay_per_hour'): db_session.commit() - def test_negative_pay_per_order(self, courier, db_session): + def test_negative_pay_per_order(self, db_session, courier): """Insert an instance with invalid data.""" courier.pay_per_order = -1 db_session.add(courier) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises(sa_exc.IntegrityError, match='realistic_pay_per_order'): db_session.commit() - def test_too_much_pay_per_order(self, courier, db_session): + def test_too_much_pay_per_order(self, db_session, courier): """Insert an instance with invalid data.""" courier.pay_per_order = 999 db_session.add(courier) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises(sa_exc.IntegrityError, match='realistic_pay_per_order'): db_session.commit() diff --git a/tests/db/test_customer.py b/tests/db/test_customer.py index 487a11c..f9ef15a 100644 --- a/tests/db/test_customer.py +++ b/tests/db/test_customer.py @@ -1,51 +1,34 @@ -"""Test the ORM's Customer model.""" +"""Test the ORM's `Customer` model.""" import pytest -from sqlalchemy.orm import exc as orm_exc from urban_meal_delivery import db class TestSpecialMethods: - """Test special methods in Customer.""" + """Test special methods in `Customer`.""" - # pylint:disable=no-self-use - - def test_create_customer(self, customer_data): - """Test instantiation of a new Customer object.""" - result = db.Customer(**customer_data) - - assert result is not None - - def test_text_representation(self, customer_data): - """Customer has a non-literal text representation.""" - customer = db.Customer(**customer_data) - id_ = customer_data['id'] + def test_create_customer(self, customer): + """Test instantiation of a new `Customer` object.""" + assert customer is not None + def test_text_representation(self, customer): + """`Customer` has a non-literal text representation.""" result = repr(customer) - assert result == f'' + assert result == f'' -@pytest.mark.e2e +@pytest.mark.db @pytest.mark.no_cover class TestConstraints: - """Test the database constraints defined in Customer.""" + """Test the database constraints defined in `Customer`.""" - # pylint:disable=no-self-use + def test_insert_into_database(self, db_session, customer): + """Insert an instance into the (empty) database.""" + assert db_session.query(db.Customer).count() == 0 - def test_insert_into_database(self, customer, db_session): - """Insert an instance into the database.""" db_session.add(customer) db_session.commit() - def test_dublicate_primary_key(self, customer, customer_data, db_session): - """Can only add a record once.""" - db_session.add(customer) - db_session.commit() - - another_customer = db.Customer(**customer_data) - db_session.add(another_customer) - - with pytest.raises(orm_exc.FlushError): - db_session.commit() + assert db_session.query(db.Customer).count() == 1 diff --git a/tests/db/test_forecasts.py b/tests/db/test_forecasts.py new file mode 100644 index 0000000..ff37dda --- /dev/null +++ b/tests/db/test_forecasts.py @@ -0,0 +1,505 @@ +"""Test the ORM's `Forecast` model.""" + +import datetime as dt + +import pandas as pd +import pytest +import sqlalchemy as sqla +from sqlalchemy import exc as sa_exc + +from tests import config as test_config +from urban_meal_delivery import db + + +MODEL = 'hets' + + +@pytest.fixture +def forecast(pixel): + """A `forecast` made in the `pixel` at `NOON`.""" + start_at = dt.datetime( + test_config.END.year, + test_config.END.month, + test_config.END.day, + test_config.NOON, + ) + + return db.Forecast( + pixel=pixel, + start_at=start_at, + time_step=test_config.LONG_TIME_STEP, + train_horizon=test_config.LONG_TRAIN_HORIZON, + model=MODEL, + actual=12, + prediction=12.3, + low80=1.23, + high80=123.4, + low95=0.123, + high95=1234.5, + ) + + +class TestSpecialMethods: + """Test special methods in `Forecast`.""" + + def test_create_forecast(self, forecast): + """Test instantiation of a new `Forecast` object.""" + assert forecast is not None + + def test_text_representation(self, forecast): + """`Forecast` has a non-literal text representation.""" + result = repr(forecast) + + assert ( + result + == f'' # noqa:E501 + ) + + +@pytest.mark.db +@pytest.mark.no_cover +class TestConstraints: + """Test the database constraints defined in `Forecast`.""" + + def test_insert_into_database(self, db_session, forecast): + """Insert an instance into the (empty) database.""" + assert db_session.query(db.Forecast).count() == 0 + + db_session.add(forecast) + db_session.commit() + + assert db_session.query(db.Forecast).count() == 1 + + def test_delete_a_referenced_pixel(self, db_session, forecast): + """Remove a record that is referenced with a FK.""" + db_session.add(forecast) + db_session.commit() + + # Must delete without ORM as otherwise an UPDATE statement is emitted. + stmt = sqla.delete(db.Pixel).where(db.Pixel.id == forecast.pixel.id) + + with pytest.raises( + sa_exc.IntegrityError, match='fk_forecasts_to_pixels_via_pixel_id', + ): + db_session.execute(stmt) + + @pytest.mark.parametrize('hour', [10, 23]) + def test_invalid_start_at_outside_operating_hours( + self, db_session, forecast, hour, + ): + """Insert an instance with invalid data.""" + forecast.start_at = dt.datetime( + forecast.start_at.year, + forecast.start_at.month, + forecast.start_at.day, + hour, + ) + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='within_operating_hours', + ): + db_session.commit() + + def test_invalid_start_at_not_quarter_of_hour(self, db_session, forecast): + """Insert an instance with invalid data.""" + forecast.start_at += dt.timedelta(minutes=1) + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='must_be_quarters_of_the_hour', + ): + db_session.commit() + + def test_invalid_start_at_seconds_set(self, db_session, forecast): + """Insert an instance with invalid data.""" + forecast.start_at += dt.timedelta(seconds=1) + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='no_seconds', + ): + db_session.commit() + + def test_invalid_start_at_microseconds_set(self, db_session, forecast): + """Insert an instance with invalid data.""" + forecast.start_at += dt.timedelta(microseconds=1) + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='no_microseconds', + ): + db_session.commit() + + @pytest.mark.parametrize('value', [-1, 0]) + def test_positive_time_step(self, db_session, forecast, value): + """Insert an instance with invalid data.""" + forecast.time_step = value + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='time_step_must_be_positive', + ): + db_session.commit() + + @pytest.mark.parametrize('value', [-1, 0]) + def test_positive_train_horizon(self, db_session, forecast, value): + """Insert an instance with invalid data.""" + forecast.train_horizon = value + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='training_horizon_must_be_positive', + ): + db_session.commit() + + def test_non_negative_actuals(self, db_session, forecast): + """Insert an instance with invalid data.""" + forecast.actual = -1 + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='actuals_must_be_non_negative', + ): + db_session.commit() + + def test_set_prediction_without_ci(self, db_session, forecast): + """Sanity check to see that the check constraint ... + + ... "prediction_must_be_within_ci" is not triggered. + """ + forecast.low80 = None + forecast.high80 = None + forecast.low95 = None + forecast.high95 = None + + db_session.add(forecast) + db_session.commit() + + def test_ci80_with_missing_low(self, db_session, forecast): + """Insert an instance with invalid data.""" + assert forecast.high80 is not None + + forecast.low80 = None + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='ci_upper_and_lower_bounds', + ): + db_session.commit() + + def test_ci95_with_missing_low(self, db_session, forecast): + """Insert an instance with invalid data.""" + assert forecast.high95 is not None + + forecast.low95 = None + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='ci_upper_and_lower_bounds', + ): + db_session.commit() + + def test_ci80_with_missing_high(self, db_session, forecast): + """Insert an instance with invalid data.""" + assert forecast.low80 is not None + + forecast.high80 = None + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='ci_upper_and_lower_bounds', + ): + db_session.commit() + + def test_ci95_with_missing_high(self, db_session, forecast): + """Insert an instance with invalid data.""" + assert forecast.low95 is not None + + forecast.high95 = None + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='ci_upper_and_lower_bounds', + ): + db_session.commit() + + def test_prediction_smaller_than_low80_with_ci95_set(self, db_session, forecast): + """Insert an instance with invalid data.""" + assert forecast.low95 is not None + assert forecast.high95 is not None + + forecast.prediction = forecast.low80 - 0.001 + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='prediction_must_be_within_ci', + ): + db_session.commit() + + def test_prediction_smaller_than_low80_without_ci95_set( + self, db_session, forecast, + ): + """Insert an instance with invalid data.""" + forecast.low95 = None + forecast.high95 = None + + forecast.prediction = forecast.low80 - 0.001 + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='prediction_must_be_within_ci', + ): + db_session.commit() + + def test_prediction_smaller_than_low95_with_ci80_set(self, db_session, forecast): + """Insert an instance with invalid data.""" + assert forecast.low80 is not None + assert forecast.high80 is not None + + forecast.prediction = forecast.low95 - 0.001 + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='prediction_must_be_within_ci', + ): + db_session.commit() + + def test_prediction_smaller_than_low95_without_ci80_set( + self, db_session, forecast, + ): + """Insert an instance with invalid data.""" + forecast.low80 = None + forecast.high80 = None + + forecast.prediction = forecast.low95 - 0.001 + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='prediction_must_be_within_ci', + ): + db_session.commit() + + def test_prediction_greater_than_high80_with_ci95_set(self, db_session, forecast): + """Insert an instance with invalid data.""" + assert forecast.low95 is not None + assert forecast.high95 is not None + + forecast.prediction = forecast.high80 + 0.001 + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='prediction_must_be_within_ci', + ): + db_session.commit() + + def test_prediction_greater_than_high80_without_ci95_set( + self, db_session, forecast, + ): + """Insert an instance with invalid data.""" + forecast.low95 = None + forecast.high95 = None + + forecast.prediction = forecast.high80 + 0.001 + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='prediction_must_be_within_ci', + ): + db_session.commit() + + def test_prediction_greater_than_high95_with_ci80_set(self, db_session, forecast): + """Insert an instance with invalid data.""" + assert forecast.low80 is not None + assert forecast.high80 is not None + + forecast.prediction = forecast.high95 + 0.001 + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='prediction_must_be_within_ci', + ): + db_session.commit() + + def test_prediction_greater_than_high95_without_ci80_set( + self, db_session, forecast, + ): + """Insert an instance with invalid data.""" + forecast.low80 = None + forecast.high80 = None + + forecast.prediction = forecast.high95 + 0.001 + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='prediction_must_be_within_ci', + ): + db_session.commit() + + def test_ci80_upper_bound_greater_than_lower_bound(self, db_session, forecast): + """Insert an instance with invalid data.""" + assert forecast.low80 is not None + assert forecast.high80 is not None + + # Do not trigger the "ci95_must_be_wider_than_ci80" constraint. + forecast.low95 = None + forecast.high95 = None + + forecast.low80, forecast.high80 = ( # noqa:WPS414 + forecast.high80, + forecast.low80, + ) + + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='ci_upper_bound_greater_than_lower_bound', + ): + db_session.commit() + + def test_ci95_upper_bound_greater_than_lower_bound(self, db_session, forecast): + """Insert an instance with invalid data.""" + assert forecast.low95 is not None + assert forecast.high95 is not None + + # Do not trigger the "ci95_must_be_wider_than_ci80" constraint. + forecast.low80 = None + forecast.high80 = None + + forecast.low95, forecast.high95 = ( # noqa:WPS414 + forecast.high95, + forecast.low95, + ) + + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='ci_upper_bound_greater_than_lower_bound', + ): + db_session.commit() + + def test_ci95_is_wider_than_ci80_at_low_end(self, db_session, forecast): + """Insert an instance with invalid data.""" + assert forecast.low80 is not None + assert forecast.low95 is not None + + forecast.low80, forecast.low95 = (forecast.low95, forecast.low80) # noqa:WPS414 + + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='ci95_must_be_wider_than_ci80', + ): + db_session.commit() + + def test_ci95_is_wider_than_ci80_at_high_end(self, db_session, forecast): + """Insert an instance with invalid data.""" + assert forecast.high80 is not None + assert forecast.high95 is not None + + forecast.high80, forecast.high95 = ( # noqa:WPS414 + forecast.high95, + forecast.high80, + ) + + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='ci95_must_be_wider_than_ci80', + ): + db_session.commit() + + def test_two_predictions_for_same_forecasting_setting(self, db_session, forecast): + """Insert a record that violates a unique constraint.""" + db_session.add(forecast) + db_session.commit() + + another_forecast = db.Forecast( + pixel=forecast.pixel, + start_at=forecast.start_at, + time_step=forecast.time_step, + train_horizon=forecast.train_horizon, + model=forecast.model, + actual=forecast.actual, + prediction=2, + low80=1, + high80=3, + low95=0, + high95=4, + ) + db_session.add(another_forecast) + + with pytest.raises(sa_exc.IntegrityError, match='duplicate key value'): + db_session.commit() + + +class TestFromDataFrameConstructor: + """Test the alternative `Forecast.from_dataframe()` constructor.""" + + @pytest.fixture + def prediction_data(self): + """A `pd.DataFrame` as returned by `*Model.predict()` ... + + ... and used as the `data` argument to `Forecast.from_dataframe()`. + + We assume the `data` come from some vertical forecasting `*Model` + and contain several rows (= `3` in this example) corresponding + to different time steps centered around `NOON`. + """ + noon_start_at = dt.datetime( + test_config.END.year, + test_config.END.month, + test_config.END.day, + test_config.NOON, + ) + + index = pd.Index( + [ + noon_start_at - dt.timedelta(minutes=test_config.LONG_TIME_STEP), + noon_start_at, + noon_start_at + dt.timedelta(minutes=test_config.LONG_TIME_STEP), + ], + ) + index.name = 'start_at' + + return pd.DataFrame( + data={ + 'actual': (11, 12, 13), + 'prediction': (11.3, 12.3, 13.3), + 'low80': (1.123, 1.23, 1.323), + 'high80': (112.34, 123.4, 132.34), + 'low95': (0.1123, 0.123, 0.1323), + 'high95': (1123.45, 1234.5, 1323.45), + }, + index=index, + ) + + def test_convert_dataframe_into_orm_objects(self, pixel, prediction_data): + """Call `Forecast.from_dataframe()`.""" + forecasts = db.Forecast.from_dataframe( + pixel=pixel, + time_step=test_config.LONG_TIME_STEP, + train_horizon=test_config.LONG_TRAIN_HORIZON, + model=MODEL, + data=prediction_data, + ) + + assert len(forecasts) == 3 + for forecast in forecasts: + assert isinstance(forecast, db.Forecast) + + @pytest.mark.db + def test_persist_predictions_into_database( + self, db_session, pixel, prediction_data, + ): + """Call `Forecast.from_dataframe()` and persist the results.""" + forecasts = db.Forecast.from_dataframe( + pixel=pixel, + time_step=test_config.LONG_TIME_STEP, + train_horizon=test_config.LONG_TRAIN_HORIZON, + model=MODEL, + data=prediction_data, + ) + + db_session.add_all(forecasts) + db_session.commit() diff --git a/tests/db/test_grids.py b/tests/db/test_grids.py new file mode 100644 index 0000000..2babf25 --- /dev/null +++ b/tests/db/test_grids.py @@ -0,0 +1,239 @@ +"""Test the ORM's `Grid` model.""" + +import pytest +import sqlalchemy as sqla +from sqlalchemy import exc as sa_exc + +from urban_meal_delivery import db + + +class TestSpecialMethods: + """Test special methods in `Grid`.""" + + def test_create_grid(self, grid): + """Test instantiation of a new `Grid` object.""" + assert grid is not None + + def test_text_representation(self, grid): + """`Grid` has a non-literal text representation.""" + result = repr(grid) + + assert result == f'' + + +@pytest.mark.db +@pytest.mark.no_cover +class TestConstraints: + """Test the database constraints defined in `Grid`.""" + + def test_insert_into_database(self, db_session, grid): + """Insert an instance into the (empty) database.""" + assert db_session.query(db.Grid).count() == 0 + + db_session.add(grid) + db_session.commit() + + assert db_session.query(db.Grid).count() == 1 + + def test_delete_a_referenced_city(self, db_session, grid): + """Remove a record that is referenced with a FK.""" + db_session.add(grid) + db_session.commit() + + # Must delete without ORM as otherwise an UPDATE statement is emitted. + stmt = sqla.delete(db.City).where(db.City.id == grid.city.id) + + with pytest.raises( + sa_exc.IntegrityError, match='fk_grids_to_cities_via_city_id', + ): + db_session.execute(stmt) + + def test_two_grids_with_identical_side_length(self, db_session, grid): + """Insert a record that violates a unique constraint.""" + db_session.add(grid) + db_session.commit() + + # Create a `Grid` with the same `.side_length` in the same `.city`. + another_grid = db.Grid(city=grid.city, side_length=grid.side_length) + db_session.add(another_grid) + + with pytest.raises(sa_exc.IntegrityError, match='duplicate key value'): + db_session.commit() + + +class TestProperties: + """Test properties in `Grid`.""" + + def test_pixel_area(self, grid): + """Test `Grid.pixel_area` property.""" + result = grid.pixel_area + + assert result == 1.0 + + +class TestGridification: + """Test the `Grid.gridify()` constructor.""" + + @pytest.fixture + def addresses_mock(self, mocker, monkeypatch): + """A `Mock` whose `.return_value` are to be set ... + + ... to the addresses that are gridified. The addresses are + all considered `Order.pickup_address` attributes for some orders. + """ + mock = mocker.Mock() + query = ( # noqa:ECE001 + mock.query.return_value.join.return_value.filter.return_value.all # noqa:E501,WPS219 + ) + monkeypatch.setattr(db, 'session', mock) + + return query + + @pytest.mark.no_cover + def test_no_pixel_without_addresses(self, city, addresses_mock): + """Without orders, there are no `Pixel` objects on the `grid`. + + This test case skips the `for`-loop inside `Grid.gridify()`. + """ + addresses_mock.return_value = [] + + # The chosen `side_length` would result in one `Pixel` if there were orders. + # `+1` as otherwise there would be a second pixel in one direction. + side_length = max(city.total_x, city.total_y) + 1 + + result = db.Grid.gridify(city=city, side_length=side_length) + + assert isinstance(result, db.Grid) + assert len(result.pixels) == 0 # noqa:WPS507 + + def test_one_pixel_with_one_address(self, city, order, addresses_mock): + """At the very least, there must be one `Pixel` ... + + ... if the `side_length` is greater than both the + horizontal and vertical distances of the viewport. + """ + addresses_mock.return_value = [order.pickup_address] + + # `+1` as otherwise there would be a second pixel in one direction. + side_length = max(city.total_x, city.total_y) + 1 + + result = db.Grid.gridify(city=city, side_length=side_length) + + assert isinstance(result, db.Grid) + assert len(result.pixels) == 1 + + def test_one_pixel_with_two_addresses(self, city, make_order, addresses_mock): + """At the very least, there must be one `Pixel` ... + + ... if the `side_length` is greater than both the + horizontal and vertical distances of the viewport. + + This test case is necessary as `test_one_pixel_with_one_address` + does not have to re-use an already created `Pixel` object internally. + """ + orders = [make_order(), make_order()] + addresses_mock.return_value = [order.pickup_address for order in orders] + + # `+1` as otherwise there would be a second pixel in one direction. + side_length = max(city.total_x, city.total_y) + 1 + + result = db.Grid.gridify(city=city, side_length=side_length) + + assert isinstance(result, db.Grid) + assert len(result.pixels) == 1 + + def test_no_pixel_with_one_address_too_far_south(self, city, order, addresses_mock): + """An `address` outside the `city`'s viewport is discarded.""" + # Move the `address` just below `city.southwest`. + order.pickup_address.latitude = city.southwest.latitude - 0.1 + addresses_mock.return_value = [order.pickup_address] + + # `+1` as otherwise there would be a second pixel in one direction. + side_length = max(city.total_x, city.total_y) + 1 + + result = db.Grid.gridify(city=city, side_length=side_length) + + assert isinstance(result, db.Grid) + assert len(result.pixels) == 0 # noqa:WPS507 + + @pytest.mark.no_cover + def test_no_pixel_with_one_address_too_far_west(self, city, order, addresses_mock): + """An `address` outside the `city`'s viewport is discarded. + + This test is a logical sibling to + `test_no_pixel_with_one_address_too_far_south` and therefore redundant. + """ + # Move the `address` just left to `city.southwest`. + order.pickup_address.longitude = city.southwest.longitude - 0.1 + addresses_mock.return_value = [order.pickup_address] + + # `+1` as otherwise there would be a second pixel in one direction. + side_length = max(city.total_x, city.total_y) + 1 + + result = db.Grid.gridify(city=city, side_length=side_length) + + assert isinstance(result, db.Grid) + assert len(result.pixels) == 0 # noqa:WPS507 + + @pytest.mark.no_cover + def test_two_pixels_with_two_addresses(self, city, make_address, addresses_mock): + """Two `Address` objects in distinct `Pixel` objects. + + This test is more of a sanity check. + """ + # Create two `Address` objects in distinct `Pixel`s. + addresses_mock.return_value = [ + # One `Address` in the lower-left `Pixel`, ... + make_address(latitude=48.8357377, longitude=2.2517412), + # ... and another one in the upper-right one. + make_address(latitude=48.8898312, longitude=2.4357622), + ] + + side_length = max(city.total_x // 2, city.total_y // 2) + 1 + + # By assumption of the test data. + n_pixels_x = (city.total_x // side_length) + 1 + n_pixels_y = (city.total_y // side_length) + 1 + assert n_pixels_x * n_pixels_y == 4 + + # Create a `Grid` with at most four `Pixel`s. + result = db.Grid.gridify(city=city, side_length=side_length) + + assert isinstance(result, db.Grid) + assert len(result.pixels) == 2 + + @pytest.mark.db + @pytest.mark.no_cover + @pytest.mark.parametrize('side_length', [250, 500, 1_000, 2_000, 4_000, 8_000]) + def test_make_random_grids( # noqa:WPS211,WPS218 + self, db_session, city, make_address, make_restaurant, make_order, side_length, + ): + """With 100 random `Address` objects, a grid must have ... + + ... between 1 and a deterministic upper bound of `Pixel` objects. + + This test creates confidence that the created `Grid` + objects adhere to the database constraints. + """ + addresses = [make_address() for _ in range(100)] + restaurants = [make_restaurant(address=address) for address in addresses] + orders = [make_order(restaurant=restaurant) for restaurant in restaurants] + db_session.add_all(orders) + + n_pixels_x = (city.total_x // side_length) + 1 + n_pixels_y = (city.total_y // side_length) + 1 + + result = db.Grid.gridify(city=city, side_length=side_length) + + assert isinstance(result, db.Grid) + assert 1 <= len(result.pixels) <= n_pixels_x * n_pixels_y + + # Sanity checks for `Pixel.southwest` and `Pixel.northeast`. + for pixel in result.pixels: + assert abs(pixel.southwest.x - pixel.n_x * side_length) < 2 + assert abs(pixel.southwest.y - pixel.n_y * side_length) < 2 + assert abs(pixel.northeast.x - (pixel.n_x + 1) * side_length) < 2 + assert abs(pixel.northeast.y - (pixel.n_y + 1) * side_length) < 2 + + db_session.add(result) + db_session.commit() diff --git a/tests/db/test_orders.py b/tests/db/test_orders.py index fa36072..653038a 100644 --- a/tests/db/test_orders.py +++ b/tests/db/test_orders.py @@ -1,57 +1,40 @@ -"""Test the ORM's Order model.""" +"""Test the ORM's `Order` model.""" import datetime +import random import pytest -from sqlalchemy.orm import exc as orm_exc from urban_meal_delivery import db class TestSpecialMethods: - """Test special methods in Order.""" + """Test special methods in `Order`.""" - # pylint:disable=no-self-use - - def test_create_order(self, order_data): - """Test instantiation of a new Order object.""" - result = db.Order(**order_data) - - assert result is not None - - def test_text_representation(self, order_data): - """Order has a non-literal text representation.""" - order = db.Order(**order_data) - id_ = order_data['id'] + def test_create_order(self, order): + """Test instantiation of a new `Order` object.""" + assert order is not None + def test_text_representation(self, order): + """`Order` has a non-literal text representation.""" result = repr(order) - assert result == f'' + assert result == f'' -@pytest.mark.e2e +@pytest.mark.db @pytest.mark.no_cover class TestConstraints: - """Test the database constraints defined in Order.""" + """Test the database constraints defined in `Order`.""" - # pylint:disable=no-self-use + def test_insert_into_database(self, db_session, order): + """Insert an instance into the (empty) database.""" + assert db_session.query(db.Order).count() == 0 - def test_insert_into_database(self, order, db_session): - """Insert an instance into the database.""" db_session.add(order) db_session.commit() - def test_dublicate_primary_key(self, order, order_data, city, db_session): - """Can only add a record once.""" - db_session.add(order) - db_session.commit() - - another_order = db.Order(**order_data) - another_order.city = city - db_session.add(another_order) - - with pytest.raises(orm_exc.FlushError): - db_session.commit() + assert db_session.query(db.Order).count() == 1 # TODO (order-constraints): the various Foreign Key and Check Constraints # should be tested eventually. This is not of highest importance as @@ -59,339 +42,429 @@ class TestConstraints: class TestProperties: - """Test properties in Order.""" + """Test properties in `Order`. - # pylint:disable=no-self-use,too-many-public-methods + The `order` fixture uses the defaults specified in `factories.OrderFactory` + and provided by the `make_order` fixture. + """ - def test_is_not_scheduled(self, order_data): - """Test Order.scheduled property.""" - order = db.Order(**order_data) + def test_is_ad_hoc(self, order): + """Test `Order.scheduled` property.""" + assert order.ad_hoc is True result = order.scheduled assert result is False - def test_is_scheduled(self, order_data): - """Test Order.scheduled property.""" - order_data['ad_hoc'] = False - order_data['scheduled_delivery_at'] = datetime.datetime(2020, 1, 2, 12, 30, 0) - order_data['scheduled_delivery_at_corrected'] = False - order = db.Order(**order_data) + def test_is_scheduled(self, make_order): + """Test `Order.scheduled` property.""" + order = make_order(scheduled=True) + assert order.ad_hoc is False result = order.scheduled assert result is True - def test_is_completed(self, order_data): - """Test Order.completed property.""" - order = db.Order(**order_data) - + def test_is_completed(self, order): + """Test `Order.completed` property.""" result = order.completed assert result is True - def test_is_not_completed(self, order_data): - """Test Order.completed property.""" - order_data['cancelled'] = True - order_data['cancelled_at'] = datetime.datetime(2020, 1, 2, 12, 15, 0) - order_data['cancelled_at_corrected'] = False - order = db.Order(**order_data) + def test_is_not_completed1(self, make_order): + """Test `Order.completed` property.""" + order = make_order(cancel_before_pickup=True) + assert order.cancelled is True result = order.completed assert result is False - def test_is_corrected(self, order_data): - """Test Order.corrected property.""" - order_data['dispatch_at_corrected'] = True - order = db.Order(**order_data) + def test_is_not_completed2(self, make_order): + """Test `Order.completed` property.""" + order = make_order(cancel_after_pickup=True) + assert order.cancelled is True + + result = order.completed + + assert result is False + + def test_is_not_corrected(self, order): + """Test `Order.corrected` property.""" + # By default, the `OrderFactory` sets all `.*_corrected` attributes to `False`. + result = order.corrected + + assert result is False + + @pytest.mark.parametrize( + 'column', + [ + 'scheduled_delivery_at', + 'cancelled_at', + 'restaurant_notified_at', + 'restaurant_confirmed_at', + 'dispatch_at', + 'courier_notified_at', + 'courier_accepted_at', + 'pickup_at', + 'left_pickup_at', + 'delivery_at', + ], + ) + def test_is_corrected(self, order, column): + """Test `Order.corrected` property.""" + setattr(order, f'{column}_corrected', True) result = order.corrected assert result is True - def test_time_to_accept_no_dispatch_at(self, order_data): - """Test Order.time_to_accept property.""" - order_data['dispatch_at'] = None - order = db.Order(**order_data) + def test_time_to_accept_no_dispatch_at(self, order): + """Test `Order.time_to_accept` property.""" + order.dispatch_at = None with pytest.raises(RuntimeError, match='not set'): int(order.time_to_accept) - def test_time_to_accept_no_courier_accepted(self, order_data): - """Test Order.time_to_accept property.""" - order_data['courier_accepted_at'] = None - order = db.Order(**order_data) + def test_time_to_accept_no_courier_accepted(self, order): + """Test `Order.time_to_accept` property.""" + order.courier_accepted_at = None with pytest.raises(RuntimeError, match='not set'): int(order.time_to_accept) - def test_time_to_accept_success(self, order_data): - """Test Order.time_to_accept property.""" - order = db.Order(**order_data) - + def test_time_to_accept_success(self, order): + """Test `Order.time_to_accept` property.""" result = order.time_to_accept - assert isinstance(result, datetime.timedelta) + assert result > datetime.timedelta(0) - def test_time_to_react_no_courier_notified(self, order_data): - """Test Order.time_to_react property.""" - order_data['courier_notified_at'] = None - order = db.Order(**order_data) + def test_time_to_react_no_courier_notified(self, order): + """Test `Order.time_to_react` property.""" + order.courier_notified_at = None with pytest.raises(RuntimeError, match='not set'): int(order.time_to_react) - def test_time_to_react_no_courier_accepted(self, order_data): - """Test Order.time_to_react property.""" - order_data['courier_accepted_at'] = None - order = db.Order(**order_data) + def test_time_to_react_no_courier_accepted(self, order): + """Test `Order.time_to_react` property.""" + order.courier_accepted_at = None with pytest.raises(RuntimeError, match='not set'): int(order.time_to_react) - def test_time_to_react_success(self, order_data): - """Test Order.time_to_react property.""" - order = db.Order(**order_data) - + def test_time_to_react_success(self, order): + """Test `Order.time_to_react` property.""" result = order.time_to_react - assert isinstance(result, datetime.timedelta) + assert result > datetime.timedelta(0) - def test_time_to_pickup_no_reached_pickup_at(self, order_data): - """Test Order.time_to_pickup property.""" - order_data['reached_pickup_at'] = None - order = db.Order(**order_data) + def test_time_to_pickup_no_reached_pickup_at(self, order): + """Test `Order.time_to_pickup` property.""" + order.reached_pickup_at = None with pytest.raises(RuntimeError, match='not set'): int(order.time_to_pickup) - def test_time_to_pickup_no_courier_accepted(self, order_data): - """Test Order.time_to_pickup property.""" - order_data['courier_accepted_at'] = None - order = db.Order(**order_data) + def test_time_to_pickup_no_courier_accepted(self, order): + """Test `Order.time_to_pickup` property.""" + order.courier_accepted_at = None with pytest.raises(RuntimeError, match='not set'): int(order.time_to_pickup) - def test_time_to_pickup_success(self, order_data): - """Test Order.time_to_pickup property.""" - order = db.Order(**order_data) - + def test_time_to_pickup_success(self, order): + """Test `Order.time_to_pickup` property.""" result = order.time_to_pickup - assert isinstance(result, datetime.timedelta) + assert result > datetime.timedelta(0) - def test_time_at_pickup_no_reached_pickup_at(self, order_data): - """Test Order.time_at_pickup property.""" - order_data['reached_pickup_at'] = None - order = db.Order(**order_data) + def test_time_at_pickup_no_reached_pickup_at(self, order): + """Test `Order.time_at_pickup` property.""" + order.reached_pickup_at = None with pytest.raises(RuntimeError, match='not set'): int(order.time_at_pickup) - def test_time_at_pickup_no_pickup_at(self, order_data): - """Test Order.time_at_pickup property.""" - order_data['pickup_at'] = None - order = db.Order(**order_data) + def test_time_at_pickup_no_pickup_at(self, order): + """Test `Order.time_at_pickup` property.""" + order.pickup_at = None with pytest.raises(RuntimeError, match='not set'): int(order.time_at_pickup) - def test_time_at_pickup_success(self, order_data): - """Test Order.time_at_pickup property.""" - order = db.Order(**order_data) - + def test_time_at_pickup_success(self, order): + """Test `Order.time_at_pickup` property.""" result = order.time_at_pickup - assert isinstance(result, datetime.timedelta) + assert result > datetime.timedelta(0) - def test_scheduled_pickup_at_no_restaurant_notified( # noqa:WPS118 - self, order_data, - ): - """Test Order.scheduled_pickup_at property.""" - order_data['restaurant_notified_at'] = None - order = db.Order(**order_data) + def test_scheduled_pickup_at_no_restaurant_notified(self, order): # noqa:WPS118 + """Test `Order.scheduled_pickup_at` property.""" + order.restaurant_notified_at = None with pytest.raises(RuntimeError, match='not set'): int(order.scheduled_pickup_at) - def test_scheduled_pickup_at_no_est_prep_duration(self, order_data): # noqa:WPS118 - """Test Order.scheduled_pickup_at property.""" - order_data['estimated_prep_duration'] = None - order = db.Order(**order_data) + def test_scheduled_pickup_at_no_est_prep_duration(self, order): # noqa:WPS118 + """Test `Order.scheduled_pickup_at` property.""" + order.estimated_prep_duration = None with pytest.raises(RuntimeError, match='not set'): int(order.scheduled_pickup_at) - def test_scheduled_pickup_at_success(self, order_data): - """Test Order.scheduled_pickup_at property.""" - order = db.Order(**order_data) - + def test_scheduled_pickup_at_success(self, order): + """Test `Order.scheduled_pickup_at` property.""" result = order.scheduled_pickup_at - assert isinstance(result, datetime.datetime) + assert order.placed_at < result < order.delivery_at - def test_if_courier_early_at_pickup(self, order_data): - """Test Order.courier_early property.""" - order = db.Order(**order_data) + def test_courier_is_early_at_pickup(self, order): + """Test `Order.courier_early` property.""" + # Manipulate the attribute that determines `Order.scheduled_pickup_at`. + order.estimated_prep_duration = 999_999 result = order.courier_early assert bool(result) is True - def test_if_courier_late_at_pickup(self, order_data): - """Test Order.courier_late property.""" - # Opposite of test case before. - order = db.Order(**order_data) + def test_courier_is_not_early_at_pickup(self, order): + """Test `Order.courier_early` property.""" + # Manipulate the attribute that determines `Order.scheduled_pickup_at`. + order.estimated_prep_duration = 1 + + result = order.courier_early + + assert bool(result) is False + + def test_courier_is_late_at_pickup(self, order): + """Test `Order.courier_late` property.""" + # Manipulate the attribute that determines `Order.scheduled_pickup_at`. + order.estimated_prep_duration = 1 + + result = order.courier_late + + assert bool(result) is True + + def test_courier_is_not_late_at_pickup(self, order): + """Test `Order.courier_late` property.""" + # Manipulate the attribute that determines `Order.scheduled_pickup_at`. + order.estimated_prep_duration = 999_999 result = order.courier_late assert bool(result) is False - def test_if_restaurant_early_at_pickup(self, order_data): - """Test Order.restaurant_early property.""" - order = db.Order(**order_data) + def test_restaurant_early_at_pickup(self, order): + """Test `Order.restaurant_early` property.""" + # Manipulate the attribute that determines `Order.scheduled_pickup_at`. + order.estimated_prep_duration = 999_999 result = order.restaurant_early assert bool(result) is True - def test_if_restaurant_late_at_pickup(self, order_data): - """Test Order.restaurant_late property.""" - # Opposite of test case before. - order = db.Order(**order_data) + def test_restaurant_is_not_early_at_pickup(self, order): + """Test `Order.restaurant_early` property.""" + # Manipulate the attribute that determines `Order.scheduled_pickup_at`. + order.estimated_prep_duration = 1 + + result = order.restaurant_early + + assert bool(result) is False + + def test_restaurant_is_late_at_pickup(self, order): + """Test `Order.restaurant_late` property.""" + # Manipulate the attribute that determines `Order.scheduled_pickup_at`. + order.estimated_prep_duration = 1 + + result = order.restaurant_late + + assert bool(result) is True + + def test_restaurant_is_not_late_at_pickup(self, order): + """Test `Order.restaurant_late` property.""" + # Manipulate the attribute that determines `Order.scheduled_pickup_at`. + order.estimated_prep_duration = 999_999 result = order.restaurant_late assert bool(result) is False - def test_time_to_delivery_no_reached_delivery_at(self, order_data): # noqa:WPS118 - """Test Order.time_to_delivery property.""" - order_data['reached_delivery_at'] = None - order = db.Order(**order_data) + def test_time_to_delivery_no_reached_delivery_at(self, order): # noqa:WPS118 + """Test `Order.time_to_delivery` property.""" + order.reached_delivery_at = None with pytest.raises(RuntimeError, match='not set'): int(order.time_to_delivery) - def test_time_to_delivery_no_pickup_at(self, order_data): - """Test Order.time_to_delivery property.""" - order_data['pickup_at'] = None - order = db.Order(**order_data) + def test_time_to_delivery_no_pickup_at(self, order): + """Test `Order.time_to_delivery` property.""" + order.pickup_at = None with pytest.raises(RuntimeError, match='not set'): int(order.time_to_delivery) - def test_time_to_delivery_success(self, order_data): - """Test Order.time_to_delivery property.""" - order = db.Order(**order_data) - + def test_time_to_delivery_success(self, order): + """Test `Order.time_to_delivery` property.""" result = order.time_to_delivery - assert isinstance(result, datetime.timedelta) + assert result > datetime.timedelta(0) - def test_time_at_delivery_no_reached_delivery_at(self, order_data): # noqa:WPS118 - """Test Order.time_at_delivery property.""" - order_data['reached_delivery_at'] = None - order = db.Order(**order_data) + def test_time_at_delivery_no_reached_delivery_at(self, order): # noqa:WPS118 + """Test `Order.time_at_delivery` property.""" + order.reached_delivery_at = None with pytest.raises(RuntimeError, match='not set'): int(order.time_at_delivery) - def test_time_at_delivery_no_delivery_at(self, order_data): - """Test Order.time_at_delivery property.""" - order_data['delivery_at'] = None - order = db.Order(**order_data) + def test_time_at_delivery_no_delivery_at(self, order): + """Test `Order.time_at_delivery` property.""" + order.delivery_at = None with pytest.raises(RuntimeError, match='not set'): int(order.time_at_delivery) - def test_time_at_delivery_success(self, order_data): - """Test Order.time_at_delivery property.""" - order = db.Order(**order_data) - + def test_time_at_delivery_success(self, order): + """Test `Order.time_at_delivery` property.""" result = order.time_at_delivery - assert isinstance(result, datetime.timedelta) + assert result > datetime.timedelta(0) - def test_courier_waited_at_delviery(self, order_data): - """Test Order.courier_waited_at_delivery property.""" - order_data['_courier_waited_at_delivery'] = True - order = db.Order(**order_data) + def test_courier_waited_at_delviery(self, order): + """Test `Order.courier_waited_at_delivery` property.""" + order._courier_waited_at_delivery = True - result = int(order.courier_waited_at_delivery.total_seconds()) + result = order.courier_waited_at_delivery.total_seconds() assert result > 0 - def test_courier_did_not_wait_at_delivery(self, order_data): - """Test Order.courier_waited_at_delivery property.""" - order_data['_courier_waited_at_delivery'] = False - order = db.Order(**order_data) + def test_courier_did_not_wait_at_delivery(self, order): + """Test `Order.courier_waited_at_delivery` property.""" + order._courier_waited_at_delivery = False - result = int(order.courier_waited_at_delivery.total_seconds()) + result = order.courier_waited_at_delivery.total_seconds() assert result == 0 - def test_if_delivery_early_success(self, order_data): - """Test Order.delivery_early property.""" - order_data['ad_hoc'] = False - order_data['scheduled_delivery_at'] = datetime.datetime(2020, 1, 2, 12, 30, 0) - order_data['scheduled_delivery_at_corrected'] = False - order = db.Order(**order_data) + def test_ad_hoc_order_cannot_be_early(self, order): + """Test `Order.delivery_early` property.""" + # By default, the `OrderFactory` creates ad-hoc orders. + with pytest.raises(AttributeError, match='scheduled'): + int(order.delivery_early) + + def test_scheduled_order_delivered_early(self, make_order): + """Test `Order.delivery_early` property.""" + order = make_order(scheduled=True) + # Schedule the order to a lot later. + order.scheduled_delivery_at += datetime.timedelta(hours=2) result = order.delivery_early assert bool(result) is True - def test_if_delivery_early_failure(self, order_data): - """Test Order.delivery_early property.""" - order = db.Order(**order_data) + def test_scheduled_order_not_delivered_early(self, make_order): + """Test `Order.delivery_early` property.""" + order = make_order(scheduled=True) + # Schedule the order to a lot earlier. + order.scheduled_delivery_at -= datetime.timedelta(hours=2) - with pytest.raises(AttributeError, match='scheduled'): - int(order.delivery_early) + result = order.delivery_early - def test_if_delivery_late_success(self, order_data): + assert bool(result) is False + + def test_ad_hoc_order_cannot_be_late(self, order): """Test Order.delivery_late property.""" - order_data['ad_hoc'] = False - order_data['scheduled_delivery_at'] = datetime.datetime(2020, 1, 2, 12, 30, 0) - order_data['scheduled_delivery_at_corrected'] = False - order = db.Order(**order_data) + # By default, the `OrderFactory` creates ad-hoc orders. + with pytest.raises(AttributeError, match='scheduled'): + int(order.delivery_late) + + def test_scheduled_order_delivered_late(self, make_order): + """Test `Order.delivery_early` property.""" + order = make_order(scheduled=True) + # Schedule the order to a lot earlier. + order.scheduled_delivery_at -= datetime.timedelta(hours=2) + + result = order.delivery_late + + assert bool(result) is True + + def test_scheduled_order_not_delivered_late(self, make_order): + """Test `Order.delivery_early` property.""" + order = make_order(scheduled=True) + # Schedule the order to a lot later. + order.scheduled_delivery_at += datetime.timedelta(hours=2) result = order.delivery_late assert bool(result) is False - def test_if_delivery_late_failure(self, order_data): - """Test Order.delivery_late property.""" - order = db.Order(**order_data) - - with pytest.raises(AttributeError, match='scheduled'): - int(order.delivery_late) - - def test_no_total_time_for_pre_order(self, order_data): - """Test Order.total_time property.""" - order_data['ad_hoc'] = False - order_data['scheduled_delivery_at'] = datetime.datetime(2020, 1, 2, 12, 30, 0) - order_data['scheduled_delivery_at_corrected'] = False - order = db.Order(**order_data) + def test_no_total_time_for_scheduled_order(self, make_order): + """Test `Order.total_time` property.""" + order = make_order(scheduled=True) with pytest.raises(AttributeError, match='Scheduled'): int(order.total_time) - def test_no_total_time_for_cancelled_order(self, order_data): - """Test Order.total_time property.""" - order_data['cancelled'] = True - order_data['cancelled_at'] = datetime.datetime(2020, 1, 2, 12, 15, 0) - order_data['cancelled_at_corrected'] = False - order = db.Order(**order_data) + def test_no_total_time_for_cancelled_order(self, make_order): + """Test `Order.total_time` property.""" + order = make_order(cancel_before_pickup=True) with pytest.raises(RuntimeError, match='Cancelled'): int(order.total_time) - def test_total_time_success(self, order_data): - """Test Order.total_time property.""" - order = db.Order(**order_data) - + def test_total_time_success(self, order): + """Test `Order.total_time` property.""" result = order.total_time - assert isinstance(result, datetime.timedelta) + assert result > datetime.timedelta(0) + + +@pytest.mark.db +@pytest.mark.no_cover +def test_make_random_orders( # noqa:C901,WPS211,WPS213,WPS231 + db_session, make_address, make_courier, make_restaurant, make_order, +): + """Sanity check the all the `make_*` fixtures. + + Ensure that all generated `Address`, `Courier`, `Customer`, `Restauarant`, + and `Order` objects adhere to the database constraints. + """ # noqa:D202 + # Generate a large number of `Order`s to obtain a large variance of data. + for _ in range(1_000): # noqa:WPS122 + + # Ad-hoc `Order`s are far more common than pre-orders. + scheduled = random.choice([True, False, False, False, False]) + + # Randomly pass a `address` argument to `make_restaurant()` and + # a `restaurant` argument to `make_order()`. + if random.random() < 0.5: + address = random.choice([None, make_address()]) + restaurant = make_restaurant(address=address) + else: + restaurant = None + + # Randomly pass a `courier` argument to `make_order()`. + courier = random.choice([None, make_courier()]) + + # A tiny fraction of `Order`s get cancelled. + if random.random() < 0.05: + if random.random() < 0.5: + cancel_before_pickup, cancel_after_pickup = True, False + else: + cancel_before_pickup, cancel_after_pickup = False, True + else: + cancel_before_pickup, cancel_after_pickup = False, False + + # Write all the generated objects to the database. + # This should already trigger an `IntegrityError` if the data are flawed. + order = make_order( + scheduled=scheduled, + restaurant=restaurant, + courier=courier, + cancel_before_pickup=cancel_before_pickup, + cancel_after_pickup=cancel_after_pickup, + ) + db_session.add(order) + + db_session.commit() diff --git a/tests/db/test_pixels.py b/tests/db/test_pixels.py new file mode 100644 index 0000000..317ce56 --- /dev/null +++ b/tests/db/test_pixels.py @@ -0,0 +1,152 @@ +"""Test the ORM's `Pixel` model.""" + +import pytest +import sqlalchemy as sqla +from sqlalchemy import exc as sa_exc + +from urban_meal_delivery import db + + +class TestSpecialMethods: + """Test special methods in `Pixel`.""" + + def test_create_pixel(self, pixel): + """Test instantiation of a new `Pixel` object.""" + assert pixel is not None + + def test_text_representation(self, pixel): + """`Pixel` has a non-literal text representation.""" + result = repr(pixel) + + assert result == f'' + + +@pytest.mark.db +@pytest.mark.no_cover +class TestConstraints: + """Test the database constraints defined in `Pixel`.""" + + def test_insert_into_database(self, db_session, pixel): + """Insert an instance into the (empty) database.""" + assert db_session.query(db.Pixel).count() == 0 + + db_session.add(pixel) + db_session.commit() + + assert db_session.query(db.Pixel).count() == 1 + + def test_delete_a_referenced_grid(self, db_session, pixel): + """Remove a record that is referenced with a FK.""" + db_session.add(pixel) + db_session.commit() + + # Must delete without ORM as otherwise an UPDATE statement is emitted. + stmt = sqla.delete(db.Grid).where(db.Grid.id == pixel.grid.id) + + with pytest.raises( + sa_exc.IntegrityError, match='fk_pixels_to_grids_via_grid_id', + ): + db_session.execute(stmt) + + def test_negative_n_x(self, db_session, pixel): + """Insert an instance with invalid data.""" + pixel.n_x = -1 + db_session.add(pixel) + + with pytest.raises(sa_exc.IntegrityError, match='n_x_is_positive'): + db_session.commit() + + def test_negative_n_y(self, db_session, pixel): + """Insert an instance with invalid data.""" + pixel.n_y = -1 + db_session.add(pixel) + + with pytest.raises(sa_exc.IntegrityError, match='n_y_is_positive'): + db_session.commit() + + def test_non_unique_coordinates_within_a_grid(self, db_session, pixel): + """Insert an instance with invalid data.""" + another_pixel = db.Pixel(grid=pixel.grid, n_x=pixel.n_x, n_y=pixel.n_y) + db_session.add(another_pixel) + + with pytest.raises(sa_exc.IntegrityError, match='duplicate key value'): + db_session.commit() + + +class TestProperties: + """Test properties in `Pixel`.""" + + def test_side_length(self, pixel): + """Test `Pixel.side_length` property.""" + result = pixel.side_length + + assert result == 1_000 + + def test_area(self, pixel): + """Test `Pixel.area` property.""" + result = pixel.area + + assert result == 1.0 + + def test_northeast(self, pixel): + """Test `Pixel.northeast` property.""" + result = pixel.northeast + + assert abs(result.x - pixel.side_length) < 2 + assert abs(result.y - pixel.side_length) < 2 + + def test_northeast_is_cached(self, pixel): + """Test `Pixel.northeast` property.""" + result1 = pixel.northeast + result2 = pixel.northeast + + assert result1 is result2 + + def test_southwest(self, pixel): + """Test `Pixel.southwest` property.""" + result = pixel.southwest + + assert abs(result.x) < 2 + assert abs(result.y) < 2 + + def test_southwest_is_cached(self, pixel): + """Test `Pixel.southwest` property.""" + result1 = pixel.southwest + result2 = pixel.southwest + + assert result1 is result2 + + @pytest.fixture + def _restaurants_mock(self, mocker, monkeypatch, restaurant): + """A `Mock` whose `.return_value` is `[restaurant]`.""" + mock = mocker.Mock() + query = ( # noqa:ECE001 + mock.query.return_value.join.return_value.filter.return_value.all # noqa:E501,WPS219 + ) + query.return_value = [restaurant] + monkeypatch.setattr(db, 'session', mock) + + @pytest.mark.usefixtures('_restaurants_mock') + def test_restaurants(self, pixel, restaurant): + """Test `Pixel.restaurants` property.""" + result = pixel.restaurants + + assert result == [restaurant] + + @pytest.mark.usefixtures('_restaurants_mock') + def test_restaurants_is_cached(self, pixel): + """Test `Pixel.restaurants` property.""" + result1 = pixel.restaurants + result2 = pixel.restaurants + + assert result1 is result2 + + @pytest.mark.db + def test_restaurants_with_db(self, pixel): + """Test `Pixel.restaurants` property. + + This is a trivial integration test. + """ + result = pixel.restaurants + + assert not result # = empty `list` diff --git a/tests/db/test_restaurants.py b/tests/db/test_restaurants.py index 4662346..a641bce 100644 --- a/tests/db/test_restaurants.py +++ b/tests/db/test_restaurants.py @@ -1,80 +1,69 @@ -"""Test the ORM's Restaurant model.""" +"""Test the ORM's `Restaurant` model.""" import pytest +import sqlalchemy as sqla from sqlalchemy import exc as sa_exc -from sqlalchemy.orm import exc as orm_exc from urban_meal_delivery import db class TestSpecialMethods: - """Test special methods in Restaurant.""" + """Test special methods in `Restaurant`.""" - # pylint:disable=no-self-use - - def test_create_restaurant(self, restaurant_data): - """Test instantiation of a new Restaurant object.""" - result = db.Restaurant(**restaurant_data) - - assert result is not None - - def test_text_representation(self, restaurant_data): - """Restaurant has a non-literal text representation.""" - restaurant = db.Restaurant(**restaurant_data) - name = restaurant_data['name'] + def test_create_restaurant(self, restaurant): + """Test instantiation of a new `Restaurant` object.""" + assert restaurant is not None + def test_text_representation(self, restaurant): + """`Restaurant` has a non-literal text representation.""" result = repr(restaurant) - assert result == f'' + assert result == f'' -@pytest.mark.e2e +@pytest.mark.db @pytest.mark.no_cover class TestConstraints: - """Test the database constraints defined in Restaurant.""" + """Test the database constraints defined in `Restaurant`.""" - # pylint:disable=no-self-use + def test_insert_into_database(self, db_session, restaurant): + """Insert an instance into the (empty) database.""" + assert db_session.query(db.Restaurant).count() == 0 - def test_insert_into_database(self, restaurant, db_session): - """Insert an instance into the database.""" db_session.add(restaurant) db_session.commit() - def test_dublicate_primary_key(self, restaurant, restaurant_data, db_session): - """Can only add a record once.""" - db_session.add(restaurant) - db_session.commit() + assert db_session.query(db.Restaurant).count() == 1 - another_restaurant = db.Restaurant(**restaurant_data) - db_session.add(another_restaurant) - - with pytest.raises(orm_exc.FlushError): - db_session.commit() - - def test_delete_a_referenced_address(self, restaurant, address, db_session): + def test_delete_a_referenced_address(self, db_session, restaurant): """Remove a record that is referenced with a FK.""" db_session.add(restaurant) db_session.commit() - with pytest.raises(sa_exc.IntegrityError): - db_session.execute( - db.Address.__table__.delete().where( # noqa:WPS609 - db.Address.id == address.id, - ), - ) + # Must delete without ORM as otherwise an UPDATE statement is emitted. + stmt = sqla.delete(db.Address).where(db.Address.id == restaurant.address.id) - def test_negative_prep_duration(self, restaurant, db_session): + with pytest.raises( + sa_exc.IntegrityError, match='fk_restaurants_to_addresses_via_address_id', + ): + db_session.execute(stmt) + + def test_negative_prep_duration(self, db_session, restaurant): """Insert an instance with invalid data.""" restaurant.estimated_prep_duration = -1 db_session.add(restaurant) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises( + sa_exc.IntegrityError, match='realistic_estimated_prep_duration', + ): db_session.commit() - def test_too_high_prep_duration(self, restaurant, db_session): + def test_too_high_prep_duration(self, db_session, restaurant): """Insert an instance with invalid data.""" restaurant.estimated_prep_duration = 2500 db_session.add(restaurant) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises( + sa_exc.IntegrityError, match='realistic_estimated_prep_duration', + ): db_session.commit() diff --git a/tests/db/utils/__init__.py b/tests/db/utils/__init__.py new file mode 100644 index 0000000..4a95f0a --- /dev/null +++ b/tests/db/utils/__init__.py @@ -0,0 +1 @@ +"""Test the utilities for the ORM layer.""" diff --git a/tests/db/utils/test_locations.py b/tests/db/utils/test_locations.py new file mode 100644 index 0000000..8eb0263 --- /dev/null +++ b/tests/db/utils/test_locations.py @@ -0,0 +1,195 @@ +"""Test the `Location` class.""" + +import pytest + +from urban_meal_delivery.db import utils + + +# All tests take place in Paris. +MIN_EASTING, MAX_EASTING = 443_100, 461_200 +MIN_NORTHING, MAX_NORTHING = 5_407_200, 5_416_800 +ZONE = '31U' + + +@pytest.fixture +def location(address): + """A `Location` object based off the `address` fixture.""" + obj = utils.Location(address.latitude, address.longitude) + + assert obj.zone == ZONE # sanity check + + return obj + + +@pytest.fixture +def faraway_location(): + """A `Location` object far away from the `location`.""" + obj = utils.Location(latitude=0, longitude=0) + + assert obj.zone != ZONE # sanity check + + return obj + + +@pytest.fixture +def origin(city): + """A `Location` object based off the one and only `city`.""" + obj = city.southwest + + assert obj.zone == ZONE # sanity check + + return obj + + +class TestSpecialMethods: + """Test special methods in `Location`.""" + + def test_create_utm_coordinates(self, location): + """Test instantiation of a new `Location` object.""" + assert location is not None + + def test_text_representation(self, location): + """The text representation is a non-literal.""" + result = repr(location) + + assert result.startswith('') + + @pytest.mark.e2e + def test_coordinates_in_the_text_representation(self, location): + """Test the UTM convention in the non-literal text `repr()`. + + Example Format: + `'` + """ + result = repr(location) + + parts = result.split(' ') + zone = parts[1] + easting = int(parts[2]) + northing = int(parts[3][:-1]) # strip the ending ">" + + assert zone == location.zone + assert MIN_EASTING < easting < MAX_EASTING + assert MIN_NORTHING < northing < MAX_NORTHING + + def test_compare_utm_coordinates_to_different_data_type(self, location): + """Test `Location.__eq__()`.""" + result = location == object() + + assert result is False + + def test_compare_utm_coordinates_to_far_away_coordinates( + self, location, faraway_location, + ): + """Test `Location.__eq__()`.""" + with pytest.raises(ValueError, match='must be in the same zone'): + bool(location == faraway_location) + + def test_compare_utm_coordinates_to_equal_coordinates(self, location, address): + """Test `Location.__eq__()`.""" + same_location = utils.Location(address.latitude, address.longitude) + + result = location == same_location + + assert result is True + + def test_compare_utm_coordinates_to_themselves(self, location): + """Test `Location.__eq__()`.""" + result = location == location # noqa:WPS312 + + assert result is True + + def test_compare_utm_coordinates_to_different_coordinates(self, location, origin): + """Test `Location.__eq__()`.""" + result = location == origin + + assert result is False + + +class TestProperties: + """Test properties in `Location`.""" + + def test_latitude(self, location, address): + """Test `Location.latitude` property.""" + result = location.latitude + + assert result == pytest.approx(float(address.latitude)) + + def test_longitude(self, location, address): + """Test `Location.longitude` property.""" + result = location.longitude + + assert result == pytest.approx(float(address.longitude)) + + def test_easting(self, location): + """Test `Location.easting` property.""" + result = location.easting + + assert MIN_EASTING < result < MAX_EASTING + + def test_northing(self, location): + """Test `Location.northing` property.""" + result = location.northing + + assert MIN_NORTHING < result < MAX_NORTHING + + def test_zone(self, location): + """Test `Location.zone` property.""" + result = location.zone + + assert result == ZONE + + def test_zone_details(self, location): + """Test `Location.zone_details` property.""" + result = location.zone_details + + zone, band = result + assert ZONE == f'{zone}{band}' + + +class TestRelateTo: + """Test the `Location.relate_to()` method and the `.x` and `.y` properties.""" + + def test_run_relate_to_twice(self, location, origin): + """The `.relate_to()` method must only be run once.""" + location.relate_to(origin) + + with pytest.raises(RuntimeError, match='once'): + location.relate_to(origin) + + def test_call_relate_to_with_wrong_other_type(self, location): + """`other` must be another `Location`.""" + with pytest.raises(TypeError, match='Location'): + location.relate_to(object()) + + def test_call_relate_to_with_far_away_other( + self, location, faraway_location, + ): + """The `other` origin must be in the same UTM zone.""" + with pytest.raises(ValueError, match='must be in the same zone'): + location.relate_to(faraway_location) + + def test_access_x_without_origin(self, location): + """`.relate_to()` must be called before `.x` can be accessed.""" + with pytest.raises(RuntimeError, match='origin to relate to must be set'): + int(location.x) + + def test_access_y_without_origin(self, location): + """`.relate_to()` must be called before `.y` can be accessed.""" + with pytest.raises(RuntimeError, match='origin to relate to must be set'): + int(location.y) + + def test_origin_must_be_lower_left_when_relating_to_oneself(self, location): + """`.x` and `.y` must be `== (0, 0)` when oneself is the origin.""" + location.relate_to(location) + + assert (location.x, location.y) == (0, 0) + + @pytest.mark.e2e + def test_x_and_y_must_not_be_lower_left_for_address_in_city(self, location, origin): + """`.x` and `.y` must be `> (0, 0)` when oneself is the origin.""" + location.relate_to(origin) + + assert location.x > 0 + assert location.y > 0 diff --git a/tests/forecasts/__init__.py b/tests/forecasts/__init__.py new file mode 100644 index 0000000..5d46e8b --- /dev/null +++ b/tests/forecasts/__init__.py @@ -0,0 +1 @@ +"""Tests for the `urban_meal_delivery.forecasts` sub-package.""" diff --git a/tests/forecasts/conftest.py b/tests/forecasts/conftest.py new file mode 100644 index 0000000..f258a3c --- /dev/null +++ b/tests/forecasts/conftest.py @@ -0,0 +1,138 @@ +"""Fixtures for testing the `urban_meal_delivery.forecasts` sub-package.""" + +import datetime as dt + +import pandas as pd +import pytest + +from tests import config as test_config +from urban_meal_delivery import config +from urban_meal_delivery.forecasts import timify + + +@pytest.fixture +def horizontal_datetime_index(): + """A `pd.Index` with `DateTime` values. + + The times resemble a horizontal time series with a `frequency` of `7`. + All observations take place at `NOON`. + """ + first_start_at = dt.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, test_config.NOON, 0, + ) + + gen = ( + start_at + for start_at in pd.date_range(first_start_at, test_config.END, freq='D') + ) + + index = pd.Index(gen) + index.name = 'start_at' + + # Sanity check. + # `+1` as both the `START` and `END` day are included. + n_days = (test_config.END - test_config.START).days + 1 + assert len(index) == n_days + + return index + + +@pytest.fixture +def horizontal_no_demand(horizontal_datetime_index): + """A horizontal time series with order totals: no demand.""" + return pd.Series(0, index=horizontal_datetime_index, name='n_orders') + + +@pytest.fixture +def vertical_datetime_index(): + """A `pd.Index` with `DateTime` values. + + The times resemble a vertical time series with a + `frequency` of `7` times the number of daily time steps, + which is `12` for `LONG_TIME_STEP` values. + """ + gen = ( + start_at + for start_at in pd.date_range( + test_config.START, test_config.END, freq=f'{test_config.LONG_TIME_STEP}T', + ) + if config.SERVICE_START <= start_at.hour < config.SERVICE_END + ) + + index = pd.Index(gen) + index.name = 'start_at' + + # Sanity check: n_days * n_number_of_opening_hours. + # `+1` as both the `START` and `END` day are included. + n_days = (test_config.END - test_config.START).days + 1 + assert len(index) == n_days * 12 + + return index + + +@pytest.fixture +def vertical_no_demand(vertical_datetime_index): + """A vertical time series with order totals: no demand.""" + return pd.Series(0, index=vertical_datetime_index, name='n_orders') + + +@pytest.fixture +def good_pixel_id(pixel): + """A `pixel_id` that is on the `grid`.""" + return pixel.id # `== 1` + + +@pytest.fixture +def predict_at() -> dt.datetime: + """`NOON` on the day to be predicted.""" + return dt.datetime( + test_config.END.year, + test_config.END.month, + test_config.END.day, + test_config.NOON, + ) + + +@pytest.fixture +def order_totals(good_pixel_id): + """A mock for `OrderHistory.totals`. + + To be a bit more realistic, we sample two pixels on the `grid`. + + Uses the LONG_TIME_STEP as the length of a time step. + """ + pixel_ids = [good_pixel_id, good_pixel_id + 1] + + gen = ( + (pixel_id, start_at) + for pixel_id in pixel_ids + for start_at in pd.date_range( + test_config.START, test_config.END, freq=f'{test_config.LONG_TIME_STEP}T', + ) + if config.SERVICE_START <= start_at.hour < config.SERVICE_END + ) + + # Re-index `data` filling in `0`s where there is no demand. + index = pd.MultiIndex.from_tuples(gen) + index.names = ['pixel_id', 'start_at'] + + df = pd.DataFrame(data={'n_orders': 1}, index=index) + + # Sanity check: n_pixels * n_time_steps_per_day * n_days. + # `+1` as both the `START` and `END` day are included. + n_days = (test_config.END - test_config.START).days + 1 + assert len(df) == 2 * 12 * n_days + + return df + + +@pytest.fixture +def order_history(order_totals, grid): + """An `OrderHistory` object that does not need the database. + + Uses the LONG_TIME_STEP as the length of a time step. + """ + oh = timify.OrderHistory(grid=grid, time_step=test_config.LONG_TIME_STEP) + oh._data = order_totals + + return oh diff --git a/tests/forecasts/methods/__init__.py b/tests/forecasts/methods/__init__.py new file mode 100644 index 0000000..e767595 --- /dev/null +++ b/tests/forecasts/methods/__init__.py @@ -0,0 +1 @@ +"""Tests for the `urban_meal_delivery.forecasts.methods` sub-package.""" diff --git a/tests/forecasts/methods/test_decomposition.py b/tests/forecasts/methods/test_decomposition.py new file mode 100644 index 0000000..c103c3f --- /dev/null +++ b/tests/forecasts/methods/test_decomposition.py @@ -0,0 +1,243 @@ +"""Test the `stl()` function.""" + +import math + +import pandas as pd +import pytest + +from tests import config as test_config +from urban_meal_delivery.forecasts.methods import decomposition + + +# The "periodic" `ns` suggested for the STL method. +NS = 999 + + +class TestInvalidArguments: + """Test `stl()` with invalid arguments.""" + + def test_no_nans_in_time_series(self, vertical_datetime_index): + """`stl()` requires a `time_series` without `NaN` values.""" + time_series = pd.Series(dtype=float, index=vertical_datetime_index) + + with pytest.raises(ValueError, match='`NaN` values'): + decomposition.stl( + time_series, frequency=test_config.VERTICAL_FREQUENCY_LONG, ns=NS, + ) + + def test_ns_not_odd(self, vertical_no_demand): + """`ns` must be odd and `>= 7`.""" + with pytest.raises(ValueError, match='`ns`'): + decomposition.stl( + vertical_no_demand, frequency=test_config.VERTICAL_FREQUENCY_LONG, ns=8, + ) + + @pytest.mark.parametrize('ns', [-99, -1, 1, 5]) + def test_ns_smaller_than_seven(self, vertical_no_demand, ns): + """`ns` must be odd and `>= 7`.""" + with pytest.raises(ValueError, match='`ns`'): + decomposition.stl( + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=ns, + ) + + def test_nt_not_odd(self, vertical_no_demand): + """`nt` must be odd and `>= default_nt`.""" + nt = 200 + default_nt = math.ceil( + (1.5 * test_config.VERTICAL_FREQUENCY_LONG) / (1 - (1.5 / NS)), + ) + + assert nt > default_nt # sanity check + + with pytest.raises(ValueError, match='`nt`'): + decomposition.stl( + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=NS, + nt=nt, + ) + + @pytest.mark.parametrize('nt', [-99, -1, 0, 1, 99, 125]) + def test_nt_not_at_least_the_default(self, vertical_no_demand, nt): + """`nt` must be odd and `>= default_nt`.""" + # `default_nt` becomes 161. + default_nt = math.ceil( + (1.5 * test_config.VERTICAL_FREQUENCY_LONG) / (1 - (1.5 / NS)), + ) + + assert nt < default_nt # sanity check + + with pytest.raises(ValueError, match='`nt`'): + decomposition.stl( + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=NS, + nt=nt, + ) + + def test_nl_not_odd(self, vertical_no_demand): + """`nl` must be odd and `>= frequency`.""" + nl = 200 + + assert nl > test_config.VERTICAL_FREQUENCY_LONG # sanity check + + with pytest.raises(ValueError, match='`nl`'): + decomposition.stl( + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=NS, + nl=nl, + ) + + def test_nl_at_least_the_frequency(self, vertical_no_demand): + """`nl` must be odd and `>= frequency`.""" + nl = 77 + + assert nl < test_config.VERTICAL_FREQUENCY_LONG # sanity check + + with pytest.raises(ValueError, match='`nl`'): + decomposition.stl( + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=NS, + nl=nl, + ) + + def test_ds_not_zero_or_one(self, vertical_no_demand): + """`ds` must be `0` or `1`.""" + with pytest.raises(ValueError, match='`ds`'): + decomposition.stl( + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=NS, + ds=2, + ) + + def test_dt_not_zero_or_one(self, vertical_no_demand): + """`dt` must be `0` or `1`.""" + with pytest.raises(ValueError, match='`dt`'): + decomposition.stl( + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=NS, + dt=2, + ) + + def test_dl_not_zero_or_one(self, vertical_no_demand): + """`dl` must be `0` or `1`.""" + with pytest.raises(ValueError, match='`dl`'): + decomposition.stl( + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=NS, + dl=2, + ) + + @pytest.mark.parametrize('js', [-1, 0]) + def test_js_not_positive(self, vertical_no_demand, js): + """`js` must be positive.""" + with pytest.raises(ValueError, match='`js`'): + decomposition.stl( + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=NS, + js=js, + ) + + @pytest.mark.parametrize('jt', [-1, 0]) + def test_jt_not_positive(self, vertical_no_demand, jt): + """`jt` must be positive.""" + with pytest.raises(ValueError, match='`jt`'): + decomposition.stl( + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=NS, + jt=jt, + ) + + @pytest.mark.parametrize('jl', [-1, 0]) + def test_jl_not_positive(self, vertical_no_demand, jl): + """`jl` must be positive.""" + with pytest.raises(ValueError, match='`jl`'): + decomposition.stl( + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=NS, + jl=jl, + ) + + @pytest.mark.parametrize('ni', [-1, 0]) + def test_ni_not_positive(self, vertical_no_demand, ni): + """`ni` must be positive.""" + with pytest.raises(ValueError, match='`ni`'): + decomposition.stl( + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=NS, + ni=ni, + ) + + def test_no_not_non_negative(self, vertical_no_demand): + """`no` must be non-negative.""" + with pytest.raises(ValueError, match='`no`'): + decomposition.stl( + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=NS, + no=-1, + ) + + +@pytest.mark.r +class TestValidArguments: + """Test `stl()` with valid arguments.""" + + def test_structure_of_returned_dataframe(self, vertical_no_demand): + """`stl()` returns a `pd.DataFrame` with three columns.""" + result = decomposition.stl( + vertical_no_demand, frequency=test_config.VERTICAL_FREQUENCY_LONG, ns=NS, + ) + + assert isinstance(result, pd.DataFrame) + assert list(result.columns) == ['seasonal', 'trend', 'residual'] + + # Run the `stl()` function with all possible combinations of arguments, + # including default ones and explicitly set non-default ones. + @pytest.mark.parametrize('nt', [None, 163]) + @pytest.mark.parametrize('nl', [None, 777]) + @pytest.mark.parametrize('ds', [0, 1]) + @pytest.mark.parametrize('dt', [0, 1]) + @pytest.mark.parametrize('dl', [0, 1]) + @pytest.mark.parametrize('js', [None, 1]) + @pytest.mark.parametrize('jt', [None, 1]) + @pytest.mark.parametrize('jl', [None, 1]) + @pytest.mark.parametrize('ni', [2, 3]) + @pytest.mark.parametrize('no', [0, 1]) + def test_decompose_time_series_with_no_demand( # noqa:WPS211,WPS216 + self, vertical_no_demand, nt, nl, ds, dt, dl, js, jt, jl, ni, no, # noqa:WPS110 + ): + """Decomposing a time series with no demand ... + + ... returns a `pd.DataFrame` with three columns holding only `0.0` values. + """ + decomposed = decomposition.stl( + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=NS, + nt=nt, + nl=nl, + ds=ds, + dt=dt, + dl=dl, + js=js, + jt=jt, + jl=jl, + ni=ni, + no=no, # noqa:WPS110 + ) + + result = decomposed.sum().sum() + + assert result == 0 diff --git a/tests/forecasts/methods/test_predictions.py b/tests/forecasts/methods/test_predictions.py new file mode 100644 index 0000000..8155d62 --- /dev/null +++ b/tests/forecasts/methods/test_predictions.py @@ -0,0 +1,130 @@ +"""Test all the `*.predict()` functions in the `methods` sub-package.""" + +import datetime as dt + +import pandas as pd +import pytest + +from tests import config as test_config +from urban_meal_delivery import config +from urban_meal_delivery.forecasts.methods import arima +from urban_meal_delivery.forecasts.methods import ets +from urban_meal_delivery.forecasts.methods import extrapolate_season + + +@pytest.fixture +def forecast_interval(): + """A `pd.Index` with `DateTime` values ... + + ... that takes place one day after the `START`-`END` horizon and + resembles an entire day (`12` "start_at" values as we use `LONG_TIME_STEP`). + """ + future_day = test_config.END.date() + dt.timedelta(days=1) + first_start_at = dt.datetime( + future_day.year, future_day.month, future_day.day, config.SERVICE_START, 0, + ) + end_of_day = dt.datetime( + future_day.year, future_day.month, future_day.day, config.SERVICE_END, 0, + ) + + gen = ( + start_at + for start_at in pd.date_range( + first_start_at, end_of_day, freq=f'{test_config.LONG_TIME_STEP}T', + ) + if config.SERVICE_START <= start_at.hour < config.SERVICE_END + ) + + index = pd.Index(gen) + index.name = 'start_at' + + return index + + +@pytest.fixture +def forecast_time_step(): + """A `pd.Index` with one `DateTime` value, resembling `NOON`.""" + future_day = test_config.END.date() + dt.timedelta(days=1) + + start_at = dt.datetime( + future_day.year, future_day.month, future_day.day, test_config.NOON, 0, + ) + + index = pd.Index([start_at]) + index.name = 'start_at' + + return index + + +@pytest.mark.r +@pytest.mark.parametrize( + 'func', [arima.predict, ets.predict, extrapolate_season.predict], +) +class TestMakePredictions: + """Make predictions with `arima.predict()` and `ets.predict()`.""" + + def test_training_data_contains_nan_values( + self, func, vertical_no_demand, forecast_interval, + ): + """`training_ts` must not contain `NaN` values.""" + vertical_no_demand.iloc[0] = pd.NA + + with pytest.raises(ValueError, match='must not contain `NaN`'): + func( + training_ts=vertical_no_demand, + forecast_interval=forecast_interval, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ) + + def test_structure_of_returned_dataframe( + self, func, vertical_no_demand, forecast_interval, + ): + """Both `.predict()` return a `pd.DataFrame` with five columns.""" + result = func( + training_ts=vertical_no_demand, + forecast_interval=forecast_interval, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ) + + assert isinstance(result, pd.DataFrame) + assert list(result.columns) == [ + 'prediction', + 'low80', + 'high80', + 'low95', + 'high95', + ] + + def test_predict_horizontal_time_series_with_no_demand( + self, func, horizontal_no_demand, forecast_time_step, + ): + """Predicting a horizontal time series with no demand ... + + ... returns a `pd.DataFrame` with five columns holding only `0.0` values. + """ + predictions = func( + training_ts=horizontal_no_demand, + forecast_interval=forecast_time_step, + frequency=7, + ) + + result = predictions.sum().sum() + + assert result == 0 + + def test_predict_vertical_time_series_with_no_demand( + self, func, vertical_no_demand, forecast_interval, + ): + """Predicting a vertical time series with no demand ... + + ... returns a `pd.DataFrame` with five columns holding only `0.0` values. + """ + predictions = func( + training_ts=vertical_no_demand, + forecast_interval=forecast_interval, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ) + + result = predictions.sum().sum() + + assert result == 0 diff --git a/tests/forecasts/test_models.py b/tests/forecasts/test_models.py new file mode 100644 index 0000000..ef24d3c --- /dev/null +++ b/tests/forecasts/test_models.py @@ -0,0 +1,172 @@ +"""Tests for the `urban_meal_delivery.forecasts.models` sub-package.""" + + +import pandas as pd +import pytest + +from tests import config as test_config +from urban_meal_delivery import db +from urban_meal_delivery.forecasts import models + + +MODELS = ( + models.HorizontalETSModel, + models.HorizontalSMAModel, + models.RealtimeARIMAModel, + models.VerticalARIMAModel, + models.TrivialModel, +) + + +@pytest.mark.parametrize('model_cls', MODELS) +class TestGenericForecastingModelProperties: + """Test everything all concrete `*Model`s have in common. + + The test cases here replace testing the `ForecastingModelABC` class on its own. + + As uncertainty is in the nature of forecasting, we do not test the individual + point forecasts or confidence intervals themselves. Instead, we confirm + that all the `*Model`s adhere to the `ForecastingModelABC` generically. + So, these test cases are more like integration tests conceptually. + + Also, note that some `methods.*.predict()` functions use R behind the scenes. + """ # noqa:RST215 + + def test_create_model(self, model_cls, order_history): + """Test instantiation of a new and concrete `*Model` object.""" + model = model_cls(order_history=order_history) + + assert model is not None + + def test_model_has_a_name(self, model_cls, order_history): + """Access the `*Model.name` property.""" + model = model_cls(order_history=order_history) + + result = model.name + + assert isinstance(result, str) + + unique_model_names = set() + + def test_each_model_has_a_unique_name(self, model_cls, order_history): + """The `*Model.name` values must be unique across all `*Model`s. + + Important: this test case has a side effect that is visible + across the different parametrized versions of this case! + """ # noqa:RST215 + model = model_cls(order_history=order_history) + + assert model.name not in self.unique_model_names + + self.unique_model_names.add(model.name) + + @pytest.mark.r + def test_make_prediction_structure( + self, model_cls, order_history, pixel, predict_at, + ): + """`*Model.predict()` returns a `pd.DataFrame` ... + + ... with known columns. + """ # noqa:RST215 + model = model_cls(order_history=order_history) + + result = model.predict( + pixel=pixel, + predict_at=predict_at, + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + assert isinstance(result, pd.DataFrame) + assert list(result.columns) == [ + 'actual', + 'prediction', + 'low80', + 'high80', + 'low95', + 'high95', + ] + + @pytest.mark.r + def test_make_prediction_for_given_time_step( + self, model_cls, order_history, pixel, predict_at, + ): + """`*Model.predict()` returns a row for ... + + ... the time step starting at `predict_at`. + """ # noqa:RST215 + model = model_cls(order_history=order_history) + + result = model.predict( + pixel=pixel, + predict_at=predict_at, + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + assert predict_at in result.index + + @pytest.mark.r + def test_make_prediction_contains_actual_values( + self, model_cls, order_history, pixel, predict_at, + ): + """`*Model.predict()` returns a `pd.DataFrame` ... + + ... where the "actual" and "prediction" columns must not be empty. + """ # noqa:RST215 + model = model_cls(order_history=order_history) + + result = model.predict( + pixel=pixel, + predict_at=predict_at, + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + assert not result['actual'].isnull().any() + assert not result['prediction'].isnull().any() + + @pytest.mark.db + @pytest.mark.r + def test_make_forecast( # noqa:WPS211 + self, db_session, model_cls, order_history, pixel, predict_at, + ): + """`*Model.make_forecast()` returns a `Forecast` object.""" # noqa:RST215 + model = model_cls(order_history=order_history) + + result = model.make_forecast( + pixel=pixel, + predict_at=predict_at, + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + assert isinstance(result, db.Forecast) + assert result.pixel == pixel + assert result.start_at == predict_at + assert result.train_horizon == test_config.LONG_TRAIN_HORIZON + + @pytest.mark.db + @pytest.mark.r + def test_make_forecast_is_cached( # noqa:WPS211 + self, db_session, model_cls, order_history, pixel, predict_at, + ): + """`*Model.make_forecast()` caches the `Forecast` object.""" # noqa:RST215 + model = model_cls(order_history=order_history) + + assert db_session.query(db.Forecast).count() == 0 + + result1 = model.make_forecast( + pixel=pixel, + predict_at=predict_at, + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + n_cached_forecasts = db_session.query(db.Forecast).count() + assert n_cached_forecasts >= 1 + + result2 = model.make_forecast( + pixel=pixel, + predict_at=predict_at, + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + assert n_cached_forecasts == db_session.query(db.Forecast).count() + + assert result1 == result2 diff --git a/tests/forecasts/timify/__init__.py b/tests/forecasts/timify/__init__.py new file mode 100644 index 0000000..167675d --- /dev/null +++ b/tests/forecasts/timify/__init__.py @@ -0,0 +1 @@ +"""Tests for the `urban_meal_delivery.forecasts.timify` module.""" diff --git a/tests/forecasts/timify/test_aggregate_orders.py b/tests/forecasts/timify/test_aggregate_orders.py new file mode 100644 index 0000000..325db74 --- /dev/null +++ b/tests/forecasts/timify/test_aggregate_orders.py @@ -0,0 +1,386 @@ +"""Test the `OrderHistory.aggregate_orders()` method.""" + +import datetime + +import pytest + +from tests import config as test_config +from urban_meal_delivery import db +from urban_meal_delivery.forecasts import timify + + +@pytest.mark.db +class TestAggregateOrders: + """Test the `OrderHistory.aggregate_orders()` method. + + The test cases are integration tests that model realistic scenarios. + """ + + @pytest.fixture + def addresses_mock(self, mocker, monkeypatch): + """A `Mock` whose `.return_value` are to be set ... + + ... to the addresses that are gridified. The addresses are + all considered `Order.pickup_address` attributes for some orders. + + Note: This fixture also exists in `tests.db.test_grids`. + """ + mock = mocker.Mock() + query = ( # noqa:ECE001 + mock.query.return_value.join.return_value.filter.return_value.all # noqa:E501,WPS219 + ) + monkeypatch.setattr(db, 'session', mock) + + return query + + @pytest.fixture + def one_pixel_grid(self, db_session, city, restaurant, addresses_mock): + """A persisted `Grid` with one `Pixel`. + + `restaurant` must be a dependency as otherwise the `restaurant.address` + is not put into the database as an `Order.pickup_address`. + """ + addresses_mock.return_value = [restaurant.address] + + # `+1` as otherwise there would be a second pixel in one direction. + side_length = max(city.total_x, city.total_y) + 1 + grid = db.Grid.gridify(city=city, side_length=side_length) + db_session.add(grid) + + assert len(grid.pixels) == 1 # sanity check + + return grid + + def test_no_orders(self, db_session, one_pixel_grid, restaurant): + """Edge case that does not occur for real-life data.""" + db_session.commit() + assert len(restaurant.orders) == 0 # noqa:WPS507 sanity check + + oh = timify.OrderHistory( + grid=one_pixel_grid, time_step=test_config.LONG_TIME_STEP, + ) + + result = oh.aggregate_orders() + + assert len(result) == 0 # noqa:WPS507 + + def test_evenly_distributed_ad_hoc_orders( + self, db_session, one_pixel_grid, restaurant, make_order, + ): + """12 ad-hoc orders, one per operating hour.""" + # Create one order per hour and 12 orders in total. + for hour in range(11, 23): + order = make_order( + scheduled=False, + restaurant=restaurant, + placed_at=datetime.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, hour, 11, + ), + ) + db_session.add(order) + + db_session.commit() + + assert len(restaurant.orders) == 12 # sanity check + + oh = timify.OrderHistory( + grid=one_pixel_grid, time_step=test_config.LONG_TIME_STEP, + ) + + result = oh.aggregate_orders() + + # The resulting `DataFrame` has 12 rows holding `1`s. + assert len(result) == 12 + assert result['n_orders'].min() == 1 + assert result['n_orders'].max() == 1 + assert result['n_orders'].sum() == 12 + + def test_evenly_distributed_ad_hoc_orders_with_no_demand_late( # noqa:WPS218 + self, db_session, one_pixel_grid, restaurant, make_order, + ): + """10 ad-hoc orders, one per hour, no orders after 21.""" + # Create one order per hour and 10 orders in total. + for hour in range(11, 21): + order = make_order( + scheduled=False, + restaurant=restaurant, + placed_at=datetime.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, hour, 11, + ), + ) + db_session.add(order) + + db_session.commit() + + assert len(restaurant.orders) == 10 # sanity check + + oh = timify.OrderHistory( + grid=one_pixel_grid, time_step=test_config.LONG_TIME_STEP, + ) + + result = oh.aggregate_orders() + + # Even though there are only 10 orders, there are 12 rows in the `DataFrame`. + # That is so as `0`s are filled in for hours without any demand at the end. + assert len(result) == 12 + assert result['n_orders'].min() == 0 + assert result['n_orders'].max() == 1 + assert result.iloc[:10]['n_orders'].sum() == 10 + assert result.iloc[10:]['n_orders'].sum() == 0 + + def test_one_ad_hoc_order_every_other_hour( + self, db_session, one_pixel_grid, restaurant, make_order, + ): + """6 ad-hoc orders, one every other hour.""" + # Create one order every other hour. + for hour in range(11, 23, 2): + order = make_order( + scheduled=False, + restaurant=restaurant, + placed_at=datetime.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, hour, 11, + ), + ) + db_session.add(order) + + db_session.commit() + + assert len(restaurant.orders) == 6 # sanity check + + oh = timify.OrderHistory( + grid=one_pixel_grid, time_step=test_config.LONG_TIME_STEP, + ) + + result = oh.aggregate_orders() + + # The resulting `DataFrame` has 12 rows, 6 holding `0`s, and 6 holding `1`s. + assert len(result) == 12 + assert result['n_orders'].min() == 0 + assert result['n_orders'].max() == 1 + assert result['n_orders'].sum() == 6 + + def test_one_ad_hoc_and_one_pre_order( + self, db_session, one_pixel_grid, restaurant, make_order, + ): + """1 ad-hoc and 1 scheduled order. + + The scheduled order is discarded. + """ + ad_hoc_order = make_order( + scheduled=False, + restaurant=restaurant, + placed_at=datetime.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, 11, 11, + ), + ) + db_session.add(ad_hoc_order) + + pre_order = make_order( + scheduled=True, + restaurant=restaurant, + placed_at=datetime.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, 9, 0, + ), + scheduled_delivery_at=datetime.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, 12, 0, + ), + ) + db_session.add(pre_order) + + db_session.commit() + + assert len(restaurant.orders) == 2 # sanity check + + oh = timify.OrderHistory( + grid=one_pixel_grid, time_step=test_config.LONG_TIME_STEP, + ) + + result = oh.aggregate_orders() + + # The resulting `DataFrame` has 12 rows, 11 holding `0`s, and one holding a `1`. + assert len(result) == 12 + assert result['n_orders'].min() == 0 + assert result['n_orders'].max() == 1 + assert result['n_orders'].sum() == 1 + + def test_evenly_distributed_ad_hoc_orders_with_half_hour_time_steps( # noqa:WPS218 + self, db_session, one_pixel_grid, restaurant, make_order, + ): + """12 ad-hoc orders, one per hour, with 30 minute time windows. + + In half the time steps, there is no demand. + """ + # Create one order per hour and 10 orders in total. + for hour in range(11, 23): + order = make_order( + scheduled=False, + restaurant=restaurant, + placed_at=datetime.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, hour, 11, + ), + ) + db_session.add(order) + + db_session.commit() + + assert len(restaurant.orders) == 12 # sanity check + + oh = timify.OrderHistory( + grid=one_pixel_grid, time_step=test_config.SHORT_TIME_STEP, + ) + + result = oh.aggregate_orders() + + # The resulting `DataFrame` has 24 rows for the 24 30-minute time steps. + # The rows' values are `0` and `1` alternating. + assert len(result) == 24 + assert result['n_orders'].min() == 0 + assert result['n_orders'].max() == 1 + assert result.iloc[::2]['n_orders'].sum() == 12 + assert result.iloc[1::2]['n_orders'].sum() == 0 + + def test_ad_hoc_orders_over_two_days( + self, db_session, one_pixel_grid, restaurant, make_order, + ): + """First day 12 ad-hoc orders, one per operating hour ... + + ... and 6 orders, one every other hour on the second day. + In total, there are 18 orders. + """ + # Create one order per hour and 12 orders in total. + for hour in range(11, 23): + order = make_order( + scheduled=False, + restaurant=restaurant, + placed_at=datetime.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, hour, 11, + ), + ) + db_session.add(order) + + # Create one order every other hour and 6 orders in total. + for hour in range(11, 23, 2): # noqa:WPS440 + order = make_order( + scheduled=False, + restaurant=restaurant, + placed_at=datetime.datetime( + test_config.YEAR, + test_config.MONTH, + test_config.DAY + 1, + hour, # noqa:WPS441 + 11, + ), + ) + db_session.add(order) + + db_session.commit() + + assert len(restaurant.orders) == 18 # sanity check + + oh = timify.OrderHistory( + grid=one_pixel_grid, time_step=test_config.LONG_TIME_STEP, + ) + + result = oh.aggregate_orders() + + # The resulting `DataFrame` has 24 rows, 12 for each day. + assert len(result) == 24 + assert result['n_orders'].min() == 0 + assert result['n_orders'].max() == 1 + assert result['n_orders'].sum() == 18 + + @pytest.fixture + def two_pixel_grid( # noqa:WPS211 + self, db_session, city, make_address, make_restaurant, addresses_mock, + ): + """A persisted `Grid` with two `Pixel` objects.""" + # One `Address` in the lower-left `Pixel`, ... + address1 = make_address(latitude=48.8357377, longitude=2.2517412) + # ... and another one in the upper-right one. + address2 = make_address(latitude=48.8898312, longitude=2.4357622) + + addresses_mock.return_value = [address1, address2] + + # Create `Restaurant`s at the two addresses. + make_restaurant(address=address1) + make_restaurant(address=address2) + + # This creates four `Pixel`s, two of which have no `pickup_address`. + side_length = max(city.total_x // 2, city.total_y // 2) + 1 + + grid = db.Grid.gridify(city=city, side_length=side_length) + + db_session.add(grid) + + assert len(grid.pixels) == 2 # sanity check + + return grid + + def test_two_pixels_with_shifted_orders( # noqa:WPS218 + self, db_session, two_pixel_grid, make_order, + ): + """One restaurant with one order every other hour ... + + ... and another restaurant with two orders per hour. + In total, there are 30 orders. + """ + address1, address2 = two_pixel_grid.city.addresses + # Rarely, an `Address` may have several `Restaurant`s in the real dataset. + restaurant1, restaurant2 = address1.restaurants[0], address2.restaurants[0] + + # Create one order every other hour for `restaurant1`. + for hour in range(11, 23, 2): + order = make_order( + scheduled=False, + restaurant=restaurant1, + placed_at=datetime.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, hour, 11, + ), + ) + db_session.add(order) + + # Create two orders per hour for `restaurant2`. + for hour in range(11, 23): # noqa:WPS440 + order = make_order( + scheduled=False, + restaurant=restaurant2, + placed_at=datetime.datetime( + test_config.YEAR, + test_config.MONTH, + test_config.DAY, + hour, # noqa:WPS441 + 13, + ), + ) + db_session.add(order) + + order = make_order( + scheduled=False, + restaurant=restaurant2, + placed_at=datetime.datetime( + test_config.YEAR, + test_config.MONTH, + test_config.DAY, + hour, # noqa:WPS441 + 14, + ), + ) + db_session.add(order) + + db_session.commit() + + # sanity checks + assert len(restaurant1.orders) == 6 + assert len(restaurant2.orders) == 24 + + oh = timify.OrderHistory( + grid=two_pixel_grid, time_step=test_config.LONG_TIME_STEP, + ) + + result = oh.aggregate_orders() + + # The resulting `DataFrame` has 24 rows, 12 for each pixel. + assert len(result) == 24 + assert result['n_orders'].min() == 0 + assert result['n_orders'].max() == 2 + assert result['n_orders'].sum() == 30 diff --git a/tests/forecasts/timify/test_avg_daily_demand.py b/tests/forecasts/timify/test_avg_daily_demand.py new file mode 100644 index 0000000..4ad3c15 --- /dev/null +++ b/tests/forecasts/timify/test_avg_daily_demand.py @@ -0,0 +1,143 @@ +"""Tests for the `OrderHistory.avg_daily_demand()` and ... + +`OrderHistory.choose_tactical_model()` methods. + +We test both methods together as they take the same input and are really +two parts of the same conceptual step. +""" + +import pytest + +from tests import config as test_config +from urban_meal_delivery.forecasts import models + + +class TestAverageDailyDemand: + """Tests for the `OrderHistory.avg_daily_demand()` method.""" + + def test_avg_daily_demand_with_constant_demand( + self, order_history, good_pixel_id, predict_at, + ): + """The average daily demand must be the number of time steps ... + + ... if the demand is `1` at each time step. + + Note: The `order_history` fixture assumes `12` time steps per day as it + uses `LONG_TIME_STEP=60` as the length of a time step. + """ + result = order_history.avg_daily_demand( + pixel_id=good_pixel_id, + predict_day=predict_at.date(), + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + assert result == 12.0 + + def test_avg_daily_demand_with_no_demand( + self, order_history, good_pixel_id, predict_at, + ): + """Without demand, the average daily demand must be `0.0`.""" + order_history._data.loc[:, 'n_orders'] = 0 + + result = order_history.avg_daily_demand( + pixel_id=good_pixel_id, + predict_day=predict_at.date(), + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + assert result == 0.0 + + +class TestChooseTacticalModel: + """Tests for the `OrderHistory.choose_tactical_model()` method.""" + + def test_best_model_with_high_demand( + self, order_history, good_pixel_id, predict_at, + ): + """With high demand, the average daily demand is `.>= 25.0`.""" + # With 12 time steps per day, the ADD becomes `36.0`. + order_history._data.loc[:, 'n_orders'] = 3 + + result = order_history.choose_tactical_model( + pixel_id=good_pixel_id, + predict_day=predict_at.date(), + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + assert isinstance(result, models.HorizontalETSModel) + + def test_best_model_with_medium_demand( + self, order_history, good_pixel_id, predict_at, + ): + """With medium demand, the average daily demand is `>= 10.0` and `< 25.0`.""" + # With 12 time steps per day, the ADD becomes `24.0`. + order_history._data.loc[:, 'n_orders'] = 2 + + result = order_history.choose_tactical_model( + pixel_id=good_pixel_id, + predict_day=predict_at.date(), + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + assert isinstance(result, models.HorizontalETSModel) + + def test_best_model_with_low_demand( + self, order_history, good_pixel_id, predict_at, + ): + """With low demand, the average daily demand is `>= 2.5` and `< 10.0`.""" + # With 12 time steps per day, the ADD becomes `12.0` ... + data = order_history._data + data.loc[:, 'n_orders'] = 1 + + # ... and we set three additional time steps per day to `0`. + data.loc[ # noqa:ECE001 + # all `Pixel`s, all `Order`s in time steps starting at 11 am + (slice(None), slice(data.index.levels[1][0], None, 12)), + 'n_orders', + ] = 0 + data.loc[ # noqa:ECE001 + # all `Pixel`s, all `Order`s in time steps starting at 12 am + (slice(None), slice(data.index.levels[1][1], None, 12)), + 'n_orders', + ] = 0 + data.loc[ # noqa:ECE001 + # all `Pixel`s, all `Order`s in time steps starting at 1 pm + (slice(None), slice(data.index.levels[1][2], None, 12)), + 'n_orders', + ] = 0 + + result = order_history.choose_tactical_model( + pixel_id=good_pixel_id, + predict_day=predict_at.date(), + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + assert isinstance(result, models.HorizontalSMAModel) + + def test_best_model_with_no_demand( + self, order_history, good_pixel_id, predict_at, + ): + """Without demand, the average daily demand is `< 2.5`.""" + order_history._data.loc[:, 'n_orders'] = 0 + + result = order_history.choose_tactical_model( + pixel_id=good_pixel_id, + predict_day=predict_at.date(), + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + assert isinstance(result, models.TrivialModel) + + def test_best_model_for_unknown_train_horizon( + self, order_history, good_pixel_id, predict_at, # noqa:RST215 + ): + """For `train_horizon`s not included in the rule-based system ... + + ... the method raises a `RuntimeError`. + """ + with pytest.raises(RuntimeError, match='no rule'): + order_history.choose_tactical_model( + pixel_id=good_pixel_id, + predict_day=predict_at.date(), + train_horizon=test_config.SHORT_TRAIN_HORIZON, + ) diff --git a/tests/forecasts/timify/test_make_time_series.py b/tests/forecasts/timify/test_make_time_series.py new file mode 100644 index 0000000..790eec6 --- /dev/null +++ b/tests/forecasts/timify/test_make_time_series.py @@ -0,0 +1,399 @@ +"""Test the code generating time series with the order totals. + +Unless otherwise noted, each `time_step` is 60 minutes long implying +12 time steps per day (i.e., we use `LONG_TIME_STEP` by default). +""" + +import datetime + +import pandas as pd +import pytest + +from tests import config as test_config +from urban_meal_delivery import config + + +@pytest.fixture +def good_predict_at(): + """A `predict_at` within `START`-`END` and ... + + ... a long enough history so that either `SHORT_TRAIN_HORIZON` + or `LONG_TRAIN_HORIZON` works. + """ + return datetime.datetime( + test_config.END.year, + test_config.END.month, + test_config.END.day, + test_config.NOON, + 0, + ) + + +@pytest.fixture +def bad_predict_at(): + """A `predict_at` within `START`-`END` but ... + + ... not a long enough history so that both `SHORT_TRAIN_HORIZON` + and `LONG_TRAIN_HORIZON` do not work. + """ + predict_day = test_config.END - datetime.timedelta(weeks=6, days=1) + return datetime.datetime( + predict_day.year, predict_day.month, predict_day.day, test_config.NOON, 0, + ) + + +class TestMakeHorizontalTimeSeries: + """Test the `OrderHistory.make_horizontal_ts()` method.""" + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_wrong_pixel(self, order_history, good_predict_at, train_horizon): + """A `pixel_id` that is not in the `grid`.""" + with pytest.raises(LookupError): + order_history.make_horizontal_ts( + pixel_id=999_999, + predict_at=good_predict_at, + train_horizon=train_horizon, + ) + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_time_series_are_series( + self, order_history, good_pixel_id, good_predict_at, train_horizon, + ): + """The time series come as a `pd.Series`.""" + result = order_history.make_horizontal_ts( + pixel_id=good_pixel_id, + predict_at=good_predict_at, + train_horizon=train_horizon, + ) + + training_ts, _, actuals_ts = result + + assert isinstance(training_ts, pd.Series) + assert training_ts.name == 'n_orders' + assert isinstance(actuals_ts, pd.Series) + assert actuals_ts.name == 'n_orders' + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_time_series_have_correct_length( + self, order_history, good_pixel_id, good_predict_at, train_horizon, + ): + """The length of a training time series must be a multiple of `7` ... + + ... whereas the time series with the actual order counts has only `1` value. + """ + result = order_history.make_horizontal_ts( + pixel_id=good_pixel_id, + predict_at=good_predict_at, + train_horizon=train_horizon, + ) + + training_ts, _, actuals_ts = result + + assert len(training_ts) == 7 * train_horizon + assert len(actuals_ts) == 1 + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_frequency_is_number_of_weekdays( + self, order_history, good_pixel_id, good_predict_at, train_horizon, + ): + """The `frequency` must be `7`.""" + result = order_history.make_horizontal_ts( + pixel_id=good_pixel_id, + predict_at=good_predict_at, + train_horizon=train_horizon, + ) + + _, frequency, _ = result # noqa:WPS434 + + assert frequency == 7 + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_no_long_enough_history1( + self, order_history, good_pixel_id, bad_predict_at, train_horizon, + ): + """If the `predict_at` day is too early in the `START`-`END` horizon ... + + ... the history of order totals is not long enough. + """ + with pytest.raises(RuntimeError): + order_history.make_horizontal_ts( + pixel_id=good_pixel_id, + predict_at=bad_predict_at, + train_horizon=train_horizon, + ) + + def test_no_long_enough_history2( + self, order_history, good_pixel_id, good_predict_at, + ): + """If the `train_horizon` is longer than the `START`-`END` horizon ... + + ... the history of order totals can never be long enough. + """ + with pytest.raises(RuntimeError): + order_history.make_horizontal_ts( + pixel_id=good_pixel_id, predict_at=good_predict_at, train_horizon=999, + ) + + +class TestMakeVerticalTimeSeries: + """Test the `OrderHistory.make_vertical_ts()` method.""" + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_wrong_pixel(self, order_history, good_predict_at, train_horizon): + """A `pixel_id` that is not in the `grid`.""" + with pytest.raises(LookupError): + order_history.make_vertical_ts( + pixel_id=999_999, + predict_day=good_predict_at.date(), + train_horizon=train_horizon, + ) + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_time_series_are_series( + self, order_history, good_pixel_id, good_predict_at, train_horizon, + ): + """The time series come as `pd.Series`.""" + result = order_history.make_vertical_ts( + pixel_id=good_pixel_id, + predict_day=good_predict_at.date(), + train_horizon=train_horizon, + ) + + training_ts, _, actuals_ts = result + + assert isinstance(training_ts, pd.Series) + assert training_ts.name == 'n_orders' + assert isinstance(actuals_ts, pd.Series) + assert actuals_ts.name == 'n_orders' + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_time_series_have_correct_length( + self, order_history, good_pixel_id, good_predict_at, train_horizon, + ): + """The length of a training time series is the product of the ... + + ... weekly time steps (i.e., product of `7` and the number of daily time steps) + and the `train_horizon` in weeks. + + The time series with the actual order counts always holds one observation + per time step of a day. + """ + result = order_history.make_vertical_ts( + pixel_id=good_pixel_id, + predict_day=good_predict_at.date(), + train_horizon=train_horizon, + ) + + training_ts, _, actuals_ts = result + + n_daily_time_steps = ( + 60 + * (config.SERVICE_END - config.SERVICE_START) + // test_config.LONG_TIME_STEP + ) + + assert len(training_ts) == 7 * n_daily_time_steps * train_horizon + assert len(actuals_ts) == n_daily_time_steps + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_frequency_is_number_number_of_weekly_time_steps( + self, order_history, good_pixel_id, good_predict_at, train_horizon, + ): + """The `frequency` is the number of weekly time steps.""" + result = order_history.make_vertical_ts( + pixel_id=good_pixel_id, + predict_day=good_predict_at.date(), + train_horizon=train_horizon, + ) + + _, frequency, _ = result # noqa:WPS434 + + n_daily_time_steps = ( + 60 + * (config.SERVICE_END - config.SERVICE_START) + // test_config.LONG_TIME_STEP + ) + + assert frequency == 7 * n_daily_time_steps + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_no_long_enough_history1( + self, order_history, good_pixel_id, bad_predict_at, train_horizon, + ): + """If the `predict_at` day is too early in the `START`-`END` horizon ... + + ... the history of order totals is not long enough. + """ + with pytest.raises(RuntimeError): + order_history.make_vertical_ts( + pixel_id=good_pixel_id, + predict_day=bad_predict_at.date(), + train_horizon=train_horizon, + ) + + def test_no_long_enough_history2( + self, order_history, good_pixel_id, good_predict_at, + ): + """If the `train_horizon` is longer than the `START`-`END` horizon ... + + ... the history of order totals can never be long enough. + """ + with pytest.raises(RuntimeError): + order_history.make_vertical_ts( + pixel_id=good_pixel_id, + predict_day=good_predict_at.date(), + train_horizon=999, + ) + + +class TestMakeRealTimeTimeSeries: + """Test the `OrderHistory.make_realtime_ts()` method.""" + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_wrong_pixel(self, order_history, good_predict_at, train_horizon): + """A `pixel_id` that is not in the `grid`.""" + with pytest.raises(LookupError): + order_history.make_realtime_ts( + pixel_id=999_999, + predict_at=good_predict_at, + train_horizon=train_horizon, + ) + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_time_series_are_series( + self, order_history, good_pixel_id, good_predict_at, train_horizon, + ): + """The time series come as `pd.Series`.""" + result = order_history.make_realtime_ts( + pixel_id=good_pixel_id, + predict_at=good_predict_at, + train_horizon=train_horizon, + ) + + training_ts, _, actuals_ts = result + + assert isinstance(training_ts, pd.Series) + assert training_ts.name == 'n_orders' + assert isinstance(actuals_ts, pd.Series) + assert actuals_ts.name == 'n_orders' + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_time_series_have_correct_length1( + self, order_history, good_pixel_id, good_predict_at, train_horizon, + ): + """The length of a training time series is the product of the ... + + ... weekly time steps (i.e., product of `7` and the number of daily time steps) + and the `train_horizon` in weeks; however, this assertion only holds if + we predict the first `time_step` of the day. + + The time series with the actual order counts always holds `1` value. + """ + predict_at = datetime.datetime( + good_predict_at.year, + good_predict_at.month, + good_predict_at.day, + config.SERVICE_START, + 0, + ) + result = order_history.make_realtime_ts( + pixel_id=good_pixel_id, predict_at=predict_at, train_horizon=train_horizon, + ) + + training_ts, _, actuals_ts = result + + n_daily_time_steps = ( + 60 + * (config.SERVICE_END - config.SERVICE_START) + // test_config.LONG_TIME_STEP + ) + + assert len(training_ts) == 7 * n_daily_time_steps * train_horizon + assert len(actuals_ts) == 1 + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_time_series_have_correct_length2( + self, order_history, good_pixel_id, good_predict_at, train_horizon, + ): + """The length of a training time series is the product of the ... + + ... weekly time steps (i.e., product of `7` and the number of daily time steps) + and the `train_horizon` in weeks; however, this assertion only holds if + we predict the first `time_step` of the day. Predicting any other `time_step` + means that the training time series becomes longer by the number of time steps + before the one being predicted. + + The time series with the actual order counts always holds `1` value. + """ + assert good_predict_at.hour == test_config.NOON + + result = order_history.make_realtime_ts( + pixel_id=good_pixel_id, + predict_at=good_predict_at, + train_horizon=train_horizon, + ) + + training_ts, _, actuals_ts = result + + n_daily_time_steps = ( + 60 + * (config.SERVICE_END - config.SERVICE_START) + // test_config.LONG_TIME_STEP + ) + n_time_steps_before = ( + 60 * (test_config.NOON - config.SERVICE_START) // test_config.LONG_TIME_STEP + ) + + assert ( + len(training_ts) + == 7 * n_daily_time_steps * train_horizon + n_time_steps_before + ) + assert len(actuals_ts) == 1 + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_frequency_is_number_number_of_weekly_time_steps( + self, order_history, good_pixel_id, good_predict_at, train_horizon, + ): + """The `frequency` is the number of weekly time steps.""" + result = order_history.make_realtime_ts( + pixel_id=good_pixel_id, + predict_at=good_predict_at, + train_horizon=train_horizon, + ) + + _, frequency, _ = result # noqa:WPS434 + + n_daily_time_steps = ( + 60 + * (config.SERVICE_END - config.SERVICE_START) + // test_config.LONG_TIME_STEP + ) + + assert frequency == 7 * n_daily_time_steps + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_no_long_enough_history1( + self, order_history, good_pixel_id, bad_predict_at, train_horizon, + ): + """If the `predict_at` day is too early in the `START`-`END` horizon ... + + ... the history of order totals is not long enough. + """ + with pytest.raises(RuntimeError): + order_history.make_realtime_ts( + pixel_id=good_pixel_id, + predict_at=bad_predict_at, + train_horizon=train_horizon, + ) + + def test_no_long_enough_history2( + self, order_history, good_pixel_id, good_predict_at, + ): + """If the `train_horizon` is longer than the `START`-`END` horizon ... + + ... the history of order totals can never be long enough. + """ + with pytest.raises(RuntimeError): + order_history.make_realtime_ts( + pixel_id=good_pixel_id, predict_at=good_predict_at, train_horizon=999, + ) diff --git a/tests/forecasts/timify/test_order_history.py b/tests/forecasts/timify/test_order_history.py new file mode 100644 index 0000000..657e615 --- /dev/null +++ b/tests/forecasts/timify/test_order_history.py @@ -0,0 +1,92 @@ +"""Test the basic functionalities in the `OrderHistory` class.""" + +import datetime as dt + +import pytest + +from tests import config as test_config +from urban_meal_delivery.forecasts import timify + + +class TestSpecialMethods: + """Test the special methods in `OrderHistory`.""" + + def test_instantiate(self, order_history): + """Test `OrderHistory.__init__()`.""" + assert order_history is not None + + +class TestProperties: + """Test the properties in `OrderHistory`.""" + + @pytest.mark.parametrize('time_step', test_config.TIME_STEPS) + def test_time_step(self, grid, time_step): + """Test `OrderHistory.time_step` property.""" + order_history = timify.OrderHistory(grid=grid, time_step=time_step) + + result = order_history.time_step + + assert result == time_step + + def test_totals(self, order_history, order_totals): + """Test `OrderHistory.totals` property. + + The result of the `OrderHistory.aggregate_orders()` method call + is cached in the `OrderHistory.totals` property. + + Note: `OrderHistory.aggregate_orders()` is not called as + `OrderHistory._data` is already set in the `order_history` fixture. + """ + result = order_history.totals + + assert result is order_totals + + def test_totals_is_cached(self, order_history, monkeypatch): + """Test `OrderHistory.totals` property. + + The result of the `OrderHistory.aggregate_orders()` method call + is cached in the `OrderHistory.totals` property. + + Note: We make `OrderHistory.aggregate_orders()` return a `sentinel` + that is cached into `OrderHistory._data`, which must be unset first. + """ + monkeypatch.setattr(order_history, '_data', None) + sentinel = object() + monkeypatch.setattr(order_history, 'aggregate_orders', lambda: sentinel) + + result1 = order_history.totals + result2 = order_history.totals + + assert result1 is result2 + assert result1 is sentinel + + +class TestMethods: + """Test various methods in `OrderHistory`.""" + + def test_first_order_at_existing_pixel(self, order_history, good_pixel_id): + """Test `OrderHistory.first_order_at()` with good input.""" + result = order_history.first_order_at(good_pixel_id) + + assert result == test_config.START + + def test_first_order_at_non_existing_pixel(self, order_history, good_pixel_id): + """Test `OrderHistory.first_order_at()` with bad input.""" + with pytest.raises( + LookupError, match='`pixel_id` is not in the `grid`', + ): + order_history.first_order_at(-1) + + def test_last_order_at_existing_pixel(self, order_history, good_pixel_id): + """Test `OrderHistory.last_order_at()` with good input.""" + result = order_history.last_order_at(good_pixel_id) + + one_time_step = dt.timedelta(minutes=test_config.LONG_TIME_STEP) + assert result == test_config.END - one_time_step + + def test_last_order_at_non_existing_pixel(self, order_history, good_pixel_id): + """Test `OrderHistory.last_order_at()` with bad input.""" + with pytest.raises( + LookupError, match='`pixel_id` is not in the `grid`', + ): + order_history.last_order_at(-1) diff --git a/tests/test_config.py b/tests/test_config.py index 6569161..db15321 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -29,6 +29,9 @@ def test_database_uri_set(env, monkeypatch): monkeypatch.setattr(configuration.ProductionConfig, 'DATABASE_URI', uri) monkeypatch.setattr(configuration.TestingConfig, 'DATABASE_URI', uri) + # Prevent that a warning is emitted for a missing R_LIBS_PATH. + monkeypatch.setattr(configuration.Config, 'R_LIBS_PATH', '.cache/r_libs') + with pytest.warns(None) as record: configuration.make_config(env) @@ -36,15 +39,88 @@ def test_database_uri_set(env, monkeypatch): @pytest.mark.parametrize('env', envs) -def test_no_database_uri_set(env, monkeypatch): +def test_no_database_uri_set_with_testing_env_var(env, monkeypatch): """Package does not work without DATABASE_URI set in the environment.""" monkeypatch.setattr(configuration.ProductionConfig, 'DATABASE_URI', None) monkeypatch.setattr(configuration.TestingConfig, 'DATABASE_URI', None) + monkeypatch.setenv('TESTING', 'true') + + # Prevent that a warning is emitted for a missing R_LIBS_PATH. + monkeypatch.setattr(configuration.Config, 'R_LIBS_PATH', '.cache/r_libs') + + with pytest.warns(None) as record: + configuration.make_config(env) + + assert len(record) == 0 # noqa:WPS441,WPS507 + + +@pytest.mark.parametrize('env', envs) +def test_no_database_uri_set_without_testing_env_var(env, monkeypatch): + """Package does not work without DATABASE_URI set in the environment.""" + monkeypatch.setattr(configuration.ProductionConfig, 'DATABASE_URI', None) + monkeypatch.setattr(configuration.TestingConfig, 'DATABASE_URI', None) + + monkeypatch.delenv('TESTING', raising=False) + + # Prevent that a warning is emitted for a missing R_LIBS_PATH. + monkeypatch.setattr(configuration.Config, 'R_LIBS_PATH', '.cache/r_libs') + with pytest.warns(UserWarning, match='no DATABASE_URI'): configuration.make_config(env) +@pytest.mark.parametrize('env', envs) +def test_r_libs_path_set(env, monkeypatch): + """Package does NOT emit a warning if R_LIBS is set in the environment.""" + monkeypatch.setattr(configuration.Config, 'R_LIBS_PATH', '.cache/r_libs') + + # Prevent that a warning is emitted for a missing DATABASE_URI. + uri = 'postgresql://user:password@localhost/db' + monkeypatch.setattr(configuration.ProductionConfig, 'DATABASE_URI', uri) + + with pytest.warns(None) as record: + configuration.make_config(env) + + assert len(record) == 0 # noqa:WPS441,WPS507 + + +@pytest.mark.parametrize('env', envs) +def test_no_r_libs_path_set_with_testing_env_var(env, monkeypatch): + """Package emits a warning if no R_LIBS is set in the environment ... + + ... when not testing. + """ + monkeypatch.setattr(configuration.Config, 'R_LIBS_PATH', None) + monkeypatch.setenv('TESTING', 'true') + + # Prevent that a warning is emitted for a missing DATABASE_URI. + uri = 'postgresql://user:password@localhost/db' + monkeypatch.setattr(configuration.ProductionConfig, 'DATABASE_URI', uri) + + with pytest.warns(None) as record: + configuration.make_config(env) + + assert len(record) == 0 # noqa:WPS441,WPS507 + + +@pytest.mark.parametrize('env', envs) +def test_no_r_libs_path_set_without_testing_env_var(env, monkeypatch): + """Package emits a warning if no R_LIBS is set in the environment ... + + ... when not testing. + """ + monkeypatch.setattr(configuration.Config, 'R_LIBS_PATH', None) + monkeypatch.delenv('TESTING', raising=False) + + # Prevent that a warning is emitted for a missing DATABASE_URI. + uri = 'postgresql://user:password@localhost/db' + monkeypatch.setattr(configuration.ProductionConfig, 'DATABASE_URI', uri) + + with pytest.warns(UserWarning, match='no R_LIBS'): + configuration.make_config(env) + + def test_random_testing_schema(): """CLEAN_SCHEMA is randomized if not set explicitly.""" result = configuration.random_schema_name() diff --git a/tests/test_init_r.py b/tests/test_init_r.py new file mode 100644 index 0000000..be673d6 --- /dev/null +++ b/tests/test_init_r.py @@ -0,0 +1,19 @@ +"""Verify that the R packages are installed correctly.""" + +import pytest + + +@pytest.mark.r +def test_r_packages_installed(): + """Import the `urban_meal_delivery.init_r` module. + + Doing this raises a `PackageNotInstalledError` if the + mentioned R packages are not importable. + + They must be installed externally. That happens either + in the "research/r_dependencies.ipynb" notebook or + in the GitHub Actions CI. + """ + from urban_meal_delivery import init_r # noqa:WPS433 + + assert init_r is not None diff --git a/tests/test_version.py b/tests/test_version.py index 474b7b1..4ee70b3 100644 --- a/tests/test_version.py +++ b/tests/test_version.py @@ -20,8 +20,6 @@ import urban_meal_delivery class TestPEP404Compliance: """Packaged version identifier is PEP440 compliant.""" - # pylint:disable=no-self-use - @pytest.fixture def parsed_version(self) -> str: """The packaged version.""" @@ -47,8 +45,6 @@ class TestPEP404Compliance: class TestSemanticVersioning: """Packaged version follows a strict subset of semantic versioning.""" - # pylint:disable=no-self-use - version_pattern = re.compile( r'^(0|([1-9]\d*))\.(0|([1-9]\d*))\.(0|([1-9]\d*))(\.dev(0|([1-9]\d*)))?$', )