This commit is contained in:
commit
3393071db3
33 changed files with 4645 additions and 223 deletions
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -1,4 +1,6 @@
|
|||
.cache/
|
||||
*.egg-info/
|
||||
**/*.egg-info/
|
||||
.env
|
||||
**/.ipynb_checkpoints/
|
||||
.python-version
|
||||
.venv/
|
||||
|
|
|
@ -4,18 +4,30 @@ repos:
|
|||
# Run the local formatting, linting, and testing tool chains.
|
||||
- repo: local
|
||||
hooks:
|
||||
- id: local-pre-commit-checks
|
||||
name: Run code formatters and linters
|
||||
entry: poetry run nox -s pre-commit --
|
||||
- id: local-fix-branch-references
|
||||
name: Check for wrong branch references
|
||||
entry: poetry run nox -s fix-branch-references --
|
||||
language: system
|
||||
stages: [commit]
|
||||
types: [text]
|
||||
- id: local-format
|
||||
name: Format the source files
|
||||
entry: poetry run nox -s format --
|
||||
language: system
|
||||
stages: [commit]
|
||||
types: [python]
|
||||
- id: local-pre-merge-checks
|
||||
name: Run the entire test suite
|
||||
entry: poetry run nox -s pre-merge --
|
||||
- id: local-lint
|
||||
name: Lint the source files
|
||||
entry: poetry run nox -s lint --
|
||||
language: system
|
||||
stages: [merge-commit, push]
|
||||
stages: [commit]
|
||||
types: [python]
|
||||
- id: local-test-suite
|
||||
name: Run the entire test suite
|
||||
entry: poetry run nox -s test-suite --
|
||||
language: system
|
||||
stages: [merge-commit]
|
||||
types: [text]
|
||||
# Enable hooks provided by the pre-commit project to
|
||||
# enforce rules that local tools could not that easily.
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
|
|
35
README.md
35
README.md
|
@ -6,11 +6,40 @@ operating in France from January 2016 to January 2017.
|
|||
The goal is to
|
||||
optimize the platform's delivery process involving independent couriers.
|
||||
|
||||
|
||||
## Structure
|
||||
|
||||
The analysis is structured into three aspects
|
||||
that iteratively build on each other.
|
||||
|
||||
## Real-time Demand Forecasting
|
||||
### Real-time Demand Forecasting
|
||||
|
||||
## Predictive Routing
|
||||
### Predictive Routing
|
||||
|
||||
## Shift & Capacity Planning
|
||||
### Shift & Capacity Planning
|
||||
|
||||
|
||||
## Installation & Contribution
|
||||
|
||||
To play with the code developed for the analyses,
|
||||
you can clone the project with [git](https://git-scm.com/)
|
||||
and install the contained `urban-meal-delivery` package
|
||||
and all its dependencies
|
||||
in a [virtual environment](https://docs.python.org/3/tutorial/venv.html)
|
||||
with [poetry](https://python-poetry.org/docs/):
|
||||
|
||||
`git clone https://github.com/webartifex/urban-meal-delivery.git`
|
||||
|
||||
and
|
||||
|
||||
`poetry install --extras research`
|
||||
|
||||
The `--extras` option is necessary as the non-develop dependencies
|
||||
are structured in the [pyproject.toml](https://github.com/webartifex/urban-meal-delivery/blob/main/pyproject.toml) file
|
||||
into dependencies related to only the `urban-meal-delivery` source code package
|
||||
and dependencies used to run the [Jupyter](https://jupyter.org/) environment
|
||||
with the analyses.
|
||||
|
||||
Contributions are welcome.
|
||||
Use the [issues](https://github.com/webartifex/urban-meal-delivery/issues) tab.
|
||||
The project is licensed under the [MIT license](https://github.com/webartifex/urban-meal-delivery/blob/main/LICENSE.txt).
|
||||
|
|
44
alembic.ini
Normal file
44
alembic.ini
Normal file
|
@ -0,0 +1,44 @@
|
|||
[alembic]
|
||||
file_template = rev_%%(year)d%%(month).2d%%(day).2d_%%(hour).2d_%%(rev)s_%%(slug)s
|
||||
script_location = %(here)s/migrations
|
||||
|
||||
[post_write_hooks]
|
||||
hooks=black
|
||||
black.type=console_scripts
|
||||
black.entrypoint=black
|
||||
|
||||
# The following is taken from the default alembic.ini file.
|
||||
|
||||
[loggers]
|
||||
keys = root,sqlalchemy,alembic
|
||||
|
||||
[handlers]
|
||||
keys = console
|
||||
|
||||
[formatters]
|
||||
keys = generic
|
||||
|
||||
[logger_root]
|
||||
level = WARN
|
||||
handlers = console
|
||||
qualname =
|
||||
|
||||
[logger_sqlalchemy]
|
||||
level = WARN
|
||||
handlers =
|
||||
qualname = sqlalchemy.engine
|
||||
|
||||
[logger_alembic]
|
||||
level = INFO
|
||||
handlers =
|
||||
qualname = alembic
|
||||
|
||||
[handler_console]
|
||||
class = StreamHandler
|
||||
args = (sys.stderr,)
|
||||
level = NOTSET
|
||||
formatter = generic
|
||||
|
||||
[formatter_generic]
|
||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
||||
datefmt = %H:%M:%S
|
4
migrations/README.md
Normal file
4
migrations/README.md
Normal file
|
@ -0,0 +1,4 @@
|
|||
# Database Migrations
|
||||
|
||||
This project uses [alembic](https://alembic.sqlalchemy.org/en/latest)
|
||||
to run the database migrations
|
45
migrations/env.py
Normal file
45
migrations/env.py
Normal file
|
@ -0,0 +1,45 @@
|
|||
"""Configure Alembic's migration environment."""
|
||||
|
||||
import os
|
||||
from logging import config as log_config
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import context
|
||||
|
||||
from urban_meal_delivery import config as umd_config
|
||||
from urban_meal_delivery import db
|
||||
|
||||
|
||||
# Disable the --sql option, a.k.a, the "offline mode".
|
||||
if context.is_offline_mode():
|
||||
raise NotImplementedError('The --sql option is not implemented in this project')
|
||||
|
||||
|
||||
# Set up the default Python logger from the alembic.ini file.
|
||||
log_config.fileConfig(context.config.config_file_name)
|
||||
|
||||
|
||||
def include_object(obj, _name, type_, _reflected, _compare_to):
|
||||
"""Only include the clean schema into --autogenerate migrations."""
|
||||
if type_ in {'table', 'column'} and obj.schema != umd_config.DATABASE_SCHEMA:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
engine = sa.create_engine(umd_config.DATABASE_URI)
|
||||
|
||||
with engine.connect() as connection:
|
||||
context.configure(
|
||||
connection=connection,
|
||||
include_object=include_object,
|
||||
target_metadata=db.Base.metadata,
|
||||
version_table='{alembic_table}{test_schema}'.format(
|
||||
alembic_table=umd_config.ALEMBIC_TABLE,
|
||||
test_schema=(f'_{umd_config.CLEAN_SCHEMA}' if os.getenv('TESTING') else ''),
|
||||
),
|
||||
version_table_schema=umd_config.ALEMBIC_TABLE_SCHEMA,
|
||||
)
|
||||
|
||||
with context.begin_transaction():
|
||||
context.run_migrations()
|
31
migrations/script.py.mako
Normal file
31
migrations/script.py.mako
Normal file
|
@ -0,0 +1,31 @@
|
|||
"""${message}.
|
||||
|
||||
Revision: # ${up_revision} at ${create_date}
|
||||
Revises: # ${down_revision | comma,n}
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
${imports if imports else ""}
|
||||
|
||||
from urban_meal_delivery import configuration
|
||||
|
||||
revision = ${repr(up_revision)}
|
||||
down_revision = ${repr(down_revision)}
|
||||
branch_labels = ${repr(branch_labels)}
|
||||
depends_on = ${repr(depends_on)}
|
||||
|
||||
|
||||
config = configuration.make_config('testing' if os.getenv('TESTING') else 'production')
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""Upgrade to revision ${up_revision}."""
|
||||
${upgrades if upgrades else "pass"}
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Downgrade to revision ${down_revision}."""
|
||||
${downgrades if downgrades else "pass"}
|
|
@ -0,0 +1,802 @@
|
|||
"""Create the database from scratch.
|
||||
|
||||
Revision: #f11cd76d2f45 at 2020-08-06 23:24:32
|
||||
"""
|
||||
|
||||
import os
|
||||
|
||||
import sqlalchemy as sa
|
||||
from alembic import op
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
from urban_meal_delivery import configuration
|
||||
|
||||
|
||||
revision = 'f11cd76d2f45'
|
||||
down_revision = None
|
||||
branch_labels = None
|
||||
depends_on = None
|
||||
|
||||
|
||||
config = configuration.make_config('testing' if os.getenv('TESTING') else 'production')
|
||||
|
||||
|
||||
def upgrade():
|
||||
"""Upgrade to revision f11cd76d2f45."""
|
||||
op.execute(f'CREATE SCHEMA {config.CLEAN_SCHEMA};')
|
||||
op.create_table( # noqa:ECE001
|
||||
'cities',
|
||||
sa.Column('id', sa.SmallInteger(), autoincrement=False, nullable=False),
|
||||
sa.Column('name', sa.Unicode(length=10), nullable=False),
|
||||
sa.Column('kml', sa.UnicodeText(), nullable=False),
|
||||
sa.Column('center_latitude', postgresql.DOUBLE_PRECISION(), nullable=False),
|
||||
sa.Column('center_longitude', postgresql.DOUBLE_PRECISION(), nullable=False),
|
||||
sa.Column('northeast_latitude', postgresql.DOUBLE_PRECISION(), nullable=False),
|
||||
sa.Column('northeast_longitude', postgresql.DOUBLE_PRECISION(), nullable=False),
|
||||
sa.Column('southwest_latitude', postgresql.DOUBLE_PRECISION(), nullable=False),
|
||||
sa.Column('southwest_longitude', postgresql.DOUBLE_PRECISION(), nullable=False),
|
||||
sa.Column('initial_zoom', sa.SmallInteger(), nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_cities')),
|
||||
*(
|
||||
[ # noqa:WPS504
|
||||
sa.ForeignKeyConstraint(
|
||||
['id'],
|
||||
[f'{config.ORIGINAL_SCHEMA}.cities.id'],
|
||||
name=op.f('pk_cities_sanity'),
|
||||
onupdate='RESTRICT',
|
||||
ondelete='RESTRICT',
|
||||
),
|
||||
]
|
||||
if not config.TESTING
|
||||
else []
|
||||
),
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_table( # noqa:ECE001
|
||||
'couriers',
|
||||
sa.Column('id', sa.Integer(), autoincrement=False, nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('vehicle', sa.Unicode(length=10), nullable=False),
|
||||
sa.Column('speed', postgresql.DOUBLE_PRECISION(), nullable=False),
|
||||
sa.Column('capacity', sa.SmallInteger(), nullable=False),
|
||||
sa.Column('pay_per_hour', sa.SmallInteger(), nullable=False),
|
||||
sa.Column('pay_per_order', sa.SmallInteger(), nullable=False),
|
||||
sa.CheckConstraint(
|
||||
"vehicle IN ('bicycle', 'motorcycle')",
|
||||
name=op.f('ck_couriers_on_available_vehicle_types'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'0 <= capacity AND capacity <= 200',
|
||||
name=op.f('ck_couriers_on_capacity_under_200_liters'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'0 <= pay_per_hour AND pay_per_hour <= 1500',
|
||||
name=op.f('ck_couriers_on_realistic_pay_per_hour'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'0 <= pay_per_order AND pay_per_order <= 650',
|
||||
name=op.f('ck_couriers_on_realistic_pay_per_order'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'0 <= speed AND speed <= 30', name=op.f('ck_couriers_on_realistic_speed'),
|
||||
),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_couriers')),
|
||||
*(
|
||||
[ # noqa:WPS504
|
||||
sa.ForeignKeyConstraint(
|
||||
['id'],
|
||||
[f'{config.ORIGINAL_SCHEMA}.couriers.id'],
|
||||
name=op.f('pk_couriers_sanity'),
|
||||
onupdate='RESTRICT',
|
||||
ondelete='RESTRICT',
|
||||
),
|
||||
]
|
||||
if not config.TESTING
|
||||
else []
|
||||
),
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_table(
|
||||
'customers',
|
||||
sa.Column('id', sa.Integer(), autoincrement=False, nullable=False),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_customers')),
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_table( # noqa:ECE001
|
||||
'addresses',
|
||||
sa.Column('id', sa.Integer(), autoincrement=False, nullable=False),
|
||||
sa.Column('primary_id', sa.Integer(), nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('place_id', sa.Unicode(length=120), nullable=False), # noqa:WPS432
|
||||
sa.Column('latitude', postgresql.DOUBLE_PRECISION(), nullable=False),
|
||||
sa.Column('longitude', postgresql.DOUBLE_PRECISION(), nullable=False),
|
||||
sa.Column('city_id', sa.SmallInteger(), nullable=False),
|
||||
sa.Column('city', sa.Unicode(length=25), nullable=False), # noqa:WPS432
|
||||
sa.Column('zip_code', sa.Integer(), nullable=False),
|
||||
sa.Column('street', sa.Unicode(length=80), nullable=False), # noqa:WPS432
|
||||
sa.Column('floor', sa.SmallInteger(), nullable=True),
|
||||
sa.CheckConstraint(
|
||||
'-180 <= longitude AND longitude <= 180',
|
||||
name=op.f('ck_addresses_on_longitude_between_180_degrees'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'-90 <= latitude AND latitude <= 90',
|
||||
name=op.f('ck_addresses_on_latitude_between_90_degrees'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'0 <= floor AND floor <= 40', name=op.f('ck_addresses_on_realistic_floor'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'30000 <= zip_code AND zip_code <= 99999',
|
||||
name=op.f('ck_addresses_on_valid_zip_code'),
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
['city_id'],
|
||||
[f'{config.CLEAN_SCHEMA}.cities.id'],
|
||||
name=op.f('fk_addresses_to_cities_via_city_id'),
|
||||
onupdate='RESTRICT',
|
||||
ondelete='RESTRICT',
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
['primary_id'],
|
||||
[f'{config.CLEAN_SCHEMA}.addresses.id'],
|
||||
name=op.f('fk_addresses_to_addresses_via_primary_id'),
|
||||
onupdate='RESTRICT',
|
||||
ondelete='RESTRICT',
|
||||
),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_addresses')),
|
||||
*(
|
||||
[ # noqa:WPS504
|
||||
sa.ForeignKeyConstraint(
|
||||
['id'],
|
||||
[f'{config.ORIGINAL_SCHEMA}.addresses.id'],
|
||||
name=op.f('pk_addresses_sanity'),
|
||||
onupdate='RESTRICT',
|
||||
ondelete='RESTRICT',
|
||||
),
|
||||
]
|
||||
if not config.TESTING
|
||||
else []
|
||||
),
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_addresses_on_city_id'),
|
||||
'addresses',
|
||||
['city_id'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_addresses_on_place_id'),
|
||||
'addresses',
|
||||
['place_id'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_addresses_on_primary_id'),
|
||||
'addresses',
|
||||
['primary_id'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_addresses_on_zip_code'),
|
||||
'addresses',
|
||||
['zip_code'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_table( # noqa:ECE001
|
||||
'restaurants',
|
||||
sa.Column('id', sa.SmallInteger(), autoincrement=False, nullable=False),
|
||||
sa.Column('created_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('name', sa.Unicode(length=45), nullable=False), # noqa:WPS432
|
||||
sa.Column('address_id', sa.Integer(), nullable=False),
|
||||
sa.Column('estimated_prep_duration', sa.SmallInteger(), nullable=False),
|
||||
sa.CheckConstraint(
|
||||
'0 <= estimated_prep_duration AND estimated_prep_duration <= 2400',
|
||||
name=op.f('ck_restaurants_on_realistic_estimated_prep_duration'),
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
['address_id'],
|
||||
[f'{config.CLEAN_SCHEMA}.addresses.id'],
|
||||
name=op.f('fk_restaurants_to_addresses_via_address_id'),
|
||||
onupdate='RESTRICT',
|
||||
ondelete='RESTRICT',
|
||||
),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_restaurants')),
|
||||
*(
|
||||
[ # noqa:WPS504
|
||||
sa.ForeignKeyConstraint(
|
||||
['id'],
|
||||
[f'{config.ORIGINAL_SCHEMA}.businesses.id'],
|
||||
name=op.f('pk_restaurants_sanity'),
|
||||
onupdate='RESTRICT',
|
||||
ondelete='RESTRICT',
|
||||
),
|
||||
]
|
||||
if not config.TESTING
|
||||
else []
|
||||
),
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_restaurants_on_address_id'),
|
||||
'restaurants',
|
||||
['address_id'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_table( # noqa:ECE001
|
||||
'orders',
|
||||
sa.Column('id', sa.Integer(), autoincrement=False, nullable=False),
|
||||
sa.Column('delivery_id', sa.Integer(), nullable=True),
|
||||
sa.Column('customer_id', sa.Integer(), nullable=False),
|
||||
sa.Column('placed_at', sa.DateTime(), nullable=False),
|
||||
sa.Column('ad_hoc', sa.Boolean(), nullable=False),
|
||||
sa.Column('scheduled_delivery_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('scheduled_delivery_at_corrected', sa.Boolean(), nullable=True),
|
||||
sa.Column('first_estimated_delivery_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('cancelled', sa.Boolean(), nullable=False),
|
||||
sa.Column('cancelled_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('cancelled_at_corrected', sa.Boolean(), nullable=True),
|
||||
sa.Column('sub_total', sa.Integer(), nullable=False),
|
||||
sa.Column('delivery_fee', sa.SmallInteger(), nullable=False),
|
||||
sa.Column('total', sa.Integer(), nullable=False),
|
||||
sa.Column('restaurant_id', sa.SmallInteger(), nullable=False),
|
||||
sa.Column('restaurant_notified_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('restaurant_notified_at_corrected', sa.Boolean(), nullable=True),
|
||||
sa.Column('restaurant_confirmed_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('restaurant_confirmed_at_corrected', sa.Boolean(), nullable=True),
|
||||
sa.Column('estimated_prep_duration', sa.Integer(), nullable=True),
|
||||
sa.Column('estimated_prep_duration_corrected', sa.Boolean(), nullable=True),
|
||||
sa.Column('estimated_prep_buffer', sa.Integer(), nullable=False),
|
||||
sa.Column('courier_id', sa.Integer(), nullable=True),
|
||||
sa.Column('dispatch_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('dispatch_at_corrected', sa.Boolean(), nullable=True),
|
||||
sa.Column('courier_notified_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('courier_notified_at_corrected', sa.Boolean(), nullable=True),
|
||||
sa.Column('courier_accepted_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('courier_accepted_at_corrected', sa.Boolean(), nullable=True),
|
||||
sa.Column('utilization', sa.SmallInteger(), nullable=False),
|
||||
sa.Column('pickup_address_id', sa.Integer(), nullable=False),
|
||||
sa.Column('reached_pickup_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('pickup_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('pickup_at_corrected', sa.Boolean(), nullable=True),
|
||||
sa.Column('pickup_not_confirmed', sa.Boolean(), nullable=True),
|
||||
sa.Column('left_pickup_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('left_pickup_at_corrected', sa.Boolean(), nullable=True),
|
||||
sa.Column('delivery_address_id', sa.Integer(), nullable=False),
|
||||
sa.Column('reached_delivery_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('delivery_at', sa.DateTime(), nullable=True),
|
||||
sa.Column('delivery_at_corrected', sa.Boolean(), nullable=True),
|
||||
sa.Column('delivery_not_confirmed', sa.Boolean(), nullable=True),
|
||||
sa.Column('courier_waited_at_delivery', sa.Boolean(), nullable=True),
|
||||
sa.Column('logged_delivery_distance', sa.SmallInteger(), nullable=True),
|
||||
sa.Column('logged_avg_speed', postgresql.DOUBLE_PRECISION(), nullable=True),
|
||||
sa.Column('logged_avg_speed_distance', sa.SmallInteger(), nullable=True),
|
||||
sa.CheckConstraint(
|
||||
'0 <= estimated_prep_buffer AND estimated_prep_buffer <= 900',
|
||||
name=op.f('ck_orders_on_estimated_prep_buffer_between_0_and_900'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'0 <= estimated_prep_duration AND estimated_prep_duration <= 2700',
|
||||
name=op.f('ck_orders_on_estimated_prep_duration_between_0_and_2700'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'0 <= utilization AND utilization <= 100',
|
||||
name=op.f('ck_orders_on_utilization_between_0_and_100'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'(cancelled_at IS NULL AND cancelled_at_corrected IS NULL) OR (cancelled_at IS NULL AND cancelled_at_corrected IS TRUE) OR (cancelled_at IS NOT NULL AND cancelled_at_corrected IS NOT NULL)', # noqa:E501
|
||||
name=op.f('ck_orders_on_corrections_only_for_set_value_1'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'(courier_accepted_at IS NULL AND courier_accepted_at_corrected IS NULL) OR (courier_accepted_at IS NULL AND courier_accepted_at_corrected IS TRUE) OR (courier_accepted_at IS NOT NULL AND courier_accepted_at_corrected IS NOT NULL)', # noqa:E501
|
||||
name=op.f('ck_orders_on_corrections_only_for_set_value_7'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'(courier_notified_at IS NULL AND courier_notified_at_corrected IS NULL) OR (courier_notified_at IS NULL AND courier_notified_at_corrected IS TRUE) OR (courier_notified_at IS NOT NULL AND courier_notified_at_corrected IS NOT NULL)', # noqa:E501
|
||||
name=op.f('ck_orders_on_corrections_only_for_set_value_6'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'(delivery_at IS NULL AND delivery_at_corrected IS NULL) OR (delivery_at IS NULL AND delivery_at_corrected IS TRUE) OR (delivery_at IS NOT NULL AND delivery_at_corrected IS NOT NULL)', # noqa:E501
|
||||
name=op.f('ck_orders_on_corrections_only_for_set_value_10'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'(dispatch_at IS NULL AND dispatch_at_corrected IS NULL) OR (dispatch_at IS NULL AND dispatch_at_corrected IS TRUE) OR (dispatch_at IS NOT NULL AND dispatch_at_corrected IS NOT NULL)', # noqa:E501
|
||||
name=op.f('ck_orders_on_corrections_only_for_set_value_5'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'(estimated_prep_duration IS NULL AND estimated_prep_duration_corrected IS NULL) OR (estimated_prep_duration IS NULL AND estimated_prep_duration_corrected IS TRUE) OR (estimated_prep_duration IS NOT NULL AND estimated_prep_duration_corrected IS NOT NULL)', # noqa:E501
|
||||
name=op.f('ck_orders_on_corrections_only_for_set_value_4'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'(left_pickup_at IS NULL AND left_pickup_at_corrected IS NULL) OR (left_pickup_at IS NULL AND left_pickup_at_corrected IS TRUE) OR (left_pickup_at IS NOT NULL AND left_pickup_at_corrected IS NOT NULL)', # noqa:E501
|
||||
name=op.f('ck_orders_on_corrections_only_for_set_value_9'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'(pickup_at IS NULL AND pickup_at_corrected IS NULL) OR (pickup_at IS NULL AND pickup_at_corrected IS TRUE) OR (pickup_at IS NOT NULL AND pickup_at_corrected IS NOT NULL)', # noqa:E501
|
||||
name=op.f('ck_orders_on_corrections_only_for_set_value_8'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'(restaurant_confirmed_at IS NULL AND restaurant_confirmed_at_corrected IS NULL) OR (restaurant_confirmed_at IS NULL AND restaurant_confirmed_at_corrected IS TRUE) OR (restaurant_confirmed_at IS NOT NULL AND restaurant_confirmed_at_corrected IS NOT NULL)', # noqa:E501
|
||||
name=op.f('ck_orders_on_corrections_only_for_set_value_3'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'(restaurant_notified_at IS NULL AND restaurant_notified_at_corrected IS NULL) OR (restaurant_notified_at IS NULL AND restaurant_notified_at_corrected IS TRUE) OR (restaurant_notified_at IS NOT NULL AND restaurant_notified_at_corrected IS NOT NULL)', # noqa:E501
|
||||
name=op.f('ck_orders_on_corrections_only_for_set_value_2'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'(scheduled_delivery_at IS NULL AND scheduled_delivery_at_corrected IS NULL) OR (scheduled_delivery_at IS NULL AND scheduled_delivery_at_corrected IS TRUE) OR (scheduled_delivery_at IS NOT NULL AND scheduled_delivery_at_corrected IS NOT NULL)', # noqa:E501
|
||||
name=op.f('ck_orders_on_corrections_only_for_set_value_0'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'(ad_hoc IS TRUE AND scheduled_delivery_at IS NULL) OR (ad_hoc IS FALSE AND scheduled_delivery_at IS NOT NULL)', # noqa:E501
|
||||
name=op.f('ck_orders_on_either_ad_hoc_or_scheduled_order'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'NOT (EXTRACT(EPOCH FROM scheduled_delivery_at - placed_at) < 1800)',
|
||||
name=op.f('ck_orders_on_scheduled_orders_not_within_30_minutes'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'NOT (ad_hoc IS FALSE AND ((EXTRACT(HOUR FROM scheduled_delivery_at) <= 11 AND NOT (EXTRACT(HOUR FROM scheduled_delivery_at) = 11 AND EXTRACT(MINUTE FROM scheduled_delivery_at) = 45)) OR EXTRACT(HOUR FROM scheduled_delivery_at) > 22))', # noqa:E501
|
||||
name=op.f('ck_orders_on_scheduled_orders_within_business_hours'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'NOT (ad_hoc IS TRUE AND (EXTRACT(HOUR FROM placed_at) < 11 OR EXTRACT(HOUR FROM placed_at) > 22))', # noqa:E501
|
||||
name=op.f('ck_orders_on_ad_hoc_orders_within_business_hours'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'NOT (cancelled IS FALSE AND cancelled_at IS NOT NULL)',
|
||||
name=op.f('ck_orders_on_only_cancelled_orders_may_have_cancelled_at'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'NOT (cancelled IS TRUE AND delivery_at IS NOT NULL)',
|
||||
name=op.f('ck_orders_on_cancelled_orders_must_not_be_delivered'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'cancelled_at > courier_accepted_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_16'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'cancelled_at > courier_notified_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_15'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'cancelled_at > delivery_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_21'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'cancelled_at > dispatch_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_14'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'cancelled_at > left_pickup_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_19'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'cancelled_at > pickup_at', name=op.f('ck_orders_on_ordered_timestamps_18'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'cancelled_at > reached_delivery_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_20'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'cancelled_at > reached_pickup_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_17'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'cancelled_at > restaurant_confirmed_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_13'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'cancelled_at > restaurant_notified_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_12'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'courier_accepted_at < delivery_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_42'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'courier_accepted_at < left_pickup_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_40'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'courier_accepted_at < pickup_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_39'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'courier_accepted_at < reached_delivery_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_41'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'courier_accepted_at < reached_pickup_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_38'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'courier_notified_at < courier_accepted_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_32'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'courier_notified_at < delivery_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_37'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'courier_notified_at < left_pickup_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_35'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'courier_notified_at < pickup_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_34'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'courier_notified_at < reached_delivery_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_36'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'courier_notified_at < reached_pickup_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_33'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'dispatch_at < courier_accepted_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_26'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'dispatch_at < courier_notified_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_25'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'dispatch_at < delivery_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_31'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'dispatch_at < left_pickup_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_29'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'dispatch_at < pickup_at', name=op.f('ck_orders_on_ordered_timestamps_28'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'dispatch_at < reached_delivery_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_30'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'dispatch_at < reached_pickup_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_27'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'estimated_prep_buffer % 60 = 0',
|
||||
name=op.f('ck_orders_on_estimated_prep_buffer_must_be_whole_minutes'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'estimated_prep_duration % 60 = 0',
|
||||
name=op.f('ck_orders_on_estimated_prep_duration_must_be_whole_minutes'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'left_pickup_at < delivery_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_51'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'left_pickup_at < reached_delivery_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_50'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'pickup_at < delivery_at', name=op.f('ck_orders_on_ordered_timestamps_49'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'pickup_at < left_pickup_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_47'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'pickup_at < reached_delivery_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_48'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'placed_at < cancelled_at', name=op.f('ck_orders_on_ordered_timestamps_2'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'placed_at < courier_accepted_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_7'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'placed_at < courier_notified_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_6'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'placed_at < delivery_at', name=op.f('ck_orders_on_ordered_timestamps_11'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'placed_at < dispatch_at', name=op.f('ck_orders_on_ordered_timestamps_5'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'placed_at < first_estimated_delivery_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_1'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'placed_at < left_pickup_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_9'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'placed_at < reached_delivery_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_10'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'placed_at < reached_pickup_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_8'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'placed_at < restaurant_confirmed_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_4'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'placed_at < restaurant_notified_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_3'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'placed_at < scheduled_delivery_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_0'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'reached_delivery_at < delivery_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_52'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'reached_pickup_at < delivery_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_46'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'reached_pickup_at < left_pickup_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_44'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'reached_pickup_at < pickup_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_43'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'reached_pickup_at < reached_delivery_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_45'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'restaurant_confirmed_at < pickup_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_24'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'restaurant_notified_at < pickup_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_23'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'restaurant_notified_at < restaurant_confirmed_at',
|
||||
name=op.f('ck_orders_on_ordered_timestamps_22'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'(pickup_at IS NULL AND pickup_not_confirmed IS NULL) OR (pickup_at IS NOT NULL AND pickup_not_confirmed IS NOT NULL)', # noqa:E501
|
||||
name=op.f('pickup_not_confirmed_only_if_pickup'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'(delivery_at IS NULL AND delivery_not_confirmed IS NULL) OR (delivery_at IS NOT NULL AND delivery_not_confirmed IS NOT NULL)', # noqa:E501
|
||||
name=op.f('delivery_not_confirmed_only_if_delivery'),
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'(delivery_at IS NULL AND courier_waited_at_delivery IS NULL) OR (delivery_at IS NOT NULL AND courier_waited_at_delivery IS NOT NULL)', # noqa:E501
|
||||
name=op.f('courier_waited_at_delivery_only_if_delivery'),
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
['courier_id'],
|
||||
[f'{config.CLEAN_SCHEMA}.couriers.id'],
|
||||
name=op.f('fk_orders_to_couriers_via_courier_id'),
|
||||
onupdate='RESTRICT',
|
||||
ondelete='RESTRICT',
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
['customer_id'],
|
||||
[f'{config.CLEAN_SCHEMA}.customers.id'],
|
||||
name=op.f('fk_orders_to_customers_via_customer_id'),
|
||||
onupdate='RESTRICT',
|
||||
ondelete='RESTRICT',
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
['delivery_address_id'],
|
||||
[f'{config.CLEAN_SCHEMA}.addresses.id'],
|
||||
name=op.f('fk_orders_to_addresses_via_delivery_address_id'),
|
||||
onupdate='RESTRICT',
|
||||
ondelete='RESTRICT',
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
['pickup_address_id'],
|
||||
[f'{config.CLEAN_SCHEMA}.addresses.id'],
|
||||
name=op.f('fk_orders_to_addresses_via_pickup_address_id'),
|
||||
onupdate='RESTRICT',
|
||||
ondelete='RESTRICT',
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
['restaurant_id'],
|
||||
[f'{config.CLEAN_SCHEMA}.restaurants.id'],
|
||||
name=op.f('fk_orders_to_restaurants_via_restaurant_id'),
|
||||
onupdate='RESTRICT',
|
||||
ondelete='RESTRICT',
|
||||
),
|
||||
sa.PrimaryKeyConstraint('id', name=op.f('pk_orders')),
|
||||
*(
|
||||
[ # noqa:WPS504
|
||||
sa.ForeignKeyConstraint(
|
||||
['id'],
|
||||
[f'{config.ORIGINAL_SCHEMA}.orders.id'],
|
||||
name=op.f('pk_orders_sanity'),
|
||||
onupdate='RESTRICT',
|
||||
ondelete='RESTRICT',
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
['delivery_id'],
|
||||
[f'{config.ORIGINAL_SCHEMA}.deliveries.id'],
|
||||
name=op.f('pk_deliveries_sanity'),
|
||||
onupdate='RESTRICT',
|
||||
ondelete='RESTRICT',
|
||||
),
|
||||
]
|
||||
if not config.TESTING
|
||||
else []
|
||||
),
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_orders_on_cancelled'),
|
||||
'orders',
|
||||
['cancelled'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_orders_on_cancelled_at_corrected'),
|
||||
'orders',
|
||||
['cancelled_at_corrected'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_orders_on_courier_accepted_at_corrected'),
|
||||
'orders',
|
||||
['courier_accepted_at_corrected'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_orders_on_courier_id'),
|
||||
'orders',
|
||||
['courier_id'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_orders_on_courier_notified_at_corrected'),
|
||||
'orders',
|
||||
['courier_notified_at_corrected'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_orders_on_customer_id'),
|
||||
'orders',
|
||||
['customer_id'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_orders_on_delivery_address_id'),
|
||||
'orders',
|
||||
['delivery_address_id'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_orders_on_delivery_at_corrected'),
|
||||
'orders',
|
||||
['delivery_at_corrected'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_orders_on_delivery_id'),
|
||||
'orders',
|
||||
['delivery_id'],
|
||||
unique=True,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_orders_on_dispatch_at_corrected'),
|
||||
'orders',
|
||||
['dispatch_at_corrected'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_orders_on_estimated_prep_buffer'),
|
||||
'orders',
|
||||
['estimated_prep_buffer'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_orders_on_estimated_prep_duration'),
|
||||
'orders',
|
||||
['estimated_prep_duration'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_orders_on_estimated_prep_duration_corrected'),
|
||||
'orders',
|
||||
['estimated_prep_duration_corrected'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_orders_on_left_pickup_at_corrected'),
|
||||
'orders',
|
||||
['left_pickup_at_corrected'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_orders_on_pickup_address_id'),
|
||||
'orders',
|
||||
['pickup_address_id'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_orders_on_pickup_at_corrected'),
|
||||
'orders',
|
||||
['pickup_at_corrected'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_orders_on_placed_at'),
|
||||
'orders',
|
||||
['placed_at'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_orders_on_restaurant_confirmed_at_corrected'),
|
||||
'orders',
|
||||
['restaurant_confirmed_at_corrected'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_orders_on_restaurant_id'),
|
||||
'orders',
|
||||
['restaurant_id'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_orders_on_restaurant_notified_at_corrected'),
|
||||
'orders',
|
||||
['restaurant_notified_at_corrected'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_orders_on_scheduled_delivery_at'),
|
||||
'orders',
|
||||
['scheduled_delivery_at'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
op.create_index(
|
||||
op.f('ix_orders_on_scheduled_delivery_at_corrected'),
|
||||
'orders',
|
||||
['scheduled_delivery_at_corrected'],
|
||||
unique=False,
|
||||
schema=config.CLEAN_SCHEMA,
|
||||
)
|
||||
|
||||
|
||||
def downgrade():
|
||||
"""Downgrade to revision None."""
|
||||
op.execute(f'DROP SCHEMA {config.CLEAN_SCHEMA} CASCADE;')
|
201
noxfile.py
201
noxfile.py
|
@ -27,7 +27,7 @@ as unified tasks to assure the quality of the source code:
|
|||
=> may be paths or options
|
||||
|
||||
|
||||
GitHub Actions implements a CI workflow:
|
||||
GitHub Actions implements the following CI workflow:
|
||||
|
||||
- "format", "lint", and "test" as above
|
||||
|
||||
|
@ -36,30 +36,31 @@ GitHub Actions implements a CI workflow:
|
|||
- "docs": build the documentation with sphinx
|
||||
|
||||
|
||||
The pre-commit framework invokes the "pre-commit" and "pre-merge" sessions:
|
||||
The pre-commit framework invokes the following tasks:
|
||||
|
||||
- "pre-commit" before all commits:
|
||||
- before any commit:
|
||||
|
||||
+ triggers "format" and "lint" on staged source files
|
||||
+ => test coverage may be < 100%
|
||||
+ "format" and "lint" as above
|
||||
+ "fix-branch-references": replace branch references with the current one
|
||||
|
||||
- "pre-merge" before all merges and pushes:
|
||||
|
||||
+ same as "pre-commit"
|
||||
+ plus: triggers "test", "safety", and "docs" (that ignore extra arguments)
|
||||
+ => test coverage is enforced to be 100%
|
||||
- before merges: run the entire "test-suite" independent of the file changes
|
||||
|
||||
"""
|
||||
|
||||
import contextlib
|
||||
import glob
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess # noqa:S404
|
||||
import tempfile
|
||||
from typing import Generator, IO, Tuple
|
||||
|
||||
import nox
|
||||
from nox.sessions import Session
|
||||
|
||||
|
||||
GITHUB_REPOSITORY = 'webartifex/urban-meal-delivery'
|
||||
PACKAGE_IMPORT_NAME = 'urban_meal_delivery'
|
||||
|
||||
# Docs/sphinx locations.
|
||||
|
@ -74,7 +75,9 @@ PYTEST_LOCATION = 'tests/'
|
|||
|
||||
# Paths with all *.py files.
|
||||
SRC_LOCATIONS = (
|
||||
f'{DOCS_SRC}/conf.py',
|
||||
f'{DOCS_SRC}conf.py',
|
||||
'migrations/env.py',
|
||||
'migrations/versions/',
|
||||
'noxfile.py',
|
||||
PACKAGE_SOURCE_LOCATION,
|
||||
PYTEST_LOCATION,
|
||||
|
@ -192,23 +195,6 @@ def lint(session):
|
|||
)
|
||||
|
||||
|
||||
@nox.session(name='pre-commit', python=PYTHON, venv_backend='none')
|
||||
def pre_commit(session):
|
||||
"""Run the format and lint sessions.
|
||||
|
||||
Source files must be well-formed before they enter git.
|
||||
|
||||
Intended to be run as a pre-commit hook.
|
||||
|
||||
Passed in extra arguments are forwarded. So, if it is run as a pre-commit
|
||||
hook, only the currently staged source files are formatted and linted.
|
||||
"""
|
||||
# "format" and "lint" are run in sessions on their own as
|
||||
# session.notify() creates new Session objects.
|
||||
session.notify('format')
|
||||
session.notify('lint')
|
||||
|
||||
|
||||
@nox.session(python=PYTHON)
|
||||
def test(session):
|
||||
"""Test the code base.
|
||||
|
@ -235,7 +221,12 @@ def test(session):
|
|||
# non-develop dependencies be installed in the virtual environment.
|
||||
session.run('poetry', 'install', '--no-dev', external=True)
|
||||
_install_packages(
|
||||
session, 'packaging', 'pytest', 'pytest-cov', 'xdoctest[optional]',
|
||||
session,
|
||||
'packaging',
|
||||
'pytest',
|
||||
'pytest-cov',
|
||||
'pytest-env',
|
||||
'xdoctest[optional]',
|
||||
)
|
||||
|
||||
# Interpret extra arguments as options for pytest.
|
||||
|
@ -249,6 +240,8 @@ def test(session):
|
|||
'--cov-branch',
|
||||
'--cov-fail-under=100',
|
||||
'--cov-report=term-missing:skip-covered',
|
||||
'-k',
|
||||
'not e2e',
|
||||
PYTEST_LOCATION,
|
||||
)
|
||||
session.run('pytest', '--version')
|
||||
|
@ -306,28 +299,22 @@ def docs(session):
|
|||
print(f'Docs are available at {os.getcwd()}/{DOCS_BUILD}index.html') # noqa:WPS421
|
||||
|
||||
|
||||
@nox.session(name='pre-merge', python=PYTHON)
|
||||
def pre_merge(session):
|
||||
"""Run the format, lint, test, safety, and docs sessions.
|
||||
@nox.session(name='test-suite', python=PYTHON)
|
||||
def test_suite(session):
|
||||
"""Run the entire test suite.
|
||||
|
||||
Intended to be run either as a pre-merge or pre-push hook.
|
||||
Intended to be run as a pre-commit hook.
|
||||
|
||||
Ignores the paths passed in by the pre-commit framework
|
||||
for the test, safety, and docs sessions so that the
|
||||
entire test suite is executed.
|
||||
and runs the entire test suite.
|
||||
"""
|
||||
# Re-using an old environment is not so easy here as the "test" session
|
||||
# runs `poetry install --no-dev`, which removes previously installed packages.
|
||||
if session.virtualenv.reuse_existing:
|
||||
raise RuntimeError(
|
||||
'The "pre-merge" session must be run without the "-r" option',
|
||||
'The "test-suite" session must be run without the "-r" option',
|
||||
)
|
||||
|
||||
session.notify('format')
|
||||
session.notify('lint')
|
||||
session.notify('safety')
|
||||
session.notify('docs')
|
||||
|
||||
# Little hack to not work with the extra arguments provided
|
||||
# by the pre-commit framework. Create a flag in the
|
||||
# env(ironment) that must contain only `str`-like objects.
|
||||
|
@ -340,6 +327,116 @@ def pre_merge(session):
|
|||
test(session)
|
||||
|
||||
|
||||
@nox.session(name='fix-branch-references', python=PYTHON, venv_backend='none')
|
||||
def fix_branch_references(session): # noqa:WPS210
|
||||
"""Replace branch references with the current branch.
|
||||
|
||||
Intended to be run as a pre-commit hook.
|
||||
|
||||
Many files in the project (e.g., README.md) contain links to resources
|
||||
on github.com or nbviewer.jupyter.org that contain branch labels.
|
||||
|
||||
This task rewrites these links such that they contain the branch reference
|
||||
of the current branch. If the branch is only a temporary one that is to be
|
||||
merged into the 'main' branch, all references are adjusted to 'main' as well.
|
||||
|
||||
This task may be called with one positional argument that is interpreted
|
||||
as the branch to which all references are changed into.
|
||||
The format must be "--branch=BRANCH_NAME".
|
||||
"""
|
||||
# Adjust this to add/remove glob patterns
|
||||
# whose links are re-written.
|
||||
paths = ['*.md', '**/*.md', '**/*.ipynb']
|
||||
|
||||
# Get the branch git is currently on.
|
||||
# This is the branch to which all references are changed into
|
||||
# if none of the two exceptions below apply.
|
||||
branch = (
|
||||
subprocess.check_output( # noqa:S603
|
||||
('git', 'rev-parse', '--abbrev-ref', 'HEAD'),
|
||||
)
|
||||
.decode()
|
||||
.strip()
|
||||
)
|
||||
# If the current branch is only a temporary one that is to be merged
|
||||
# into 'main', we adjust all branch references to 'main' as well.
|
||||
if branch.startswith('release') or branch.startswith('research'):
|
||||
branch = 'main'
|
||||
# If a "--branch=BRANCH_NAME" argument is passed in
|
||||
# as the only positional argument, we use BRANCH_NAME.
|
||||
# Note: The --branch is required as session.posargs contains
|
||||
# the staged files passed in by pre-commit in most cases.
|
||||
if session.posargs and len(session.posargs) == 1:
|
||||
match = re.match(
|
||||
pattern=r'^--branch=([\w\.-]+)$', string=session.posargs[0].strip(),
|
||||
)
|
||||
if match:
|
||||
branch = match.groups()[0]
|
||||
|
||||
rewrites = [
|
||||
{
|
||||
'name': 'github',
|
||||
'pattern': re.compile(
|
||||
fr'((((http)|(https))://github\.com/{GITHUB_REPOSITORY}/((blob)|(tree))/)([\w\.-]+)/)', # noqa:E501
|
||||
),
|
||||
'replacement': fr'\2{branch}/',
|
||||
},
|
||||
{
|
||||
'name': 'nbviewer',
|
||||
'pattern': re.compile(
|
||||
fr'((((http)|(https))://nbviewer\.jupyter\.org/github/{GITHUB_REPOSITORY}/((blob)|(tree))/)([\w\.-]+)/)', # noqa:E501
|
||||
),
|
||||
'replacement': fr'\2{branch}/',
|
||||
},
|
||||
]
|
||||
|
||||
for expanded in _expand(*paths):
|
||||
with _line_by_line_replace(expanded) as (old_file, new_file):
|
||||
for line in old_file:
|
||||
for rewrite in rewrites:
|
||||
line = re.sub(rewrite['pattern'], rewrite['replacement'], line)
|
||||
new_file.write(line)
|
||||
|
||||
|
||||
def _expand(*patterns: str) -> Generator[str, None, None]:
|
||||
"""Expand glob patterns into paths.
|
||||
|
||||
Args:
|
||||
*patterns: the patterns to be expanded
|
||||
|
||||
Yields:
|
||||
expanded: a single expanded path
|
||||
""" # noqa:RST213
|
||||
for pattern in patterns:
|
||||
yield from glob.glob(pattern.strip())
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def _line_by_line_replace(path: str) -> Generator[Tuple[IO, IO], None, None]:
|
||||
"""Replace/change the lines in a file one by one.
|
||||
|
||||
This generator function yields two file handles, one to the current file
|
||||
(i.e., `old_file`) and one to its replacement (i.e., `new_file`).
|
||||
|
||||
Usage: loop over the lines in `old_file` and write the files to be kept
|
||||
to `new_file`. Files not written to `new_file` are removed!
|
||||
|
||||
Args:
|
||||
path: the file whose lines are to be replaced
|
||||
|
||||
Yields:
|
||||
old_file, new_file: handles to a file and its replacement
|
||||
"""
|
||||
file_handle, new_file_path = tempfile.mkstemp()
|
||||
with os.fdopen(file_handle, 'w') as new_file:
|
||||
with open(path) as old_file:
|
||||
yield old_file, new_file
|
||||
|
||||
shutil.copymode(path, new_file_path)
|
||||
os.remove(path)
|
||||
shutil.move(new_file_path, path)
|
||||
|
||||
|
||||
@nox.session(name='init-project', python=PYTHON, venv_backend='none')
|
||||
def init_project(session):
|
||||
"""Install the pre-commit hooks."""
|
||||
|
@ -348,25 +445,27 @@ def init_project(session):
|
|||
|
||||
|
||||
@nox.session(name='clean-pwd', python=PYTHON, venv_backend='none')
|
||||
def clean_pwd(session):
|
||||
def clean_pwd(session): # noqa:WPS210,WPS231
|
||||
"""Remove (almost) all glob patterns listed in .gitignore.
|
||||
|
||||
The difference compared to `git clean -X` is that this task
|
||||
does not remove pyenv's .python-version file and poetry's
|
||||
virtual environment.
|
||||
"""
|
||||
exclude = frozenset(('.python-version', '.venv', 'venv'))
|
||||
exclude = frozenset(('.env', '.python-version', '.venv/', 'venv/'))
|
||||
|
||||
with open('.gitignore') as file_handle:
|
||||
paths = file_handle.readlines()
|
||||
|
||||
for path in paths:
|
||||
path = path.strip()
|
||||
if path.startswith('#') or path in exclude:
|
||||
for path in _expand(*paths):
|
||||
if path.startswith('#'):
|
||||
continue
|
||||
|
||||
for expanded in glob.glob(path):
|
||||
session.run(f'rm -rf {expanded}')
|
||||
for excluded in exclude:
|
||||
if path.startswith(excluded):
|
||||
break
|
||||
else:
|
||||
session.run('rm', '-rf', path)
|
||||
|
||||
|
||||
def _begin(session):
|
||||
|
@ -428,11 +527,11 @@ def _install_packages(session: Session, *packages_or_pip_args: str, **kwargs) ->
|
|||
|
||||
|
||||
# TODO (isort): Remove this fix after
|
||||
# upgrading to isort ^5.3.0 in pyproject.toml.
|
||||
# upgrading to isort ^5.5.4 in pyproject.toml.
|
||||
@contextlib.contextmanager
|
||||
def _isort_fix(session):
|
||||
"""Temporarily upgrade to isort 5.3.0."""
|
||||
session.install('isort==5.3.0')
|
||||
"""Temporarily upgrade to isort 5.5.4."""
|
||||
session.install('isort==5.5.4')
|
||||
try:
|
||||
yield
|
||||
finally:
|
||||
|
|
1399
poetry.lock
generated
1399
poetry.lock
generated
File diff suppressed because it is too large
Load diff
|
@ -9,7 +9,7 @@ target-version = ["py38"]
|
|||
|
||||
[tool.poetry]
|
||||
name = "urban-meal-delivery"
|
||||
version = "0.1.0"
|
||||
version = "0.2.0"
|
||||
|
||||
authors = ["Alexander Hess <alexander@webartifex.biz>"]
|
||||
description = "Optimizing an urban meal delivery platform"
|
||||
|
@ -27,7 +27,29 @@ repository = "https://github.com/webartifex/urban-meal-delivery"
|
|||
[tool.poetry.dependencies]
|
||||
python = "^3.8"
|
||||
|
||||
# Package => code developed in *.py files and packaged under src/urban_meal_delivery
|
||||
alembic = "^1.4.2"
|
||||
click = "^7.1.2"
|
||||
psycopg2 = "^2.8.5" # adapter for PostgreSQL
|
||||
python-dotenv = "^0.14.0"
|
||||
sqlalchemy = "^1.3.18"
|
||||
|
||||
# Jupyter Lab => notebooks with analyses using the developed package
|
||||
# IMPORTANT: must be kept in sync with the "research" extra below
|
||||
jupyterlab = { version="^2.2.2", optional=true }
|
||||
nb_black = { version="^1.0.7", optional=true }
|
||||
numpy = { version="^1.19.1", optional=true }
|
||||
pandas = { version="^1.1.0", optional=true }
|
||||
pytz = { version="^2020.1", optional=true }
|
||||
|
||||
[tool.poetry.extras]
|
||||
research = [
|
||||
"jupyterlab",
|
||||
"nb_black",
|
||||
"numpy",
|
||||
"pandas",
|
||||
"pytz",
|
||||
]
|
||||
|
||||
[tool.poetry.dev-dependencies]
|
||||
# Task Runners
|
||||
|
@ -37,7 +59,7 @@ pre-commit = "^2.6.0"
|
|||
# Code Formatters
|
||||
autoflake = "^1.3.1"
|
||||
black = "^19.10b0"
|
||||
isort = "^4.3.21" # TODO (isort): not ^5.2.2 due to pylint and wemake-python-styleguide
|
||||
isort = "^4.3.21" # TODO (isort): not ^5.5.4 due to wemake-python-styleguide
|
||||
|
||||
# (Static) Code Analyzers
|
||||
flake8 = "^3.8.3"
|
||||
|
@ -53,6 +75,7 @@ wemake-python-styleguide = "^0.14.1" # flake8 plug-in
|
|||
packaging = "^20.4" # used to test the packaged version
|
||||
pytest = "^6.0.1"
|
||||
pytest-cov = "^2.10.0"
|
||||
pytest-env = "^0.6.2"
|
||||
xdoctest = { version="^0.13.0", extras=["optional"] }
|
||||
|
||||
# Documentation
|
||||
|
|
52
setup.cfg
52
setup.cfg
|
@ -84,9 +84,11 @@ ignore =
|
|||
# If --ignore is passed on the command
|
||||
# line, still ignore the following:
|
||||
extend-ignore =
|
||||
# Too long line => duplicate with E501.
|
||||
B950,
|
||||
# Comply with black's style.
|
||||
# Source: https://github.com/psf/black/blob/master/docs/compatible_configs.md#flake8
|
||||
E203, W503,
|
||||
E203, W503, WPS348,
|
||||
# f-strings are ok.
|
||||
WPS305,
|
||||
# Classes should not have to specify a base class.
|
||||
|
@ -100,6 +102,20 @@ per-file-ignores =
|
|||
docs/conf.py:
|
||||
# Allow shadowing built-ins and reading __*__ variables.
|
||||
WPS125,WPS609,
|
||||
migrations/env.py:
|
||||
# Type annotations are not strictly enforced.
|
||||
ANN0, ANN2,
|
||||
migrations/versions/*.py:
|
||||
# Type annotations are not strictly enforced.
|
||||
ANN0, ANN2,
|
||||
# File names of revisions are ok.
|
||||
WPS114,WPS118,
|
||||
# Revisions may have too many expressions.
|
||||
WPS204,WPS213,
|
||||
# No overuse of string constants (e.g., 'RESTRICT').
|
||||
WPS226,
|
||||
# Too many noqa's are ok.
|
||||
WPS402,
|
||||
noxfile.py:
|
||||
# Type annotations are not strictly enforced.
|
||||
ANN0, ANN2,
|
||||
|
@ -109,6 +125,17 @@ per-file-ignores =
|
|||
WPS213,
|
||||
# No overuse of string constants (e.g., '--version').
|
||||
WPS226,
|
||||
# The noxfile is rather long => allow many noqa's.
|
||||
WPS402,
|
||||
src/urban_meal_delivery/configuration.py:
|
||||
# Allow upper case class variables within classes.
|
||||
WPS115,
|
||||
# Numbers are normal in config files.
|
||||
WPS432,
|
||||
src/urban_meal_delivery/db/addresses.py:
|
||||
WPS226,
|
||||
src/urban_meal_delivery/db/orders.py:
|
||||
WPS226,
|
||||
tests/*.py:
|
||||
# Type annotations are not strictly enforced.
|
||||
ANN0, ANN2,
|
||||
|
@ -116,8 +143,12 @@ per-file-ignores =
|
|||
S101,
|
||||
# Shadowing outer scopes occurs naturally with mocks.
|
||||
WPS442,
|
||||
# Modules may have many test cases.
|
||||
WPS202,WPS204,WPS214,
|
||||
# No overuse of string constants (e.g., '__version__').
|
||||
WPS226,
|
||||
# Numbers are normal in test cases as expected results.
|
||||
WPS432,
|
||||
|
||||
# Explicitly set mccabe's maximum complexity to 10 as recommended by
|
||||
# Thomas McCabe, the inventor of the McCabe complexity, and the NIST.
|
||||
|
@ -135,10 +166,10 @@ show-source = true
|
|||
# wemake-python-styleguide's settings
|
||||
# ===================================
|
||||
allowed-domain-names =
|
||||
obj,
|
||||
param,
|
||||
result,
|
||||
value,
|
||||
min-name-length = 3
|
||||
max-name-length = 40
|
||||
# darglint
|
||||
strictness = long
|
||||
|
@ -186,7 +217,15 @@ single_line_exclusions = typing
|
|||
[mypy]
|
||||
cache_dir = .cache/mypy
|
||||
|
||||
[mypy-nox.*,packaging,pytest]
|
||||
[mypy-dotenv]
|
||||
ignore_missing_imports = true
|
||||
[mypy-nox.*]
|
||||
ignore_missing_imports = true
|
||||
[mypy-packaging]
|
||||
ignore_missing_imports = true
|
||||
[mypy-pytest]
|
||||
ignore_missing_imports = true
|
||||
[mypy-sqlalchemy.*]
|
||||
ignore_missing_imports = true
|
||||
|
||||
|
||||
|
@ -199,6 +238,9 @@ disable =
|
|||
# We use TODO's to indicate locations in the source base
|
||||
# that must be worked on in the near future.
|
||||
fixme,
|
||||
# Too many false positives and cannot be disabled within a file.
|
||||
# Source: https://github.com/PyCQA/pylint/issues/214
|
||||
duplicate-code,
|
||||
# Comply with black's style.
|
||||
bad-continuation, bad-whitespace,
|
||||
# =====================
|
||||
|
@ -229,3 +271,7 @@ addopts =
|
|||
--strict-markers
|
||||
cache_dir = .cache/pytest
|
||||
console_output_style = count
|
||||
env =
|
||||
TESTING=true
|
||||
markers =
|
||||
e2e: integration tests, inlc., for example, tests touching a database
|
||||
|
|
|
@ -6,8 +6,11 @@ Example:
|
|||
True
|
||||
"""
|
||||
|
||||
import os as _os
|
||||
from importlib import metadata as _metadata
|
||||
|
||||
from urban_meal_delivery import configuration as _configuration
|
||||
|
||||
|
||||
try:
|
||||
_pkg_info = _metadata.metadata(__name__)
|
||||
|
@ -21,3 +24,14 @@ else:
|
|||
__author__ = _pkg_info['author']
|
||||
__pkg_name__ = _pkg_info['name']
|
||||
__version__ = _pkg_info['version']
|
||||
|
||||
|
||||
# Global `config` object to be used in the package.
|
||||
config: _configuration.Config = _configuration.make_config(
|
||||
'testing' if _os.getenv('TESTING') else 'production',
|
||||
)
|
||||
|
||||
|
||||
# Import `db` down here as it depends on `config`.
|
||||
# pylint:disable=wrong-import-position
|
||||
from urban_meal_delivery import db # noqa:E402,F401 isort:skip
|
||||
|
|
101
src/urban_meal_delivery/configuration.py
Normal file
101
src/urban_meal_delivery/configuration.py
Normal file
|
@ -0,0 +1,101 @@
|
|||
"""Provide package-wide configuration.
|
||||
|
||||
This module provides utils to create new `Config` objects
|
||||
on the fly, mainly for testing and migrating!
|
||||
|
||||
Within this package, use the `config` proxy at the package's top level
|
||||
to access the current configuration!
|
||||
"""
|
||||
|
||||
import datetime
|
||||
import os
|
||||
import random
|
||||
import string
|
||||
import warnings
|
||||
|
||||
import dotenv
|
||||
|
||||
|
||||
dotenv.load_dotenv()
|
||||
|
||||
|
||||
def random_schema_name() -> str:
|
||||
"""Generate a random PostgreSQL schema name for testing."""
|
||||
return 'temp_{name}'.format(
|
||||
name=''.join(
|
||||
(random.choice(string.ascii_lowercase) for _ in range(10)), # noqa:S311
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
class Config:
|
||||
"""Configuration that applies in all situations."""
|
||||
|
||||
# pylint:disable=too-few-public-methods
|
||||
|
||||
CUTOFF_DAY = datetime.datetime(2017, 2, 1)
|
||||
|
||||
# If a scheduled pre-order is made within this
|
||||
# time horizon, we treat it as an ad-hoc order.
|
||||
QUASI_AD_HOC_LIMIT = datetime.timedelta(minutes=45)
|
||||
|
||||
DATABASE_URI = os.getenv('DATABASE_URI')
|
||||
|
||||
# The PostgreSQL schema that holds the tables with the original data.
|
||||
ORIGINAL_SCHEMA = os.getenv('ORIGINAL_SCHEMA') or 'public'
|
||||
|
||||
# The PostgreSQL schema that holds the tables with the cleaned data.
|
||||
CLEAN_SCHEMA = os.getenv('CLEAN_SCHEMA') or 'clean'
|
||||
|
||||
ALEMBIC_TABLE = 'alembic_version'
|
||||
ALEMBIC_TABLE_SCHEMA = 'public'
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Non-literal text representation."""
|
||||
return '<configuration>'
|
||||
|
||||
|
||||
class ProductionConfig(Config):
|
||||
"""Configuration for the real dataset."""
|
||||
|
||||
# pylint:disable=too-few-public-methods
|
||||
|
||||
TESTING = False
|
||||
|
||||
|
||||
class TestingConfig(Config):
|
||||
"""Configuration for the test suite."""
|
||||
|
||||
# pylint:disable=too-few-public-methods
|
||||
|
||||
TESTING = True
|
||||
|
||||
DATABASE_URI = os.getenv('DATABASE_URI_TESTING') or Config.DATABASE_URI
|
||||
CLEAN_SCHEMA = os.getenv('CLEAN_SCHEMA_TESTING') or random_schema_name()
|
||||
|
||||
|
||||
def make_config(env: str = 'production') -> Config:
|
||||
"""Create a new `Config` object.
|
||||
|
||||
Args:
|
||||
env: either 'production' or 'testing'; defaults to the first
|
||||
|
||||
Returns:
|
||||
config: a namespace with all configurations
|
||||
|
||||
Raises:
|
||||
ValueError: if `env` is not as specified
|
||||
""" # noqa:DAR203
|
||||
config: Config
|
||||
if env.strip().lower() == 'production':
|
||||
config = ProductionConfig()
|
||||
elif env.strip().lower() == 'testing':
|
||||
config = TestingConfig()
|
||||
else:
|
||||
raise ValueError("Must be either 'production' or 'testing'")
|
||||
|
||||
# Without a PostgreSQL database the package cannot work.
|
||||
if config.DATABASE_URI is None:
|
||||
warnings.warn('Bad configurartion: no DATABASE_URI set in the environment')
|
||||
|
||||
return config
|
11
src/urban_meal_delivery/db/__init__.py
Normal file
11
src/urban_meal_delivery/db/__init__.py
Normal file
|
@ -0,0 +1,11 @@
|
|||
"""Provide the ORM models and a connection to the database."""
|
||||
|
||||
from urban_meal_delivery.db.addresses import Address # noqa:F401
|
||||
from urban_meal_delivery.db.cities import City # noqa:F401
|
||||
from urban_meal_delivery.db.connection import make_engine # noqa:F401
|
||||
from urban_meal_delivery.db.connection import make_session_factory # noqa:F401
|
||||
from urban_meal_delivery.db.couriers import Courier # noqa:F401
|
||||
from urban_meal_delivery.db.customers import Customer # noqa:F401
|
||||
from urban_meal_delivery.db.meta import Base # noqa:F401
|
||||
from urban_meal_delivery.db.orders import Order # noqa:F401
|
||||
from urban_meal_delivery.db.restaurants import Restaurant # noqa:F401
|
82
src/urban_meal_delivery/db/addresses.py
Normal file
82
src/urban_meal_delivery/db/addresses.py
Normal file
|
@ -0,0 +1,82 @@
|
|||
"""Provide the ORM's Address model."""
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import orm
|
||||
from sqlalchemy.dialects import postgresql
|
||||
from sqlalchemy.ext import hybrid
|
||||
|
||||
from urban_meal_delivery.db import meta
|
||||
|
||||
|
||||
class Address(meta.Base):
|
||||
"""An Address of a Customer or a Restaurant on the UDP."""
|
||||
|
||||
__tablename__ = 'addresses'
|
||||
|
||||
# Columns
|
||||
id = sa.Column(sa.Integer, primary_key=True, autoincrement=False) # noqa:WPS125
|
||||
_primary_id = sa.Column('primary_id', sa.Integer, nullable=False, index=True)
|
||||
created_at = sa.Column(sa.DateTime, nullable=False)
|
||||
place_id = sa.Column(
|
||||
sa.Unicode(length=120), nullable=False, index=True, # noqa:WPS432
|
||||
)
|
||||
latitude = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False)
|
||||
longitude = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False)
|
||||
_city_id = sa.Column('city_id', sa.SmallInteger, nullable=False, index=True)
|
||||
city_name = sa.Column('city', sa.Unicode(length=25), nullable=False) # noqa:WPS432
|
||||
zip_code = sa.Column(sa.Integer, nullable=False, index=True)
|
||||
street = sa.Column(sa.Unicode(length=80), nullable=False) # noqa:WPS432
|
||||
floor = sa.Column(sa.SmallInteger)
|
||||
|
||||
# Constraints
|
||||
__table_args__ = (
|
||||
sa.ForeignKeyConstraint(
|
||||
['primary_id'], ['addresses.id'], onupdate='RESTRICT', ondelete='RESTRICT',
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
['city_id'], ['cities.id'], onupdate='RESTRICT', ondelete='RESTRICT',
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'-90 <= latitude AND latitude <= 90', name='latitude_between_90_degrees',
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'-180 <= longitude AND longitude <= 180',
|
||||
name='longitude_between_180_degrees',
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'30000 <= zip_code AND zip_code <= 99999', name='valid_zip_code',
|
||||
),
|
||||
sa.CheckConstraint('0 <= floor AND floor <= 40', name='realistic_floor'),
|
||||
)
|
||||
|
||||
# Relationships
|
||||
city = orm.relationship('City', back_populates='addresses')
|
||||
restaurant = orm.relationship('Restaurant', back_populates='address', uselist=False)
|
||||
orders_picked_up = orm.relationship(
|
||||
'Order',
|
||||
back_populates='pickup_address',
|
||||
foreign_keys='[Order._pickup_address_id]',
|
||||
)
|
||||
|
||||
orders_delivered = orm.relationship(
|
||||
'Order',
|
||||
back_populates='delivery_address',
|
||||
foreign_keys='[Order._delivery_address_id]',
|
||||
)
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Non-literal text representation."""
|
||||
return '<{cls}({street} in {city})>'.format(
|
||||
cls=self.__class__.__name__, street=self.street, city=self.city_name,
|
||||
)
|
||||
|
||||
@hybrid.hybrid_property
|
||||
def is_primary(self) -> bool:
|
||||
"""If an Address object is the earliest one entered at its location.
|
||||
|
||||
Street addresses may have been entered several times with different
|
||||
versions/spellings of the street name and/or different floors.
|
||||
|
||||
`is_primary` indicates the first in a group of addresses.
|
||||
"""
|
||||
return self.id == self._primary_id
|
83
src/urban_meal_delivery/db/cities.py
Normal file
83
src/urban_meal_delivery/db/cities.py
Normal file
|
@ -0,0 +1,83 @@
|
|||
"""Provide the ORM's City model."""
|
||||
|
||||
from typing import Dict
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import orm
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
from urban_meal_delivery.db import meta
|
||||
|
||||
|
||||
class City(meta.Base):
|
||||
"""A City where the UDP operates in."""
|
||||
|
||||
__tablename__ = 'cities'
|
||||
|
||||
# Generic columns
|
||||
id = sa.Column( # noqa:WPS125
|
||||
sa.SmallInteger, primary_key=True, autoincrement=False,
|
||||
)
|
||||
name = sa.Column(sa.Unicode(length=10), nullable=False)
|
||||
kml = sa.Column(sa.UnicodeText, nullable=False)
|
||||
|
||||
# Google Maps related columns
|
||||
_center_latitude = sa.Column(
|
||||
'center_latitude', postgresql.DOUBLE_PRECISION, nullable=False,
|
||||
)
|
||||
_center_longitude = sa.Column(
|
||||
'center_longitude', postgresql.DOUBLE_PRECISION, nullable=False,
|
||||
)
|
||||
_northeast_latitude = sa.Column(
|
||||
'northeast_latitude', postgresql.DOUBLE_PRECISION, nullable=False,
|
||||
)
|
||||
_northeast_longitude = sa.Column(
|
||||
'northeast_longitude', postgresql.DOUBLE_PRECISION, nullable=False,
|
||||
)
|
||||
_southwest_latitude = sa.Column(
|
||||
'southwest_latitude', postgresql.DOUBLE_PRECISION, nullable=False,
|
||||
)
|
||||
_southwest_longitude = sa.Column(
|
||||
'southwest_longitude', postgresql.DOUBLE_PRECISION, nullable=False,
|
||||
)
|
||||
initial_zoom = sa.Column(sa.SmallInteger, nullable=False)
|
||||
|
||||
# Relationships
|
||||
addresses = orm.relationship('Address', back_populates='city')
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Non-literal text representation."""
|
||||
return '<{cls}({name})>'.format(cls=self.__class__.__name__, name=self.name)
|
||||
|
||||
@property
|
||||
def location(self) -> Dict[str, float]:
|
||||
"""GPS location of the city's center.
|
||||
|
||||
Example:
|
||||
{"latitude": 48.856614, "longitude": 2.3522219}
|
||||
"""
|
||||
return {
|
||||
'latitude': self._center_latitude,
|
||||
'longitude': self._center_longitude,
|
||||
}
|
||||
|
||||
@property
|
||||
def viewport(self) -> Dict[str, Dict[str, float]]:
|
||||
"""Google Maps viewport of the city.
|
||||
|
||||
Example:
|
||||
{
|
||||
'northeast': {'latitude': 48.9021449, 'longitude': 2.4699208},
|
||||
'southwest': {'latitude': 48.815573, 'longitude': 2.225193},
|
||||
}
|
||||
""" # noqa:RST203
|
||||
return {
|
||||
'northeast': {
|
||||
'latitude': self._northeast_latitude,
|
||||
'longitude': self._northeast_longitude,
|
||||
},
|
||||
'southwest': {
|
||||
'latitude': self._southwest_latitude,
|
||||
'longitude': self._southwest_longitude,
|
||||
},
|
||||
}
|
17
src/urban_meal_delivery/db/connection.py
Normal file
17
src/urban_meal_delivery/db/connection.py
Normal file
|
@ -0,0 +1,17 @@
|
|||
"""Provide connection utils for the ORM layer."""
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import engine
|
||||
from sqlalchemy import orm
|
||||
|
||||
import urban_meal_delivery
|
||||
|
||||
|
||||
def make_engine() -> engine.Engine: # pragma: no cover
|
||||
"""Provide a configured Engine object."""
|
||||
return sa.create_engine(urban_meal_delivery.config.DATABASE_URI)
|
||||
|
||||
|
||||
def make_session_factory() -> orm.Session: # pragma: no cover
|
||||
"""Provide a configured Session factory."""
|
||||
return orm.sessionmaker(bind=make_engine())
|
51
src/urban_meal_delivery/db/couriers.py
Normal file
51
src/urban_meal_delivery/db/couriers.py
Normal file
|
@ -0,0 +1,51 @@
|
|||
"""Provide the ORM's Courier model."""
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import orm
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
from urban_meal_delivery.db import meta
|
||||
|
||||
|
||||
class Courier(meta.Base):
|
||||
"""A Courier working for the UDP."""
|
||||
|
||||
# pylint:disable=too-few-public-methods
|
||||
|
||||
__tablename__ = 'couriers'
|
||||
|
||||
# Columns
|
||||
id = sa.Column(sa.Integer, primary_key=True, autoincrement=False) # noqa:WPS125
|
||||
created_at = sa.Column(sa.DateTime, nullable=False)
|
||||
vehicle = sa.Column(sa.Unicode(length=10), nullable=False)
|
||||
historic_speed = sa.Column('speed', postgresql.DOUBLE_PRECISION, nullable=False)
|
||||
capacity = sa.Column(sa.SmallInteger, nullable=False)
|
||||
pay_per_hour = sa.Column(sa.SmallInteger, nullable=False)
|
||||
pay_per_order = sa.Column(sa.SmallInteger, nullable=False)
|
||||
|
||||
# Constraints
|
||||
__table_args__ = (
|
||||
sa.CheckConstraint(
|
||||
"vehicle IN ('bicycle', 'motorcycle')", name='available_vehicle_types',
|
||||
),
|
||||
sa.CheckConstraint('0 <= speed AND speed <= 30', name='realistic_speed'),
|
||||
sa.CheckConstraint(
|
||||
'0 <= capacity AND capacity <= 200', name='capacity_under_200_liters',
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'0 <= pay_per_hour AND pay_per_hour <= 1500', name='realistic_pay_per_hour',
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'0 <= pay_per_order AND pay_per_order <= 650',
|
||||
name='realistic_pay_per_order',
|
||||
),
|
||||
)
|
||||
|
||||
# Relationships
|
||||
orders = orm.relationship('Order', back_populates='courier')
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Non-literal text representation."""
|
||||
return '<{cls}(#{courier_id})>'.format(
|
||||
cls=self.__class__.__name__, courier_id=self.id,
|
||||
)
|
26
src/urban_meal_delivery/db/customers.py
Normal file
26
src/urban_meal_delivery/db/customers.py
Normal file
|
@ -0,0 +1,26 @@
|
|||
"""Provide the ORM's Customer model."""
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import orm
|
||||
|
||||
from urban_meal_delivery.db import meta
|
||||
|
||||
|
||||
class Customer(meta.Base):
|
||||
"""A Customer of the UDP."""
|
||||
|
||||
# pylint:disable=too-few-public-methods
|
||||
|
||||
__tablename__ = 'customers'
|
||||
|
||||
# Columns
|
||||
id = sa.Column(sa.Integer, primary_key=True, autoincrement=False) # noqa:WPS125
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Non-literal text representation."""
|
||||
return '<{cls}(#{customer_id})>'.format(
|
||||
cls=self.__class__.__name__, customer_id=self.id,
|
||||
)
|
||||
|
||||
# Relationships
|
||||
orders = orm.relationship('Order', back_populates='customer')
|
22
src/urban_meal_delivery/db/meta.py
Normal file
22
src/urban_meal_delivery/db/meta.py
Normal file
|
@ -0,0 +1,22 @@
|
|||
"""Provide the ORM's declarative base."""
|
||||
|
||||
from typing import Any
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy.ext import declarative
|
||||
|
||||
import urban_meal_delivery
|
||||
|
||||
|
||||
Base: Any = declarative.declarative_base(
|
||||
metadata=sa.MetaData(
|
||||
schema=urban_meal_delivery.config.CLEAN_SCHEMA,
|
||||
naming_convention={
|
||||
'pk': 'pk_%(table_name)s', # noqa:WPS323
|
||||
'fk': 'fk_%(table_name)s_to_%(referred_table_name)s_via_%(column_0_N_name)s', # noqa:E501,WPS323
|
||||
'uq': 'uq_%(table_name)s_on_%(column_0_N_name)s', # noqa:WPS323
|
||||
'ix': 'ix_%(table_name)s_on_%(column_0_N_name)s', # noqa:WPS323
|
||||
'ck': 'ck_%(table_name)s_on_%(constraint_name)s', # noqa:WPS323
|
||||
},
|
||||
),
|
||||
)
|
526
src/urban_meal_delivery/db/orders.py
Normal file
526
src/urban_meal_delivery/db/orders.py
Normal file
|
@ -0,0 +1,526 @@
|
|||
"""Provide the ORM's Order model."""
|
||||
|
||||
import datetime
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import orm
|
||||
from sqlalchemy.dialects import postgresql
|
||||
|
||||
from urban_meal_delivery.db import meta
|
||||
|
||||
|
||||
class Order(meta.Base): # noqa:WPS214
|
||||
"""An Order by a Customer of the UDP."""
|
||||
|
||||
__tablename__ = 'orders'
|
||||
|
||||
# Generic columns
|
||||
id = sa.Column(sa.Integer, primary_key=True, autoincrement=False) # noqa:WPS125
|
||||
_delivery_id = sa.Column('delivery_id', sa.Integer, index=True, unique=True)
|
||||
_customer_id = sa.Column('customer_id', sa.Integer, nullable=False, index=True)
|
||||
placed_at = sa.Column(sa.DateTime, nullable=False, index=True)
|
||||
ad_hoc = sa.Column(sa.Boolean, nullable=False)
|
||||
scheduled_delivery_at = sa.Column(sa.DateTime, index=True)
|
||||
scheduled_delivery_at_corrected = sa.Column(sa.Boolean, index=True)
|
||||
first_estimated_delivery_at = sa.Column(sa.DateTime)
|
||||
cancelled = sa.Column(sa.Boolean, nullable=False, index=True)
|
||||
cancelled_at = sa.Column(sa.DateTime)
|
||||
cancelled_at_corrected = sa.Column(sa.Boolean, index=True)
|
||||
|
||||
# Price-related columns
|
||||
sub_total = sa.Column(sa.Integer, nullable=False)
|
||||
delivery_fee = sa.Column(sa.SmallInteger, nullable=False)
|
||||
total = sa.Column(sa.Integer, nullable=False)
|
||||
|
||||
# Restaurant-related columns
|
||||
_restaurant_id = sa.Column(
|
||||
'restaurant_id', sa.SmallInteger, nullable=False, index=True,
|
||||
)
|
||||
restaurant_notified_at = sa.Column(sa.DateTime)
|
||||
restaurant_notified_at_corrected = sa.Column(sa.Boolean, index=True)
|
||||
restaurant_confirmed_at = sa.Column(sa.DateTime)
|
||||
restaurant_confirmed_at_corrected = sa.Column(sa.Boolean, index=True)
|
||||
estimated_prep_duration = sa.Column(sa.Integer, index=True)
|
||||
estimated_prep_duration_corrected = sa.Column(sa.Boolean, index=True)
|
||||
estimated_prep_buffer = sa.Column(sa.Integer, nullable=False, index=True)
|
||||
|
||||
# Dispatch-related columns
|
||||
_courier_id = sa.Column('courier_id', sa.Integer, index=True)
|
||||
dispatch_at = sa.Column(sa.DateTime)
|
||||
dispatch_at_corrected = sa.Column(sa.Boolean, index=True)
|
||||
courier_notified_at = sa.Column(sa.DateTime)
|
||||
courier_notified_at_corrected = sa.Column(sa.Boolean, index=True)
|
||||
courier_accepted_at = sa.Column(sa.DateTime)
|
||||
courier_accepted_at_corrected = sa.Column(sa.Boolean, index=True)
|
||||
utilization = sa.Column(sa.SmallInteger, nullable=False)
|
||||
|
||||
# Pickup-related columns
|
||||
_pickup_address_id = sa.Column(
|
||||
'pickup_address_id', sa.Integer, nullable=False, index=True,
|
||||
)
|
||||
reached_pickup_at = sa.Column(sa.DateTime)
|
||||
pickup_at = sa.Column(sa.DateTime)
|
||||
pickup_at_corrected = sa.Column(sa.Boolean, index=True)
|
||||
pickup_not_confirmed = sa.Column(sa.Boolean)
|
||||
left_pickup_at = sa.Column(sa.DateTime)
|
||||
left_pickup_at_corrected = sa.Column(sa.Boolean, index=True)
|
||||
|
||||
# Delivery-related columns
|
||||
_delivery_address_id = sa.Column(
|
||||
'delivery_address_id', sa.Integer, nullable=False, index=True,
|
||||
)
|
||||
reached_delivery_at = sa.Column(sa.DateTime)
|
||||
delivery_at = sa.Column(sa.DateTime)
|
||||
delivery_at_corrected = sa.Column(sa.Boolean, index=True)
|
||||
delivery_not_confirmed = sa.Column(sa.Boolean)
|
||||
_courier_waited_at_delivery = sa.Column('courier_waited_at_delivery', sa.Boolean)
|
||||
|
||||
# Statistical columns
|
||||
logged_delivery_distance = sa.Column(sa.SmallInteger, nullable=True)
|
||||
logged_avg_speed = sa.Column(postgresql.DOUBLE_PRECISION, nullable=True)
|
||||
logged_avg_speed_distance = sa.Column(sa.SmallInteger, nullable=True)
|
||||
|
||||
# Constraints
|
||||
__table_args__ = (
|
||||
sa.ForeignKeyConstraint(
|
||||
['customer_id'], ['customers.id'], onupdate='RESTRICT', ondelete='RESTRICT',
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
['restaurant_id'],
|
||||
['restaurants.id'],
|
||||
onupdate='RESTRICT',
|
||||
ondelete='RESTRICT',
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
['courier_id'], ['couriers.id'], onupdate='RESTRICT', ondelete='RESTRICT',
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
['pickup_address_id'],
|
||||
['addresses.id'],
|
||||
onupdate='RESTRICT',
|
||||
ondelete='RESTRICT',
|
||||
),
|
||||
sa.ForeignKeyConstraint(
|
||||
['delivery_address_id'],
|
||||
['addresses.id'],
|
||||
onupdate='RESTRICT',
|
||||
ondelete='RESTRICT',
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
"""
|
||||
(ad_hoc IS TRUE AND scheduled_delivery_at IS NULL)
|
||||
OR
|
||||
(ad_hoc IS FALSE AND scheduled_delivery_at IS NOT NULL)
|
||||
""",
|
||||
name='either_ad_hoc_or_scheduled_order',
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
"""
|
||||
NOT (
|
||||
ad_hoc IS TRUE
|
||||
AND (
|
||||
EXTRACT(HOUR FROM placed_at) < 11
|
||||
OR
|
||||
EXTRACT(HOUR FROM placed_at) > 22
|
||||
)
|
||||
)
|
||||
""",
|
||||
name='ad_hoc_orders_within_business_hours',
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
"""
|
||||
NOT (
|
||||
ad_hoc IS FALSE
|
||||
AND (
|
||||
(
|
||||
EXTRACT(HOUR FROM scheduled_delivery_at) <= 11
|
||||
AND
|
||||
NOT (
|
||||
EXTRACT(HOUR FROM scheduled_delivery_at) = 11
|
||||
AND
|
||||
EXTRACT(MINUTE FROM scheduled_delivery_at) = 45
|
||||
)
|
||||
)
|
||||
OR
|
||||
EXTRACT(HOUR FROM scheduled_delivery_at) > 22
|
||||
)
|
||||
)
|
||||
""",
|
||||
name='scheduled_orders_within_business_hours',
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
"""
|
||||
NOT (
|
||||
EXTRACT(EPOCH FROM scheduled_delivery_at - placed_at) < 1800
|
||||
)
|
||||
""",
|
||||
name='scheduled_orders_not_within_30_minutes',
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
"""
|
||||
NOT (
|
||||
cancelled IS FALSE
|
||||
AND
|
||||
cancelled_at IS NOT NULL
|
||||
)
|
||||
""",
|
||||
name='only_cancelled_orders_may_have_cancelled_at',
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
"""
|
||||
NOT (
|
||||
cancelled IS TRUE
|
||||
AND
|
||||
delivery_at IS NOT NULL
|
||||
)
|
||||
""",
|
||||
name='cancelled_orders_must_not_be_delivered',
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'0 <= estimated_prep_duration AND estimated_prep_duration <= 2700',
|
||||
name='estimated_prep_duration_between_0_and_2700',
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'estimated_prep_duration % 60 = 0',
|
||||
name='estimated_prep_duration_must_be_whole_minutes',
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'0 <= estimated_prep_buffer AND estimated_prep_buffer <= 900',
|
||||
name='estimated_prep_buffer_between_0_and_900',
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'estimated_prep_buffer % 60 = 0',
|
||||
name='estimated_prep_buffer_must_be_whole_minutes',
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'0 <= utilization AND utilization <= 100',
|
||||
name='utilization_between_0_and_100',
|
||||
),
|
||||
*(
|
||||
sa.CheckConstraint(
|
||||
f"""
|
||||
({column} IS NULL AND {column}_corrected IS NULL)
|
||||
OR
|
||||
({column} IS NULL AND {column}_corrected IS TRUE)
|
||||
OR
|
||||
({column} IS NOT NULL AND {column}_corrected IS NOT NULL)
|
||||
""",
|
||||
name=f'corrections_only_for_set_value_{index}',
|
||||
)
|
||||
for index, column in enumerate(
|
||||
(
|
||||
'scheduled_delivery_at',
|
||||
'cancelled_at',
|
||||
'restaurant_notified_at',
|
||||
'restaurant_confirmed_at',
|
||||
'estimated_prep_duration',
|
||||
'dispatch_at',
|
||||
'courier_notified_at',
|
||||
'courier_accepted_at',
|
||||
'pickup_at',
|
||||
'left_pickup_at',
|
||||
'delivery_at',
|
||||
),
|
||||
)
|
||||
),
|
||||
*(
|
||||
sa.CheckConstraint(
|
||||
f"""
|
||||
({event}_at IS NULL AND {event}_not_confirmed IS NULL)
|
||||
OR
|
||||
({event}_at IS NOT NULL AND {event}_not_confirmed IS NOT NULL)
|
||||
""",
|
||||
name=f'{event}_not_confirmed_only_if_{event}',
|
||||
)
|
||||
for event in ('pickup', 'delivery')
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
"""
|
||||
(delivery_at IS NULL AND courier_waited_at_delivery IS NULL)
|
||||
OR
|
||||
(delivery_at IS NOT NULL AND courier_waited_at_delivery IS NOT NULL)
|
||||
""",
|
||||
name='courier_waited_at_delivery_only_if_delivery',
|
||||
),
|
||||
*(
|
||||
sa.CheckConstraint(
|
||||
constraint, name='ordered_timestamps_{index}'.format(index=index),
|
||||
)
|
||||
for index, constraint in enumerate(
|
||||
(
|
||||
'placed_at < scheduled_delivery_at',
|
||||
'placed_at < first_estimated_delivery_at',
|
||||
'placed_at < cancelled_at',
|
||||
'placed_at < restaurant_notified_at',
|
||||
'placed_at < restaurant_confirmed_at',
|
||||
'placed_at < dispatch_at',
|
||||
'placed_at < courier_notified_at',
|
||||
'placed_at < courier_accepted_at',
|
||||
'placed_at < reached_pickup_at',
|
||||
'placed_at < left_pickup_at',
|
||||
'placed_at < reached_delivery_at',
|
||||
'placed_at < delivery_at',
|
||||
'cancelled_at > restaurant_notified_at',
|
||||
'cancelled_at > restaurant_confirmed_at',
|
||||
'cancelled_at > dispatch_at',
|
||||
'cancelled_at > courier_notified_at',
|
||||
'cancelled_at > courier_accepted_at',
|
||||
'cancelled_at > reached_pickup_at',
|
||||
'cancelled_at > pickup_at',
|
||||
'cancelled_at > left_pickup_at',
|
||||
'cancelled_at > reached_delivery_at',
|
||||
'cancelled_at > delivery_at',
|
||||
'restaurant_notified_at < restaurant_confirmed_at',
|
||||
'restaurant_notified_at < pickup_at',
|
||||
'restaurant_confirmed_at < pickup_at',
|
||||
'dispatch_at < courier_notified_at',
|
||||
'dispatch_at < courier_accepted_at',
|
||||
'dispatch_at < reached_pickup_at',
|
||||
'dispatch_at < pickup_at',
|
||||
'dispatch_at < left_pickup_at',
|
||||
'dispatch_at < reached_delivery_at',
|
||||
'dispatch_at < delivery_at',
|
||||
'courier_notified_at < courier_accepted_at',
|
||||
'courier_notified_at < reached_pickup_at',
|
||||
'courier_notified_at < pickup_at',
|
||||
'courier_notified_at < left_pickup_at',
|
||||
'courier_notified_at < reached_delivery_at',
|
||||
'courier_notified_at < delivery_at',
|
||||
'courier_accepted_at < reached_pickup_at',
|
||||
'courier_accepted_at < pickup_at',
|
||||
'courier_accepted_at < left_pickup_at',
|
||||
'courier_accepted_at < reached_delivery_at',
|
||||
'courier_accepted_at < delivery_at',
|
||||
'reached_pickup_at < pickup_at',
|
||||
'reached_pickup_at < left_pickup_at',
|
||||
'reached_pickup_at < reached_delivery_at',
|
||||
'reached_pickup_at < delivery_at',
|
||||
'pickup_at < left_pickup_at',
|
||||
'pickup_at < reached_delivery_at',
|
||||
'pickup_at < delivery_at',
|
||||
'left_pickup_at < reached_delivery_at',
|
||||
'left_pickup_at < delivery_at',
|
||||
'reached_delivery_at < delivery_at',
|
||||
),
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
# Relationships
|
||||
customer = orm.relationship('Customer', back_populates='orders')
|
||||
restaurant = orm.relationship('Restaurant', back_populates='orders')
|
||||
courier = orm.relationship('Courier', back_populates='orders')
|
||||
pickup_address = orm.relationship(
|
||||
'Address',
|
||||
back_populates='orders_picked_up',
|
||||
foreign_keys='[Order._pickup_address_id]',
|
||||
)
|
||||
delivery_address = orm.relationship(
|
||||
'Address',
|
||||
back_populates='orders_delivered',
|
||||
foreign_keys='[Order._delivery_address_id]',
|
||||
)
|
||||
|
||||
# Convenience properties
|
||||
|
||||
@property
|
||||
def scheduled(self) -> bool:
|
||||
"""Inverse of Order.ad_hoc."""
|
||||
return not self.ad_hoc
|
||||
|
||||
@property
|
||||
def completed(self) -> bool:
|
||||
"""Inverse of Order.cancelled."""
|
||||
return not self.cancelled
|
||||
|
||||
@property
|
||||
def corrected(self) -> bool:
|
||||
"""If any timestamp was corrected as compared to the original data."""
|
||||
return (
|
||||
self.scheduled_delivery_at_corrected # noqa:WPS222 => too much logic
|
||||
or self.cancelled_at_corrected
|
||||
or self.restaurant_notified_at_corrected
|
||||
or self.restaurant_confirmed_at_corrected
|
||||
or self.dispatch_at_corrected
|
||||
or self.courier_notified_at_corrected
|
||||
or self.courier_accepted_at_corrected
|
||||
or self.pickup_at_corrected
|
||||
or self.left_pickup_at_corrected
|
||||
or self.delivery_at_corrected
|
||||
)
|
||||
|
||||
# Timing-related properties
|
||||
|
||||
@property
|
||||
def time_to_accept(self) -> datetime.timedelta:
|
||||
"""Time until a courier accepted an order.
|
||||
|
||||
This adds the time it took the UDP to notify a courier.
|
||||
"""
|
||||
if not self.dispatch_at:
|
||||
raise RuntimeError('dispatch_at is not set')
|
||||
if not self.courier_accepted_at:
|
||||
raise RuntimeError('courier_accepted_at is not set')
|
||||
return self.courier_accepted_at - self.dispatch_at
|
||||
|
||||
@property
|
||||
def time_to_react(self) -> datetime.timedelta:
|
||||
"""Time a courier took to accept an order.
|
||||
|
||||
This time is a subset of Order.time_to_accept.
|
||||
"""
|
||||
if not self.courier_notified_at:
|
||||
raise RuntimeError('courier_notified_at is not set')
|
||||
if not self.courier_accepted_at:
|
||||
raise RuntimeError('courier_accepted_at is not set')
|
||||
return self.courier_accepted_at - self.courier_notified_at
|
||||
|
||||
@property
|
||||
def time_to_pickup(self) -> datetime.timedelta:
|
||||
"""Time from a courier's acceptance to arrival at the pickup location."""
|
||||
if not self.courier_accepted_at:
|
||||
raise RuntimeError('courier_accepted_at is not set')
|
||||
if not self.reached_pickup_at:
|
||||
raise RuntimeError('reached_pickup_at is not set')
|
||||
return self.reached_pickup_at - self.courier_accepted_at
|
||||
|
||||
@property
|
||||
def time_at_pickup(self) -> datetime.timedelta:
|
||||
"""Time a courier stayed at the pickup location."""
|
||||
if not self.reached_pickup_at:
|
||||
raise RuntimeError('reached_pickup_at is not set')
|
||||
if not self.pickup_at:
|
||||
raise RuntimeError('pickup_at is not set')
|
||||
return self.pickup_at - self.reached_pickup_at
|
||||
|
||||
@property
|
||||
def scheduled_pickup_at(self) -> datetime.datetime:
|
||||
"""Point in time at which the pickup was scheduled."""
|
||||
if not self.restaurant_notified_at:
|
||||
raise RuntimeError('restaurant_notified_at is not set')
|
||||
if not self.estimated_prep_duration:
|
||||
raise RuntimeError('estimated_prep_duration is not set')
|
||||
delta = datetime.timedelta(seconds=self.estimated_prep_duration)
|
||||
return self.restaurant_notified_at + delta
|
||||
|
||||
@property
|
||||
def courier_early(self) -> datetime.timedelta:
|
||||
"""Time by which a courier is early for pickup.
|
||||
|
||||
Measured relative to Order.scheduled_pickup_at.
|
||||
|
||||
0 if the courier is on time or late.
|
||||
|
||||
Goes together with Order.courier_late.
|
||||
"""
|
||||
return max(
|
||||
datetime.timedelta(), self.scheduled_pickup_at - self.reached_pickup_at,
|
||||
)
|
||||
|
||||
@property
|
||||
def courier_late(self) -> datetime.timedelta:
|
||||
"""Time by which a courier is late for pickup.
|
||||
|
||||
Measured relative to Order.scheduled_pickup_at.
|
||||
|
||||
0 if the courier is on time or early.
|
||||
|
||||
Goes together with Order.courier_early.
|
||||
"""
|
||||
return max(
|
||||
datetime.timedelta(), self.reached_pickup_at - self.scheduled_pickup_at,
|
||||
)
|
||||
|
||||
@property
|
||||
def restaurant_early(self) -> datetime.timedelta:
|
||||
"""Time by which a restaurant is early for pickup.
|
||||
|
||||
Measured relative to Order.scheduled_pickup_at.
|
||||
|
||||
0 if the restaurant is on time or late.
|
||||
|
||||
Goes together with Order.restaurant_late.
|
||||
"""
|
||||
return max(datetime.timedelta(), self.scheduled_pickup_at - self.pickup_at)
|
||||
|
||||
@property
|
||||
def restaurant_late(self) -> datetime.timedelta:
|
||||
"""Time by which a restaurant is late for pickup.
|
||||
|
||||
Measured relative to Order.scheduled_pickup_at.
|
||||
|
||||
0 if the restaurant is on time or early.
|
||||
|
||||
Goes together with Order.restaurant_early.
|
||||
"""
|
||||
return max(datetime.timedelta(), self.pickup_at - self.scheduled_pickup_at)
|
||||
|
||||
@property
|
||||
def time_to_delivery(self) -> datetime.timedelta:
|
||||
"""Time a courier took from pickup to delivery location."""
|
||||
if not self.pickup_at:
|
||||
raise RuntimeError('pickup_at is not set')
|
||||
if not self.reached_delivery_at:
|
||||
raise RuntimeError('reached_delivery_at is not set')
|
||||
return self.reached_delivery_at - self.pickup_at
|
||||
|
||||
@property
|
||||
def time_at_delivery(self) -> datetime.timedelta:
|
||||
"""Time a courier stayed at the delivery location."""
|
||||
if not self.reached_delivery_at:
|
||||
raise RuntimeError('reached_delivery_at is not set')
|
||||
if not self.delivery_at:
|
||||
raise RuntimeError('delivery_at is not set')
|
||||
return self.delivery_at - self.reached_delivery_at
|
||||
|
||||
@property
|
||||
def courier_waited_at_delivery(self) -> datetime.timedelta:
|
||||
"""Time a courier waited at the delivery location."""
|
||||
if self._courier_waited_at_delivery:
|
||||
return self.time_at_delivery
|
||||
return datetime.timedelta()
|
||||
|
||||
@property
|
||||
def delivery_early(self) -> datetime.timedelta:
|
||||
"""Time by which a scheduled order was early.
|
||||
|
||||
Measured relative to Order.scheduled_delivery_at.
|
||||
|
||||
0 if the delivery is on time or late.
|
||||
|
||||
Goes together with Order.delivery_late.
|
||||
"""
|
||||
if not self.scheduled:
|
||||
raise AttributeError('Makes sense only for scheduled orders')
|
||||
return max(datetime.timedelta(), self.scheduled_delivery_at - self.delivery_at)
|
||||
|
||||
@property
|
||||
def delivery_late(self) -> datetime.timedelta:
|
||||
"""Time by which a scheduled order was late.
|
||||
|
||||
Measured relative to Order.scheduled_delivery_at.
|
||||
|
||||
0 if the delivery is on time or early.
|
||||
|
||||
Goes together with Order.delivery_early.
|
||||
"""
|
||||
if not self.scheduled:
|
||||
raise AttributeError('Makes sense only for scheduled orders')
|
||||
return max(datetime.timedelta(), self.delivery_at - self.scheduled_delivery_at)
|
||||
|
||||
@property
|
||||
def total_time(self) -> datetime.timedelta:
|
||||
"""Time from order placement to delivery for an ad-hoc order."""
|
||||
if self.scheduled:
|
||||
raise AttributeError('Scheduled orders have no total_time')
|
||||
if self.cancelled:
|
||||
raise RuntimeError('Cancelled orders have no total_time')
|
||||
return self.delivery_at - self.placed_at
|
||||
|
||||
# Other Methods
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Non-literal text representation."""
|
||||
return '<{cls}(#{order_id})>'.format(
|
||||
cls=self.__class__.__name__, order_id=self.id,
|
||||
)
|
42
src/urban_meal_delivery/db/restaurants.py
Normal file
42
src/urban_meal_delivery/db/restaurants.py
Normal file
|
@ -0,0 +1,42 @@
|
|||
"""Provide the ORM's Restaurant model."""
|
||||
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import orm
|
||||
|
||||
from urban_meal_delivery.db import meta
|
||||
|
||||
|
||||
class Restaurant(meta.Base):
|
||||
"""A Restaurant selling meals on the UDP."""
|
||||
|
||||
# pylint:disable=too-few-public-methods
|
||||
|
||||
__tablename__ = 'restaurants'
|
||||
|
||||
# Columns
|
||||
id = sa.Column( # noqa:WPS125
|
||||
sa.SmallInteger, primary_key=True, autoincrement=False,
|
||||
)
|
||||
created_at = sa.Column(sa.DateTime, nullable=False)
|
||||
name = sa.Column(sa.Unicode(length=45), nullable=False) # noqa:WPS432
|
||||
_address_id = sa.Column('address_id', sa.Integer, nullable=False, index=True)
|
||||
estimated_prep_duration = sa.Column(sa.SmallInteger, nullable=False)
|
||||
|
||||
# Constraints
|
||||
__table_args__ = (
|
||||
sa.ForeignKeyConstraint(
|
||||
['address_id'], ['addresses.id'], onupdate='RESTRICT', ondelete='RESTRICT',
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'0 <= estimated_prep_duration AND estimated_prep_duration <= 2400',
|
||||
name='realistic_estimated_prep_duration',
|
||||
),
|
||||
)
|
||||
|
||||
# Relationships
|
||||
address = orm.relationship('Address', back_populates='restaurant')
|
||||
orders = orm.relationship('Order', back_populates='restaurant')
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Non-literal text representation."""
|
||||
return '<{cls}({name})>'.format(cls=self.__class__.__name__, name=self.name)
|
12
tests/conftest.py
Normal file
12
tests/conftest.py
Normal file
|
@ -0,0 +1,12 @@
|
|||
"""Utils for testing the entire package."""
|
||||
|
||||
import os
|
||||
|
||||
from urban_meal_delivery import config
|
||||
|
||||
|
||||
if not os.getenv('TESTING'):
|
||||
raise RuntimeError('Tests must be executed with TESTING set in the environment')
|
||||
|
||||
if not config.TESTING:
|
||||
raise RuntimeError('The testing configuration was not loaded')
|
1
tests/db/__init__.py
Normal file
1
tests/db/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
"""Test the ORM layer."""
|
263
tests/db/conftest.py
Normal file
263
tests/db/conftest.py
Normal file
|
@ -0,0 +1,263 @@
|
|||
"""Utils for testing the ORM layer."""
|
||||
|
||||
import datetime
|
||||
|
||||
import pytest
|
||||
from alembic import command as migrations_cmd
|
||||
from alembic import config as migrations_config
|
||||
|
||||
from urban_meal_delivery import config
|
||||
from urban_meal_delivery import db
|
||||
|
||||
|
||||
@pytest.fixture(scope='session', params=['all_at_once', 'sequentially'])
|
||||
def db_engine(request):
|
||||
"""Create all tables given the ORM models.
|
||||
|
||||
The tables are put into a distinct PostgreSQL schema
|
||||
that is removed after all tests are over.
|
||||
|
||||
The engine used to do that is yielded.
|
||||
|
||||
There are two modes for this fixture:
|
||||
|
||||
- "all_at_once": build up the tables all at once with MetaData.create_all()
|
||||
- "sequentially": build up the tables sequentially with `alembic upgrade head`
|
||||
|
||||
This ensures that Alembic's migration files are consistent.
|
||||
"""
|
||||
engine = db.make_engine()
|
||||
|
||||
if request.param == 'all_at_once':
|
||||
engine.execute(f'CREATE SCHEMA {config.CLEAN_SCHEMA};')
|
||||
db.Base.metadata.create_all(engine)
|
||||
else:
|
||||
cfg = migrations_config.Config('alembic.ini')
|
||||
migrations_cmd.upgrade(cfg, 'head')
|
||||
|
||||
try:
|
||||
yield engine
|
||||
|
||||
finally:
|
||||
engine.execute(f'DROP SCHEMA {config.CLEAN_SCHEMA} CASCADE;')
|
||||
|
||||
if request.param == 'sequentially':
|
||||
tmp_alembic_version = f'{config.ALEMBIC_TABLE}_{config.CLEAN_SCHEMA}'
|
||||
engine.execute(
|
||||
f'DROP TABLE {config.ALEMBIC_TABLE_SCHEMA}.{tmp_alembic_version};',
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def db_session(db_engine):
|
||||
"""A SQLAlchemy session that rolls back everything after a test case."""
|
||||
connection = db_engine.connect()
|
||||
# Begin the outer most transaction
|
||||
# that is rolled back at the end of the test.
|
||||
transaction = connection.begin()
|
||||
# Create a session bound on the same connection as the transaction.
|
||||
# Using any other session would not work.
|
||||
Session = db.make_session_factory() # noqa:N806
|
||||
session = Session(bind=connection)
|
||||
|
||||
try:
|
||||
yield session
|
||||
|
||||
finally:
|
||||
session.close()
|
||||
transaction.rollback()
|
||||
connection.close()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def address_data():
|
||||
"""The data for an Address object in Paris."""
|
||||
return {
|
||||
'id': 1,
|
||||
'_primary_id': 1, # => "itself"
|
||||
'created_at': datetime.datetime(2020, 1, 2, 3, 4, 5),
|
||||
'place_id': 'ChIJxSr71vZt5kcRoFHY4caCCxw',
|
||||
'latitude': 48.85313,
|
||||
'longitude': 2.37461,
|
||||
'_city_id': 1,
|
||||
'city_name': 'St. German',
|
||||
'zip_code': '75011',
|
||||
'street': '42 Rue De Charonne',
|
||||
'floor': None,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def address(address_data, city):
|
||||
"""An Address object."""
|
||||
address = db.Address(**address_data)
|
||||
address.city = city
|
||||
return address
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def address2_data():
|
||||
"""The data for an Address object in Paris."""
|
||||
return {
|
||||
'id': 2,
|
||||
'_primary_id': 2, # => "itself"
|
||||
'created_at': datetime.datetime(2020, 1, 2, 4, 5, 6),
|
||||
'place_id': 'ChIJs-9a6QZy5kcRY8Wwk9Ywzl8',
|
||||
'latitude': 48.852196,
|
||||
'longitude': 2.373937,
|
||||
'_city_id': 1,
|
||||
'city_name': 'Paris',
|
||||
'zip_code': '75011',
|
||||
'street': 'Rue De Charonne 3',
|
||||
'floor': 2,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def address2(address2_data, city):
|
||||
"""An Address object."""
|
||||
address2 = db.Address(**address2_data)
|
||||
address2.city = city
|
||||
return address2
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def city_data():
|
||||
"""The data for the City object modeling Paris."""
|
||||
return {
|
||||
'id': 1,
|
||||
'name': 'Paris',
|
||||
'kml': "<?xml version='1.0' encoding='UTF-8'?> ...",
|
||||
'_center_latitude': 48.856614,
|
||||
'_center_longitude': 2.3522219,
|
||||
'_northeast_latitude': 48.9021449,
|
||||
'_northeast_longitude': 2.4699208,
|
||||
'_southwest_latitude': 48.815573,
|
||||
'_southwest_longitude': 2.225193,
|
||||
'initial_zoom': 12,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def city(city_data):
|
||||
"""A City object."""
|
||||
return db.City(**city_data)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def courier_data():
|
||||
"""The data for a Courier object."""
|
||||
return {
|
||||
'id': 1,
|
||||
'created_at': datetime.datetime(2020, 1, 2, 3, 4, 5),
|
||||
'vehicle': 'bicycle',
|
||||
'historic_speed': 7.89,
|
||||
'capacity': 100,
|
||||
'pay_per_hour': 750,
|
||||
'pay_per_order': 200,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def courier(courier_data):
|
||||
"""A Courier object."""
|
||||
return db.Courier(**courier_data)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def customer_data():
|
||||
"""The data for the Customer object."""
|
||||
return {'id': 1}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def customer(customer_data):
|
||||
"""A Customer object."""
|
||||
return db.Customer(**customer_data)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def order_data():
|
||||
"""The data for an ad-hoc Order object."""
|
||||
return {
|
||||
'id': 1,
|
||||
'_delivery_id': 1,
|
||||
'_customer_id': 1,
|
||||
'placed_at': datetime.datetime(2020, 1, 2, 11, 55, 11),
|
||||
'ad_hoc': True,
|
||||
'scheduled_delivery_at': None,
|
||||
'scheduled_delivery_at_corrected': None,
|
||||
'first_estimated_delivery_at': datetime.datetime(2020, 1, 2, 12, 35, 0),
|
||||
'cancelled': False,
|
||||
'cancelled_at': None,
|
||||
'cancelled_at_corrected': None,
|
||||
'sub_total': 2000,
|
||||
'delivery_fee': 250,
|
||||
'total': 2250,
|
||||
'_restaurant_id': 1,
|
||||
'restaurant_notified_at': datetime.datetime(2020, 1, 2, 12, 5, 5),
|
||||
'restaurant_notified_at_corrected': False,
|
||||
'restaurant_confirmed_at': datetime.datetime(2020, 1, 2, 12, 5, 25),
|
||||
'restaurant_confirmed_at_corrected': False,
|
||||
'estimated_prep_duration': 900,
|
||||
'estimated_prep_duration_corrected': False,
|
||||
'estimated_prep_buffer': 480,
|
||||
'_courier_id': 1,
|
||||
'dispatch_at': datetime.datetime(2020, 1, 2, 12, 5, 1),
|
||||
'dispatch_at_corrected': False,
|
||||
'courier_notified_at': datetime.datetime(2020, 1, 2, 12, 6, 2),
|
||||
'courier_notified_at_corrected': False,
|
||||
'courier_accepted_at': datetime.datetime(2020, 1, 2, 12, 6, 17),
|
||||
'courier_accepted_at_corrected': False,
|
||||
'utilization': 50,
|
||||
'_pickup_address_id': 1,
|
||||
'reached_pickup_at': datetime.datetime(2020, 1, 2, 12, 16, 21),
|
||||
'pickup_at': datetime.datetime(2020, 1, 2, 12, 18, 1),
|
||||
'pickup_at_corrected': False,
|
||||
'pickup_not_confirmed': False,
|
||||
'left_pickup_at': datetime.datetime(2020, 1, 2, 12, 19, 45),
|
||||
'left_pickup_at_corrected': False,
|
||||
'_delivery_address_id': 2,
|
||||
'reached_delivery_at': datetime.datetime(2020, 1, 2, 12, 27, 33),
|
||||
'delivery_at': datetime.datetime(2020, 1, 2, 12, 29, 55),
|
||||
'delivery_at_corrected': False,
|
||||
'delivery_not_confirmed': False,
|
||||
'_courier_waited_at_delivery': False,
|
||||
'logged_delivery_distance': 500,
|
||||
'logged_avg_speed': 7.89,
|
||||
'logged_avg_speed_distance': 490,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def order( # noqa:WPS211 pylint:disable=too-many-arguments
|
||||
order_data, customer, restaurant, courier, address, address2,
|
||||
):
|
||||
"""An Order object."""
|
||||
order = db.Order(**order_data)
|
||||
order.customer = customer
|
||||
order.restaurant = restaurant
|
||||
order.courier = courier
|
||||
order.pickup_address = address
|
||||
order.delivery_address = address2
|
||||
return order
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def restaurant_data():
|
||||
"""The data for the Restaurant object."""
|
||||
return {
|
||||
'id': 1,
|
||||
'created_at': datetime.datetime(2020, 1, 2, 3, 4, 5),
|
||||
'name': 'Vevay',
|
||||
'_address_id': 1,
|
||||
'estimated_prep_duration': 1000,
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def restaurant(restaurant_data, address):
|
||||
"""A Restaurant object."""
|
||||
restaurant = db.Restaurant(**restaurant_data)
|
||||
restaurant.address = address
|
||||
return restaurant
|
141
tests/db/test_addresses.py
Normal file
141
tests/db/test_addresses.py
Normal file
|
@ -0,0 +1,141 @@
|
|||
"""Test the ORM's Address model."""
|
||||
|
||||
import pytest
|
||||
from sqlalchemy import exc as sa_exc
|
||||
from sqlalchemy.orm import exc as orm_exc
|
||||
|
||||
from urban_meal_delivery import db
|
||||
|
||||
|
||||
class TestSpecialMethods:
|
||||
"""Test special methods in Address."""
|
||||
|
||||
# pylint:disable=no-self-use
|
||||
|
||||
def test_create_address(self, address_data):
|
||||
"""Test instantiation of a new Address object."""
|
||||
result = db.Address(**address_data)
|
||||
|
||||
assert result is not None
|
||||
|
||||
def test_text_representation(self, address_data):
|
||||
"""Address has a non-literal text representation."""
|
||||
address = db.Address(**address_data)
|
||||
street = address_data['street']
|
||||
city_name = address_data['city_name']
|
||||
|
||||
result = repr(address)
|
||||
|
||||
assert result == f'<Address({street} in {city_name})>'
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
@pytest.mark.no_cover
|
||||
class TestConstraints:
|
||||
"""Test the database constraints defined in Address."""
|
||||
|
||||
# pylint:disable=no-self-use
|
||||
|
||||
def test_insert_into_database(self, address, db_session):
|
||||
"""Insert an instance into the database."""
|
||||
db_session.add(address)
|
||||
db_session.commit()
|
||||
|
||||
def test_dublicate_primary_key(self, address, address_data, city, db_session):
|
||||
"""Can only add a record once."""
|
||||
db_session.add(address)
|
||||
db_session.commit()
|
||||
|
||||
another_address = db.Address(**address_data)
|
||||
another_address.city = city
|
||||
db_session.add(another_address)
|
||||
|
||||
with pytest.raises(orm_exc.FlushError):
|
||||
db_session.commit()
|
||||
|
||||
def test_delete_a_referenced_address(self, address, address_data, db_session):
|
||||
"""Remove a record that is referenced with a FK."""
|
||||
db_session.add(address)
|
||||
db_session.commit()
|
||||
|
||||
# Fake a second address that belongs to the same primary address.
|
||||
address_data['id'] += 1
|
||||
another_address = db.Address(**address_data)
|
||||
db_session.add(another_address)
|
||||
db_session.commit()
|
||||
|
||||
with pytest.raises(sa_exc.IntegrityError):
|
||||
db_session.execute(
|
||||
db.Address.__table__.delete().where( # noqa:WPS609
|
||||
db.Address.id == address.id,
|
||||
),
|
||||
)
|
||||
|
||||
def test_delete_a_referenced_city(self, address, city, db_session):
|
||||
"""Remove a record that is referenced with a FK."""
|
||||
db_session.add(address)
|
||||
db_session.commit()
|
||||
|
||||
with pytest.raises(sa_exc.IntegrityError):
|
||||
db_session.execute(
|
||||
db.City.__table__.delete().where(db.City.id == city.id), # noqa:WPS609
|
||||
)
|
||||
|
||||
@pytest.mark.parametrize('latitude', [-91, 91])
|
||||
def test_invalid_latitude(self, address, db_session, latitude):
|
||||
"""Insert an instance with invalid data."""
|
||||
address.latitude = latitude
|
||||
db_session.add(address)
|
||||
|
||||
with pytest.raises(sa_exc.IntegrityError):
|
||||
db_session.commit()
|
||||
|
||||
@pytest.mark.parametrize('longitude', [-181, 181])
|
||||
def test_invalid_longitude(self, address, db_session, longitude):
|
||||
"""Insert an instance with invalid data."""
|
||||
address.longitude = longitude
|
||||
db_session.add(address)
|
||||
|
||||
with pytest.raises(sa_exc.IntegrityError):
|
||||
db_session.commit()
|
||||
|
||||
@pytest.mark.parametrize('zip_code', [-1, 0, 9999, 100000])
|
||||
def test_invalid_zip_code(self, address, db_session, zip_code):
|
||||
"""Insert an instance with invalid data."""
|
||||
address.zip_code = zip_code
|
||||
db_session.add(address)
|
||||
|
||||
with pytest.raises(sa_exc.IntegrityError):
|
||||
db_session.commit()
|
||||
|
||||
@pytest.mark.parametrize('floor', [-1, 41])
|
||||
def test_invalid_floor(self, address, db_session, floor):
|
||||
"""Insert an instance with invalid data."""
|
||||
address.floor = floor
|
||||
db_session.add(address)
|
||||
|
||||
with pytest.raises(sa_exc.IntegrityError):
|
||||
db_session.commit()
|
||||
|
||||
|
||||
class TestProperties:
|
||||
"""Test properties in Address."""
|
||||
|
||||
# pylint:disable=no-self-use
|
||||
|
||||
def test_is_primary(self, address_data):
|
||||
"""Test Address.is_primary property."""
|
||||
address = db.Address(**address_data)
|
||||
|
||||
result = address.is_primary
|
||||
|
||||
assert result is True
|
||||
|
||||
def test_is_not_primary(self, address_data):
|
||||
"""Test Address.is_primary property."""
|
||||
address_data['_primary_id'] = 999
|
||||
address = db.Address(**address_data)
|
||||
|
||||
result = address.is_primary
|
||||
|
||||
assert result is False
|
99
tests/db/test_cities.py
Normal file
99
tests/db/test_cities.py
Normal file
|
@ -0,0 +1,99 @@
|
|||
"""Test the ORM's City model."""
|
||||
|
||||
import pytest
|
||||
from sqlalchemy.orm import exc as orm_exc
|
||||
|
||||
from urban_meal_delivery import db
|
||||
|
||||
|
||||
class TestSpecialMethods:
|
||||
"""Test special methods in City."""
|
||||
|
||||
# pylint:disable=no-self-use
|
||||
|
||||
def test_create_city(self, city_data):
|
||||
"""Test instantiation of a new City object."""
|
||||
result = db.City(**city_data)
|
||||
|
||||
assert result is not None
|
||||
|
||||
def test_text_representation(self, city_data):
|
||||
"""City has a non-literal text representation."""
|
||||
city = db.City(**city_data)
|
||||
name = city_data['name']
|
||||
|
||||
result = repr(city)
|
||||
|
||||
assert result == f'<City({name})>'
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
@pytest.mark.no_cover
|
||||
class TestConstraints:
|
||||
"""Test the database constraints defined in City."""
|
||||
|
||||
# pylint:disable=no-self-use
|
||||
|
||||
def test_insert_into_database(self, city, db_session):
|
||||
"""Insert an instance into the database."""
|
||||
db_session.add(city)
|
||||
db_session.commit()
|
||||
|
||||
def test_dublicate_primary_key(self, city, city_data, db_session):
|
||||
"""Can only add a record once."""
|
||||
db_session.add(city)
|
||||
db_session.commit()
|
||||
|
||||
another_city = db.City(**city_data)
|
||||
db_session.add(another_city)
|
||||
|
||||
with pytest.raises(orm_exc.FlushError):
|
||||
db_session.commit()
|
||||
|
||||
|
||||
class TestProperties:
|
||||
"""Test properties in City."""
|
||||
|
||||
# pylint:disable=no-self-use
|
||||
|
||||
def test_location_data(self, city_data):
|
||||
"""Test City.location property."""
|
||||
city = db.City(**city_data)
|
||||
|
||||
result = city.location
|
||||
|
||||
assert isinstance(result, dict)
|
||||
assert len(result) == 2
|
||||
assert result['latitude'] == pytest.approx(city_data['_center_latitude'])
|
||||
assert result['longitude'] == pytest.approx(city_data['_center_longitude'])
|
||||
|
||||
def test_viewport_data_overall(self, city_data):
|
||||
"""Test City.viewport property."""
|
||||
city = db.City(**city_data)
|
||||
|
||||
result = city.viewport
|
||||
|
||||
assert isinstance(result, dict)
|
||||
assert len(result) == 2
|
||||
|
||||
def test_viewport_data_northeast(self, city_data):
|
||||
"""Test City.viewport property."""
|
||||
city = db.City(**city_data)
|
||||
|
||||
result = city.viewport['northeast']
|
||||
|
||||
assert isinstance(result, dict)
|
||||
assert len(result) == 2
|
||||
assert result['latitude'] == pytest.approx(city_data['_northeast_latitude'])
|
||||
assert result['longitude'] == pytest.approx(city_data['_northeast_longitude'])
|
||||
|
||||
def test_viewport_data_southwest(self, city_data):
|
||||
"""Test City.viewport property."""
|
||||
city = db.City(**city_data)
|
||||
|
||||
result = city.viewport['southwest']
|
||||
|
||||
assert isinstance(result, dict)
|
||||
assert len(result) == 2
|
||||
assert result['latitude'] == pytest.approx(city_data['_southwest_latitude'])
|
||||
assert result['longitude'] == pytest.approx(city_data['_southwest_longitude'])
|
125
tests/db/test_couriers.py
Normal file
125
tests/db/test_couriers.py
Normal file
|
@ -0,0 +1,125 @@
|
|||
"""Test the ORM's Courier model."""
|
||||
|
||||
import pytest
|
||||
from sqlalchemy import exc as sa_exc
|
||||
from sqlalchemy.orm import exc as orm_exc
|
||||
|
||||
from urban_meal_delivery import db
|
||||
|
||||
|
||||
class TestSpecialMethods:
|
||||
"""Test special methods in Courier."""
|
||||
|
||||
# pylint:disable=no-self-use
|
||||
|
||||
def test_create_courier(self, courier_data):
|
||||
"""Test instantiation of a new Courier object."""
|
||||
result = db.Courier(**courier_data)
|
||||
|
||||
assert result is not None
|
||||
|
||||
def test_text_representation(self, courier_data):
|
||||
"""Courier has a non-literal text representation."""
|
||||
courier_data['id'] = 1
|
||||
courier = db.Courier(**courier_data)
|
||||
id_ = courier_data['id']
|
||||
|
||||
result = repr(courier)
|
||||
|
||||
assert result == f'<Courier(#{id_})>'
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
@pytest.mark.no_cover
|
||||
class TestConstraints:
|
||||
"""Test the database constraints defined in Courier."""
|
||||
|
||||
# pylint:disable=no-self-use
|
||||
|
||||
def test_insert_into_database(self, courier, db_session):
|
||||
"""Insert an instance into the database."""
|
||||
db_session.add(courier)
|
||||
db_session.commit()
|
||||
|
||||
def test_dublicate_primary_key(self, courier, courier_data, db_session):
|
||||
"""Can only add a record once."""
|
||||
db_session.add(courier)
|
||||
db_session.commit()
|
||||
|
||||
another_courier = db.Courier(**courier_data)
|
||||
db_session.add(another_courier)
|
||||
|
||||
with pytest.raises(orm_exc.FlushError):
|
||||
db_session.commit()
|
||||
|
||||
def test_invalid_vehicle(self, courier, db_session):
|
||||
"""Insert an instance with invalid data."""
|
||||
courier.vehicle = 'invalid'
|
||||
db_session.add(courier)
|
||||
|
||||
with pytest.raises(sa_exc.IntegrityError):
|
||||
db_session.commit()
|
||||
|
||||
def test_negative_speed(self, courier, db_session):
|
||||
"""Insert an instance with invalid data."""
|
||||
courier.historic_speed = -1
|
||||
db_session.add(courier)
|
||||
|
||||
with pytest.raises(sa_exc.IntegrityError):
|
||||
db_session.commit()
|
||||
|
||||
def test_unrealistic_speed(self, courier, db_session):
|
||||
"""Insert an instance with invalid data."""
|
||||
courier.historic_speed = 999
|
||||
db_session.add(courier)
|
||||
|
||||
with pytest.raises(sa_exc.IntegrityError):
|
||||
db_session.commit()
|
||||
|
||||
def test_negative_capacity(self, courier, db_session):
|
||||
"""Insert an instance with invalid data."""
|
||||
courier.capacity = -1
|
||||
db_session.add(courier)
|
||||
|
||||
with pytest.raises(sa_exc.IntegrityError):
|
||||
db_session.commit()
|
||||
|
||||
def test_too_much_capacity(self, courier, db_session):
|
||||
"""Insert an instance with invalid data."""
|
||||
courier.capacity = 999
|
||||
db_session.add(courier)
|
||||
|
||||
with pytest.raises(sa_exc.IntegrityError):
|
||||
db_session.commit()
|
||||
|
||||
def test_negative_pay_per_hour(self, courier, db_session):
|
||||
"""Insert an instance with invalid data."""
|
||||
courier.pay_per_hour = -1
|
||||
db_session.add(courier)
|
||||
|
||||
with pytest.raises(sa_exc.IntegrityError):
|
||||
db_session.commit()
|
||||
|
||||
def test_too_much_pay_per_hour(self, courier, db_session):
|
||||
"""Insert an instance with invalid data."""
|
||||
courier.pay_per_hour = 9999
|
||||
db_session.add(courier)
|
||||
|
||||
with pytest.raises(sa_exc.IntegrityError):
|
||||
db_session.commit()
|
||||
|
||||
def test_negative_pay_per_order(self, courier, db_session):
|
||||
"""Insert an instance with invalid data."""
|
||||
courier.pay_per_order = -1
|
||||
db_session.add(courier)
|
||||
|
||||
with pytest.raises(sa_exc.IntegrityError):
|
||||
db_session.commit()
|
||||
|
||||
def test_too_much_pay_per_order(self, courier, db_session):
|
||||
"""Insert an instance with invalid data."""
|
||||
courier.pay_per_order = 999
|
||||
db_session.add(courier)
|
||||
|
||||
with pytest.raises(sa_exc.IntegrityError):
|
||||
db_session.commit()
|
51
tests/db/test_customer.py
Normal file
51
tests/db/test_customer.py
Normal file
|
@ -0,0 +1,51 @@
|
|||
"""Test the ORM's Customer model."""
|
||||
|
||||
import pytest
|
||||
from sqlalchemy.orm import exc as orm_exc
|
||||
|
||||
from urban_meal_delivery import db
|
||||
|
||||
|
||||
class TestSpecialMethods:
|
||||
"""Test special methods in Customer."""
|
||||
|
||||
# pylint:disable=no-self-use
|
||||
|
||||
def test_create_customer(self, customer_data):
|
||||
"""Test instantiation of a new Customer object."""
|
||||
result = db.Customer(**customer_data)
|
||||
|
||||
assert result is not None
|
||||
|
||||
def test_text_representation(self, customer_data):
|
||||
"""Customer has a non-literal text representation."""
|
||||
customer = db.Customer(**customer_data)
|
||||
id_ = customer_data['id']
|
||||
|
||||
result = repr(customer)
|
||||
|
||||
assert result == f'<Customer(#{id_})>'
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
@pytest.mark.no_cover
|
||||
class TestConstraints:
|
||||
"""Test the database constraints defined in Customer."""
|
||||
|
||||
# pylint:disable=no-self-use
|
||||
|
||||
def test_insert_into_database(self, customer, db_session):
|
||||
"""Insert an instance into the database."""
|
||||
db_session.add(customer)
|
||||
db_session.commit()
|
||||
|
||||
def test_dublicate_primary_key(self, customer, customer_data, db_session):
|
||||
"""Can only add a record once."""
|
||||
db_session.add(customer)
|
||||
db_session.commit()
|
||||
|
||||
another_customer = db.Customer(**customer_data)
|
||||
db_session.add(another_customer)
|
||||
|
||||
with pytest.raises(orm_exc.FlushError):
|
||||
db_session.commit()
|
397
tests/db/test_orders.py
Normal file
397
tests/db/test_orders.py
Normal file
|
@ -0,0 +1,397 @@
|
|||
"""Test the ORM's Order model."""
|
||||
|
||||
import datetime
|
||||
|
||||
import pytest
|
||||
from sqlalchemy.orm import exc as orm_exc
|
||||
|
||||
from urban_meal_delivery import db
|
||||
|
||||
|
||||
class TestSpecialMethods:
|
||||
"""Test special methods in Order."""
|
||||
|
||||
# pylint:disable=no-self-use
|
||||
|
||||
def test_create_order(self, order_data):
|
||||
"""Test instantiation of a new Order object."""
|
||||
result = db.Order(**order_data)
|
||||
|
||||
assert result is not None
|
||||
|
||||
def test_text_representation(self, order_data):
|
||||
"""Order has a non-literal text representation."""
|
||||
order = db.Order(**order_data)
|
||||
id_ = order_data['id']
|
||||
|
||||
result = repr(order)
|
||||
|
||||
assert result == f'<Order(#{id_})>'
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
@pytest.mark.no_cover
|
||||
class TestConstraints:
|
||||
"""Test the database constraints defined in Order."""
|
||||
|
||||
# pylint:disable=no-self-use
|
||||
|
||||
def test_insert_into_database(self, order, db_session):
|
||||
"""Insert an instance into the database."""
|
||||
db_session.add(order)
|
||||
db_session.commit()
|
||||
|
||||
def test_dublicate_primary_key(self, order, order_data, city, db_session):
|
||||
"""Can only add a record once."""
|
||||
db_session.add(order)
|
||||
db_session.commit()
|
||||
|
||||
another_order = db.Order(**order_data)
|
||||
another_order.city = city
|
||||
db_session.add(another_order)
|
||||
|
||||
with pytest.raises(orm_exc.FlushError):
|
||||
db_session.commit()
|
||||
|
||||
# TODO (order-constraints): the various Foreign Key and Check Constraints
|
||||
# should be tested eventually. This is not of highest importance as
|
||||
# we have a lot of confidence from the data cleaning notebook.
|
||||
|
||||
|
||||
class TestProperties:
|
||||
"""Test properties in Order."""
|
||||
|
||||
# pylint:disable=no-self-use,too-many-public-methods
|
||||
|
||||
def test_is_not_scheduled(self, order_data):
|
||||
"""Test Order.scheduled property."""
|
||||
order = db.Order(**order_data)
|
||||
|
||||
result = order.scheduled
|
||||
|
||||
assert result is False
|
||||
|
||||
def test_is_scheduled(self, order_data):
|
||||
"""Test Order.scheduled property."""
|
||||
order_data['ad_hoc'] = False
|
||||
order_data['scheduled_delivery_at'] = datetime.datetime(2020, 1, 2, 12, 30, 0)
|
||||
order_data['scheduled_delivery_at_corrected'] = False
|
||||
order = db.Order(**order_data)
|
||||
|
||||
result = order.scheduled
|
||||
|
||||
assert result is True
|
||||
|
||||
def test_is_completed(self, order_data):
|
||||
"""Test Order.completed property."""
|
||||
order = db.Order(**order_data)
|
||||
|
||||
result = order.completed
|
||||
|
||||
assert result is True
|
||||
|
||||
def test_is_not_completed(self, order_data):
|
||||
"""Test Order.completed property."""
|
||||
order_data['cancelled'] = True
|
||||
order_data['cancelled_at'] = datetime.datetime(2020, 1, 2, 12, 15, 0)
|
||||
order_data['cancelled_at_corrected'] = False
|
||||
order = db.Order(**order_data)
|
||||
|
||||
result = order.completed
|
||||
|
||||
assert result is False
|
||||
|
||||
def test_is_corrected(self, order_data):
|
||||
"""Test Order.corrected property."""
|
||||
order_data['dispatch_at_corrected'] = True
|
||||
order = db.Order(**order_data)
|
||||
|
||||
result = order.corrected
|
||||
|
||||
assert result is True
|
||||
|
||||
def test_time_to_accept_no_dispatch_at(self, order_data):
|
||||
"""Test Order.time_to_accept property."""
|
||||
order_data['dispatch_at'] = None
|
||||
order = db.Order(**order_data)
|
||||
|
||||
with pytest.raises(RuntimeError, match='not set'):
|
||||
int(order.time_to_accept)
|
||||
|
||||
def test_time_to_accept_no_courier_accepted(self, order_data):
|
||||
"""Test Order.time_to_accept property."""
|
||||
order_data['courier_accepted_at'] = None
|
||||
order = db.Order(**order_data)
|
||||
|
||||
with pytest.raises(RuntimeError, match='not set'):
|
||||
int(order.time_to_accept)
|
||||
|
||||
def test_time_to_accept_success(self, order_data):
|
||||
"""Test Order.time_to_accept property."""
|
||||
order = db.Order(**order_data)
|
||||
|
||||
result = order.time_to_accept
|
||||
|
||||
assert isinstance(result, datetime.timedelta)
|
||||
|
||||
def test_time_to_react_no_courier_notified(self, order_data):
|
||||
"""Test Order.time_to_react property."""
|
||||
order_data['courier_notified_at'] = None
|
||||
order = db.Order(**order_data)
|
||||
|
||||
with pytest.raises(RuntimeError, match='not set'):
|
||||
int(order.time_to_react)
|
||||
|
||||
def test_time_to_react_no_courier_accepted(self, order_data):
|
||||
"""Test Order.time_to_react property."""
|
||||
order_data['courier_accepted_at'] = None
|
||||
order = db.Order(**order_data)
|
||||
|
||||
with pytest.raises(RuntimeError, match='not set'):
|
||||
int(order.time_to_react)
|
||||
|
||||
def test_time_to_react_success(self, order_data):
|
||||
"""Test Order.time_to_react property."""
|
||||
order = db.Order(**order_data)
|
||||
|
||||
result = order.time_to_react
|
||||
|
||||
assert isinstance(result, datetime.timedelta)
|
||||
|
||||
def test_time_to_pickup_no_reached_pickup_at(self, order_data):
|
||||
"""Test Order.time_to_pickup property."""
|
||||
order_data['reached_pickup_at'] = None
|
||||
order = db.Order(**order_data)
|
||||
|
||||
with pytest.raises(RuntimeError, match='not set'):
|
||||
int(order.time_to_pickup)
|
||||
|
||||
def test_time_to_pickup_no_courier_accepted(self, order_data):
|
||||
"""Test Order.time_to_pickup property."""
|
||||
order_data['courier_accepted_at'] = None
|
||||
order = db.Order(**order_data)
|
||||
|
||||
with pytest.raises(RuntimeError, match='not set'):
|
||||
int(order.time_to_pickup)
|
||||
|
||||
def test_time_to_pickup_success(self, order_data):
|
||||
"""Test Order.time_to_pickup property."""
|
||||
order = db.Order(**order_data)
|
||||
|
||||
result = order.time_to_pickup
|
||||
|
||||
assert isinstance(result, datetime.timedelta)
|
||||
|
||||
def test_time_at_pickup_no_reached_pickup_at(self, order_data):
|
||||
"""Test Order.time_at_pickup property."""
|
||||
order_data['reached_pickup_at'] = None
|
||||
order = db.Order(**order_data)
|
||||
|
||||
with pytest.raises(RuntimeError, match='not set'):
|
||||
int(order.time_at_pickup)
|
||||
|
||||
def test_time_at_pickup_no_pickup_at(self, order_data):
|
||||
"""Test Order.time_at_pickup property."""
|
||||
order_data['pickup_at'] = None
|
||||
order = db.Order(**order_data)
|
||||
|
||||
with pytest.raises(RuntimeError, match='not set'):
|
||||
int(order.time_at_pickup)
|
||||
|
||||
def test_time_at_pickup_success(self, order_data):
|
||||
"""Test Order.time_at_pickup property."""
|
||||
order = db.Order(**order_data)
|
||||
|
||||
result = order.time_at_pickup
|
||||
|
||||
assert isinstance(result, datetime.timedelta)
|
||||
|
||||
def test_scheduled_pickup_at_no_restaurant_notified( # noqa:WPS118
|
||||
self, order_data,
|
||||
):
|
||||
"""Test Order.scheduled_pickup_at property."""
|
||||
order_data['restaurant_notified_at'] = None
|
||||
order = db.Order(**order_data)
|
||||
|
||||
with pytest.raises(RuntimeError, match='not set'):
|
||||
int(order.scheduled_pickup_at)
|
||||
|
||||
def test_scheduled_pickup_at_no_est_prep_duration(self, order_data): # noqa:WPS118
|
||||
"""Test Order.scheduled_pickup_at property."""
|
||||
order_data['estimated_prep_duration'] = None
|
||||
order = db.Order(**order_data)
|
||||
|
||||
with pytest.raises(RuntimeError, match='not set'):
|
||||
int(order.scheduled_pickup_at)
|
||||
|
||||
def test_scheduled_pickup_at_success(self, order_data):
|
||||
"""Test Order.scheduled_pickup_at property."""
|
||||
order = db.Order(**order_data)
|
||||
|
||||
result = order.scheduled_pickup_at
|
||||
|
||||
assert isinstance(result, datetime.datetime)
|
||||
|
||||
def test_if_courier_early_at_pickup(self, order_data):
|
||||
"""Test Order.courier_early property."""
|
||||
order = db.Order(**order_data)
|
||||
|
||||
result = order.courier_early
|
||||
|
||||
assert bool(result) is True
|
||||
|
||||
def test_if_courier_late_at_pickup(self, order_data):
|
||||
"""Test Order.courier_late property."""
|
||||
# Opposite of test case before.
|
||||
order = db.Order(**order_data)
|
||||
|
||||
result = order.courier_late
|
||||
|
||||
assert bool(result) is False
|
||||
|
||||
def test_if_restaurant_early_at_pickup(self, order_data):
|
||||
"""Test Order.restaurant_early property."""
|
||||
order = db.Order(**order_data)
|
||||
|
||||
result = order.restaurant_early
|
||||
|
||||
assert bool(result) is True
|
||||
|
||||
def test_if_restaurant_late_at_pickup(self, order_data):
|
||||
"""Test Order.restaurant_late property."""
|
||||
# Opposite of test case before.
|
||||
order = db.Order(**order_data)
|
||||
|
||||
result = order.restaurant_late
|
||||
|
||||
assert bool(result) is False
|
||||
|
||||
def test_time_to_delivery_no_reached_delivery_at(self, order_data): # noqa:WPS118
|
||||
"""Test Order.time_to_delivery property."""
|
||||
order_data['reached_delivery_at'] = None
|
||||
order = db.Order(**order_data)
|
||||
|
||||
with pytest.raises(RuntimeError, match='not set'):
|
||||
int(order.time_to_delivery)
|
||||
|
||||
def test_time_to_delivery_no_pickup_at(self, order_data):
|
||||
"""Test Order.time_to_delivery property."""
|
||||
order_data['pickup_at'] = None
|
||||
order = db.Order(**order_data)
|
||||
|
||||
with pytest.raises(RuntimeError, match='not set'):
|
||||
int(order.time_to_delivery)
|
||||
|
||||
def test_time_to_delivery_success(self, order_data):
|
||||
"""Test Order.time_to_delivery property."""
|
||||
order = db.Order(**order_data)
|
||||
|
||||
result = order.time_to_delivery
|
||||
|
||||
assert isinstance(result, datetime.timedelta)
|
||||
|
||||
def test_time_at_delivery_no_reached_delivery_at(self, order_data): # noqa:WPS118
|
||||
"""Test Order.time_at_delivery property."""
|
||||
order_data['reached_delivery_at'] = None
|
||||
order = db.Order(**order_data)
|
||||
|
||||
with pytest.raises(RuntimeError, match='not set'):
|
||||
int(order.time_at_delivery)
|
||||
|
||||
def test_time_at_delivery_no_delivery_at(self, order_data):
|
||||
"""Test Order.time_at_delivery property."""
|
||||
order_data['delivery_at'] = None
|
||||
order = db.Order(**order_data)
|
||||
|
||||
with pytest.raises(RuntimeError, match='not set'):
|
||||
int(order.time_at_delivery)
|
||||
|
||||
def test_time_at_delivery_success(self, order_data):
|
||||
"""Test Order.time_at_delivery property."""
|
||||
order = db.Order(**order_data)
|
||||
|
||||
result = order.time_at_delivery
|
||||
|
||||
assert isinstance(result, datetime.timedelta)
|
||||
|
||||
def test_courier_waited_at_delviery(self, order_data):
|
||||
"""Test Order.courier_waited_at_delivery property."""
|
||||
order_data['_courier_waited_at_delivery'] = True
|
||||
order = db.Order(**order_data)
|
||||
|
||||
result = int(order.courier_waited_at_delivery.total_seconds())
|
||||
|
||||
assert result > 0
|
||||
|
||||
def test_courier_did_not_wait_at_delivery(self, order_data):
|
||||
"""Test Order.courier_waited_at_delivery property."""
|
||||
order_data['_courier_waited_at_delivery'] = False
|
||||
order = db.Order(**order_data)
|
||||
|
||||
result = int(order.courier_waited_at_delivery.total_seconds())
|
||||
|
||||
assert result == 0
|
||||
|
||||
def test_if_delivery_early_success(self, order_data):
|
||||
"""Test Order.delivery_early property."""
|
||||
order_data['ad_hoc'] = False
|
||||
order_data['scheduled_delivery_at'] = datetime.datetime(2020, 1, 2, 12, 30, 0)
|
||||
order_data['scheduled_delivery_at_corrected'] = False
|
||||
order = db.Order(**order_data)
|
||||
|
||||
result = order.delivery_early
|
||||
|
||||
assert bool(result) is True
|
||||
|
||||
def test_if_delivery_early_failure(self, order_data):
|
||||
"""Test Order.delivery_early property."""
|
||||
order = db.Order(**order_data)
|
||||
|
||||
with pytest.raises(AttributeError, match='scheduled'):
|
||||
int(order.delivery_early)
|
||||
|
||||
def test_if_delivery_late_success(self, order_data):
|
||||
"""Test Order.delivery_late property."""
|
||||
order_data['ad_hoc'] = False
|
||||
order_data['scheduled_delivery_at'] = datetime.datetime(2020, 1, 2, 12, 30, 0)
|
||||
order_data['scheduled_delivery_at_corrected'] = False
|
||||
order = db.Order(**order_data)
|
||||
|
||||
result = order.delivery_late
|
||||
|
||||
assert bool(result) is False
|
||||
|
||||
def test_if_delivery_late_failure(self, order_data):
|
||||
"""Test Order.delivery_late property."""
|
||||
order = db.Order(**order_data)
|
||||
|
||||
with pytest.raises(AttributeError, match='scheduled'):
|
||||
int(order.delivery_late)
|
||||
|
||||
def test_no_total_time_for_pre_order(self, order_data):
|
||||
"""Test Order.total_time property."""
|
||||
order_data['ad_hoc'] = False
|
||||
order_data['scheduled_delivery_at'] = datetime.datetime(2020, 1, 2, 12, 30, 0)
|
||||
order_data['scheduled_delivery_at_corrected'] = False
|
||||
order = db.Order(**order_data)
|
||||
|
||||
with pytest.raises(AttributeError, match='Scheduled'):
|
||||
int(order.total_time)
|
||||
|
||||
def test_no_total_time_for_cancelled_order(self, order_data):
|
||||
"""Test Order.total_time property."""
|
||||
order_data['cancelled'] = True
|
||||
order_data['cancelled_at'] = datetime.datetime(2020, 1, 2, 12, 15, 0)
|
||||
order_data['cancelled_at_corrected'] = False
|
||||
order = db.Order(**order_data)
|
||||
|
||||
with pytest.raises(RuntimeError, match='Cancelled'):
|
||||
int(order.total_time)
|
||||
|
||||
def test_total_time_success(self, order_data):
|
||||
"""Test Order.total_time property."""
|
||||
order = db.Order(**order_data)
|
||||
|
||||
result = order.total_time
|
||||
|
||||
assert isinstance(result, datetime.timedelta)
|
80
tests/db/test_restaurants.py
Normal file
80
tests/db/test_restaurants.py
Normal file
|
@ -0,0 +1,80 @@
|
|||
"""Test the ORM's Restaurant model."""
|
||||
|
||||
import pytest
|
||||
from sqlalchemy import exc as sa_exc
|
||||
from sqlalchemy.orm import exc as orm_exc
|
||||
|
||||
from urban_meal_delivery import db
|
||||
|
||||
|
||||
class TestSpecialMethods:
|
||||
"""Test special methods in Restaurant."""
|
||||
|
||||
# pylint:disable=no-self-use
|
||||
|
||||
def test_create_restaurant(self, restaurant_data):
|
||||
"""Test instantiation of a new Restaurant object."""
|
||||
result = db.Restaurant(**restaurant_data)
|
||||
|
||||
assert result is not None
|
||||
|
||||
def test_text_representation(self, restaurant_data):
|
||||
"""Restaurant has a non-literal text representation."""
|
||||
restaurant = db.Restaurant(**restaurant_data)
|
||||
name = restaurant_data['name']
|
||||
|
||||
result = repr(restaurant)
|
||||
|
||||
assert result == f'<Restaurant({name})>'
|
||||
|
||||
|
||||
@pytest.mark.e2e
|
||||
@pytest.mark.no_cover
|
||||
class TestConstraints:
|
||||
"""Test the database constraints defined in Restaurant."""
|
||||
|
||||
# pylint:disable=no-self-use
|
||||
|
||||
def test_insert_into_database(self, restaurant, db_session):
|
||||
"""Insert an instance into the database."""
|
||||
db_session.add(restaurant)
|
||||
db_session.commit()
|
||||
|
||||
def test_dublicate_primary_key(self, restaurant, restaurant_data, db_session):
|
||||
"""Can only add a record once."""
|
||||
db_session.add(restaurant)
|
||||
db_session.commit()
|
||||
|
||||
another_restaurant = db.Restaurant(**restaurant_data)
|
||||
db_session.add(another_restaurant)
|
||||
|
||||
with pytest.raises(orm_exc.FlushError):
|
||||
db_session.commit()
|
||||
|
||||
def test_delete_a_referenced_address(self, restaurant, address, db_session):
|
||||
"""Remove a record that is referenced with a FK."""
|
||||
db_session.add(restaurant)
|
||||
db_session.commit()
|
||||
|
||||
with pytest.raises(sa_exc.IntegrityError):
|
||||
db_session.execute(
|
||||
db.Address.__table__.delete().where( # noqa:WPS609
|
||||
db.Address.id == address.id,
|
||||
),
|
||||
)
|
||||
|
||||
def test_negative_prep_duration(self, restaurant, db_session):
|
||||
"""Insert an instance with invalid data."""
|
||||
restaurant.estimated_prep_duration = -1
|
||||
db_session.add(restaurant)
|
||||
|
||||
with pytest.raises(sa_exc.IntegrityError):
|
||||
db_session.commit()
|
||||
|
||||
def test_too_high_prep_duration(self, restaurant, db_session):
|
||||
"""Insert an instance with invalid data."""
|
||||
restaurant.estimated_prep_duration = 2500
|
||||
db_session.add(restaurant)
|
||||
|
||||
with pytest.raises(sa_exc.IntegrityError):
|
||||
db_session.commit()
|
54
tests/test_config.py
Normal file
54
tests/test_config.py
Normal file
|
@ -0,0 +1,54 @@
|
|||
"""Test the package's configuration module."""
|
||||
|
||||
import pytest
|
||||
|
||||
from urban_meal_delivery import configuration
|
||||
|
||||
|
||||
envs = ['production', 'testing']
|
||||
|
||||
|
||||
@pytest.mark.parametrize('env', envs)
|
||||
def test_config_repr(env):
|
||||
"""Config objects have the text representation '<configuration>'."""
|
||||
config = configuration.make_config(env)
|
||||
|
||||
assert str(config) == '<configuration>'
|
||||
|
||||
|
||||
def test_invalid_config():
|
||||
"""There are only 'production' and 'testing' configurations."""
|
||||
with pytest.raises(ValueError, match="'production' or 'testing'"):
|
||||
configuration.make_config('invalid')
|
||||
|
||||
|
||||
@pytest.mark.parametrize('env', envs)
|
||||
def test_database_uri_set(env, monkeypatch):
|
||||
"""Package does NOT emit warning if DATABASE_URI is set."""
|
||||
uri = 'postgresql://user:password@localhost/db'
|
||||
monkeypatch.setattr(configuration.ProductionConfig, 'DATABASE_URI', uri)
|
||||
monkeypatch.setattr(configuration.TestingConfig, 'DATABASE_URI', uri)
|
||||
|
||||
with pytest.warns(None) as record:
|
||||
configuration.make_config(env)
|
||||
|
||||
assert len(record) == 0 # noqa:WPS441,WPS507
|
||||
|
||||
|
||||
@pytest.mark.parametrize('env', envs)
|
||||
def test_no_database_uri_set(env, monkeypatch):
|
||||
"""Package does not work without DATABASE_URI set in the environment."""
|
||||
monkeypatch.setattr(configuration.ProductionConfig, 'DATABASE_URI', None)
|
||||
monkeypatch.setattr(configuration.TestingConfig, 'DATABASE_URI', None)
|
||||
|
||||
with pytest.warns(UserWarning, match='no DATABASE_URI'):
|
||||
configuration.make_config(env)
|
||||
|
||||
|
||||
def test_random_testing_schema():
|
||||
"""CLEAN_SCHEMA is randomized if not set explicitly."""
|
||||
result = configuration.random_schema_name()
|
||||
|
||||
assert isinstance(result, str)
|
||||
assert result.startswith('temp_')
|
||||
assert len(result) == 15
|
Loading…
Reference in a new issue