Add database migrations

- use Alembic to migrate the PostgreSQL database
  + create initial migration script to set up the database,
    as an alternative to db.Base.metadata.create_all()
  + integrate Alembic into the test suite; the db_engine fixture
    now has two modes:
    * create the latest version of tables all at once
    * invoke `alembic upgrade head`
    => the "e2e" tests are all run twice, once in each mode; this
       ensures that the migration scripts re-create the same database
       schema as db.Base.metadata.create_all() would
    * in both modes, a temporary PostgreSQL schema is used to create the
      tables in
    => could now run "e2e" tests against production database and still
       have isolation
- make the configuration module public (to be used by Alembic)
- adjust linting rules for Alembic
This commit is contained in:
Alexander Hess 2020-08-09 17:14:23 +02:00
parent fdcc93a1ea
commit a16c260543
Signed by: alexander
GPG key ID: 344EA5AB10D868E0
14 changed files with 1104 additions and 35 deletions

44
alembic.ini Normal file
View file

@ -0,0 +1,44 @@
[alembic]
file_template = rev_%%(year)d%%(month).2d%%(day).2d_%%(hour).2d_%%(rev)s_%%(slug)s
script_location = %(here)s/migrations
[post_write_hooks]
hooks=black
black.type=console_scripts
black.entrypoint=black
# The following is taken from the default alembic.ini file.
[loggers]
keys = root,sqlalchemy,alembic
[handlers]
keys = console
[formatters]
keys = generic
[logger_root]
level = WARN
handlers = console
qualname =
[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine
[logger_alembic]
level = INFO
handlers =
qualname = alembic
[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic
[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S

4
migrations/README.md Normal file
View file

@ -0,0 +1,4 @@
# Database Migrations
This project uses [alembic](https://alembic.sqlalchemy.org/en/latest)
to run the database migrations

45
migrations/env.py Normal file
View file

@ -0,0 +1,45 @@
"""Configure Alembic's migration environment."""
import os
from logging import config as log_config
import sqlalchemy as sa
from alembic import context
from urban_meal_delivery import config as umd_config
from urban_meal_delivery import db
# Disable the --sql option, a.k.a, the "offline mode".
if context.is_offline_mode():
raise NotImplementedError('The --sql option is not implemented in this project')
# Set up the default Python logger from the alembic.ini file.
log_config.fileConfig(context.config.config_file_name)
def include_object(obj, _name, type_, _reflected, _compare_to):
"""Only include the clean schema into --autogenerate migrations."""
if type_ in {'table', 'column'} and obj.schema != umd_config.DATABASE_SCHEMA:
return False
return True
engine = sa.create_engine(umd_config.DATABASE_URI)
with engine.connect() as connection:
context.configure(
connection=connection,
include_object=include_object,
target_metadata=db.Base.metadata,
version_table='{alembic_table}{test_schema}'.format(
alembic_table=umd_config.ALEMBIC_TABLE,
test_schema=(f'_{umd_config.CLEAN_SCHEMA}' if os.getenv('TESTING') else ''),
),
version_table_schema=umd_config.ALEMBIC_TABLE_SCHEMA,
)
with context.begin_transaction():
context.run_migrations()

31
migrations/script.py.mako Normal file
View file

@ -0,0 +1,31 @@
"""${message}.
Revision: # ${up_revision} at ${create_date}
Revises: # ${down_revision | comma,n}
"""
import os
import sqlalchemy as sa
from alembic import op
${imports if imports else ""}
from urban_meal_delivery import configuration
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
config = configuration.make_config('testing' if os.getenv('TESTING') else 'production')
def upgrade():
"""Upgrade to revision ${up_revision}."""
${upgrades if upgrades else "pass"}
def downgrade():
"""Downgrade to revision ${down_revision}."""
${downgrades if downgrades else "pass"}

View file

@ -0,0 +1,802 @@
"""Create the database from scratch.
Revision: #f11cd76d2f45 at 2020-08-06 23:24:32
"""
import os
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
from urban_meal_delivery import configuration
revision = 'f11cd76d2f45'
down_revision = None
branch_labels = None
depends_on = None
config = configuration.make_config('testing' if os.getenv('TESTING') else 'production')
def upgrade():
"""Upgrade to revision f11cd76d2f45."""
op.execute(f'CREATE SCHEMA {config.CLEAN_SCHEMA};')
op.create_table( # noqa:ECE001
'cities',
sa.Column('id', sa.SmallInteger(), autoincrement=False, nullable=False),
sa.Column('name', sa.Unicode(length=10), nullable=False),
sa.Column('kml', sa.UnicodeText(), nullable=False),
sa.Column('center_latitude', postgresql.DOUBLE_PRECISION(), nullable=False),
sa.Column('center_longitude', postgresql.DOUBLE_PRECISION(), nullable=False),
sa.Column('northeast_latitude', postgresql.DOUBLE_PRECISION(), nullable=False),
sa.Column('northeast_longitude', postgresql.DOUBLE_PRECISION(), nullable=False),
sa.Column('southwest_latitude', postgresql.DOUBLE_PRECISION(), nullable=False),
sa.Column('southwest_longitude', postgresql.DOUBLE_PRECISION(), nullable=False),
sa.Column('initial_zoom', sa.SmallInteger(), nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_cities')),
*(
[ # noqa:WPS504
sa.ForeignKeyConstraint(
['id'],
[f'{config.ORIGINAL_SCHEMA}.cities.id'],
name=op.f('pk_cities_sanity'),
onupdate='RESTRICT',
ondelete='RESTRICT',
),
]
if not config.TESTING
else []
),
schema=config.CLEAN_SCHEMA,
)
op.create_table( # noqa:ECE001
'couriers',
sa.Column('id', sa.Integer(), autoincrement=False, nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('vehicle', sa.Unicode(length=10), nullable=False),
sa.Column('speed', postgresql.DOUBLE_PRECISION(), nullable=False),
sa.Column('capacity', sa.SmallInteger(), nullable=False),
sa.Column('pay_per_hour', sa.SmallInteger(), nullable=False),
sa.Column('pay_per_order', sa.SmallInteger(), nullable=False),
sa.CheckConstraint(
"vehicle IN ('bicycle', 'motorcycle')",
name=op.f('ck_couriers_on_available_vehicle_types'),
),
sa.CheckConstraint(
'0 <= capacity AND capacity <= 200',
name=op.f('ck_couriers_on_capacity_under_200_liters'),
),
sa.CheckConstraint(
'0 <= pay_per_hour AND pay_per_hour <= 1500',
name=op.f('ck_couriers_on_realistic_pay_per_hour'),
),
sa.CheckConstraint(
'0 <= pay_per_order AND pay_per_order <= 650',
name=op.f('ck_couriers_on_realistic_pay_per_order'),
),
sa.CheckConstraint(
'0 <= speed AND speed <= 30', name=op.f('ck_couriers_on_realistic_speed'),
),
sa.PrimaryKeyConstraint('id', name=op.f('pk_couriers')),
*(
[ # noqa:WPS504
sa.ForeignKeyConstraint(
['id'],
[f'{config.ORIGINAL_SCHEMA}.couriers.id'],
name=op.f('pk_couriers_sanity'),
onupdate='RESTRICT',
ondelete='RESTRICT',
),
]
if not config.TESTING
else []
),
schema=config.CLEAN_SCHEMA,
)
op.create_table(
'customers',
sa.Column('id', sa.Integer(), autoincrement=False, nullable=False),
sa.PrimaryKeyConstraint('id', name=op.f('pk_customers')),
schema=config.CLEAN_SCHEMA,
)
op.create_table( # noqa:ECE001
'addresses',
sa.Column('id', sa.Integer(), autoincrement=False, nullable=False),
sa.Column('primary_id', sa.Integer(), nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('place_id', sa.Unicode(length=120), nullable=False), # noqa:WPS432
sa.Column('latitude', postgresql.DOUBLE_PRECISION(), nullable=False),
sa.Column('longitude', postgresql.DOUBLE_PRECISION(), nullable=False),
sa.Column('city_id', sa.SmallInteger(), nullable=False),
sa.Column('city', sa.Unicode(length=25), nullable=False), # noqa:WPS432
sa.Column('zip_code', sa.Integer(), nullable=False),
sa.Column('street', sa.Unicode(length=80), nullable=False), # noqa:WPS432
sa.Column('floor', sa.SmallInteger(), nullable=True),
sa.CheckConstraint(
'-180 <= longitude AND longitude <= 180',
name=op.f('ck_addresses_on_longitude_between_180_degrees'),
),
sa.CheckConstraint(
'-90 <= latitude AND latitude <= 90',
name=op.f('ck_addresses_on_latitude_between_90_degrees'),
),
sa.CheckConstraint(
'0 <= floor AND floor <= 40', name=op.f('ck_addresses_on_realistic_floor'),
),
sa.CheckConstraint(
'30000 <= zip_code AND zip_code <= 99999',
name=op.f('ck_addresses_on_valid_zip_code'),
),
sa.ForeignKeyConstraint(
['city_id'],
[f'{config.CLEAN_SCHEMA}.cities.id'],
name=op.f('fk_addresses_to_cities_via_city_id'),
onupdate='RESTRICT',
ondelete='RESTRICT',
),
sa.ForeignKeyConstraint(
['primary_id'],
[f'{config.CLEAN_SCHEMA}.addresses.id'],
name=op.f('fk_addresses_to_addresses_via_primary_id'),
onupdate='RESTRICT',
ondelete='RESTRICT',
),
sa.PrimaryKeyConstraint('id', name=op.f('pk_addresses')),
*(
[ # noqa:WPS504
sa.ForeignKeyConstraint(
['id'],
[f'{config.ORIGINAL_SCHEMA}.addresses.id'],
name=op.f('pk_addresses_sanity'),
onupdate='RESTRICT',
ondelete='RESTRICT',
),
]
if not config.TESTING
else []
),
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_addresses_on_city_id'),
'addresses',
['city_id'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_addresses_on_place_id'),
'addresses',
['place_id'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_addresses_on_primary_id'),
'addresses',
['primary_id'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_addresses_on_zip_code'),
'addresses',
['zip_code'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
op.create_table( # noqa:ECE001
'restaurants',
sa.Column('id', sa.SmallInteger(), autoincrement=False, nullable=False),
sa.Column('created_at', sa.DateTime(), nullable=False),
sa.Column('name', sa.Unicode(length=45), nullable=False), # noqa:WPS432
sa.Column('address_id', sa.Integer(), nullable=False),
sa.Column('estimated_prep_duration', sa.SmallInteger(), nullable=False),
sa.CheckConstraint(
'0 <= estimated_prep_duration AND estimated_prep_duration <= 2400',
name=op.f('ck_restaurants_on_realistic_estimated_prep_duration'),
),
sa.ForeignKeyConstraint(
['address_id'],
[f'{config.CLEAN_SCHEMA}.addresses.id'],
name=op.f('fk_restaurants_to_addresses_via_address_id'),
onupdate='RESTRICT',
ondelete='RESTRICT',
),
sa.PrimaryKeyConstraint('id', name=op.f('pk_restaurants')),
*(
[ # noqa:WPS504
sa.ForeignKeyConstraint(
['id'],
[f'{config.ORIGINAL_SCHEMA}.businesses.id'],
name=op.f('pk_restaurants_sanity'),
onupdate='RESTRICT',
ondelete='RESTRICT',
),
]
if not config.TESTING
else []
),
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_restaurants_on_address_id'),
'restaurants',
['address_id'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
op.create_table( # noqa:ECE001
'orders',
sa.Column('id', sa.Integer(), autoincrement=False, nullable=False),
sa.Column('delivery_id', sa.Integer(), nullable=True),
sa.Column('customer_id', sa.Integer(), nullable=False),
sa.Column('placed_at', sa.DateTime(), nullable=False),
sa.Column('ad_hoc', sa.Boolean(), nullable=False),
sa.Column('scheduled_delivery_at', sa.DateTime(), nullable=True),
sa.Column('scheduled_delivery_at_corrected', sa.Boolean(), nullable=True),
sa.Column('first_estimated_delivery_at', sa.DateTime(), nullable=True),
sa.Column('cancelled', sa.Boolean(), nullable=False),
sa.Column('cancelled_at', sa.DateTime(), nullable=True),
sa.Column('cancelled_at_corrected', sa.Boolean(), nullable=True),
sa.Column('sub_total', sa.Integer(), nullable=False),
sa.Column('delivery_fee', sa.SmallInteger(), nullable=False),
sa.Column('total', sa.Integer(), nullable=False),
sa.Column('restaurant_id', sa.SmallInteger(), nullable=False),
sa.Column('restaurant_notified_at', sa.DateTime(), nullable=True),
sa.Column('restaurant_notified_at_corrected', sa.Boolean(), nullable=True),
sa.Column('restaurant_confirmed_at', sa.DateTime(), nullable=True),
sa.Column('restaurant_confirmed_at_corrected', sa.Boolean(), nullable=True),
sa.Column('estimated_prep_duration', sa.Integer(), nullable=True),
sa.Column('estimated_prep_duration_corrected', sa.Boolean(), nullable=True),
sa.Column('estimated_prep_buffer', sa.Integer(), nullable=False),
sa.Column('courier_id', sa.Integer(), nullable=True),
sa.Column('dispatch_at', sa.DateTime(), nullable=True),
sa.Column('dispatch_at_corrected', sa.Boolean(), nullable=True),
sa.Column('courier_notified_at', sa.DateTime(), nullable=True),
sa.Column('courier_notified_at_corrected', sa.Boolean(), nullable=True),
sa.Column('courier_accepted_at', sa.DateTime(), nullable=True),
sa.Column('courier_accepted_at_corrected', sa.Boolean(), nullable=True),
sa.Column('utilization', sa.SmallInteger(), nullable=False),
sa.Column('pickup_address_id', sa.Integer(), nullable=False),
sa.Column('reached_pickup_at', sa.DateTime(), nullable=True),
sa.Column('pickup_at', sa.DateTime(), nullable=True),
sa.Column('pickup_at_corrected', sa.Boolean(), nullable=True),
sa.Column('pickup_not_confirmed', sa.Boolean(), nullable=True),
sa.Column('left_pickup_at', sa.DateTime(), nullable=True),
sa.Column('left_pickup_at_corrected', sa.Boolean(), nullable=True),
sa.Column('delivery_address_id', sa.Integer(), nullable=False),
sa.Column('reached_delivery_at', sa.DateTime(), nullable=True),
sa.Column('delivery_at', sa.DateTime(), nullable=True),
sa.Column('delivery_at_corrected', sa.Boolean(), nullable=True),
sa.Column('delivery_not_confirmed', sa.Boolean(), nullable=True),
sa.Column('courier_waited_at_delivery', sa.Boolean(), nullable=True),
sa.Column('logged_delivery_distance', sa.SmallInteger(), nullable=True),
sa.Column('logged_avg_speed', postgresql.DOUBLE_PRECISION(), nullable=True),
sa.Column('logged_avg_speed_distance', sa.SmallInteger(), nullable=True),
sa.CheckConstraint(
'0 <= estimated_prep_buffer AND estimated_prep_buffer <= 900',
name=op.f('ck_orders_on_estimated_prep_buffer_between_0_and_900'),
),
sa.CheckConstraint(
'0 <= estimated_prep_duration AND estimated_prep_duration <= 2700',
name=op.f('ck_orders_on_estimated_prep_duration_between_0_and_2700'),
),
sa.CheckConstraint(
'0 <= utilization AND utilization <= 100',
name=op.f('ck_orders_on_utilization_between_0_and_100'),
),
sa.CheckConstraint(
'(cancelled_at IS NULL AND cancelled_at_corrected IS NULL) OR (cancelled_at IS NULL AND cancelled_at_corrected IS TRUE) OR (cancelled_at IS NOT NULL AND cancelled_at_corrected IS NOT NULL)', # noqa:E501
name=op.f('ck_orders_on_corrections_only_for_set_value_1'),
),
sa.CheckConstraint(
'(courier_accepted_at IS NULL AND courier_accepted_at_corrected IS NULL) OR (courier_accepted_at IS NULL AND courier_accepted_at_corrected IS TRUE) OR (courier_accepted_at IS NOT NULL AND courier_accepted_at_corrected IS NOT NULL)', # noqa:E501
name=op.f('ck_orders_on_corrections_only_for_set_value_7'),
),
sa.CheckConstraint(
'(courier_notified_at IS NULL AND courier_notified_at_corrected IS NULL) OR (courier_notified_at IS NULL AND courier_notified_at_corrected IS TRUE) OR (courier_notified_at IS NOT NULL AND courier_notified_at_corrected IS NOT NULL)', # noqa:E501
name=op.f('ck_orders_on_corrections_only_for_set_value_6'),
),
sa.CheckConstraint(
'(delivery_at IS NULL AND delivery_at_corrected IS NULL) OR (delivery_at IS NULL AND delivery_at_corrected IS TRUE) OR (delivery_at IS NOT NULL AND delivery_at_corrected IS NOT NULL)', # noqa:E501
name=op.f('ck_orders_on_corrections_only_for_set_value_10'),
),
sa.CheckConstraint(
'(dispatch_at IS NULL AND dispatch_at_corrected IS NULL) OR (dispatch_at IS NULL AND dispatch_at_corrected IS TRUE) OR (dispatch_at IS NOT NULL AND dispatch_at_corrected IS NOT NULL)', # noqa:E501
name=op.f('ck_orders_on_corrections_only_for_set_value_5'),
),
sa.CheckConstraint(
'(estimated_prep_duration IS NULL AND estimated_prep_duration_corrected IS NULL) OR (estimated_prep_duration IS NULL AND estimated_prep_duration_corrected IS TRUE) OR (estimated_prep_duration IS NOT NULL AND estimated_prep_duration_corrected IS NOT NULL)', # noqa:E501
name=op.f('ck_orders_on_corrections_only_for_set_value_4'),
),
sa.CheckConstraint(
'(left_pickup_at IS NULL AND left_pickup_at_corrected IS NULL) OR (left_pickup_at IS NULL AND left_pickup_at_corrected IS TRUE) OR (left_pickup_at IS NOT NULL AND left_pickup_at_corrected IS NOT NULL)', # noqa:E501
name=op.f('ck_orders_on_corrections_only_for_set_value_9'),
),
sa.CheckConstraint(
'(pickup_at IS NULL AND pickup_at_corrected IS NULL) OR (pickup_at IS NULL AND pickup_at_corrected IS TRUE) OR (pickup_at IS NOT NULL AND pickup_at_corrected IS NOT NULL)', # noqa:E501
name=op.f('ck_orders_on_corrections_only_for_set_value_8'),
),
sa.CheckConstraint(
'(restaurant_confirmed_at IS NULL AND restaurant_confirmed_at_corrected IS NULL) OR (restaurant_confirmed_at IS NULL AND restaurant_confirmed_at_corrected IS TRUE) OR (restaurant_confirmed_at IS NOT NULL AND restaurant_confirmed_at_corrected IS NOT NULL)', # noqa:E501
name=op.f('ck_orders_on_corrections_only_for_set_value_3'),
),
sa.CheckConstraint(
'(restaurant_notified_at IS NULL AND restaurant_notified_at_corrected IS NULL) OR (restaurant_notified_at IS NULL AND restaurant_notified_at_corrected IS TRUE) OR (restaurant_notified_at IS NOT NULL AND restaurant_notified_at_corrected IS NOT NULL)', # noqa:E501
name=op.f('ck_orders_on_corrections_only_for_set_value_2'),
),
sa.CheckConstraint(
'(scheduled_delivery_at IS NULL AND scheduled_delivery_at_corrected IS NULL) OR (scheduled_delivery_at IS NULL AND scheduled_delivery_at_corrected IS TRUE) OR (scheduled_delivery_at IS NOT NULL AND scheduled_delivery_at_corrected IS NOT NULL)', # noqa:E501
name=op.f('ck_orders_on_corrections_only_for_set_value_0'),
),
sa.CheckConstraint(
'(ad_hoc IS TRUE AND scheduled_delivery_at IS NULL) OR (ad_hoc IS FALSE AND scheduled_delivery_at IS NOT NULL)', # noqa:E501
name=op.f('ck_orders_on_either_ad_hoc_or_scheduled_order'),
),
sa.CheckConstraint(
'NOT (EXTRACT(EPOCH FROM scheduled_delivery_at - placed_at) < 1800)',
name=op.f('ck_orders_on_scheduled_orders_not_within_30_minutes'),
),
sa.CheckConstraint(
'NOT (ad_hoc IS FALSE AND ((EXTRACT(HOUR FROM scheduled_delivery_at) <= 11 AND NOT (EXTRACT(HOUR FROM scheduled_delivery_at) = 11 AND EXTRACT(MINUTE FROM scheduled_delivery_at) = 45)) OR EXTRACT(HOUR FROM scheduled_delivery_at) > 22))', # noqa:E501
name=op.f('ck_orders_on_scheduled_orders_within_business_hours'),
),
sa.CheckConstraint(
'NOT (ad_hoc IS TRUE AND (EXTRACT(HOUR FROM placed_at) < 11 OR EXTRACT(HOUR FROM placed_at) > 22))', # noqa:E501
name=op.f('ck_orders_on_ad_hoc_orders_within_business_hours'),
),
sa.CheckConstraint(
'NOT (cancelled IS FALSE AND cancelled_at IS NOT NULL)',
name=op.f('ck_orders_on_only_cancelled_orders_may_have_cancelled_at'),
),
sa.CheckConstraint(
'NOT (cancelled IS TRUE AND delivery_at IS NOT NULL)',
name=op.f('ck_orders_on_cancelled_orders_must_not_be_delivered'),
),
sa.CheckConstraint(
'cancelled_at > courier_accepted_at',
name=op.f('ck_orders_on_ordered_timestamps_16'),
),
sa.CheckConstraint(
'cancelled_at > courier_notified_at',
name=op.f('ck_orders_on_ordered_timestamps_15'),
),
sa.CheckConstraint(
'cancelled_at > delivery_at',
name=op.f('ck_orders_on_ordered_timestamps_21'),
),
sa.CheckConstraint(
'cancelled_at > dispatch_at',
name=op.f('ck_orders_on_ordered_timestamps_14'),
),
sa.CheckConstraint(
'cancelled_at > left_pickup_at',
name=op.f('ck_orders_on_ordered_timestamps_19'),
),
sa.CheckConstraint(
'cancelled_at > pickup_at', name=op.f('ck_orders_on_ordered_timestamps_18'),
),
sa.CheckConstraint(
'cancelled_at > reached_delivery_at',
name=op.f('ck_orders_on_ordered_timestamps_20'),
),
sa.CheckConstraint(
'cancelled_at > reached_pickup_at',
name=op.f('ck_orders_on_ordered_timestamps_17'),
),
sa.CheckConstraint(
'cancelled_at > restaurant_confirmed_at',
name=op.f('ck_orders_on_ordered_timestamps_13'),
),
sa.CheckConstraint(
'cancelled_at > restaurant_notified_at',
name=op.f('ck_orders_on_ordered_timestamps_12'),
),
sa.CheckConstraint(
'courier_accepted_at < delivery_at',
name=op.f('ck_orders_on_ordered_timestamps_42'),
),
sa.CheckConstraint(
'courier_accepted_at < left_pickup_at',
name=op.f('ck_orders_on_ordered_timestamps_40'),
),
sa.CheckConstraint(
'courier_accepted_at < pickup_at',
name=op.f('ck_orders_on_ordered_timestamps_39'),
),
sa.CheckConstraint(
'courier_accepted_at < reached_delivery_at',
name=op.f('ck_orders_on_ordered_timestamps_41'),
),
sa.CheckConstraint(
'courier_accepted_at < reached_pickup_at',
name=op.f('ck_orders_on_ordered_timestamps_38'),
),
sa.CheckConstraint(
'courier_notified_at < courier_accepted_at',
name=op.f('ck_orders_on_ordered_timestamps_32'),
),
sa.CheckConstraint(
'courier_notified_at < delivery_at',
name=op.f('ck_orders_on_ordered_timestamps_37'),
),
sa.CheckConstraint(
'courier_notified_at < left_pickup_at',
name=op.f('ck_orders_on_ordered_timestamps_35'),
),
sa.CheckConstraint(
'courier_notified_at < pickup_at',
name=op.f('ck_orders_on_ordered_timestamps_34'),
),
sa.CheckConstraint(
'courier_notified_at < reached_delivery_at',
name=op.f('ck_orders_on_ordered_timestamps_36'),
),
sa.CheckConstraint(
'courier_notified_at < reached_pickup_at',
name=op.f('ck_orders_on_ordered_timestamps_33'),
),
sa.CheckConstraint(
'dispatch_at < courier_accepted_at',
name=op.f('ck_orders_on_ordered_timestamps_26'),
),
sa.CheckConstraint(
'dispatch_at < courier_notified_at',
name=op.f('ck_orders_on_ordered_timestamps_25'),
),
sa.CheckConstraint(
'dispatch_at < delivery_at',
name=op.f('ck_orders_on_ordered_timestamps_31'),
),
sa.CheckConstraint(
'dispatch_at < left_pickup_at',
name=op.f('ck_orders_on_ordered_timestamps_29'),
),
sa.CheckConstraint(
'dispatch_at < pickup_at', name=op.f('ck_orders_on_ordered_timestamps_28'),
),
sa.CheckConstraint(
'dispatch_at < reached_delivery_at',
name=op.f('ck_orders_on_ordered_timestamps_30'),
),
sa.CheckConstraint(
'dispatch_at < reached_pickup_at',
name=op.f('ck_orders_on_ordered_timestamps_27'),
),
sa.CheckConstraint(
'estimated_prep_buffer % 60 = 0',
name=op.f('ck_orders_on_estimated_prep_buffer_must_be_whole_minutes'),
),
sa.CheckConstraint(
'estimated_prep_duration % 60 = 0',
name=op.f('ck_orders_on_estimated_prep_duration_must_be_whole_minutes'),
),
sa.CheckConstraint(
'left_pickup_at < delivery_at',
name=op.f('ck_orders_on_ordered_timestamps_51'),
),
sa.CheckConstraint(
'left_pickup_at < reached_delivery_at',
name=op.f('ck_orders_on_ordered_timestamps_50'),
),
sa.CheckConstraint(
'pickup_at < delivery_at', name=op.f('ck_orders_on_ordered_timestamps_49'),
),
sa.CheckConstraint(
'pickup_at < left_pickup_at',
name=op.f('ck_orders_on_ordered_timestamps_47'),
),
sa.CheckConstraint(
'pickup_at < reached_delivery_at',
name=op.f('ck_orders_on_ordered_timestamps_48'),
),
sa.CheckConstraint(
'placed_at < cancelled_at', name=op.f('ck_orders_on_ordered_timestamps_2'),
),
sa.CheckConstraint(
'placed_at < courier_accepted_at',
name=op.f('ck_orders_on_ordered_timestamps_7'),
),
sa.CheckConstraint(
'placed_at < courier_notified_at',
name=op.f('ck_orders_on_ordered_timestamps_6'),
),
sa.CheckConstraint(
'placed_at < delivery_at', name=op.f('ck_orders_on_ordered_timestamps_11'),
),
sa.CheckConstraint(
'placed_at < dispatch_at', name=op.f('ck_orders_on_ordered_timestamps_5'),
),
sa.CheckConstraint(
'placed_at < first_estimated_delivery_at',
name=op.f('ck_orders_on_ordered_timestamps_1'),
),
sa.CheckConstraint(
'placed_at < left_pickup_at',
name=op.f('ck_orders_on_ordered_timestamps_9'),
),
sa.CheckConstraint(
'placed_at < reached_delivery_at',
name=op.f('ck_orders_on_ordered_timestamps_10'),
),
sa.CheckConstraint(
'placed_at < reached_pickup_at',
name=op.f('ck_orders_on_ordered_timestamps_8'),
),
sa.CheckConstraint(
'placed_at < restaurant_confirmed_at',
name=op.f('ck_orders_on_ordered_timestamps_4'),
),
sa.CheckConstraint(
'placed_at < restaurant_notified_at',
name=op.f('ck_orders_on_ordered_timestamps_3'),
),
sa.CheckConstraint(
'placed_at < scheduled_delivery_at',
name=op.f('ck_orders_on_ordered_timestamps_0'),
),
sa.CheckConstraint(
'reached_delivery_at < delivery_at',
name=op.f('ck_orders_on_ordered_timestamps_52'),
),
sa.CheckConstraint(
'reached_pickup_at < delivery_at',
name=op.f('ck_orders_on_ordered_timestamps_46'),
),
sa.CheckConstraint(
'reached_pickup_at < left_pickup_at',
name=op.f('ck_orders_on_ordered_timestamps_44'),
),
sa.CheckConstraint(
'reached_pickup_at < pickup_at',
name=op.f('ck_orders_on_ordered_timestamps_43'),
),
sa.CheckConstraint(
'reached_pickup_at < reached_delivery_at',
name=op.f('ck_orders_on_ordered_timestamps_45'),
),
sa.CheckConstraint(
'restaurant_confirmed_at < pickup_at',
name=op.f('ck_orders_on_ordered_timestamps_24'),
),
sa.CheckConstraint(
'restaurant_notified_at < pickup_at',
name=op.f('ck_orders_on_ordered_timestamps_23'),
),
sa.CheckConstraint(
'restaurant_notified_at < restaurant_confirmed_at',
name=op.f('ck_orders_on_ordered_timestamps_22'),
),
sa.CheckConstraint(
'(pickup_at IS NULL AND pickup_not_confirmed IS NULL) OR (pickup_at IS NOT NULL AND pickup_not_confirmed IS NOT NULL)', # noqa:E501
name=op.f('pickup_not_confirmed_only_if_pickup'),
),
sa.CheckConstraint(
'(delivery_at IS NULL AND delivery_not_confirmed IS NULL) OR (delivery_at IS NOT NULL AND delivery_not_confirmed IS NOT NULL)', # noqa:E501
name=op.f('delivery_not_confirmed_only_if_delivery'),
),
sa.CheckConstraint(
'(delivery_at IS NULL AND courier_waited_at_delivery IS NULL) OR (delivery_at IS NOT NULL AND courier_waited_at_delivery IS NOT NULL)', # noqa:E501
name=op.f('courier_waited_at_delivery_only_if_delivery'),
),
sa.ForeignKeyConstraint(
['courier_id'],
[f'{config.CLEAN_SCHEMA}.couriers.id'],
name=op.f('fk_orders_to_couriers_via_courier_id'),
onupdate='RESTRICT',
ondelete='RESTRICT',
),
sa.ForeignKeyConstraint(
['customer_id'],
[f'{config.CLEAN_SCHEMA}.customers.id'],
name=op.f('fk_orders_to_customers_via_customer_id'),
onupdate='RESTRICT',
ondelete='RESTRICT',
),
sa.ForeignKeyConstraint(
['delivery_address_id'],
[f'{config.CLEAN_SCHEMA}.addresses.id'],
name=op.f('fk_orders_to_addresses_via_delivery_address_id'),
onupdate='RESTRICT',
ondelete='RESTRICT',
),
sa.ForeignKeyConstraint(
['pickup_address_id'],
[f'{config.CLEAN_SCHEMA}.addresses.id'],
name=op.f('fk_orders_to_addresses_via_pickup_address_id'),
onupdate='RESTRICT',
ondelete='RESTRICT',
),
sa.ForeignKeyConstraint(
['restaurant_id'],
[f'{config.CLEAN_SCHEMA}.restaurants.id'],
name=op.f('fk_orders_to_restaurants_via_restaurant_id'),
onupdate='RESTRICT',
ondelete='RESTRICT',
),
sa.PrimaryKeyConstraint('id', name=op.f('pk_orders')),
*(
[ # noqa:WPS504
sa.ForeignKeyConstraint(
['id'],
[f'{config.ORIGINAL_SCHEMA}.orders.id'],
name=op.f('pk_orders_sanity'),
onupdate='RESTRICT',
ondelete='RESTRICT',
),
sa.ForeignKeyConstraint(
['delivery_id'],
[f'{config.ORIGINAL_SCHEMA}.deliveries.id'],
name=op.f('pk_deliveries_sanity'),
onupdate='RESTRICT',
ondelete='RESTRICT',
),
]
if not config.TESTING
else []
),
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_orders_on_cancelled'),
'orders',
['cancelled'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_orders_on_cancelled_at_corrected'),
'orders',
['cancelled_at_corrected'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_orders_on_courier_accepted_at_corrected'),
'orders',
['courier_accepted_at_corrected'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_orders_on_courier_id'),
'orders',
['courier_id'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_orders_on_courier_notified_at_corrected'),
'orders',
['courier_notified_at_corrected'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_orders_on_customer_id'),
'orders',
['customer_id'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_orders_on_delivery_address_id'),
'orders',
['delivery_address_id'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_orders_on_delivery_at_corrected'),
'orders',
['delivery_at_corrected'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_orders_on_delivery_id'),
'orders',
['delivery_id'],
unique=True,
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_orders_on_dispatch_at_corrected'),
'orders',
['dispatch_at_corrected'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_orders_on_estimated_prep_buffer'),
'orders',
['estimated_prep_buffer'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_orders_on_estimated_prep_duration'),
'orders',
['estimated_prep_duration'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_orders_on_estimated_prep_duration_corrected'),
'orders',
['estimated_prep_duration_corrected'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_orders_on_left_pickup_at_corrected'),
'orders',
['left_pickup_at_corrected'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_orders_on_pickup_address_id'),
'orders',
['pickup_address_id'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_orders_on_pickup_at_corrected'),
'orders',
['pickup_at_corrected'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_orders_on_placed_at'),
'orders',
['placed_at'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_orders_on_restaurant_confirmed_at_corrected'),
'orders',
['restaurant_confirmed_at_corrected'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_orders_on_restaurant_id'),
'orders',
['restaurant_id'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_orders_on_restaurant_notified_at_corrected'),
'orders',
['restaurant_notified_at_corrected'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_orders_on_scheduled_delivery_at'),
'orders',
['scheduled_delivery_at'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
op.create_index(
op.f('ix_orders_on_scheduled_delivery_at_corrected'),
'orders',
['scheduled_delivery_at_corrected'],
unique=False,
schema=config.CLEAN_SCHEMA,
)
def downgrade():
"""Downgrade to revision None."""
op.execute(f'DROP SCHEMA {config.CLEAN_SCHEMA} CASCADE;')

View file

@ -74,7 +74,9 @@ PYTEST_LOCATION = 'tests/'
# Paths with all *.py files. # Paths with all *.py files.
SRC_LOCATIONS = ( SRC_LOCATIONS = (
f'{DOCS_SRC}/conf.py', f'{DOCS_SRC}conf.py',
'migrations/env.py',
'migrations/versions/',
'noxfile.py', 'noxfile.py',
PACKAGE_SOURCE_LOCATION, PACKAGE_SOURCE_LOCATION,
PYTEST_LOCATION, PYTEST_LOCATION,
@ -235,7 +237,12 @@ def test(session):
# non-develop dependencies be installed in the virtual environment. # non-develop dependencies be installed in the virtual environment.
session.run('poetry', 'install', '--no-dev', external=True) session.run('poetry', 'install', '--no-dev', external=True)
_install_packages( _install_packages(
session, 'packaging', 'pytest', 'pytest-cov', 'xdoctest[optional]', session,
'packaging',
'pytest',
'pytest-cov',
'pytest-env',
'xdoctest[optional]',
) )
# Interpret extra arguments as options for pytest. # Interpret extra arguments as options for pytest.

86
poetry.lock generated
View file

@ -6,6 +6,20 @@ optional = false
python-versions = "*" python-versions = "*"
version = "0.7.12" version = "0.7.12"
[[package]]
category = "main"
description = "A database migration tool for SQLAlchemy."
name = "alembic"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "1.4.2"
[package.dependencies]
Mako = "*"
SQLAlchemy = ">=1.1.0"
python-dateutil = "*"
python-editor = ">=0.3"
[[package]] [[package]]
category = "dev" category = "dev"
description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"."
@ -562,7 +576,22 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "1.4.3" version = "1.4.3"
[[package]] [[package]]
category = "dev" category = "main"
description = "A super-fast templating language that borrows the best ideas from the existing templating languages."
name = "mako"
optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
version = "1.1.3"
[package.dependencies]
MarkupSafe = ">=0.9.2"
[package.extras]
babel = ["babel"]
lingua = ["lingua"]
[[package]]
category = "main"
description = "Safely add untrusted strings to HTML/XML markup." description = "Safely add untrusted strings to HTML/XML markup."
name = "markupsafe" name = "markupsafe"
optional = false optional = false
@ -812,6 +841,28 @@ pytest = ">=4.6"
[package.extras] [package.extras]
testing = ["fields", "hunter", "process-tests (2.0.2)", "six", "pytest-xdist", "virtualenv"] testing = ["fields", "hunter", "process-tests (2.0.2)", "six", "pytest-xdist", "virtualenv"]
[[package]]
category = "dev"
description = "py.test plugin that allows you to add environment variables."
name = "pytest-env"
optional = false
python-versions = "*"
version = "0.6.2"
[package.dependencies]
pytest = ">=2.6.0"
[[package]]
category = "main"
description = "Extensions to the standard Python datetime module"
name = "python-dateutil"
optional = false
python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7"
version = "2.8.1"
[package.dependencies]
six = ">=1.5"
[[package]] [[package]]
category = "main" category = "main"
description = "Add .env support to your django/flask apps in development and deployments" description = "Add .env support to your django/flask apps in development and deployments"
@ -823,6 +874,14 @@ version = "0.14.0"
[package.extras] [package.extras]
cli = ["click (>=5.0)"] cli = ["click (>=5.0)"]
[[package]]
category = "main"
description = "Programmatically open an editor, capture the result."
name = "python-editor"
optional = false
python-versions = "*"
version = "1.0.4"
[[package]] [[package]]
category = "dev" category = "dev"
description = "World timezone definitions, modern and historical" description = "World timezone definitions, modern and historical"
@ -877,7 +936,7 @@ version = "1.3.1"
docutils = ">=0.11,<1.0" docutils = ">=0.11,<1.0"
[[package]] [[package]]
category = "dev" category = "main"
description = "Python 2 and 3 compatibility utilities" description = "Python 2 and 3 compatibility utilities"
name = "six" name = "six"
optional = false optional = false
@ -1179,7 +1238,7 @@ optional = ["pygments", "colorama"]
tests = ["pytest", "pytest-cov", "codecov", "scikit-build", "cmake", "ninja", "pybind11"] tests = ["pytest", "pytest-cov", "codecov", "scikit-build", "cmake", "ninja", "pybind11"]
[metadata] [metadata]
content-hash = "508cbaa3105e47cac64c68663ed8d4178ee752bf267cb24cf68264e73325e10b" content-hash = "3227fd9a5706b1483adc9b6cb7350515ffda05c38ab9c9a83d63594b3f4f6673"
lock-version = "1.0" lock-version = "1.0"
python-versions = "^3.8" python-versions = "^3.8"
@ -1188,6 +1247,9 @@ alabaster = [
{file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"},
{file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"},
] ]
alembic = [
{file = "alembic-1.4.2.tar.gz", hash = "sha256:035ab00497217628bf5d0be82d664d8713ab13d37b630084da8e1f98facf4dbf"},
]
appdirs = [ appdirs = [
{file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"},
{file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"},
@ -1435,6 +1497,10 @@ lazy-object-proxy = [
{file = "lazy_object_proxy-1.4.3-cp38-cp38-win32.whl", hash = "sha256:5541cada25cd173702dbd99f8e22434105456314462326f06dba3e180f203dfd"}, {file = "lazy_object_proxy-1.4.3-cp38-cp38-win32.whl", hash = "sha256:5541cada25cd173702dbd99f8e22434105456314462326f06dba3e180f203dfd"},
{file = "lazy_object_proxy-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:59f79fef100b09564bc2df42ea2d8d21a64fdcda64979c0fa3db7bdaabaf6239"}, {file = "lazy_object_proxy-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:59f79fef100b09564bc2df42ea2d8d21a64fdcda64979c0fa3db7bdaabaf6239"},
] ]
mako = [
{file = "Mako-1.1.3-py2.py3-none-any.whl", hash = "sha256:93729a258e4ff0747c876bd9e20df1b9758028946e976324ccd2d68245c7b6a9"},
{file = "Mako-1.1.3.tar.gz", hash = "sha256:8195c8c1400ceb53496064314c6736719c6f25e7479cd24c77be3d9361cddc27"},
]
markupsafe = [ markupsafe = [
{file = "MarkupSafe-1.1.1-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161"}, {file = "MarkupSafe-1.1.1-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161"},
{file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"}, {file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"},
@ -1580,10 +1646,24 @@ pytest-cov = [
{file = "pytest-cov-2.10.0.tar.gz", hash = "sha256:1a629dc9f48e53512fcbfda6b07de490c374b0c83c55ff7a1720b3fccff0ac87"}, {file = "pytest-cov-2.10.0.tar.gz", hash = "sha256:1a629dc9f48e53512fcbfda6b07de490c374b0c83c55ff7a1720b3fccff0ac87"},
{file = "pytest_cov-2.10.0-py2.py3-none-any.whl", hash = "sha256:6e6d18092dce6fad667cd7020deed816f858ad3b49d5b5e2b1cc1c97a4dba65c"}, {file = "pytest_cov-2.10.0-py2.py3-none-any.whl", hash = "sha256:6e6d18092dce6fad667cd7020deed816f858ad3b49d5b5e2b1cc1c97a4dba65c"},
] ]
pytest-env = [
{file = "pytest-env-0.6.2.tar.gz", hash = "sha256:7e94956aef7f2764f3c147d216ce066bf6c42948bb9e293169b1b1c880a580c2"},
]
python-dateutil = [
{file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"},
{file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"},
]
python-dotenv = [ python-dotenv = [
{file = "python-dotenv-0.14.0.tar.gz", hash = "sha256:8c10c99a1b25d9a68058a1ad6f90381a62ba68230ca93966882a4dbc3bc9c33d"}, {file = "python-dotenv-0.14.0.tar.gz", hash = "sha256:8c10c99a1b25d9a68058a1ad6f90381a62ba68230ca93966882a4dbc3bc9c33d"},
{file = "python_dotenv-0.14.0-py2.py3-none-any.whl", hash = "sha256:c10863aee750ad720f4f43436565e4c1698798d763b63234fb5021b6c616e423"}, {file = "python_dotenv-0.14.0-py2.py3-none-any.whl", hash = "sha256:c10863aee750ad720f4f43436565e4c1698798d763b63234fb5021b6c616e423"},
] ]
python-editor = [
{file = "python-editor-1.0.4.tar.gz", hash = "sha256:51fda6bcc5ddbbb7063b2af7509e43bd84bfc32a4ff71349ec7847713882327b"},
{file = "python_editor-1.0.4-py2-none-any.whl", hash = "sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8"},
{file = "python_editor-1.0.4-py2.7.egg", hash = "sha256:ea87e17f6ec459e780e4221f295411462e0d0810858e055fc514684350a2f522"},
{file = "python_editor-1.0.4-py3-none-any.whl", hash = "sha256:1bf6e860a8ad52a14c3ee1252d5dc25b2030618ed80c022598f00176adc8367d"},
{file = "python_editor-1.0.4-py3.5.egg", hash = "sha256:c3da2053dbab6b29c94e43c486ff67206eafbe7eb52dbec7390b5e2fb05aac77"},
]
pytz = [ pytz = [
{file = "pytz-2020.1-py2.py3-none-any.whl", hash = "sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed"}, {file = "pytz-2020.1-py2.py3-none-any.whl", hash = "sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed"},
{file = "pytz-2020.1.tar.gz", hash = "sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048"}, {file = "pytz-2020.1.tar.gz", hash = "sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048"},

View file

@ -27,6 +27,7 @@ repository = "https://github.com/webartifex/urban-meal-delivery"
[tool.poetry.dependencies] [tool.poetry.dependencies]
python = "^3.8" python = "^3.8"
alembic = "^1.4.2"
click = "^7.1.2" click = "^7.1.2"
psycopg2 = "^2.8.5" # adapter for PostgreSQL psycopg2 = "^2.8.5" # adapter for PostgreSQL
python-dotenv = "^0.14.0" python-dotenv = "^0.14.0"
@ -56,6 +57,7 @@ wemake-python-styleguide = "^0.14.1" # flake8 plug-in
packaging = "^20.4" # used to test the packaged version packaging = "^20.4" # used to test the packaged version
pytest = "^6.0.1" pytest = "^6.0.1"
pytest-cov = "^2.10.0" pytest-cov = "^2.10.0"
pytest-env = "^0.6.2"
xdoctest = { version="^0.13.0", extras=["optional"] } xdoctest = { version="^0.13.0", extras=["optional"] }
# Documentation # Documentation

View file

@ -102,6 +102,20 @@ per-file-ignores =
docs/conf.py: docs/conf.py:
# Allow shadowing built-ins and reading __*__ variables. # Allow shadowing built-ins and reading __*__ variables.
WPS125,WPS609, WPS125,WPS609,
migrations/env.py:
# Type annotations are not strictly enforced.
ANN0, ANN2,
migrations/versions/*.py:
# Type annotations are not strictly enforced.
ANN0, ANN2,
# File names of revisions are ok.
WPS114,WPS118,
# Revisions may have too many expressions.
WPS204,WPS213,
# No overuse of string constants (e.g., 'RESTRICT').
WPS226,
# Too many noqa's are ok.
WPS402,
noxfile.py: noxfile.py:
# Type annotations are not strictly enforced. # Type annotations are not strictly enforced.
ANN0, ANN2, ANN0, ANN2,
@ -111,7 +125,7 @@ per-file-ignores =
WPS213, WPS213,
# No overuse of string constants (e.g., '--version'). # No overuse of string constants (e.g., '--version').
WPS226, WPS226,
src/urban_meal_delivery/_config.py: src/urban_meal_delivery/configuration.py:
# Allow upper case class variables within classes. # Allow upper case class variables within classes.
WPS115, WPS115,
# Numbers are normal in config files. # Numbers are normal in config files.
@ -255,5 +269,7 @@ addopts =
--strict-markers --strict-markers
cache_dir = .cache/pytest cache_dir = .cache/pytest
console_output_style = count console_output_style = count
env =
TESTING=true
markers = markers =
e2e: integration tests, inlc., for example, tests touching a database e2e: integration tests, inlc., for example, tests touching a database

View file

@ -9,7 +9,7 @@ Example:
import os as _os import os as _os
from importlib import metadata as _metadata from importlib import metadata as _metadata
from urban_meal_delivery import _config # noqa:WPS450 from urban_meal_delivery import configuration as _configuration
try: try:
@ -26,8 +26,8 @@ else:
__version__ = _pkg_info['version'] __version__ = _pkg_info['version']
# Little Hack: "Overwrites" the config module so that the environment is already set. # Global `config` object to be used in the package.
config: _config.Config = _config.get_config( config: _configuration.Config = _configuration.make_config(
'testing' if _os.getenv('TESTING') else 'production', 'testing' if _os.getenv('TESTING') else 'production',
) )

View file

@ -1,11 +1,12 @@
"""Provide package-wide configuration. """Provide package-wide configuration.
This module is "protected" so that it is only used This module provides utils to create new `Config` objects
via the `config` proxy at the package's top level. on the fly, mainly for testing and migrating!
That already loads the correct configuration Within this package, use the `config` proxy at the package's top level
depending on the current environment. to access the current configuration!
""" """
import datetime import datetime
import os import os
import random import random
@ -20,8 +21,10 @@ dotenv.load_dotenv()
def random_schema_name() -> str: def random_schema_name() -> str:
"""Generate a random PostgreSQL schema name for testing.""" """Generate a random PostgreSQL schema name for testing."""
return ''.join( return 'temp_{name}'.format(
random.choice(string.ascii_lowercase) for _ in range(10) # noqa:S311 name=''.join(
(random.choice(string.ascii_lowercase) for _ in range(10)), # noqa:S311
),
) )
@ -44,6 +47,9 @@ class Config:
# The PostgreSQL schema that holds the tables with the cleaned data. # The PostgreSQL schema that holds the tables with the cleaned data.
CLEAN_SCHEMA = os.getenv('CLEAN_SCHEMA') or 'clean' CLEAN_SCHEMA = os.getenv('CLEAN_SCHEMA') or 'clean'
ALEMBIC_TABLE = 'alembic_version'
ALEMBIC_TABLE_SCHEMA = 'public'
def __repr__(self) -> str: def __repr__(self) -> str:
"""Non-literal text representation.""" """Non-literal text representation."""
return '<configuration>' return '<configuration>'
@ -68,8 +74,8 @@ class TestingConfig(Config):
CLEAN_SCHEMA = os.getenv('CLEAN_SCHEMA_TESTING') or random_schema_name() CLEAN_SCHEMA = os.getenv('CLEAN_SCHEMA_TESTING') or random_schema_name()
def get_config(env: str = 'production') -> Config: def make_config(env: str = 'production') -> Config:
"""Get the configuration for the package. """Create a new `Config` object.
Args: Args:
env: either 'production' or 'testing'; defaults to the first env: either 'production' or 'testing'; defaults to the first

12
tests/conftest.py Normal file
View file

@ -0,0 +1,12 @@
"""Utils for testing the entire package."""
import os
from urban_meal_delivery import config
if not os.getenv('TESTING'):
raise RuntimeError('Tests must be executed with TESTING set in the environment')
if not config.TESTING:
raise RuntimeError('The testing configuration was not loaded')

View file

@ -3,30 +3,49 @@
import datetime import datetime
import pytest import pytest
from sqlalchemy import schema from alembic import command as migrations_cmd
from alembic import config as migrations_config
from urban_meal_delivery import config from urban_meal_delivery import config
from urban_meal_delivery import db from urban_meal_delivery import db
@pytest.fixture(scope='session') @pytest.fixture(scope='session', params=['all_at_once', 'sequentially'])
def db_engine(): def db_engine(request):
"""Create all tables given the ORM models. """Create all tables given the ORM models.
The tables are put into a distinct PostgreSQL schema The tables are put into a distinct PostgreSQL schema
that is removed after all tests are over. that is removed after all tests are over.
The engine used to do that is yielded. The engine used to do that is yielded.
There are two modes for this fixture:
- "all_at_once": build up the tables all at once with MetaData.create_all()
- "sequentially": build up the tables sequentially with `alembic upgrade head`
This ensures that Alembic's migration files are consistent.
""" """
engine = db.make_engine() engine = db.make_engine()
engine.execute(schema.CreateSchema(config.CLEAN_SCHEMA))
db.Base.metadata.create_all(engine) if request.param == 'all_at_once':
engine.execute(f'CREATE SCHEMA {config.CLEAN_SCHEMA};')
db.Base.metadata.create_all(engine)
else:
cfg = migrations_config.Config('alembic.ini')
migrations_cmd.upgrade(cfg, 'head')
try: try:
yield engine yield engine
finally: finally:
engine.execute(schema.DropSchema(config.CLEAN_SCHEMA, cascade=True)) engine.execute(f'DROP SCHEMA {config.CLEAN_SCHEMA} CASCADE;')
if request.param == 'sequentially':
tmp_alembic_version = f'{config.ALEMBIC_TABLE}_{config.CLEAN_SCHEMA}'
engine.execute(
f'DROP TABLE {config.ALEMBIC_TABLE_SCHEMA}.{tmp_alembic_version};',
)
@pytest.fixture @pytest.fixture

View file

@ -2,7 +2,7 @@
import pytest import pytest
from urban_meal_delivery import _config as config_mod # noqa:WPS450 from urban_meal_delivery import configuration
envs = ['production', 'testing'] envs = ['production', 'testing']
@ -11,7 +11,7 @@ envs = ['production', 'testing']
@pytest.mark.parametrize('env', envs) @pytest.mark.parametrize('env', envs)
def test_config_repr(env): def test_config_repr(env):
"""Config objects have the text representation '<configuration>'.""" """Config objects have the text representation '<configuration>'."""
config = config_mod.get_config(env) config = configuration.make_config(env)
assert str(config) == '<configuration>' assert str(config) == '<configuration>'
@ -19,18 +19,18 @@ def test_config_repr(env):
def test_invalid_config(): def test_invalid_config():
"""There are only 'production' and 'testing' configurations.""" """There are only 'production' and 'testing' configurations."""
with pytest.raises(ValueError, match="'production' or 'testing'"): with pytest.raises(ValueError, match="'production' or 'testing'"):
config_mod.get_config('invalid') configuration.make_config('invalid')
@pytest.mark.parametrize('env', envs) @pytest.mark.parametrize('env', envs)
def test_database_uri_set(env, monkeypatch): def test_database_uri_set(env, monkeypatch):
"""Package does NOT emit warning if DATABASE_URI is set.""" """Package does NOT emit warning if DATABASE_URI is set."""
uri = 'postgresql://user:password@localhost/db' uri = 'postgresql://user:password@localhost/db'
monkeypatch.setattr(config_mod.ProductionConfig, 'DATABASE_URI', uri) monkeypatch.setattr(configuration.ProductionConfig, 'DATABASE_URI', uri)
monkeypatch.setattr(config_mod.TestingConfig, 'DATABASE_URI', uri) monkeypatch.setattr(configuration.TestingConfig, 'DATABASE_URI', uri)
with pytest.warns(None) as record: with pytest.warns(None) as record:
config_mod.get_config(env) configuration.make_config(env)
assert len(record) == 0 # noqa:WPS441,WPS507 assert len(record) == 0 # noqa:WPS441,WPS507
@ -38,16 +38,17 @@ def test_database_uri_set(env, monkeypatch):
@pytest.mark.parametrize('env', envs) @pytest.mark.parametrize('env', envs)
def test_no_database_uri_set(env, monkeypatch): def test_no_database_uri_set(env, monkeypatch):
"""Package does not work without DATABASE_URI set in the environment.""" """Package does not work without DATABASE_URI set in the environment."""
monkeypatch.setattr(config_mod.ProductionConfig, 'DATABASE_URI', None) monkeypatch.setattr(configuration.ProductionConfig, 'DATABASE_URI', None)
monkeypatch.setattr(config_mod.TestingConfig, 'DATABASE_URI', None) monkeypatch.setattr(configuration.TestingConfig, 'DATABASE_URI', None)
with pytest.warns(UserWarning, match='no DATABASE_URI'): with pytest.warns(UserWarning, match='no DATABASE_URI'):
config_mod.get_config(env) configuration.make_config(env)
def test_random_testing_schema(): def test_random_testing_schema():
"""CLEAN_SCHEMA is randomized if not seti explicitly.""" """CLEAN_SCHEMA is randomized if not set explicitly."""
result = config_mod.random_schema_name() result = configuration.random_schema_name()
assert isinstance(result, str) assert isinstance(result, str)
assert len(result) <= 10 assert result.startswith('temp_')
assert len(result) == 15