Merge branch 'release-0.4.0' into main
Some checks failed
CI / fast (without R) (push) Has been cancelled
CI / slow (with R) (push) Has been cancelled

This commit is contained in:
Alexander Hess 2021-09-13 11:22:42 +02:00
commit f6b331883e
Signed by: alexander
GPG key ID: 344EA5AB10D868E0
36 changed files with 2494 additions and 1032 deletions

1
.gitignore vendored
View file

@ -1,6 +1,7 @@
.cache/
**/*.egg-info/
.env
.idea/
**/.ipynb_checkpoints/
.python-version
.venv/

View file

@ -0,0 +1,96 @@
"""Add distance matrix.
Revision: #b4dd0b8903a5 at 2021-03-01 16:14:06
Revises: #8bfb928a31f8
"""
import os
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
from urban_meal_delivery import configuration
revision = 'b4dd0b8903a5'
down_revision = '8bfb928a31f8'
branch_labels = None
depends_on = None
config = configuration.make_config('testing' if os.getenv('TESTING') else 'production')
def upgrade():
"""Upgrade to revision b4dd0b8903a5."""
op.create_table(
'addresses_addresses',
sa.Column('first_address_id', sa.Integer(), nullable=False),
sa.Column('second_address_id', sa.Integer(), nullable=False),
sa.Column('city_id', sa.SmallInteger(), nullable=False),
sa.Column('air_distance', sa.Integer(), nullable=False),
sa.Column('bicycle_distance', sa.Integer(), nullable=True),
sa.Column('bicycle_duration', sa.Integer(), nullable=True),
sa.Column('directions', postgresql.JSON(), nullable=True),
sa.PrimaryKeyConstraint(
'first_address_id',
'second_address_id',
name=op.f('pk_addresses_addresses'),
),
sa.ForeignKeyConstraint(
['first_address_id', 'city_id'],
[
f'{config.CLEAN_SCHEMA}.addresses.id',
f'{config.CLEAN_SCHEMA}.addresses.city_id',
],
name=op.f(
'fk_addresses_addresses_to_addresses_via_first_address_id_city_id',
),
onupdate='RESTRICT',
ondelete='RESTRICT',
),
sa.ForeignKeyConstraint(
['second_address_id', 'city_id'],
[
f'{config.CLEAN_SCHEMA}.addresses.id',
f'{config.CLEAN_SCHEMA}.addresses.city_id',
],
name=op.f(
'fk_addresses_addresses_to_addresses_via_second_address_id_city_id',
),
onupdate='RESTRICT',
ondelete='RESTRICT',
),
sa.UniqueConstraint(
'first_address_id',
'second_address_id',
name=op.f('uq_addresses_addresses_on_first_address_id_second_address_id'),
),
sa.CheckConstraint(
'first_address_id < second_address_id',
name=op.f('ck_addresses_addresses_on_distances_are_symmetric_for_bicycles'),
),
sa.CheckConstraint(
'0 <= air_distance AND air_distance < 20000',
name=op.f('ck_addresses_addresses_on_realistic_air_distance'),
),
sa.CheckConstraint(
'bicycle_distance < 25000',
name=op.f('ck_addresses_addresses_on_realistic_bicycle_distance'),
),
sa.CheckConstraint(
'air_distance <= bicycle_distance',
name=op.f('ck_addresses_addresses_on_air_distance_is_shortest'),
),
sa.CheckConstraint(
'0 <= bicycle_duration AND bicycle_duration <= 3600',
name=op.f('ck_addresses_addresses_on_realistic_bicycle_travel_time'),
),
schema=config.CLEAN_SCHEMA,
)
def downgrade():
"""Downgrade to revision 8bfb928a31f8."""
op.drop_table('addresses_addresses', schema=config.CLEAN_SCHEMA)

View file

@ -135,7 +135,6 @@ def lint(session):
'flake8',
'flake8-annotations',
'flake8-black',
'flake8-expression-complexity',
'flake8-pytest-style',
'mypy',
'wemake-python-styleguide',
@ -197,7 +196,6 @@ def test(session):
'pytest-cov',
'pytest-env',
'pytest-mock',
'pytest-randomly',
'xdoctest[optional]',
)
@ -207,7 +205,7 @@ def test(session):
# test cases that require the slow installation of R and some packages.
if session.env.get('_slow_ci_tests'):
session.run(
'pytest', '--randomly-seed=4287', '-m', 'r and not db', PYTEST_LOCATION,
'pytest', '-m', 'r and not db', PYTEST_LOCATION,
)
# In the "ci-tests-slow" session, we do not run any test tool
@ -219,7 +217,6 @@ def test(session):
# Therefore, the CI server does not measure coverage.
elif session.env.get('_fast_ci_tests'):
pytest_args = (
'--randomly-seed=4287',
'-m',
'not (db or r)',
PYTEST_LOCATION,
@ -235,7 +232,6 @@ def test(session):
'--cov-branch',
'--cov-fail-under=100',
'--cov-report=term-missing:skip-covered',
'--randomly-seed=4287',
PYTEST_LOCATION,
)

2023
poetry.lock generated

File diff suppressed because it is too large Load diff

View file

@ -9,7 +9,7 @@ target-version = ["py38"]
[tool.poetry]
name = "urban-meal-delivery"
version = "0.3.0"
version = "0.4.0"
authors = ["Alexander Hess <alexander@webartifex.biz>"]
description = "Optimizing an urban meal delivery platform"
@ -32,7 +32,10 @@ Shapely = "^1.7.1"
alembic = "^1.4.2"
click = "^7.1.2"
folium = "^0.12.1"
geopy = "^2.1.0"
googlemaps = "^4.4.2"
matplotlib = "^3.3.3"
ordered-set = "^4.0.2"
pandas = "^1.1.0"
psycopg2 = "^2.8.5" # adapter for PostgreSQL
rpy2 = "^3.4.1"
@ -69,7 +72,6 @@ isort = "^4.3.21" # TODO (isort): not ^5.5.4 due to wemake-python-styleguide
flake8 = "^3.8.3"
flake8-annotations = "^2.3.0"
flake8-black = "^0.2.1"
flake8-expression-complexity = "^0.0.8"
flake8-pytest-style = "^1.2.2"
mypy = "^0.782"
wemake-python-styleguide = "^0.14.1" # flake8 plug-in
@ -83,7 +85,6 @@ pytest = "^6.0.1"
pytest-cov = "^2.10.0"
pytest-env = "^0.6.2"
pytest-mock = "^3.5.1"
pytest-randomly = "^3.5.0"
xdoctest = { version="^0.13.0", extras=["optional"] }
# Documentation

View file

@ -72,8 +72,6 @@ select =
ANN0, ANN2, ANN3,
# flake8-black => complain if black would make changes
BLK1, BLK9,
# flake8-expression-complexity => not too many expressions at once
ECE001,
# flake8-pytest-style => enforce a consistent style with pytest
PT0,
@ -89,6 +87,8 @@ extend-ignore =
# Comply with black's style.
# Source: https://github.com/psf/black/blob/master/docs/compatible_configs.md#flake8
E203, W503, WPS348,
# Let's not do `@pytest.mark.no_cover()` instead of `@pytest.mark.no_cover`.
PT023,
# Google's Python Style Guide is not reStructuredText
# until after being processed by Sphinx Napoleon.
# Source: https://github.com/peterjc/flake8-rst-docstrings/issues/17
@ -144,6 +144,9 @@ per-file-ignores =
src/urban_meal_delivery/console/forecasts.py:
# The module is not too complex.
WPS232,
src/urban_meal_delivery/db/addresses_addresses.py:
# The module does not have too many imports.
WPS201,
src/urban_meal_delivery/db/customers.py:
# The module is not too complex.
WPS232,
@ -200,7 +203,7 @@ max-complexity = 10
max-local-variables = 8
# Allow more than wemake-python-styleguide's 7 methods per class.
max-methods = 12
max-methods = 15
# Comply with black's style.
# Source: https://github.com/psf/black/blob/master/docs/the_black_code_style.md#line-length
@ -217,6 +220,7 @@ allowed-domain-names =
obj,
param,
result,
results,
value,
max-name-length = 40
# darglint
@ -265,14 +269,35 @@ single_line_exclusions = typing
[mypy]
cache_dir = .cache/mypy
# Check the interior of functions without type annotations.
check_untyped_defs = true
# Disallow generic types without explicit type parameters.
disallow_any_generics = true
# Disallow functions with incomplete type annotations.
disallow_incomplete_defs = true
# Disallow calling functions without type annotations.
disallow_untyped_calls = true
# Disallow functions without type annotations (or incomplete annotations).
disallow_untyped_defs = true
[mypy-folium.*]
ignore_missing_imports = true
[mypy-geopy.*]
ignore_missing_imports = true
[mypy-googlemaps.*]
ignore_missing_imports = true
[mypy-matplotlib.*]
ignore_missing_imports = true
[mypy-nox.*]
ignore_missing_imports = true
[mypy-numpy.*]
ignore_missing_imports = true
[mypy-ordered_set.*]
ignore_missing_imports = true
[mypy-packaging]
ignore_missing_imports = true
[mypy-pandas]

View file

@ -59,11 +59,13 @@ class Config:
# Colors for the visualizations ins `folium`.
RESTAURANT_COLOR = 'red'
CUSTOMER_COLOR = 'blue'
NEUTRAL_COLOR = 'black'
# Implementation-specific settings
# --------------------------------
DATABASE_URI = os.getenv('DATABASE_URI')
GOOGLE_MAPS_API_KEY = os.getenv('GOOGLE_MAPS_API_KEY')
# The PostgreSQL schema that holds the tables with the original data.
ORIGINAL_SCHEMA = os.getenv('ORIGINAL_SCHEMA') or 'public'
@ -122,7 +124,7 @@ def make_config(env: str = 'production') -> Config:
# the warning is only emitted if the code is not run by pytest.
# We see the bad configuration immediately as all "db" tests fail.
if config.DATABASE_URI is None and not os.getenv('TESTING'):
warnings.warn('Bad configurartion: no DATABASE_URI set in the environment')
warnings.warn('Bad configuration: no DATABASE_URI set in the environment')
# Some functionalities require R and some packages installed.
# To ensure isolation and reproducibility, the projects keeps the R dependencies

View file

@ -9,10 +9,12 @@ from typing import Any, Callable
import click
def db_revision(rev: str) -> Callable: # pragma: no cover -> easy to check visually
def db_revision(
rev: str,
) -> Callable[..., Callable[..., Any]]: # pragma: no cover -> easy to check visually
"""A decorator ensuring the database is at a given revision."""
def decorator(func: Callable) -> Callable:
def decorator(func: Callable[..., Any]) -> Callable[..., Any]:
@functools.wraps(func)
def ensure(*args: Any, **kwargs: Any) -> Any: # noqa:WPS430
"""Do not execute the `func` if the revision does not match."""

View file

@ -103,8 +103,8 @@ def tactical_heuristic( # noqa:C901,WPS213,WPS216,WPS231
# Important: this check may need to be adapted once further
# commands are added the make `Forecast`s without the heuristic!
# Continue with forecasting on the day the last prediction was made ...
last_predict_at = ( # noqa:ECE001
db.session.query(func.max(db.Forecast.start_at))
last_predict_at = (
db.session.query(func.max(db.Forecast.start_at)) # noqa:WPS221
.join(db.Pixel, db.Forecast.pixel_id == db.Pixel.id)
.join(db.Grid, db.Pixel.grid_id == db.Grid.id)
.filter(db.Forecast.pixel == pixel)

View file

@ -34,8 +34,9 @@ def gridify() -> None: # pragma: no cover note:b1f68d24
click.echo(f' -> created {len(grid.pixels)} pixels')
# The number of assigned addresses is the same across different `side_length`s.
db.session.flush() # necessary for the query to work
# Because the number of assigned addresses is the same across
# different `side_length`s, we can take any `grid` from the `city`.
grid = db.session.query(db.Grid).filter_by(city=city).first()
n_assigned = (
db.session.query(db.AddressPixelAssociation)
.filter(db.AddressPixelAssociation.grid_id == grid.id)

View file

@ -1,6 +1,7 @@
"""Provide the ORM models and a connection to the database."""
from urban_meal_delivery.db.addresses import Address
from urban_meal_delivery.db.addresses_addresses import Path
from urban_meal_delivery.db.addresses_pixels import AddressPixelAssociation
from urban_meal_delivery.db.cities import City
from urban_meal_delivery.db.connection import connection

View file

@ -2,6 +2,7 @@
from __future__ import annotations
import functools
from typing import Any
import folium
@ -10,6 +11,7 @@ from sqlalchemy import orm
from sqlalchemy.dialects import postgresql
from sqlalchemy.ext import hybrid
from urban_meal_delivery import config
from urban_meal_delivery.db import meta
from urban_meal_delivery.db import utils
@ -70,9 +72,6 @@ class Address(meta.Base):
)
pixels = orm.relationship('AddressPixelAssociation', back_populates='address')
# We do not implement a `.__init__()` method and leave that to SQLAlchemy.
# Instead, we use `hasattr()` to check for uninitialized attributes. grep:b1f68d24
def __repr__(self) -> str:
"""Non-literal text representation."""
return '<{cls}({street} in {city})>'.format(
@ -90,7 +89,7 @@ class Address(meta.Base):
"""
return self.id == self.primary_id
@property
@functools.cached_property
def location(self) -> utils.Location:
"""The location of the address.
@ -102,10 +101,9 @@ class Address(meta.Base):
Implementation detail: This property is cached as none of the
underlying attributes to calculate the value are to be changed.
"""
if not hasattr(self, '_location'): # noqa:WPS421 note:b1f68d24
self._location = utils.Location(self.latitude, self.longitude)
self._location.relate_to(self.city.southwest)
return self._location
location = utils.Location(self.latitude, self.longitude)
location.relate_to(self.city.southwest)
return location
@property
def x(self) -> int: # noqa=WPS111
@ -154,7 +152,7 @@ class Address(meta.Base):
`.city.map` for convenience in interactive usage
"""
defaults = {
'color': 'black',
'color': f'{config.NEUTRAL_COLOR}',
'popup': f'{self.street}, {self.zip_code} {self.city_name}',
}
defaults.update(kwargs)

View file

@ -0,0 +1,316 @@
"""Model for the `Path` relationship between two `Address` objects."""
from __future__ import annotations
import functools
import itertools
import json
from typing import List
import folium
import googlemaps as gm
import ordered_set
import sqlalchemy as sa
from geopy import distance as geo_distance
from sqlalchemy import orm
from sqlalchemy.dialects import postgresql
from urban_meal_delivery import config
from urban_meal_delivery import db
from urban_meal_delivery.db import meta
from urban_meal_delivery.db import utils
class Path(meta.Base):
"""Path between two `Address` objects.
Models the path between two `Address` objects, including directions
for a `Courier` to get from one `Address` to another.
As the couriers are on bicycles, we model the paths as
a symmetric graph (i.e., same distance in both directions).
Implements an association pattern between `Address` and `Address`.
Further info:
https://docs.sqlalchemy.org/en/stable/orm/basic_relationships.html#association-object # noqa:E501
"""
__tablename__ = 'addresses_addresses'
# Columns
first_address_id = sa.Column(sa.Integer, primary_key=True)
second_address_id = sa.Column(sa.Integer, primary_key=True)
city_id = sa.Column(sa.SmallInteger, nullable=False)
# Distances are measured in meters.
air_distance = sa.Column(sa.Integer, nullable=False)
bicycle_distance = sa.Column(sa.Integer, nullable=True)
# The duration is measured in seconds.
bicycle_duration = sa.Column(sa.Integer, nullable=True)
# An array of latitude-longitude pairs approximating a courier's way.
_directions = sa.Column('directions', postgresql.JSON, nullable=True)
# Constraints
__table_args__ = (
# The two `Address` objects must be in the same `.city`.
sa.ForeignKeyConstraint(
['first_address_id', 'city_id'],
['addresses.id', 'addresses.city_id'],
onupdate='RESTRICT',
ondelete='RESTRICT',
),
sa.ForeignKeyConstraint(
['second_address_id', 'city_id'],
['addresses.id', 'addresses.city_id'],
onupdate='RESTRICT',
ondelete='RESTRICT',
),
# Each `Address`-`Address` pair only has one distance.
sa.UniqueConstraint('first_address_id', 'second_address_id'),
sa.CheckConstraint(
'first_address_id < second_address_id',
name='distances_are_symmetric_for_bicycles',
),
sa.CheckConstraint(
'0 <= air_distance AND air_distance < 20000', name='realistic_air_distance',
),
sa.CheckConstraint(
'bicycle_distance < 25000', # `.bicycle_distance` may not be negative
name='realistic_bicycle_distance', # due to the constraint below.
),
sa.CheckConstraint(
'air_distance <= bicycle_distance', name='air_distance_is_shortest',
),
sa.CheckConstraint(
'0 <= bicycle_duration AND bicycle_duration <= 3600',
name='realistic_bicycle_travel_time',
),
)
# Relationships
first_address = orm.relationship(
'Address', foreign_keys='[Path.first_address_id, Path.city_id]',
)
second_address = orm.relationship(
'Address',
foreign_keys='[Path.second_address_id, Path.city_id]',
overlaps='first_address',
)
@classmethod
def from_addresses(
cls, *addresses: db.Address, google_maps: bool = False,
) -> List[Path]:
"""Calculate pair-wise paths for `Address` objects.
This is the main constructor method for the class.
It handles the "sorting" of the `Address` objects by `.id`, which is
the logic that enforces the symmetric graph behind the paths.
Args:
*addresses: to calculate the pair-wise paths for;
must contain at least two `Address` objects
google_maps: if `.bicycle_distance` and `._directions` should be
populated with a query to the Google Maps Directions API;
by default, only the `.air_distance` is calculated with `geopy`
Returns:
paths
"""
paths = []
# We consider all 2-tuples of `Address`es. The symmetric graph is ...
for first, second in itertools.combinations(addresses, 2):
# ... implicitly enforced by a precedence constraint for the `.id`s.
first, second = ( # noqa:WPS211
(first, second) if first.id < second.id else (second, first)
)
# If there is no `Path` object in the database ...
path = (
db.session.query(db.Path)
.filter(db.Path.first_address == first)
.filter(db.Path.second_address == second)
.first()
)
# ... create a new one.
if path is None:
air_distance = geo_distance.great_circle(
first.location.lat_lng, second.location.lat_lng,
)
path = cls(
first_address=first,
second_address=second,
air_distance=round(air_distance.meters),
)
db.session.add(path)
db.session.commit()
paths.append(path)
if google_maps:
for path in paths: # noqa:WPS440
path.sync_with_google_maps()
return paths
@classmethod
def from_order(cls, order: db.Order, google_maps: bool = False) -> Path:
"""Calculate the path for an `Order` object.
The path goes from the `Order.pickup_address` to the `Order.delivery_address`.
Args:
order: to calculate the path for
google_maps: if `.bicycle_distance` and `._directions` should be
populated with a query to the Google Maps Directions API;
by default, only the `.air_distance` is calculated with `geopy`
Returns:
path
"""
return cls.from_addresses(
order.pickup_address, order.delivery_address, google_maps=google_maps,
)[0]
def sync_with_google_maps(self) -> None:
"""Fill in `.bicycle_distance` and `._directions` with Google Maps.
`._directions` will NOT contain the coordinates
of `.first_address` and `.second_address`.
This uses the Google Maps Directions API.
Further info:
https://developers.google.com/maps/documentation/directions
"""
# To save costs, we do not make an API call
# if we already have data from Google Maps.
if self.bicycle_distance is not None:
return
client = gm.Client(config.GOOGLE_MAPS_API_KEY)
response = client.directions(
origin=self.first_address.location.lat_lng,
destination=self.second_address.location.lat_lng,
mode='bicycling',
alternatives=False,
)
# Without "alternatives" and "waypoints", the `response` contains
# exactly one "route" that consists of exactly one "leg".
# Source: https://developers.google.com/maps/documentation/directions/get-directions#Legs # noqa:E501
route = response[0]['legs'][0]
self.bicycle_distance = route['distance']['value'] # noqa:WPS601
self.bicycle_duration = route['duration']['value'] # noqa:WPS601
# Each route consists of many "steps" that are instructions as to how to
# get from A to B. As a step's "start_location" may equal the previous step's
# "end_location", we use an `OrderedSet` to find the unique latitude-longitude
# pairs that make up the path from `.first_address` to `.second_address`.
steps = ordered_set.OrderedSet()
for step in route['steps']:
steps.add( # noqa:WPS221
(step['start_location']['lat'], step['start_location']['lng']),
)
steps.add( # noqa:WPS221
(step['end_location']['lat'], step['end_location']['lng']),
)
steps.discard(self.first_address.location.lat_lng)
steps.discard(self.second_address.location.lat_lng)
self._directions = json.dumps(list(steps)) # noqa:WPS601
db.session.add(self)
db.session.commit()
@property # pragma: no cover
def map(self) -> folium.Map: # noqa:WPS125
"""Convenience property to obtain the underlying `City.map`."""
return self.first_address.city.map
@functools.cached_property
def waypoints(self) -> List[utils.Location]:
"""The couriers' route from `.first_address` to `.second_address`.
The returned `Location`s all relate to `.first_address.city.southwest`.
Implementation detail: This property is cached as none of the
underlying attributes (i.e., `._directions`) are to be changed.
"""
points = [utils.Location(*point) for point in json.loads(self._directions)]
for point in points:
point.relate_to(self.first_address.city.southwest)
return points
def draw( # noqa:WPS211
self,
*,
reverse: bool = False,
start_tooltip: str = 'Start',
end_tooltip: str = 'End',
start_color: str = 'green',
end_color: str = 'red',
path_color: str = 'black',
) -> folium.Map: # pragma: no cover
"""Draw the `.waypoints` from `.first_address` to `.second_address`.
Args:
reverse: by default, `.first_address` is used as the start;
set to `False` to make `.second_address` the start
start_tooltip: text shown on marker at the path's start
end_tooltip: text shown on marker at the path's end
start_color: `folium` color for the path's start
end_color: `folium` color for the path's end
path_color: `folium` color along the path, which
is the line between the `.waypoints`
Returns:
`.map` for convenience in interactive usage
"""
# Without `self._directions` synced from Google Maps,
# the `.waypoints` are not available.
self.sync_with_google_maps()
# First, plot the couriers' path between the start and
# end locations, so that it is below the `folium.Circle`s.
line = folium.PolyLine(
locations=(
self.first_address.location.lat_lng,
*(point.lat_lng for point in self.waypoints),
self.second_address.location.lat_lng,
),
color=path_color,
weight=2,
)
line.add_to(self.map)
# Draw the path's start and end locations, possibly reversed,
# on top of the couriers' path.
if reverse:
start, end = self.second_address, self.first_address
else:
start, end = self.first_address, self.second_address
start.draw(
radius=5,
color=start_color,
fill_color=start_color,
fill_opacity=1,
tooltip=start_tooltip,
)
end.draw(
radius=5,
color=end_color,
fill_color=end_color,
fill_opacity=1,
tooltip=end_tooltip,
)
return self.map

View file

@ -10,7 +10,7 @@ class AddressPixelAssociation(meta.Base):
"""Association pattern between `Address` and `Pixel`.
This approach is needed here mainly because it implicitly
updates the `_city_id` and `_grid_id` columns.
updates the `city_id` and `grid_id` columns.
Further info:
https://docs.sqlalchemy.org/en/stable/orm/basic_relationships.html#association-object # noqa:E501

View file

@ -2,6 +2,8 @@
from __future__ import annotations
import functools
import folium
import sqlalchemy as sa
from sqlalchemy import orm
@ -38,51 +40,39 @@ class City(meta.Base):
addresses = orm.relationship('Address', back_populates='city')
grids = orm.relationship('Grid', back_populates='city')
# We do not implement a `.__init__()` method and leave that to SQLAlchemy.
# Instead, we use `hasattr()` to check for uninitialized attributes. grep:d334120e
# We do not implement a `.__init__()` method and use SQLAlchemy's default.
# The uninitialized attribute `._map` is computed on the fly. note:d334120ei
def __repr__(self) -> str:
"""Non-literal text representation."""
return '<{cls}({name})>'.format(cls=self.__class__.__name__, name=self.name)
@property
@functools.cached_property
def center(self) -> utils.Location:
"""Location of the city's center.
Implementation detail: This property is cached as none of the
underlying attributes to calculate the value are to be changed.
"""
if not hasattr(self, '_center'): # noqa:WPS421 note:d334120e
self._center = utils.Location(self.center_latitude, self.center_longitude)
return self._center
return utils.Location(self.center_latitude, self.center_longitude)
@property
@functools.cached_property
def northeast(self) -> utils.Location:
"""The city's northeast corner of the Google Maps viewport.
Implementation detail: This property is cached as none of the
underlying attributes to calculate the value are to be changed.
"""
if not hasattr(self, '_northeast'): # noqa:WPS421 note:d334120e
self._northeast = utils.Location(
self.northeast_latitude, self.northeast_longitude,
)
return utils.Location(self.northeast_latitude, self.northeast_longitude)
return self._northeast
@property
@functools.cached_property
def southwest(self) -> utils.Location:
"""The city's southwest corner of the Google Maps viewport.
Implementation detail: This property is cached as none of the
underlying attributes to calculate the value are to be changed.
"""
if not hasattr(self, '_southwest'): # noqa:WPS421 note:d334120e
self._southwest = utils.Location(
self.southwest_latitude, self.southwest_longitude,
)
return self._southwest
return utils.Location(self.southwest_latitude, self.southwest_longitude)
@property
def total_x(self) -> int:
@ -103,16 +93,17 @@ class City(meta.Base):
def clear_map(self) -> City: # pragma: no cover
"""Create a new `folium.Map` object aligned with the city's viewport.
The map is available via the `.map` property. Note that it is a
mutable objects that is changed from various locations in the code base.
The map is available via the `.map` property. Note that it is mutable
and changed from various locations in the code base.
Returns:
self: enabling method chaining
""" # noqa:DAR203
""" # noqa:DAR203 note:d334120e
self._map = folium.Map(
location=[self.center_latitude, self.center_longitude],
zoom_start=self.initial_zoom,
)
return self
@property # pragma: no cover
@ -138,7 +129,7 @@ class City(meta.Base):
`.map` for convenience in interactive usage
"""
# Obtain all primary `Address`es in the city that host `Restaurant`s.
addresses = ( # noqa:ECE001
addresses = (
db.session.query(db.Address)
.filter(
db.Address.id.in_(
@ -155,7 +146,7 @@ class City(meta.Base):
for address in addresses:
# Show the restaurant's name if there is only one.
# Otherwise, list all the restaurants' ID's.
restaurants = ( # noqa:ECE001
restaurants = (
db.session.query(db.Restaurant)
.join(db.Address, db.Restaurant.address_id == db.Address.id)
.filter(db.Address.primary_id == address.id)
@ -170,7 +161,7 @@ class City(meta.Base):
if order_counts:
# Calculate the number of orders for ALL restaurants ...
n_orders = ( # noqa:ECE001
n_orders = (
db.session.query(db.Order.id)
.join(db.Address, db.Order.pickup_address_id == db.Address.id)
.filter(db.Address.primary_id == address.id)
@ -221,11 +212,11 @@ class City(meta.Base):
sa.text(
f""" -- # noqa:S608
SELECT DISTINCT
zip_code
{config.CLEAN_SCHEMA}.addresses.zip_code
FROM
{config.CLEAN_SCHEMA}.addresses
{config.CLEAN_SCHEMA}.addresses AS addresses
WHERE
city_id = {self.id};
{config.CLEAN_SCHEMA}.addresses.city_id = {self.id};
""",
),
)

View file

@ -63,15 +63,18 @@ class Customer(meta.Base):
# Obtain all primary `Address`es where
# at least one delivery was made to `self`.
delivery_addresses = ( # noqa:ECE001
delivery_addresses = (
db.session.query(db.Address)
.filter(
db.Address.id.in_(
row.primary_id
for row in (
db.session.query(db.Address.primary_id) # noqa:WPS221
.join(db.Order, db.Address.id == db.Order.delivery_address_id)
.filter(db.Order.customer_id == self.id)
.distinct()
.all(),
.all()
)
),
)
.all()
@ -79,7 +82,7 @@ class Customer(meta.Base):
for address in delivery_addresses:
if order_counts:
n_orders = ( # noqa:ECE001
n_orders = (
db.session.query(db.Order)
.join(db.Address, db.Order.delivery_address_id == db.Address.id)
.filter(db.Order.customer_id == self.id)
@ -111,7 +114,7 @@ class Customer(meta.Base):
)
if restaurants:
pickup_addresses = ( # noqa:ECE001
pickup_addresses = (
db.session.query(db.Address)
.filter(
db.Address.id.in_(
@ -129,7 +132,7 @@ class Customer(meta.Base):
# Show the restaurant's name if there is only one.
# Otherwise, list all the restaurants' ID's.
# We cannot show the `Order.restaurant.name` due to the aggregation.
restaurants = ( # noqa:ECE001
restaurants = (
db.session.query(db.Restaurant)
.join(db.Address, db.Restaurant.address_id == db.Address.id)
.filter(db.Address.primary_id == address.id) # noqa:WPS441
@ -145,7 +148,7 @@ class Customer(meta.Base):
)
if order_counts:
n_orders = ( # noqa:ECE001
n_orders = (
db.session.query(db.Order)
.join(db.Address, db.Order.pickup_address_id == db.Address.id)
.filter(db.Order.customer_id == self.id)

View file

@ -31,7 +31,7 @@ class Forecast(meta.Base):
model = sa.Column(sa.Unicode(length=20), nullable=False)
# We also store the actual order counts for convenient retrieval.
# A `UniqueConstraint` below ensures that redundant values that
# are to be expected are consistend across rows.
# are to be expected are consistent across rows.
actual = sa.Column(sa.SmallInteger, nullable=False)
# Raw `.prediction`s are stored as `float`s (possibly negative).
# The rounding is then done on the fly if required.
@ -157,7 +157,7 @@ class Forecast(meta.Base):
Background: The functions in `urban_meal_delivery.forecasts.methods`
return `pd.Dataframe`s with "start_at" (i.e., `pd.Timestamp` objects)
values in the index and five columns "prediction", "low80", "high80",
"low95", and "high95" with `np.float` values. The `*Model.predic()`
"low95", and "high95" with `np.float` values. The `*Model.predict()`
methods in `urban_meal_delivery.forecasts.models` then add an "actual"
column. This constructor converts these results into ORM models.
Also, the `np.float` values are cast as plain `float` ones as

View file

@ -76,7 +76,7 @@ class Grid(meta.Base):
# `Pixel`s grouped by `.n_x`-`.n_y` coordinates.
pixels = {}
pickup_addresses = ( # noqa:ECE:001
pickup_addresses = (
db.session.query(db.Address)
.join(db.Order, db.Address.id == db.Order.pickup_address_id)
.filter(db.Address.city == city)

View file

@ -2,10 +2,13 @@
import datetime
import folium
import sqlalchemy as sa
from sqlalchemy import orm
from sqlalchemy.dialects import postgresql
from urban_meal_delivery import config
from urban_meal_delivery import db
from urban_meal_delivery.db import meta
@ -524,3 +527,36 @@ class Order(meta.Base): # noqa:WPS214
return '<{cls}(#{order_id})>'.format(
cls=self.__class__.__name__, order_id=self.id,
)
def draw(self) -> folium.Map: # pragma: no cover
"""Draw the `.waypoints` from `.pickup_address` to `.delivery_address`.
Important: Do not put this in an automated script as a method call
triggers an API call to the Google Maps API and may result in costs.
Returns:
`...city.map` for convenience in interactive usage
"""
path = db.Path.from_order(self)
restaurant_tooltip = f'{self.restaurant.name} (#{self.restaurant.id})'
customer_tooltip = f'Customer #{self.customer.id}'
# Because the underlying distance matrix is symmetric (i.e., a DB constraint),
# we must check if the `.pickup_address` is the couriers' `Path`'s start.
if path.first_address is self.pickup_address:
reverse = False
start_tooltip, end_tooltip = restaurant_tooltip, customer_tooltip
else:
reverse = True
start_tooltip, end_tooltip = customer_tooltip, restaurant_tooltip
# This triggers `Path.sync_with_google_maps()` behind the scenes.
return path.draw(
reverse=reverse,
start_tooltip=start_tooltip,
end_tooltip=end_tooltip,
start_color=config.RESTAURANT_COLOR,
end_color=config.CUSTOMER_COLOR,
path_color=config.NEUTRAL_COLOR,
)

View file

@ -2,6 +2,7 @@
from __future__ import annotations
import functools
from typing import List
import folium
@ -68,56 +69,50 @@ class Pixel(meta.Base):
"""The area of a pixel in square kilometers."""
return self.grid.pixel_area
@property
@functools.cached_property
def northeast(self) -> utils.Location:
"""The pixel's northeast corner, relative to `.grid.city.southwest`.
Implementation detail: This property is cached as none of the
underlying attributes to calculate the value are to be changed.
"""
if not hasattr(self, '_northeast'): # noqa:WPS421 note:d334120e
# The origin is the southwest corner of the `.grid.city`'s viewport.
easting_origin = self.grid.city.southwest.easting
northing_origin = self.grid.city.southwest.northing
easting, northing = (
self.grid.city.southwest.easting + ((self.n_x + 1) * self.side_length),
self.grid.city.southwest.northing + ((self.n_y + 1) * self.side_length),
)
latitude, longitude = utm.to_latlon(
easting, northing, *self.grid.city.southwest.zone_details,
)
# `+1` as otherwise we get the pixel's `.southwest` corner.
easting = easting_origin + ((self.n_x + 1) * self.side_length)
northing = northing_origin + ((self.n_y + 1) * self.side_length)
zone, band = self.grid.city.southwest.zone_details
latitude, longitude = utm.to_latlon(easting, northing, zone, band)
location = utils.Location(latitude, longitude)
location.relate_to(self.grid.city.southwest)
self._northeast = utils.Location(latitude, longitude)
self._northeast.relate_to(self.grid.city.southwest)
return location
return self._northeast
@property
@functools.cached_property
def southwest(self) -> utils.Location:
"""The pixel's northeast corner, relative to `.grid.city.southwest`.
"""The pixel's southwest corner, relative to `.grid.city.southwest`.
Implementation detail: This property is cached as none of the
underlying attributes to calculate the value are to be changed.
"""
if not hasattr(self, '_southwest'): # noqa:WPS421 note:d334120e
# The origin is the southwest corner of the `.grid.city`'s viewport.
easting_origin = self.grid.city.southwest.easting
northing_origin = self.grid.city.southwest.northing
easting, northing = (
self.grid.city.southwest.easting + (self.n_x * self.side_length),
self.grid.city.southwest.northing + (self.n_y * self.side_length),
)
latitude, longitude = utm.to_latlon(
easting, northing, *self.grid.city.southwest.zone_details,
)
easting = easting_origin + (self.n_x * self.side_length)
northing = northing_origin + (self.n_y * self.side_length)
zone, band = self.grid.city.southwest.zone_details
latitude, longitude = utm.to_latlon(easting, northing, zone, band)
location = utils.Location(latitude, longitude)
location.relate_to(self.grid.city.southwest)
self._southwest = utils.Location(latitude, longitude)
self._southwest.relate_to(self.grid.city.southwest)
return location
return self._southwest
@property
@functools.cached_property
def restaurants(self) -> List[db.Restaurant]: # pragma: no cover
"""Obtain all `Restaurant`s in `self`."""
if not hasattr(self, '_restaurants'): # noqa:WPS421 note:d334120e
self._restaurants = ( # noqa:ECE001
return (
db.session.query(db.Restaurant)
.join(
db.AddressPixelAssociation,
@ -127,8 +122,6 @@ class Pixel(meta.Base):
.all()
)
return self._restaurants
def clear_map(self) -> Pixel: # pragma: no cover
"""Shortcut to the `.city.clear_map()` method.
@ -182,7 +175,7 @@ class Pixel(meta.Base):
if restaurants:
# Obtain all primary `Address`es in the city that host `Restaurant`s
# and are in the `self` `Pixel`.
addresses = ( # noqa:ECE001
addresses = (
db.session.query(db.Address)
.filter(
db.Address.id.in_(
@ -208,7 +201,7 @@ class Pixel(meta.Base):
for address in addresses:
# Show the restaurant's name if there is only one.
# Otherwise, list all the restaurants' ID's.
restaurants = ( # noqa:ECE001
restaurants = (
db.session.query(db.Restaurant)
.join(db.Address, db.Restaurant.address_id == db.Address.id)
.filter(db.Address.primary_id == address.id)
@ -225,7 +218,7 @@ class Pixel(meta.Base):
if order_counts:
# Calculate the number of orders for ALL restaurants ...
n_orders = ( # noqa:ECE001
n_orders = (
db.session.query(db.Order.id)
.join(db.Address, db.Order.pickup_address_id == db.Address.id)
.filter(db.Address.primary_id == address.id)

View file

@ -45,7 +45,11 @@ class Restaurant(meta.Base):
# Relationships
address = orm.relationship('Address', back_populates='restaurants')
orders = orm.relationship('Order', back_populates='restaurant')
orders = orm.relationship(
'Order',
back_populates='restaurant',
overlaps='orders_picked_up,pickup_address',
)
def __repr__(self) -> str:
"""Non-literal text representation."""
@ -83,15 +87,20 @@ class Restaurant(meta.Base):
if customers:
# Obtain all primary `Address`es in the city that
# received at least one delivery from `self`.
delivery_addresses = ( # noqa:ECE001
delivery_addresses = (
db.session.query(db.Address)
.filter(
db.Address.id.in_(
row.primary_id
for row in (
db.session.query(db.Address.primary_id) # noqa:WPS221
.join(db.Order, db.Address.id == db.Order.delivery_address_id)
.join(
db.Order, db.Address.id == db.Order.delivery_address_id,
)
.filter(db.Order.restaurant_id == self.id)
.distinct()
.all(),
.all()
)
),
)
.all()
@ -99,7 +108,7 @@ class Restaurant(meta.Base):
for address in delivery_addresses:
if order_counts:
n_orders = ( # noqa:ECE001
n_orders = (
db.session.query(db.Order)
.join(db.Address, db.Order.delivery_address_id == db.Address.id)
.filter(db.Order.restaurant_id == self.id)

View file

@ -7,7 +7,7 @@ from typing import Optional, Tuple
import utm
class Location:
class Location: # noqa:WPS214
"""A location represented in WGS84 and UTM coordinates.
WGS84:
@ -15,7 +15,7 @@ class Location:
- assumes earth is a sphere and models the location in 3D
UTM:
- the Universal Transverse Mercator sytem
- the Universal Transverse Mercator system
- projects WGS84 coordinates onto a 2D map
- can be used for visualizations and calculations directly
- distances are in meters
@ -67,6 +67,11 @@ class Location:
"""
return self._longitude
@property
def lat_lng(self) -> Tuple[float, float]:
"""The `.latitude` and `.longitude` as a 2-`tuple`."""
return self._latitude, self._longitude
@property
def easting(self) -> int:
"""The easting of the location in meters (UTM)."""
@ -85,7 +90,7 @@ class Location:
@property
def zone_details(self) -> Tuple[int, str]:
"""The UTM zone of the location as the zone number and the band."""
return (self._zone, self._band)
return self._zone, self._band
def __eq__(self, other: object) -> bool:
"""Check if two `Location` objects are the same location."""

View file

@ -31,8 +31,8 @@ def predict(
Raises:
ValueError: if `training_ts` contains `NaN` values
"""
# Initialize R only if necessary as it is tested only in nox's
# "ci-tests-slow" session and "ci-tests-fast" should not fail.
# Initialize R only if it is actually used.
# For example, the nox session "ci-tests-fast" does not use it.
from urban_meal_delivery import init_r # noqa:F401,WPS433
# Re-seed R every time it is used to ensure reproducibility.

View file

@ -154,8 +154,8 @@ def stl( # noqa:C901,WPS210,WPS211,WPS231
else:
robust = False
# Initialize R only if necessary as it is tested only in nox's
# "ci-tests-slow" session and "ci-tests-fast" should not fail.
# Initialize R only if it is actually used.
# For example, the nox session "ci-tests-fast" does not use it.
from urban_meal_delivery import init_r # noqa:F401,WPS433
# Re-seed R every time it is used to ensure reproducibility.

View file

@ -32,8 +32,8 @@ def predict(
Raises:
ValueError: if `training_ts` contains `NaN` values
"""
# Initialize R only if necessary as it is tested only in nox's
# "ci-tests-slow" session and "ci-tests-fast" should not fail.
# Initialize R only if it is actually used.
# For example, the nox session "ci-tests-fast" does not use it.
from urban_meal_delivery import init_r # noqa:F401,WPS433
# Re-seed R every time it is used to ensure reproducibility.

View file

@ -15,7 +15,7 @@ For the paper check:
This sub-package is organized as follows. The `base` module defines an abstract
`ForecastingModelABC` class that unifies how the concrete `*Model`s work.
While the abstact `.predict()` method returns a `pd.DataFrame` (= basically,
While the abstract `.predict()` method returns a `pd.DataFrame` (= basically,
the result of one of the forecasting `methods`, the concrete `.make_forecast()`
method converts the results into `Forecast` (=ORM) objects.
Also, `.make_forecast()` implements a caching strategy where already made
@ -23,7 +23,7 @@ Also, `.make_forecast()` implements a caching strategy where already made
which could be a heavier computation.
The `tactical` sub-package contains all the `*Model`s used to implement the
UDP's predictive routing strategy.
predictive routing strategy employed by the UDP.
A future `planning` sub-package will contain the `*Model`s used to plan the
`Courier`'s shifts a week ahead.

View file

@ -75,7 +75,7 @@ class ForecastingModelABC(abc.ABC):
# noqa:DAR401 RuntimeError
"""
if ( # noqa:WPS337
cached_forecast := db.session.query(db.Forecast) # noqa:ECE001,WPS221
cached_forecast := db.session.query(db.Forecast) # noqa:WPS221
.filter_by(pixel=pixel)
.filter_by(start_at=predict_at)
.filter_by(time_step=self._order_history.time_step)

View file

@ -1,8 +1,8 @@
"""Forecasting `*Model`s to predict demand for tactical purposes.
The `*Model`s in this module predict only a small number (e.g., one)
of time steps into the near future and are used to implement the UDP's
predictive routing strategies.
of time steps into the near future and are used to implement the
predictive routing strategies employed by the UDP.
They are classified into "horizontal", "vertical", and "real-time" models
with respect to what historic data they are trained on and how often they

View file

@ -51,7 +51,7 @@ class HorizontalETSModel(base.ForecastingModelABC):
# Make `predictions` with the seasonal ETS method ("ZZZ" model).
predictions = methods.ets.predict(
training_ts=training_ts,
forecast_interval=actuals_ts.index,
forecast_interval=pd.DatetimeIndex(actuals_ts.index),
frequency=frequency, # `== 7`, the number of weekdays
seasonal_fit=True, # because there was no decomposition before
)
@ -59,7 +59,7 @@ class HorizontalETSModel(base.ForecastingModelABC):
predictions.insert(loc=0, column='actual', value=actuals_ts)
# Sanity checks.
if predictions.isnull().any().any(): # pragma: no cover
if predictions.isnull().sum().any(): # pragma: no cover
raise RuntimeError('missing predictions in hets model')
if predict_at not in predictions.index: # pragma: no cover
raise RuntimeError('missing prediction for `predict_at`')

View file

@ -59,7 +59,7 @@ class RealtimeARIMAModel(base.ForecastingModelABC):
# Make predictions for the seasonal component by linear extrapolation.
seasonal_predictions = methods.extrapolate_season.predict(
training_ts=decomposed_training_ts['seasonal'],
forecast_interval=actuals_ts.index,
forecast_interval=pd.DatetimeIndex(actuals_ts.index),
frequency=frequency,
)
@ -68,7 +68,7 @@ class RealtimeARIMAModel(base.ForecastingModelABC):
training_ts=(
decomposed_training_ts['trend'] + decomposed_training_ts['residual']
),
forecast_interval=actuals_ts.index,
forecast_interval=pd.DatetimeIndex(actuals_ts.index),
# Because the seasonality was taken out before,
# the `training_ts` has, by definition, a `frequency` of `1`.
frequency=1,
@ -109,7 +109,7 @@ class RealtimeARIMAModel(base.ForecastingModelABC):
# Sanity checks.
if len(predictions) != 1: # pragma: no cover
raise RuntimeError('real-time models should predict exactly one time step')
if predictions.isnull().any().any(): # pragma: no cover
if predictions.isnull().sum().any(): # pragma: no cover
raise RuntimeError('missing predictions in rtarima model')
if predict_at not in predictions.index: # pragma: no cover
raise RuntimeError('missing prediction for `predict_at`')

View file

@ -61,7 +61,7 @@ class VerticalARIMAModel(base.ForecastingModelABC):
# Make predictions for the seasonal component by linear extrapolation.
seasonal_predictions = methods.extrapolate_season.predict(
training_ts=decomposed_training_ts['seasonal'],
forecast_interval=actuals_ts.index,
forecast_interval=pd.DatetimeIndex(actuals_ts.index),
frequency=frequency,
)
@ -70,7 +70,7 @@ class VerticalARIMAModel(base.ForecastingModelABC):
training_ts=(
decomposed_training_ts['trend'] + decomposed_training_ts['residual']
),
forecast_interval=actuals_ts.index,
forecast_interval=pd.DatetimeIndex(actuals_ts.index),
# Because the seasonality was taken out before,
# the `training_ts` has, by definition, a `frequency` of `1`.
frequency=1,
@ -111,7 +111,7 @@ class VerticalARIMAModel(base.ForecastingModelABC):
# Sanity checks.
if len(predictions) <= 1: # pragma: no cover
raise RuntimeError('vertical models should predict several time steps')
if predictions.isnull().any().any(): # pragma: no cover
if predictions.isnull().sum().any(): # pragma: no cover
raise RuntimeError('missing predictions in varima model')
if predict_at not in predictions.index: # pragma: no cover
raise RuntimeError('missing prediction for `predict_at`')

View file

@ -84,41 +84,50 @@ class OrderHistory:
pixels.pixel_id,
DATE_TRUNC('MINUTE', orders.placed_at)
AS placed_at_without_seconds,
((
(
(
(
EXTRACT(MINUTES FROM orders.placed_at)::INTEGER
% {self._time_step}
)::TEXT || ' MINUTES')::INTERVAL
AS minutes_to_be_cut
)::TEXT
||
' MINUTES'
)::INTERVAL
) AS minutes_to_be_cut
FROM (
SELECT
id,
placed_at,
pickup_address_id
{config.CLEAN_SCHEMA}.orders.id,
{config.CLEAN_SCHEMA}.orders.placed_at,
{config.CLEAN_SCHEMA}.orders.pickup_address_id
FROM
{config.CLEAN_SCHEMA}.orders
INNER JOIN (
SELECT
id AS address_id
{config.CLEAN_SCHEMA}.addresses.id AS address_id
FROM
{config.CLEAN_SCHEMA}.addresses
WHERE
city_id = {self._grid.city.id}
{config.CLEAN_SCHEMA}.addresses.city_id
= {self._grid.city.id}
) AS in_city
ON orders.pickup_address_id = in_city.address_id
ON {config.CLEAN_SCHEMA}.orders.pickup_address_id
= in_city.address_id
WHERE
ad_hoc IS TRUE
{config.CLEAN_SCHEMA}.orders.ad_hoc IS TRUE
) AS
orders
INNER JOIN (
SELECT
address_id,
pixel_id
{config.CLEAN_SCHEMA}.addresses_pixels.address_id,
{config.CLEAN_SCHEMA}.addresses_pixels.pixel_id
FROM
{config.CLEAN_SCHEMA}.addresses_pixels
WHERE
grid_id = {self._grid.id}
{config.CLEAN_SCHEMA}.addresses_pixels.grid_id
= {self._grid.id}
AND
city_id = {self._grid.city.id} -- -> sanity check
{config.CLEAN_SCHEMA}.addresses_pixels.city_id
= {self._grid.city.id} -- -> sanity check
) AS pixels
ON orders.pickup_address_id = pixels.address_id
) AS placed_at_aggregated_into_start_at
@ -544,7 +553,7 @@ class OrderHistory:
# For now, we only make forecasts with 7 and 8 weeks
# as the training horizon (note:4f79e8fa).
if train_horizon == 7 or train_horizon == 8:
if train_horizon in {7, 8}:
if add >= 25: # = "high demand"
return models.HorizontalETSModel(order_history=self)
elif add >= 10: # = "medium demand"

View file

@ -5,6 +5,7 @@ in the CLI layer need access to the database.
"""
import os
import warnings
import pytest
import sqlalchemy as sa
@ -94,6 +95,8 @@ def db_session(db_connection):
finally:
session.close()
with warnings.catch_warnings(record=True):
transaction.rollback()

View file

@ -0,0 +1,682 @@
"""Test the ORM's `Path` model."""
import json
import googlemaps
import pytest
import sqlalchemy as sqla
from geopy import distance
from sqlalchemy import exc as sa_exc
from urban_meal_delivery import db
from urban_meal_delivery.db import utils
@pytest.fixture
def another_address(make_address):
"""Another `Address` object in the `city`."""
return make_address()
@pytest.fixture
def path(address, another_address, make_address):
"""A `Path` from `address` to `another_address`."""
air_distance = distance.great_circle( # noqa:WPS317
address.location.lat_lng, another_address.location.lat_lng,
).meters
# We put 5 latitude-longitude pairs as the "path" from
# `.first_address` to `.second_address`.
directions = json.dumps(
[
(float(add.latitude), float(add.longitude))
for add in (make_address() for _ in range(5)) # noqa:WPS335
],
)
return db.Path(
first_address=address,
second_address=another_address,
air_distance=round(air_distance),
bicycle_distance=round(1.25 * air_distance),
bicycle_duration=300,
_directions=directions,
)
class TestSpecialMethods:
"""Test special methods in `Path`."""
def test_create_an_address_address_association(self, path):
"""Test instantiation of a new `Path` object."""
assert path is not None
@pytest.mark.db
@pytest.mark.no_cover
class TestConstraints:
"""Test the database constraints defined in `Path`."""
def test_insert_into_database(self, db_session, path):
"""Insert an instance into the (empty) database."""
assert db_session.query(db.Path).count() == 0
db_session.add(path)
db_session.commit()
assert db_session.query(db.Path).count() == 1
def test_delete_a_referenced_first_address(self, db_session, path):
"""Remove a record that is referenced with a FK."""
db_session.add(path)
db_session.commit()
# Must delete without ORM as otherwise an UPDATE statement is emitted.
stmt = sqla.delete(db.Address).where(db.Address.id == path.first_address.id)
with pytest.raises(
sa_exc.IntegrityError,
match='fk_addresses_addresses_to_addresses_via_first_address', # shortened
):
db_session.execute(stmt)
def test_delete_a_referenced_second_address(self, db_session, path):
"""Remove a record that is referenced with a FK."""
db_session.add(path)
db_session.commit()
# Must delete without ORM as otherwise an UPDATE statement is emitted.
stmt = sqla.delete(db.Address).where(db.Address.id == path.second_address.id)
with pytest.raises(
sa_exc.IntegrityError,
match='fk_addresses_addresses_to_addresses_via_second_address', # shortened
):
db_session.execute(stmt)
def test_reference_an_invalid_city(self, db_session, address, another_address):
"""Insert a record with an invalid foreign key."""
db_session.add(address)
db_session.add(another_address)
db_session.commit()
# Must insert without ORM as otherwise SQLAlchemy figures out
# that something is wrong before any query is sent to the database.
stmt = sqla.insert(db.Path).values(
first_address_id=address.id,
second_address_id=another_address.id,
city_id=999,
air_distance=123,
)
with pytest.raises(
sa_exc.IntegrityError,
match='fk_addresses_addresses_to_addresses_via_first_address', # shortened
):
db_session.execute(stmt)
def test_redundant_addresses(self, db_session, path):
"""Insert a record that violates a unique constraint."""
db_session.add(path)
db_session.commit()
# Must insert without ORM as otherwise SQLAlchemy figures out
# that something is wrong before any query is sent to the database.
stmt = sqla.insert(db.Path).values(
first_address_id=path.first_address.id,
second_address_id=path.second_address.id,
city_id=path.city_id,
air_distance=path.air_distance,
)
with pytest.raises(sa_exc.IntegrityError, match='duplicate key value'):
db_session.execute(stmt)
def test_symmetric_addresses(self, db_session, path):
"""Insert a record that violates a check constraint."""
db_session.add(path)
db_session.commit()
another_path = db.Path(
first_address=path.second_address,
second_address=path.first_address,
air_distance=path.air_distance,
)
db_session.add(another_path)
with pytest.raises(
sa_exc.IntegrityError,
match='ck_addresses_addresses_on_distances_are_symmetric_for_bicycles',
):
db_session.commit()
def test_negative_air_distance(self, db_session, path):
"""Insert an instance with invalid data."""
path.air_distance = -1
db_session.add(path)
with pytest.raises(sa_exc.IntegrityError, match='realistic_air_distance'):
db_session.commit()
def test_air_distance_too_large(self, db_session, path):
"""Insert an instance with invalid data."""
path.air_distance = 20_000
path.bicycle_distance = 21_000
db_session.add(path)
with pytest.raises(sa_exc.IntegrityError, match='realistic_air_distance'):
db_session.commit()
def test_bicycle_distance_too_large(self, db_session, path):
"""Insert an instance with invalid data."""
path.bicycle_distance = 25_000
db_session.add(path)
with pytest.raises(sa_exc.IntegrityError, match='realistic_bicycle_distance'):
db_session.commit()
def test_air_distance_shorter_than_bicycle_distance(self, db_session, path):
"""Insert an instance with invalid data."""
path.bicycle_distance = round(0.75 * path.air_distance)
db_session.add(path)
with pytest.raises(sa_exc.IntegrityError, match='air_distance_is_shortest'):
db_session.commit()
@pytest.mark.parametrize('duration', [-1, 3601])
def test_unrealistic_bicycle_travel_time(self, db_session, path, duration):
"""Insert an instance with invalid data."""
path.bicycle_duration = duration
db_session.add(path)
with pytest.raises(
sa_exc.IntegrityError, match='realistic_bicycle_travel_time',
):
db_session.commit()
@pytest.mark.db
class TestFromAddresses:
"""Test the alternative constructor `Path.from_addresses()`.
Includes tests for the convenience method `Path.from_order()`,
which redirects to `Path.from_addresses()`.
"""
@pytest.fixture
def _prepare_db(self, db_session, address):
"""Put the `address` into the database.
`Address`es must be in the database as otherwise the `.city_id` column
cannot be resolved in `Path.from_addresses()`.
"""
db_session.add(address)
@pytest.mark.usefixtures('_prepare_db')
def test_make_path_instance(
self, db_session, address, another_address,
):
"""Test instantiation of a new `Path` instance."""
assert db_session.query(db.Path).count() == 0
db.Path.from_addresses(address, another_address)
assert db_session.query(db.Path).count() == 1
@pytest.mark.usefixtures('_prepare_db')
def test_make_the_same_path_instance_twice(
self, db_session, address, another_address,
):
"""Test instantiation of a new `Path` instance."""
assert db_session.query(db.Path).count() == 0
db.Path.from_addresses(address, another_address)
assert db_session.query(db.Path).count() == 1
db.Path.from_addresses(another_address, address)
assert db_session.query(db.Path).count() == 1
@pytest.mark.usefixtures('_prepare_db')
def test_structure_of_return_value(self, db_session, address, another_address):
"""Test instantiation of a new `Path` instance."""
results = db.Path.from_addresses(address, another_address)
assert isinstance(results, list)
@pytest.mark.usefixtures('_prepare_db')
def test_instances_must_have_air_distance(
self, db_session, address, another_address,
):
"""Test instantiation of a new `Path` instance."""
paths = db.Path.from_addresses(address, another_address)
result = paths[0]
assert result.air_distance is not None
@pytest.mark.usefixtures('_prepare_db')
def test_do_not_sync_instances_with_google_maps(
self, db_session, address, another_address,
):
"""Test instantiation of a new `Path` instance."""
paths = db.Path.from_addresses(address, another_address)
result = paths[0]
assert result.bicycle_distance is None
assert result.bicycle_duration is None
@pytest.mark.usefixtures('_prepare_db')
def test_sync_instances_with_google_maps(
self, db_session, address, another_address, monkeypatch,
):
"""Test instantiation of a new `Path` instance."""
def sync(self):
self.bicycle_distance = 1.25 * self.air_distance
self.bicycle_duration = 300
monkeypatch.setattr(db.Path, 'sync_with_google_maps', sync)
paths = db.Path.from_addresses(address, another_address, google_maps=True)
result = paths[0]
assert result.bicycle_distance is not None
assert result.bicycle_duration is not None
@pytest.mark.usefixtures('_prepare_db')
def test_one_path_for_two_addresses(self, db_session, address, another_address):
"""Test instantiation of a new `Path` instance."""
result = len(db.Path.from_addresses(address, another_address))
assert result == 1
@pytest.mark.usefixtures('_prepare_db')
def test_two_paths_for_three_addresses(self, db_session, make_address):
"""Test instantiation of a new `Path` instance."""
result = len(db.Path.from_addresses(*[make_address() for _ in range(3)]))
assert result == 3
@pytest.mark.usefixtures('_prepare_db')
def test_six_paths_for_four_addresses(self, db_session, make_address):
"""Test instantiation of a new `Path` instance."""
result = len(db.Path.from_addresses(*[make_address() for _ in range(4)]))
assert result == 6
# Tests for the `Path.from_order()` convenience method.
@pytest.mark.usefixtures('_prepare_db')
def test_make_path_instance_from_order(
self, db_session, order,
):
"""Test instantiation of a new `Path` instance."""
assert db_session.query(db.Path).count() == 0
db.Path.from_order(order)
assert db_session.query(db.Path).count() == 1
@pytest.mark.usefixtures('_prepare_db')
def test_make_the_same_path_instance_from_order_twice(
self, db_session, order,
):
"""Test instantiation of a new `Path` instance."""
assert db_session.query(db.Path).count() == 0
db.Path.from_order(order)
assert db_session.query(db.Path).count() == 1
db.Path.from_order(order)
assert db_session.query(db.Path).count() == 1
@pytest.mark.usefixtures('_prepare_db')
def test_structure_of_return_value_from_order(self, db_session, order):
"""Test instantiation of a new `Path` instance."""
result = db.Path.from_order(order)
assert isinstance(result, db.Path)
@pytest.mark.usefixtures('_prepare_db')
def test_sync_instance_from_order_with_google_maps(
self, db_session, order, monkeypatch,
):
"""Test instantiation of a new `Path` instance."""
def sync(self):
self.bicycle_distance = 1.25 * self.air_distance
self.bicycle_duration = 300
monkeypatch.setattr(db.Path, 'sync_with_google_maps', sync)
result = db.Path.from_order(order, google_maps=True)
assert result.bicycle_distance is not None
assert result.bicycle_duration is not None
@pytest.mark.db
class TestSyncWithGoogleMaps:
"""Test the `Path.sync_with_google_maps()` method."""
@pytest.fixture
def api_response(self):
"""A typical (shortened) response by the Google Maps Directions API."""
return [ # noqa:ECE001
{
'bounds': {
'northeast': {'lat': 44.8554284, 'lng': -0.5652398},
'southwest': {'lat': 44.8342256, 'lng': -0.5708206},
},
'copyrights': 'Map data ©2021',
'legs': [
{
'distance': {'text': '3.0 km', 'value': 2999},
'duration': {'text': '10 mins', 'value': 596},
'end_address': '13 Place Paul et Jean Paul Avisseau, ...',
'end_location': {'lat': 44.85540839999999, 'lng': -0.5672105},
'start_address': '59 Rue Saint-François, 33000 Bordeaux, ...',
'start_location': {'lat': 44.8342256, 'lng': -0.570372},
'steps': [
{
'distance': {'text': '0.1 km', 'value': 138},
'duration': {'text': '1 min', 'value': 43},
'end_location': {
'lat': 44.83434380000001,
'lng': -0.5690105999999999,
},
'html_instructions': 'Head <b>east</b> on <b> ...',
'polyline': {'points': '}tspGxknBKcDIkB'},
'start_location': {'lat': 44.8342256, 'lng': -0.57032},
'travel_mode': 'BICYCLING',
},
{
'distance': {'text': '0.1 km', 'value': 115},
'duration': {'text': '1 min', 'value': 22},
'end_location': {'lat': 44.8353651, 'lng': -0.569199},
'html_instructions': 'Turn <b>left</b> onto <b> ...',
'maneuver': 'turn-left',
'polyline': {'points': 'suspGhcnBc@JE@_@DiAHA?w@F'},
'start_location': {
'lat': 44.83434380000001,
'lng': -0.5690105999999999,
},
'travel_mode': 'BICYCLING',
},
{
'distance': {'text': '0.3 km', 'value': 268},
'duration': {'text': '1 min', 'value': 59},
'end_location': {'lat': 44.8362675, 'lng': -0.5660914},
'html_instructions': 'Turn <b>right</b> onto <b> ...',
'maneuver': 'turn-right',
'polyline': {
'points': 'a|spGndnBEYEQKi@Mi@Is@CYCOE]CQIq@ ...',
},
'start_location': {'lat': 44.8353651, 'lng': -0.56919},
'travel_mode': 'BICYCLING',
},
{
'distance': {'text': '0.1 km', 'value': 95},
'duration': {'text': '1 min', 'value': 29},
'end_location': {'lat': 44.8368458, 'lng': -0.5652398},
'html_instructions': 'Slight <b>left</b> onto <b> ...',
'maneuver': 'turn-slight-left',
'polyline': {
'points': 'uatpG`qmBg@aAGM?ACE[k@CICGACEGCCAAEAG?',
},
'start_location': {
'lat': 44.8362675,
'lng': -0.5660914,
},
'travel_mode': 'BICYCLING',
},
{
'distance': {'text': '23 m', 'value': 23},
'duration': {'text': '1 min', 'value': 4},
'end_location': {'lat': 44.83697, 'lng': -0.5654425},
'html_instructions': 'Slight <b>left</b> to stay ...',
'maneuver': 'turn-slight-left',
'polyline': {
'points': 'ietpGvkmBA@C?CBCBEHA@AB?B?B?B?@',
},
'start_location': {
'lat': 44.8368458,
'lng': -0.5652398,
},
'travel_mode': 'BICYCLING',
},
{
'distance': {'text': '0.2 km', 'value': 185},
'duration': {'text': '1 min', 'value': 23},
'end_location': {'lat': 44.8382126, 'lng': -0.5669969},
'html_instructions': 'Take the ramp to <b>Le Lac ...',
'polyline': {
'points': 'aftpG~lmBY^[^sAdB]`@CDKLQRa@h@A@IZ',
},
'start_location': {'lat': 44.83697, 'lng': -0.5654425},
'travel_mode': 'BICYCLING',
},
{
'distance': {'text': '0.3 km', 'value': 253},
'duration': {'text': '1 min', 'value': 43},
'end_location': {'lat': 44.840163, 'lng': -0.5686525},
'html_instructions': 'Merge onto <b>Quai Richelieu</b>',
'maneuver': 'merge',
'polyline': {
'points': 'ymtpGvvmBeAbAe@b@_@ZUN[To@f@e@^A?g ...',
},
'start_location': {
'lat': 44.8382126,
'lng': -0.5669969,
},
'travel_mode': 'BICYCLING',
},
{
'distance': {'text': '0.1 km', 'value': 110},
'duration': {'text': '1 min', 'value': 21},
'end_location': {'lat': 44.841079, 'lng': -0.5691835},
'html_instructions': 'Continue onto <b>Quai de la ...',
'polyline': {'points': '_ztpG`anBUNQLULUJOHMFKDWN'},
'start_location': {'lat': 44.840163, 'lng': -0.56865},
'travel_mode': 'BICYCLING',
},
{
'distance': {'text': '0.3 km', 'value': 262},
'duration': {'text': '1 min', 'value': 44},
'end_location': {'lat': 44.8433375, 'lng': -0.5701161},
'html_instructions': 'Continue onto <b>Quai du ...',
'polyline': {
'points': 'w_upGjdnBeBl@sBn@gA^[JIBc@Nk@Nk@L',
},
'start_location': {'lat': 44.841079, 'lng': -0.56915},
'travel_mode': 'BICYCLING',
},
{
'distance': {'text': '0.6 km', 'value': 550},
'duration': {'text': '2 mins', 'value': 97},
'end_location': {
'lat': 44.84822339999999,
'lng': -0.5705307,
},
'html_instructions': 'Continue onto <b>Quai ...',
'polyline': {
'points': '{mupGfjnBYFI@IBaAPUD{AX}@NK@]Fe@H ...',
},
'start_location': {
'lat': 44.8433375,
'lng': -0.5701161,
},
'travel_mode': 'BICYCLING',
},
{
'distance': {'text': '0.5 km', 'value': 508},
'duration': {'text': '1 min', 'value': 87},
'end_location': {'lat': 44.8523224, 'lng': -0.5678223},
'html_instructions': 'Continue onto ...',
'polyline': {
'points': 'klvpGxlnBWEUGWGSGMEOEOE[KMEQGIA] ...',
},
'start_location': {
'lat': 44.84822339999999,
'lng': -0.5705307,
},
'travel_mode': 'BICYCLING',
},
{
'distance': {'text': '28 m', 'value': 28},
'duration': {'text': '1 min', 'value': 45},
'end_location': {
'lat': 44.85245620000001,
'lng': -0.5681259,
},
'html_instructions': 'Turn <b>left</b> onto <b> ...',
'maneuver': 'turn-left',
'polyline': {'points': '_fwpGz{mBGLADGPCFEN'},
'start_location': {
'lat': 44.8523224,
'lng': -0.5678223,
},
'travel_mode': 'BICYCLING',
},
{
'distance': {'text': '0.2 km', 'value': 176},
'duration': {'text': '1 min', 'value': 31},
'end_location': {'lat': 44.8536857, 'lng': -0.5667282},
'html_instructions': 'Turn <b>right</b> onto <b> ...',
'maneuver': 'turn-right',
'polyline': {
'points': '{fwpGx}mB_@c@mAuAOQi@m@m@y@_@c@',
},
'start_location': {
'lat': 44.85245620000001,
'lng': -0.5681259,
},
'travel_mode': 'BICYCLING',
},
{
'distance': {'text': '0.2 km', 'value': 172},
'duration': {'text': '1 min', 'value': 28},
'end_location': {'lat': 44.8547766, 'lng': -0.5682825},
'html_instructions': 'Turn <b>left</b> onto <b> ... ',
'maneuver': 'turn-left',
'polyline': {'points': 'qnwpG`umBW`@UkDtF'},
'start_location': {
'lat': 44.8536857,
'lng': -0.5667282,
},
'travel_mode': 'BICYCLING',
},
{
'distance': {'text': '0.1 km', 'value': 101},
'duration': {'text': '1 min', 'value': 17},
'end_location': {'lat': 44.8554284, 'lng': -0.5673822},
'html_instructions': 'Turn <b>right</b> onto ...',
'maneuver': 'turn-right',
'polyline': {'points': 'kuwpGv~mBa@q@cA_B[a@'},
'start_location': {
'lat': 44.8547766,
'lng': -0.5682825,
},
'travel_mode': 'BICYCLING',
},
{
'distance': {'text': '15 m', 'value': 15},
'duration': {'text': '1 min', 'value': 3},
'end_location': {
'lat': 44.85540839999999,
'lng': -0.5672105,
},
'html_instructions': 'Turn <b>right</b> onto <b> ...',
'maneuver': 'turn-right',
'polyline': {'points': 'mywpGbymBBC@C?E?C?E?EAC'},
'start_location': {
'lat': 44.8554284,
'lng': -0.5673822,
},
'travel_mode': 'BICYCLING',
},
],
'traffic_speed_entry': [],
'via_waypoint': [],
},
],
'overview_polyline': {
'points': '}tspGxknBUoGi@LcDVe@_CW{Ba@sC[eA_@} ...',
},
'summary': 'Quai des Chartrons',
'warnings': ['Bicycling directions are in beta ...'],
'waypoint_order': [],
},
]
@pytest.fixture
def _fake_google_api(self, api_response, monkeypatch):
"""Patch out the call to the Google Maps Directions API."""
def directions(_self, *_args, **_kwargs):
return api_response
monkeypatch.setattr(googlemaps.Client, 'directions', directions)
@pytest.mark.usefixtures('_fake_google_api')
def test_sync_instances_with_google_maps(self, db_session, path):
"""Call the method for a `Path` object without Google data."""
path.bicycle_distance = None
path.bicycle_duration = None
path._directions = None
path.sync_with_google_maps()
assert path.bicycle_distance == 2_999
assert path.bicycle_duration == 596
assert path._directions is not None
@pytest.mark.usefixtures('_fake_google_api')
def test_repeated_sync_instances_with_google_maps(self, db_session, path):
"""Call the method for a `Path` object with Google data.
That call should immediately return without changing any data.
We use the `path`'s "Google" data from above.
"""
old_distance = path.bicycle_distance
old_duration = path.bicycle_duration
old_directions = path._directions
path.sync_with_google_maps()
assert path.bicycle_distance is old_distance
assert path.bicycle_duration is old_duration
assert path._directions is old_directions
class TestProperties:
"""Test properties in `Path`."""
def test_waypoints_structure(self, path):
"""Test `Path.waypoints` property."""
result = path.waypoints
assert isinstance(result, list)
assert isinstance(result[0], utils.Location)
def test_waypoints_content(self, path):
"""Test `Path.waypoints` property."""
result = path.waypoints
# There are 5 inner points, excluding start and end,
# i.e., the `.first_address` and `second_address`.
assert len(result) == 5
def test_waypoints_is_cached(self, path):
"""Test `Path.waypoints` property."""
result1 = path.waypoints
result2 = path.waypoints
assert result1 is result2

View file

@ -24,7 +24,7 @@ def assoc(address, pixel):
@pytest.mark.no_cover
class TestSpecialMethods:
"""Test special methods in `Pixel`."""
"""Test special methods in `AddressPixelAssociation`."""
def test_create_an_address_pixel_association(self, assoc):
"""Test instantiation of a new `AddressPixelAssociation` object."""

View file

@ -122,6 +122,15 @@ class TestProperties:
assert result == pytest.approx(float(address.longitude))
def test_lat_lng(self, location, address):
"""Test `Location.lat_lng` property."""
result = location.lat_lng
assert result == (
pytest.approx(float(address.latitude)),
pytest.approx(float(address.longitude)),
)
def test_easting(self, location):
"""Test `Location.easting` property."""
result = location.easting