Solve all issues detected by PyCharm
- as of September 2021, PyCharm is used to write some of the code - PyCharm's built-in code styler, linter, and type checker issued some warnings that are resolved in this commit + spelling mistakes + all instance attributes must be specified explicitly in a class's __init__() method => use `functools.cached_property` for caching + make `tuple`s explicit with `(...)` + one test failed randomly although everything is ok => adjust the fixture's return value (stub for Google Directions API) + reformulate SQL so that PyCharm can understand the symbols
This commit is contained in:
parent
1268aba017
commit
1c19da2f70
19 changed files with 136 additions and 151 deletions
|
@ -205,7 +205,7 @@ max-complexity = 10
|
||||||
max-local-variables = 8
|
max-local-variables = 8
|
||||||
|
|
||||||
# Allow more than wemake-python-styleguide's 7 methods per class.
|
# Allow more than wemake-python-styleguide's 7 methods per class.
|
||||||
max-methods = 12
|
max-methods = 15
|
||||||
|
|
||||||
# Comply with black's style.
|
# Comply with black's style.
|
||||||
# Source: https://github.com/psf/black/blob/master/docs/the_black_code_style.md#line-length
|
# Source: https://github.com/psf/black/blob/master/docs/the_black_code_style.md#line-length
|
||||||
|
|
|
@ -123,7 +123,7 @@ def make_config(env: str = 'production') -> Config:
|
||||||
# the warning is only emitted if the code is not run by pytest.
|
# the warning is only emitted if the code is not run by pytest.
|
||||||
# We see the bad configuration immediately as all "db" tests fail.
|
# We see the bad configuration immediately as all "db" tests fail.
|
||||||
if config.DATABASE_URI is None and not os.getenv('TESTING'):
|
if config.DATABASE_URI is None and not os.getenv('TESTING'):
|
||||||
warnings.warn('Bad configurartion: no DATABASE_URI set in the environment')
|
warnings.warn('Bad configuration: no DATABASE_URI set in the environment')
|
||||||
|
|
||||||
# Some functionalities require R and some packages installed.
|
# Some functionalities require R and some packages installed.
|
||||||
# To ensure isolation and reproducibility, the projects keeps the R dependencies
|
# To ensure isolation and reproducibility, the projects keeps the R dependencies
|
||||||
|
|
|
@ -34,8 +34,9 @@ def gridify() -> None: # pragma: no cover note:b1f68d24
|
||||||
|
|
||||||
click.echo(f' -> created {len(grid.pixels)} pixels')
|
click.echo(f' -> created {len(grid.pixels)} pixels')
|
||||||
|
|
||||||
# The number of assigned addresses is the same across different `side_length`s.
|
# Because the number of assigned addresses is the same across
|
||||||
db.session.flush() # necessary for the query to work
|
# different `side_length`s, we can take any `grid` from the `city`.
|
||||||
|
grid = db.session.query(db.Grid).filter_by(city=city).first()
|
||||||
n_assigned = (
|
n_assigned = (
|
||||||
db.session.query(db.AddressPixelAssociation)
|
db.session.query(db.AddressPixelAssociation)
|
||||||
.filter(db.AddressPixelAssociation.grid_id == grid.id)
|
.filter(db.AddressPixelAssociation.grid_id == grid.id)
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import functools
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
|
||||||
import folium
|
import folium
|
||||||
|
@ -80,9 +81,6 @@ class Address(meta.Base):
|
||||||
)
|
)
|
||||||
pixels = orm.relationship('AddressPixelAssociation', back_populates='address')
|
pixels = orm.relationship('AddressPixelAssociation', back_populates='address')
|
||||||
|
|
||||||
# We do not implement a `.__init__()` method and leave that to SQLAlchemy.
|
|
||||||
# Instead, we use `hasattr()` to check for uninitialized attributes. grep:b1f68d24
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
"""Non-literal text representation."""
|
"""Non-literal text representation."""
|
||||||
return '<{cls}({street} in {city})>'.format(
|
return '<{cls}({street} in {city})>'.format(
|
||||||
|
@ -100,7 +98,7 @@ class Address(meta.Base):
|
||||||
"""
|
"""
|
||||||
return self.id == self.primary_id
|
return self.id == self.primary_id
|
||||||
|
|
||||||
@property
|
@functools.cached_property
|
||||||
def location(self) -> utils.Location:
|
def location(self) -> utils.Location:
|
||||||
"""The location of the address.
|
"""The location of the address.
|
||||||
|
|
||||||
|
@ -112,10 +110,9 @@ class Address(meta.Base):
|
||||||
Implementation detail: This property is cached as none of the
|
Implementation detail: This property is cached as none of the
|
||||||
underlying attributes to calculate the value are to be changed.
|
underlying attributes to calculate the value are to be changed.
|
||||||
"""
|
"""
|
||||||
if not hasattr(self, '_location'): # noqa:WPS421 note:b1f68d24
|
location = utils.Location(self.latitude, self.longitude)
|
||||||
self._location = utils.Location(self.latitude, self.longitude)
|
location.relate_to(self.city.southwest)
|
||||||
self._location.relate_to(self.city.southwest)
|
return location
|
||||||
return self._location
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def x(self) -> int: # noqa=WPS111
|
def x(self) -> int: # noqa=WPS111
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import functools
|
||||||
import itertools
|
import itertools
|
||||||
import json
|
import json
|
||||||
from typing import List
|
from typing import List
|
||||||
|
@ -46,7 +47,7 @@ class DistanceMatrix(meta.Base):
|
||||||
# The duration is measured in seconds.
|
# The duration is measured in seconds.
|
||||||
bicycle_duration = sa.Column(sa.Integer, nullable=True)
|
bicycle_duration = sa.Column(sa.Integer, nullable=True)
|
||||||
# An array of latitude-longitude pairs approximating a courier's way.
|
# An array of latitude-longitude pairs approximating a courier's way.
|
||||||
directions = sa.Column(postgresql.JSON, nullable=True)
|
_directions = sa.Column('directions', postgresql.JSON, nullable=True)
|
||||||
|
|
||||||
# Constraints
|
# Constraints
|
||||||
__table_args__ = (
|
__table_args__ = (
|
||||||
|
@ -73,7 +74,7 @@ class DistanceMatrix(meta.Base):
|
||||||
'0 <= air_distance AND air_distance < 20000', name='realistic_air_distance',
|
'0 <= air_distance AND air_distance < 20000', name='realistic_air_distance',
|
||||||
),
|
),
|
||||||
sa.CheckConstraint(
|
sa.CheckConstraint(
|
||||||
'bicycle_distance < 25000', # `.bicycle_distance` may not be negatative
|
'bicycle_distance < 25000', # `.bicycle_distance` may not be negative
|
||||||
name='realistic_bicycle_distance', # due to the constraint below.
|
name='realistic_bicycle_distance', # due to the constraint below.
|
||||||
),
|
),
|
||||||
sa.CheckConstraint(
|
sa.CheckConstraint(
|
||||||
|
@ -97,9 +98,6 @@ class DistanceMatrix(meta.Base):
|
||||||
foreign_keys='[DistanceMatrix.second_address_id, DistanceMatrix.city_id]',
|
foreign_keys='[DistanceMatrix.second_address_id, DistanceMatrix.city_id]',
|
||||||
)
|
)
|
||||||
|
|
||||||
# We do not implement a `.__init__()` method and leave that to SQLAlchemy.
|
|
||||||
# Instead, we use `hasattr()` to check for uninitialized attributes. grep:86ffc14e
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_addresses(
|
def from_addresses(
|
||||||
cls, *addresses: db.Address, google_maps: bool = False,
|
cls, *addresses: db.Address, google_maps: bool = False,
|
||||||
|
@ -114,7 +112,7 @@ class DistanceMatrix(meta.Base):
|
||||||
Args:
|
Args:
|
||||||
*addresses: to calculate the pair-wise distances for;
|
*addresses: to calculate the pair-wise distances for;
|
||||||
must contain at least two `Address` objects
|
must contain at least two `Address` objects
|
||||||
google_maps: if `.bicylce_distance` and `.directions` should be
|
google_maps: if `.bicycle_distance` and `._directions` should be
|
||||||
populated with a query to the Google Maps Directions API;
|
populated with a query to the Google Maps Directions API;
|
||||||
by default, only the `.air_distance` is calculated with `geopy`
|
by default, only the `.air_distance` is calculated with `geopy`
|
||||||
|
|
||||||
|
@ -130,7 +128,7 @@ class DistanceMatrix(meta.Base):
|
||||||
(first, second) if first.id < second.id else (second, first)
|
(first, second) if first.id < second.id else (second, first)
|
||||||
)
|
)
|
||||||
|
|
||||||
# If there is no `DistaneMatrix` object in the database ...
|
# If there is no `DistanceMatrix` object in the database ...
|
||||||
distance = ( # noqa:ECE001
|
distance = ( # noqa:ECE001
|
||||||
db.session.query(db.DistanceMatrix)
|
db.session.query(db.DistanceMatrix)
|
||||||
.filter(db.DistanceMatrix.first_address == first)
|
.filter(db.DistanceMatrix.first_address == first)
|
||||||
|
@ -161,10 +159,10 @@ class DistanceMatrix(meta.Base):
|
||||||
return distances
|
return distances
|
||||||
|
|
||||||
def sync_with_google_maps(self) -> None:
|
def sync_with_google_maps(self) -> None:
|
||||||
"""Fill in `.bicycle_distance` and `.directions` with Google Maps.
|
"""Fill in `.bicycle_distance` and `._directions` with Google Maps.
|
||||||
|
|
||||||
`.directions` will not contain the coordinates of `.first_address` and
|
`._directions` will NOT contain the coordinates
|
||||||
`.second_address`.
|
of `.first_address` and `.second_address`.
|
||||||
|
|
||||||
This uses the Google Maps Directions API.
|
This uses the Google Maps Directions API.
|
||||||
|
|
||||||
|
@ -207,28 +205,24 @@ class DistanceMatrix(meta.Base):
|
||||||
steps.discard(self.first_address.location.lat_lng)
|
steps.discard(self.first_address.location.lat_lng)
|
||||||
steps.discard(self.second_address.location.lat_lng)
|
steps.discard(self.second_address.location.lat_lng)
|
||||||
|
|
||||||
self.directions = json.dumps(list(steps)) # noqa:WPS601
|
self._directions = json.dumps(list(steps)) # noqa:WPS601
|
||||||
|
|
||||||
db.session.add(self)
|
db.session.add(self)
|
||||||
db.session.commit()
|
db.session.commit()
|
||||||
|
|
||||||
@property
|
@functools.cached_property
|
||||||
def path(self) -> List[utils.Location]:
|
def path(self) -> List[utils.Location]:
|
||||||
"""The couriers' path from `.first_address` to `.second_address`.
|
"""The couriers' path from `.first_address` to `.second_address`.
|
||||||
|
|
||||||
The returned `Location`s all relates to `.first_address.city.southwest`.
|
The returned `Location`s all relates to `.first_address.city.southwest`.
|
||||||
|
|
||||||
Implementation detail: This property is cached as none of the
|
Implementation detail: This property is cached as none of the
|
||||||
underlying attributes (i.e., `.directions`) are to be changed.
|
underlying attributes (i.e., `._directions`) are to be changed.
|
||||||
"""
|
"""
|
||||||
if not hasattr(self, '_path'): # noqa:WPS421 note:86ffc14e
|
inner_points = [
|
||||||
inner_points = [
|
utils.Location(*point) for point in json.loads(self._directions)
|
||||||
utils.Location(point[0], point[1])
|
]
|
||||||
for point in json.loads(self.directions)
|
for point in inner_points:
|
||||||
]
|
point.relate_to(self.first_address.city.southwest)
|
||||||
for point in inner_points:
|
|
||||||
point.relate_to(self.first_address.city.southwest)
|
|
||||||
|
|
||||||
self._path = inner_points
|
return inner_points
|
||||||
|
|
||||||
return self._path
|
|
||||||
|
|
|
@ -2,6 +2,8 @@
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import functools
|
||||||
|
|
||||||
import folium
|
import folium
|
||||||
import sqlalchemy as sa
|
import sqlalchemy as sa
|
||||||
from sqlalchemy import orm
|
from sqlalchemy import orm
|
||||||
|
@ -38,51 +40,39 @@ class City(meta.Base):
|
||||||
addresses = orm.relationship('Address', back_populates='city')
|
addresses = orm.relationship('Address', back_populates='city')
|
||||||
grids = orm.relationship('Grid', back_populates='city')
|
grids = orm.relationship('Grid', back_populates='city')
|
||||||
|
|
||||||
# We do not implement a `.__init__()` method and leave that to SQLAlchemy.
|
# We do not implement a `.__init__()` method and use SQLAlchemy's default.
|
||||||
# Instead, we use `hasattr()` to check for uninitialized attributes. grep:d334120e
|
# The uninitialized attribute `._map` is computed on the fly. note:d334120ei
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
"""Non-literal text representation."""
|
"""Non-literal text representation."""
|
||||||
return '<{cls}({name})>'.format(cls=self.__class__.__name__, name=self.name)
|
return '<{cls}({name})>'.format(cls=self.__class__.__name__, name=self.name)
|
||||||
|
|
||||||
@property
|
@functools.cached_property
|
||||||
def center(self) -> utils.Location:
|
def center(self) -> utils.Location:
|
||||||
"""Location of the city's center.
|
"""Location of the city's center.
|
||||||
|
|
||||||
Implementation detail: This property is cached as none of the
|
Implementation detail: This property is cached as none of the
|
||||||
underlying attributes to calculate the value are to be changed.
|
underlying attributes to calculate the value are to be changed.
|
||||||
"""
|
"""
|
||||||
if not hasattr(self, '_center'): # noqa:WPS421 note:d334120e
|
return utils.Location(self.center_latitude, self.center_longitude)
|
||||||
self._center = utils.Location(self.center_latitude, self.center_longitude)
|
|
||||||
return self._center
|
|
||||||
|
|
||||||
@property
|
@functools.cached_property
|
||||||
def northeast(self) -> utils.Location:
|
def northeast(self) -> utils.Location:
|
||||||
"""The city's northeast corner of the Google Maps viewport.
|
"""The city's northeast corner of the Google Maps viewport.
|
||||||
|
|
||||||
Implementation detail: This property is cached as none of the
|
Implementation detail: This property is cached as none of the
|
||||||
underlying attributes to calculate the value are to be changed.
|
underlying attributes to calculate the value are to be changed.
|
||||||
"""
|
"""
|
||||||
if not hasattr(self, '_northeast'): # noqa:WPS421 note:d334120e
|
return utils.Location(self.northeast_latitude, self.northeast_longitude)
|
||||||
self._northeast = utils.Location(
|
|
||||||
self.northeast_latitude, self.northeast_longitude,
|
|
||||||
)
|
|
||||||
|
|
||||||
return self._northeast
|
@functools.cached_property
|
||||||
|
|
||||||
@property
|
|
||||||
def southwest(self) -> utils.Location:
|
def southwest(self) -> utils.Location:
|
||||||
"""The city's southwest corner of the Google Maps viewport.
|
"""The city's southwest corner of the Google Maps viewport.
|
||||||
|
|
||||||
Implementation detail: This property is cached as none of the
|
Implementation detail: This property is cached as none of the
|
||||||
underlying attributes to calculate the value are to be changed.
|
underlying attributes to calculate the value are to be changed.
|
||||||
"""
|
"""
|
||||||
if not hasattr(self, '_southwest'): # noqa:WPS421 note:d334120e
|
return utils.Location(self.southwest_latitude, self.southwest_longitude)
|
||||||
self._southwest = utils.Location(
|
|
||||||
self.southwest_latitude, self.southwest_longitude,
|
|
||||||
)
|
|
||||||
|
|
||||||
return self._southwest
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def total_x(self) -> int:
|
def total_x(self) -> int:
|
||||||
|
@ -103,16 +93,17 @@ class City(meta.Base):
|
||||||
def clear_map(self) -> City: # pragma: no cover
|
def clear_map(self) -> City: # pragma: no cover
|
||||||
"""Create a new `folium.Map` object aligned with the city's viewport.
|
"""Create a new `folium.Map` object aligned with the city's viewport.
|
||||||
|
|
||||||
The map is available via the `.map` property. Note that it is a
|
The map is available via the `.map` property. Note that it is mutable
|
||||||
mutable objects that is changed from various locations in the code base.
|
and changed from various locations in the code base.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
self: enabling method chaining
|
self: enabling method chaining
|
||||||
""" # noqa:DAR203
|
""" # noqa:DAR203 note:d334120e
|
||||||
self._map = folium.Map(
|
self._map = folium.Map(
|
||||||
location=[self.center_latitude, self.center_longitude],
|
location=[self.center_latitude, self.center_longitude],
|
||||||
zoom_start=self.initial_zoom,
|
zoom_start=self.initial_zoom,
|
||||||
)
|
)
|
||||||
|
|
||||||
return self
|
return self
|
||||||
|
|
||||||
@property # pragma: no cover
|
@property # pragma: no cover
|
||||||
|
@ -221,11 +212,11 @@ class City(meta.Base):
|
||||||
sa.text(
|
sa.text(
|
||||||
f""" -- # noqa:S608
|
f""" -- # noqa:S608
|
||||||
SELECT DISTINCT
|
SELECT DISTINCT
|
||||||
zip_code
|
{config.CLEAN_SCHEMA}.addresses.zip_code
|
||||||
FROM
|
FROM
|
||||||
{config.CLEAN_SCHEMA}.addresses
|
{config.CLEAN_SCHEMA}.addresses AS addresses
|
||||||
WHERE
|
WHERE
|
||||||
city_id = {self.id};
|
{config.CLEAN_SCHEMA}.addresses.city_id = {self.id};
|
||||||
""",
|
""",
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
|
@ -31,7 +31,7 @@ class Forecast(meta.Base):
|
||||||
model = sa.Column(sa.Unicode(length=20), nullable=False)
|
model = sa.Column(sa.Unicode(length=20), nullable=False)
|
||||||
# We also store the actual order counts for convenient retrieval.
|
# We also store the actual order counts for convenient retrieval.
|
||||||
# A `UniqueConstraint` below ensures that redundant values that
|
# A `UniqueConstraint` below ensures that redundant values that
|
||||||
# are to be expected are consistend across rows.
|
# are to be expected are consistent across rows.
|
||||||
actual = sa.Column(sa.SmallInteger, nullable=False)
|
actual = sa.Column(sa.SmallInteger, nullable=False)
|
||||||
# Raw `.prediction`s are stored as `float`s (possibly negative).
|
# Raw `.prediction`s are stored as `float`s (possibly negative).
|
||||||
# The rounding is then done on the fly if required.
|
# The rounding is then done on the fly if required.
|
||||||
|
@ -157,7 +157,7 @@ class Forecast(meta.Base):
|
||||||
Background: The functions in `urban_meal_delivery.forecasts.methods`
|
Background: The functions in `urban_meal_delivery.forecasts.methods`
|
||||||
return `pd.Dataframe`s with "start_at" (i.e., `pd.Timestamp` objects)
|
return `pd.Dataframe`s with "start_at" (i.e., `pd.Timestamp` objects)
|
||||||
values in the index and five columns "prediction", "low80", "high80",
|
values in the index and five columns "prediction", "low80", "high80",
|
||||||
"low95", and "high95" with `np.float` values. The `*Model.predic()`
|
"low95", and "high95" with `np.float` values. The `*Model.predict()`
|
||||||
methods in `urban_meal_delivery.forecasts.models` then add an "actual"
|
methods in `urban_meal_delivery.forecasts.models` then add an "actual"
|
||||||
column. This constructor converts these results into ORM models.
|
column. This constructor converts these results into ORM models.
|
||||||
Also, the `np.float` values are cast as plain `float` ones as
|
Also, the `np.float` values are cast as plain `float` ones as
|
||||||
|
|
|
@ -2,6 +2,7 @@
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import functools
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
import folium
|
import folium
|
||||||
|
@ -68,66 +69,58 @@ class Pixel(meta.Base):
|
||||||
"""The area of a pixel in square kilometers."""
|
"""The area of a pixel in square kilometers."""
|
||||||
return self.grid.pixel_area
|
return self.grid.pixel_area
|
||||||
|
|
||||||
@property
|
@functools.cached_property
|
||||||
def northeast(self) -> utils.Location:
|
def northeast(self) -> utils.Location:
|
||||||
"""The pixel's northeast corner, relative to `.grid.city.southwest`.
|
"""The pixel's northeast corner, relative to `.grid.city.southwest`.
|
||||||
|
|
||||||
Implementation detail: This property is cached as none of the
|
Implementation detail: This property is cached as none of the
|
||||||
underlying attributes to calculate the value are to be changed.
|
underlying attributes to calculate the value are to be changed.
|
||||||
"""
|
"""
|
||||||
if not hasattr(self, '_northeast'): # noqa:WPS421 note:d334120e
|
easting, northing = (
|
||||||
# The origin is the southwest corner of the `.grid.city`'s viewport.
|
self.grid.city.southwest.easting + ((self.n_x + 1) * self.side_length),
|
||||||
easting_origin = self.grid.city.southwest.easting
|
self.grid.city.southwest.northing + ((self.n_y + 1) * self.side_length),
|
||||||
northing_origin = self.grid.city.southwest.northing
|
)
|
||||||
|
latitude, longitude = utm.to_latlon(
|
||||||
|
easting, northing, *self.grid.city.southwest.zone_details,
|
||||||
|
)
|
||||||
|
|
||||||
# `+1` as otherwise we get the pixel's `.southwest` corner.
|
location = utils.Location(latitude, longitude)
|
||||||
easting = easting_origin + ((self.n_x + 1) * self.side_length)
|
location.relate_to(self.grid.city.southwest)
|
||||||
northing = northing_origin + ((self.n_y + 1) * self.side_length)
|
|
||||||
zone, band = self.grid.city.southwest.zone_details
|
|
||||||
latitude, longitude = utm.to_latlon(easting, northing, zone, band)
|
|
||||||
|
|
||||||
self._northeast = utils.Location(latitude, longitude)
|
return location
|
||||||
self._northeast.relate_to(self.grid.city.southwest)
|
|
||||||
|
|
||||||
return self._northeast
|
@functools.cached_property
|
||||||
|
|
||||||
@property
|
|
||||||
def southwest(self) -> utils.Location:
|
def southwest(self) -> utils.Location:
|
||||||
"""The pixel's northeast corner, relative to `.grid.city.southwest`.
|
"""The pixel's southwest corner, relative to `.grid.city.southwest`.
|
||||||
|
|
||||||
Implementation detail: This property is cached as none of the
|
Implementation detail: This property is cached as none of the
|
||||||
underlying attributes to calculate the value are to be changed.
|
underlying attributes to calculate the value are to be changed.
|
||||||
"""
|
"""
|
||||||
if not hasattr(self, '_southwest'): # noqa:WPS421 note:d334120e
|
easting, northing = (
|
||||||
# The origin is the southwest corner of the `.grid.city`'s viewport.
|
self.grid.city.southwest.easting + (self.n_x * self.side_length),
|
||||||
easting_origin = self.grid.city.southwest.easting
|
self.grid.city.southwest.northing + (self.n_y * self.side_length),
|
||||||
northing_origin = self.grid.city.southwest.northing
|
)
|
||||||
|
latitude, longitude = utm.to_latlon(
|
||||||
|
easting, northing, *self.grid.city.southwest.zone_details,
|
||||||
|
)
|
||||||
|
|
||||||
easting = easting_origin + (self.n_x * self.side_length)
|
location = utils.Location(latitude, longitude)
|
||||||
northing = northing_origin + (self.n_y * self.side_length)
|
location.relate_to(self.grid.city.southwest)
|
||||||
zone, band = self.grid.city.southwest.zone_details
|
|
||||||
latitude, longitude = utm.to_latlon(easting, northing, zone, band)
|
|
||||||
|
|
||||||
self._southwest = utils.Location(latitude, longitude)
|
return location
|
||||||
self._southwest.relate_to(self.grid.city.southwest)
|
|
||||||
|
|
||||||
return self._southwest
|
@functools.cached_property
|
||||||
|
|
||||||
@property
|
|
||||||
def restaurants(self) -> List[db.Restaurant]: # pragma: no cover
|
def restaurants(self) -> List[db.Restaurant]: # pragma: no cover
|
||||||
"""Obtain all `Restaurant`s in `self`."""
|
"""Obtain all `Restaurant`s in `self`."""
|
||||||
if not hasattr(self, '_restaurants'): # noqa:WPS421 note:d334120e
|
return ( # noqa:ECE001
|
||||||
self._restaurants = ( # noqa:ECE001
|
db.session.query(db.Restaurant)
|
||||||
db.session.query(db.Restaurant)
|
.join(
|
||||||
.join(
|
db.AddressPixelAssociation,
|
||||||
db.AddressPixelAssociation,
|
db.Restaurant.address_id == db.AddressPixelAssociation.address_id,
|
||||||
db.Restaurant.address_id == db.AddressPixelAssociation.address_id,
|
|
||||||
)
|
|
||||||
.filter(db.AddressPixelAssociation.pixel_id == self.id)
|
|
||||||
.all()
|
|
||||||
)
|
)
|
||||||
|
.filter(db.AddressPixelAssociation.pixel_id == self.id)
|
||||||
return self._restaurants
|
.all()
|
||||||
|
)
|
||||||
|
|
||||||
def clear_map(self) -> Pixel: # pragma: no cover
|
def clear_map(self) -> Pixel: # pragma: no cover
|
||||||
"""Shortcut to the `.city.clear_map()` method.
|
"""Shortcut to the `.city.clear_map()` method.
|
||||||
|
|
|
@ -15,7 +15,7 @@ class Location: # noqa:WPS214
|
||||||
- assumes earth is a sphere and models the location in 3D
|
- assumes earth is a sphere and models the location in 3D
|
||||||
|
|
||||||
UTM:
|
UTM:
|
||||||
- the Universal Transverse Mercator sytem
|
- the Universal Transverse Mercator system
|
||||||
- projects WGS84 coordinates onto a 2D map
|
- projects WGS84 coordinates onto a 2D map
|
||||||
- can be used for visualizations and calculations directly
|
- can be used for visualizations and calculations directly
|
||||||
- distances are in meters
|
- distances are in meters
|
||||||
|
@ -70,7 +70,7 @@ class Location: # noqa:WPS214
|
||||||
@property
|
@property
|
||||||
def lat_lng(self) -> Tuple[float, float]:
|
def lat_lng(self) -> Tuple[float, float]:
|
||||||
"""The `.latitude` and `.longitude` as a 2-`tuple`."""
|
"""The `.latitude` and `.longitude` as a 2-`tuple`."""
|
||||||
return (self._latitude, self._longitude)
|
return self._latitude, self._longitude
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def easting(self) -> int:
|
def easting(self) -> int:
|
||||||
|
@ -90,7 +90,7 @@ class Location: # noqa:WPS214
|
||||||
@property
|
@property
|
||||||
def zone_details(self) -> Tuple[int, str]:
|
def zone_details(self) -> Tuple[int, str]:
|
||||||
"""The UTM zone of the location as the zone number and the band."""
|
"""The UTM zone of the location as the zone number and the band."""
|
||||||
return (self._zone, self._band)
|
return self._zone, self._band
|
||||||
|
|
||||||
def __eq__(self, other: object) -> bool:
|
def __eq__(self, other: object) -> bool:
|
||||||
"""Check if two `Location` objects are the same location."""
|
"""Check if two `Location` objects are the same location."""
|
||||||
|
|
|
@ -31,8 +31,8 @@ def predict(
|
||||||
Raises:
|
Raises:
|
||||||
ValueError: if `training_ts` contains `NaN` values
|
ValueError: if `training_ts` contains `NaN` values
|
||||||
"""
|
"""
|
||||||
# Initialize R only if necessary as it is tested only in nox's
|
# Initialize R only if it is actually used.
|
||||||
# "ci-tests-slow" session and "ci-tests-fast" should not fail.
|
# For example, the nox session "ci-tests-fast" does not use it.
|
||||||
from urban_meal_delivery import init_r # noqa:F401,WPS433
|
from urban_meal_delivery import init_r # noqa:F401,WPS433
|
||||||
|
|
||||||
# Re-seed R every time it is used to ensure reproducibility.
|
# Re-seed R every time it is used to ensure reproducibility.
|
||||||
|
|
|
@ -154,8 +154,8 @@ def stl( # noqa:C901,WPS210,WPS211,WPS231
|
||||||
else:
|
else:
|
||||||
robust = False
|
robust = False
|
||||||
|
|
||||||
# Initialize R only if necessary as it is tested only in nox's
|
# Initialize R only if it is actually used.
|
||||||
# "ci-tests-slow" session and "ci-tests-fast" should not fail.
|
# For example, the nox session "ci-tests-fast" does not use it.
|
||||||
from urban_meal_delivery import init_r # noqa:F401,WPS433
|
from urban_meal_delivery import init_r # noqa:F401,WPS433
|
||||||
|
|
||||||
# Re-seed R every time it is used to ensure reproducibility.
|
# Re-seed R every time it is used to ensure reproducibility.
|
||||||
|
|
|
@ -32,8 +32,8 @@ def predict(
|
||||||
Raises:
|
Raises:
|
||||||
ValueError: if `training_ts` contains `NaN` values
|
ValueError: if `training_ts` contains `NaN` values
|
||||||
"""
|
"""
|
||||||
# Initialize R only if necessary as it is tested only in nox's
|
# Initialize R only if it is actually used.
|
||||||
# "ci-tests-slow" session and "ci-tests-fast" should not fail.
|
# For example, the nox session "ci-tests-fast" does not use it.
|
||||||
from urban_meal_delivery import init_r # noqa:F401,WPS433
|
from urban_meal_delivery import init_r # noqa:F401,WPS433
|
||||||
|
|
||||||
# Re-seed R every time it is used to ensure reproducibility.
|
# Re-seed R every time it is used to ensure reproducibility.
|
||||||
|
|
|
@ -15,7 +15,7 @@ For the paper check:
|
||||||
|
|
||||||
This sub-package is organized as follows. The `base` module defines an abstract
|
This sub-package is organized as follows. The `base` module defines an abstract
|
||||||
`ForecastingModelABC` class that unifies how the concrete `*Model`s work.
|
`ForecastingModelABC` class that unifies how the concrete `*Model`s work.
|
||||||
While the abstact `.predict()` method returns a `pd.DataFrame` (= basically,
|
While the abstract `.predict()` method returns a `pd.DataFrame` (= basically,
|
||||||
the result of one of the forecasting `methods`, the concrete `.make_forecast()`
|
the result of one of the forecasting `methods`, the concrete `.make_forecast()`
|
||||||
method converts the results into `Forecast` (=ORM) objects.
|
method converts the results into `Forecast` (=ORM) objects.
|
||||||
Also, `.make_forecast()` implements a caching strategy where already made
|
Also, `.make_forecast()` implements a caching strategy where already made
|
||||||
|
@ -23,7 +23,7 @@ Also, `.make_forecast()` implements a caching strategy where already made
|
||||||
which could be a heavier computation.
|
which could be a heavier computation.
|
||||||
|
|
||||||
The `tactical` sub-package contains all the `*Model`s used to implement the
|
The `tactical` sub-package contains all the `*Model`s used to implement the
|
||||||
UDP's predictive routing strategy.
|
predictive routing strategy employed by the UDP.
|
||||||
|
|
||||||
A future `planning` sub-package will contain the `*Model`s used to plan the
|
A future `planning` sub-package will contain the `*Model`s used to plan the
|
||||||
`Courier`'s shifts a week ahead.
|
`Courier`'s shifts a week ahead.
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
"""Forecasting `*Model`s to predict demand for tactical purposes.
|
"""Forecasting `*Model`s to predict demand for tactical purposes.
|
||||||
|
|
||||||
The `*Model`s in this module predict only a small number (e.g., one)
|
The `*Model`s in this module predict only a small number (e.g., one)
|
||||||
of time steps into the near future and are used to implement the UDP's
|
of time steps into the near future and are used to implement the
|
||||||
predictive routing strategies.
|
predictive routing strategies employed by the UDP.
|
||||||
|
|
||||||
They are classified into "horizontal", "vertical", and "real-time" models
|
They are classified into "horizontal", "vertical", and "real-time" models
|
||||||
with respect to what historic data they are trained on and how often they
|
with respect to what historic data they are trained on and how often they
|
||||||
|
|
|
@ -51,7 +51,7 @@ class HorizontalETSModel(base.ForecastingModelABC):
|
||||||
# Make `predictions` with the seasonal ETS method ("ZZZ" model).
|
# Make `predictions` with the seasonal ETS method ("ZZZ" model).
|
||||||
predictions = methods.ets.predict(
|
predictions = methods.ets.predict(
|
||||||
training_ts=training_ts,
|
training_ts=training_ts,
|
||||||
forecast_interval=actuals_ts.index,
|
forecast_interval=pd.DatetimeIndex(actuals_ts.index),
|
||||||
frequency=frequency, # `== 7`, the number of weekdays
|
frequency=frequency, # `== 7`, the number of weekdays
|
||||||
seasonal_fit=True, # because there was no decomposition before
|
seasonal_fit=True, # because there was no decomposition before
|
||||||
)
|
)
|
||||||
|
@ -59,7 +59,7 @@ class HorizontalETSModel(base.ForecastingModelABC):
|
||||||
predictions.insert(loc=0, column='actual', value=actuals_ts)
|
predictions.insert(loc=0, column='actual', value=actuals_ts)
|
||||||
|
|
||||||
# Sanity checks.
|
# Sanity checks.
|
||||||
if predictions.isnull().any().any(): # pragma: no cover
|
if predictions.isnull().sum().any(): # pragma: no cover
|
||||||
raise RuntimeError('missing predictions in hets model')
|
raise RuntimeError('missing predictions in hets model')
|
||||||
if predict_at not in predictions.index: # pragma: no cover
|
if predict_at not in predictions.index: # pragma: no cover
|
||||||
raise RuntimeError('missing prediction for `predict_at`')
|
raise RuntimeError('missing prediction for `predict_at`')
|
||||||
|
|
|
@ -59,7 +59,7 @@ class RealtimeARIMAModel(base.ForecastingModelABC):
|
||||||
# Make predictions for the seasonal component by linear extrapolation.
|
# Make predictions for the seasonal component by linear extrapolation.
|
||||||
seasonal_predictions = methods.extrapolate_season.predict(
|
seasonal_predictions = methods.extrapolate_season.predict(
|
||||||
training_ts=decomposed_training_ts['seasonal'],
|
training_ts=decomposed_training_ts['seasonal'],
|
||||||
forecast_interval=actuals_ts.index,
|
forecast_interval=pd.DatetimeIndex(actuals_ts.index),
|
||||||
frequency=frequency,
|
frequency=frequency,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -68,7 +68,7 @@ class RealtimeARIMAModel(base.ForecastingModelABC):
|
||||||
training_ts=(
|
training_ts=(
|
||||||
decomposed_training_ts['trend'] + decomposed_training_ts['residual']
|
decomposed_training_ts['trend'] + decomposed_training_ts['residual']
|
||||||
),
|
),
|
||||||
forecast_interval=actuals_ts.index,
|
forecast_interval=pd.DatetimeIndex(actuals_ts.index),
|
||||||
# Because the seasonality was taken out before,
|
# Because the seasonality was taken out before,
|
||||||
# the `training_ts` has, by definition, a `frequency` of `1`.
|
# the `training_ts` has, by definition, a `frequency` of `1`.
|
||||||
frequency=1,
|
frequency=1,
|
||||||
|
@ -109,7 +109,7 @@ class RealtimeARIMAModel(base.ForecastingModelABC):
|
||||||
# Sanity checks.
|
# Sanity checks.
|
||||||
if len(predictions) != 1: # pragma: no cover
|
if len(predictions) != 1: # pragma: no cover
|
||||||
raise RuntimeError('real-time models should predict exactly one time step')
|
raise RuntimeError('real-time models should predict exactly one time step')
|
||||||
if predictions.isnull().any().any(): # pragma: no cover
|
if predictions.isnull().sum().any(): # pragma: no cover
|
||||||
raise RuntimeError('missing predictions in rtarima model')
|
raise RuntimeError('missing predictions in rtarima model')
|
||||||
if predict_at not in predictions.index: # pragma: no cover
|
if predict_at not in predictions.index: # pragma: no cover
|
||||||
raise RuntimeError('missing prediction for `predict_at`')
|
raise RuntimeError('missing prediction for `predict_at`')
|
||||||
|
|
|
@ -61,7 +61,7 @@ class VerticalARIMAModel(base.ForecastingModelABC):
|
||||||
# Make predictions for the seasonal component by linear extrapolation.
|
# Make predictions for the seasonal component by linear extrapolation.
|
||||||
seasonal_predictions = methods.extrapolate_season.predict(
|
seasonal_predictions = methods.extrapolate_season.predict(
|
||||||
training_ts=decomposed_training_ts['seasonal'],
|
training_ts=decomposed_training_ts['seasonal'],
|
||||||
forecast_interval=actuals_ts.index,
|
forecast_interval=pd.DatetimeIndex(actuals_ts.index),
|
||||||
frequency=frequency,
|
frequency=frequency,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -70,7 +70,7 @@ class VerticalARIMAModel(base.ForecastingModelABC):
|
||||||
training_ts=(
|
training_ts=(
|
||||||
decomposed_training_ts['trend'] + decomposed_training_ts['residual']
|
decomposed_training_ts['trend'] + decomposed_training_ts['residual']
|
||||||
),
|
),
|
||||||
forecast_interval=actuals_ts.index,
|
forecast_interval=pd.DatetimeIndex(actuals_ts.index),
|
||||||
# Because the seasonality was taken out before,
|
# Because the seasonality was taken out before,
|
||||||
# the `training_ts` has, by definition, a `frequency` of `1`.
|
# the `training_ts` has, by definition, a `frequency` of `1`.
|
||||||
frequency=1,
|
frequency=1,
|
||||||
|
@ -111,7 +111,7 @@ class VerticalARIMAModel(base.ForecastingModelABC):
|
||||||
# Sanity checks.
|
# Sanity checks.
|
||||||
if len(predictions) <= 1: # pragma: no cover
|
if len(predictions) <= 1: # pragma: no cover
|
||||||
raise RuntimeError('vertical models should predict several time steps')
|
raise RuntimeError('vertical models should predict several time steps')
|
||||||
if predictions.isnull().any().any(): # pragma: no cover
|
if predictions.isnull().sum().any(): # pragma: no cover
|
||||||
raise RuntimeError('missing predictions in varima model')
|
raise RuntimeError('missing predictions in varima model')
|
||||||
if predict_at not in predictions.index: # pragma: no cover
|
if predict_at not in predictions.index: # pragma: no cover
|
||||||
raise RuntimeError('missing prediction for `predict_at`')
|
raise RuntimeError('missing prediction for `predict_at`')
|
||||||
|
|
|
@ -84,41 +84,50 @@ class OrderHistory:
|
||||||
pixels.pixel_id,
|
pixels.pixel_id,
|
||||||
DATE_TRUNC('MINUTE', orders.placed_at)
|
DATE_TRUNC('MINUTE', orders.placed_at)
|
||||||
AS placed_at_without_seconds,
|
AS placed_at_without_seconds,
|
||||||
((
|
(
|
||||||
EXTRACT(MINUTES FROM orders.placed_at)::INTEGER
|
(
|
||||||
% {self._time_step}
|
(
|
||||||
)::TEXT || ' MINUTES')::INTERVAL
|
EXTRACT(MINUTES FROM orders.placed_at)::INTEGER
|
||||||
AS minutes_to_be_cut
|
% {self._time_step}
|
||||||
|
)::TEXT
|
||||||
|
||
|
||||||
|
' MINUTES'
|
||||||
|
)::INTERVAL
|
||||||
|
) AS minutes_to_be_cut
|
||||||
FROM (
|
FROM (
|
||||||
SELECT
|
SELECT
|
||||||
id,
|
{config.CLEAN_SCHEMA}.orders.id,
|
||||||
placed_at,
|
{config.CLEAN_SCHEMA}.orders.placed_at,
|
||||||
pickup_address_id
|
{config.CLEAN_SCHEMA}.orders.pickup_address_id
|
||||||
FROM
|
FROM
|
||||||
{config.CLEAN_SCHEMA}.orders
|
{config.CLEAN_SCHEMA}.orders
|
||||||
INNER JOIN (
|
INNER JOIN (
|
||||||
SELECT
|
SELECT
|
||||||
id AS address_id
|
{config.CLEAN_SCHEMA}.addresses.id AS address_id
|
||||||
FROM
|
FROM
|
||||||
{config.CLEAN_SCHEMA}.addresses
|
{config.CLEAN_SCHEMA}.addresses
|
||||||
WHERE
|
WHERE
|
||||||
city_id = {self._grid.city.id}
|
{config.CLEAN_SCHEMA}.addresses.city_id
|
||||||
|
= {self._grid.city.id}
|
||||||
) AS in_city
|
) AS in_city
|
||||||
ON orders.pickup_address_id = in_city.address_id
|
ON {config.CLEAN_SCHEMA}.orders.pickup_address_id
|
||||||
|
= in_city.address_id
|
||||||
WHERE
|
WHERE
|
||||||
ad_hoc IS TRUE
|
{config.CLEAN_SCHEMA}.orders.ad_hoc IS TRUE
|
||||||
) AS
|
) AS
|
||||||
orders
|
orders
|
||||||
INNER JOIN (
|
INNER JOIN (
|
||||||
SELECT
|
SELECT
|
||||||
address_id,
|
{config.CLEAN_SCHEMA}.addresses_pixels.address_id,
|
||||||
pixel_id
|
{config.CLEAN_SCHEMA}.addresses_pixels.pixel_id
|
||||||
FROM
|
FROM
|
||||||
{config.CLEAN_SCHEMA}.addresses_pixels
|
{config.CLEAN_SCHEMA}.addresses_pixels
|
||||||
WHERE
|
WHERE
|
||||||
grid_id = {self._grid.id}
|
{config.CLEAN_SCHEMA}.addresses_pixels.grid_id
|
||||||
|
= {self._grid.id}
|
||||||
AND
|
AND
|
||||||
city_id = {self._grid.city.id} -- -> sanity check
|
{config.CLEAN_SCHEMA}.addresses_pixels.city_id
|
||||||
|
= {self._grid.city.id} -- -> sanity check
|
||||||
) AS pixels
|
) AS pixels
|
||||||
ON orders.pickup_address_id = pixels.address_id
|
ON orders.pickup_address_id = pixels.address_id
|
||||||
) AS placed_at_aggregated_into_start_at
|
) AS placed_at_aggregated_into_start_at
|
||||||
|
|
|
@ -29,8 +29,8 @@ def assoc(address, another_address, make_address):
|
||||||
# `.first_address` to `.second_address`.
|
# `.first_address` to `.second_address`.
|
||||||
directions = json.dumps(
|
directions = json.dumps(
|
||||||
[
|
[
|
||||||
(float(addr.latitude), float(addr.longitude))
|
(float(add.latitude), float(add.longitude))
|
||||||
for addr in (make_address() for _ in range(5)) # noqa:WPS335
|
for add in (make_address() for _ in range(5)) # noqa:WPS335
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -40,7 +40,7 @@ def assoc(address, another_address, make_address):
|
||||||
air_distance=round(air_distance),
|
air_distance=round(air_distance),
|
||||||
bicycle_distance=round(1.25 * air_distance),
|
bicycle_distance=round(1.25 * air_distance),
|
||||||
bicycle_duration=300,
|
bicycle_duration=300,
|
||||||
directions=directions,
|
_directions=directions,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -327,7 +327,7 @@ class TestSyncWithGoogleMaps:
|
||||||
'copyrights': 'Map data ©2021',
|
'copyrights': 'Map data ©2021',
|
||||||
'legs': [
|
'legs': [
|
||||||
{
|
{
|
||||||
'distance': {'text': '3.0 km', 'value': 2969},
|
'distance': {'text': '3.0 km', 'value': 2999},
|
||||||
'duration': {'text': '10 mins', 'value': 596},
|
'duration': {'text': '10 mins', 'value': 596},
|
||||||
'end_address': '13 Place Paul et Jean Paul Avisseau, ...',
|
'end_address': '13 Place Paul et Jean Paul Avisseau, ...',
|
||||||
'end_location': {'lat': 44.85540839999999, 'lng': -0.5672105},
|
'end_location': {'lat': 44.85540839999999, 'lng': -0.5672105},
|
||||||
|
@ -335,7 +335,7 @@ class TestSyncWithGoogleMaps:
|
||||||
'start_location': {'lat': 44.8342256, 'lng': -0.570372},
|
'start_location': {'lat': 44.8342256, 'lng': -0.570372},
|
||||||
'steps': [
|
'steps': [
|
||||||
{
|
{
|
||||||
'distance': {'text': '0.1 km', 'value': 108},
|
'distance': {'text': '0.1 km', 'value': 138},
|
||||||
'duration': {'text': '1 min', 'value': 43},
|
'duration': {'text': '1 min', 'value': 43},
|
||||||
'end_location': {
|
'end_location': {
|
||||||
'lat': 44.83434380000001,
|
'lat': 44.83434380000001,
|
||||||
|
@ -569,7 +569,7 @@ class TestSyncWithGoogleMaps:
|
||||||
def _fake_google_api(self, api_response, monkeypatch):
|
def _fake_google_api(self, api_response, monkeypatch):
|
||||||
"""Patch out the call to the Google Maps Directions API."""
|
"""Patch out the call to the Google Maps Directions API."""
|
||||||
|
|
||||||
def directions(self, *args, **kwargs):
|
def directions(_self, *_args, **_kwargs):
|
||||||
return api_response
|
return api_response
|
||||||
|
|
||||||
monkeypatch.setattr(googlemaps.Client, 'directions', directions)
|
monkeypatch.setattr(googlemaps.Client, 'directions', directions)
|
||||||
|
@ -579,13 +579,13 @@ class TestSyncWithGoogleMaps:
|
||||||
"""Call the method for a `DistanceMatrix` object without Google data."""
|
"""Call the method for a `DistanceMatrix` object without Google data."""
|
||||||
assoc.bicycle_distance = None
|
assoc.bicycle_distance = None
|
||||||
assoc.bicycle_duration = None
|
assoc.bicycle_duration = None
|
||||||
assoc.directions = None
|
assoc._directions = None
|
||||||
|
|
||||||
assoc.sync_with_google_maps()
|
assoc.sync_with_google_maps()
|
||||||
|
|
||||||
assert assoc.bicycle_distance == 2_969
|
assert assoc.bicycle_distance == 2_999
|
||||||
assert assoc.bicycle_duration == 596
|
assert assoc.bicycle_duration == 596
|
||||||
assert assoc.directions is not None
|
assert assoc._directions is not None
|
||||||
|
|
||||||
@pytest.mark.usefixtures('_fake_google_api')
|
@pytest.mark.usefixtures('_fake_google_api')
|
||||||
def test_repeated_sync_instances_with_google_maps(self, db_session, assoc):
|
def test_repeated_sync_instances_with_google_maps(self, db_session, assoc):
|
||||||
|
@ -597,13 +597,13 @@ class TestSyncWithGoogleMaps:
|
||||||
"""
|
"""
|
||||||
old_distance = assoc.bicycle_distance
|
old_distance = assoc.bicycle_distance
|
||||||
old_duration = assoc.bicycle_duration
|
old_duration = assoc.bicycle_duration
|
||||||
old_directions = assoc.directions
|
old_directions = assoc._directions
|
||||||
|
|
||||||
assoc.sync_with_google_maps()
|
assoc.sync_with_google_maps()
|
||||||
|
|
||||||
assert assoc.bicycle_distance is old_distance
|
assert assoc.bicycle_distance is old_distance
|
||||||
assert assoc.bicycle_duration is old_duration
|
assert assoc.bicycle_duration is old_duration
|
||||||
assert assoc.directions is old_directions
|
assert assoc._directions is old_directions
|
||||||
|
|
||||||
|
|
||||||
class TestProperties:
|
class TestProperties:
|
||||||
|
|
Loading…
Reference in a new issue