Solve all issues detected by PyCharm
- as of September 2021, PyCharm is used to write some of the code - PyCharm's built-in code styler, linter, and type checker issued some warnings that are resolved in this commit + spelling mistakes + all instance attributes must be specified explicitly in a class's __init__() method => use `functools.cached_property` for caching + make `tuple`s explicit with `(...)` + one test failed randomly although everything is ok => adjust the fixture's return value (stub for Google Directions API) + reformulate SQL so that PyCharm can understand the symbols
This commit is contained in:
parent
1268aba017
commit
1c19da2f70
19 changed files with 136 additions and 151 deletions
|
@ -205,7 +205,7 @@ max-complexity = 10
|
|||
max-local-variables = 8
|
||||
|
||||
# Allow more than wemake-python-styleguide's 7 methods per class.
|
||||
max-methods = 12
|
||||
max-methods = 15
|
||||
|
||||
# Comply with black's style.
|
||||
# Source: https://github.com/psf/black/blob/master/docs/the_black_code_style.md#line-length
|
||||
|
|
|
@ -123,7 +123,7 @@ def make_config(env: str = 'production') -> Config:
|
|||
# the warning is only emitted if the code is not run by pytest.
|
||||
# We see the bad configuration immediately as all "db" tests fail.
|
||||
if config.DATABASE_URI is None and not os.getenv('TESTING'):
|
||||
warnings.warn('Bad configurartion: no DATABASE_URI set in the environment')
|
||||
warnings.warn('Bad configuration: no DATABASE_URI set in the environment')
|
||||
|
||||
# Some functionalities require R and some packages installed.
|
||||
# To ensure isolation and reproducibility, the projects keeps the R dependencies
|
||||
|
|
|
@ -34,8 +34,9 @@ def gridify() -> None: # pragma: no cover note:b1f68d24
|
|||
|
||||
click.echo(f' -> created {len(grid.pixels)} pixels')
|
||||
|
||||
# The number of assigned addresses is the same across different `side_length`s.
|
||||
db.session.flush() # necessary for the query to work
|
||||
# Because the number of assigned addresses is the same across
|
||||
# different `side_length`s, we can take any `grid` from the `city`.
|
||||
grid = db.session.query(db.Grid).filter_by(city=city).first()
|
||||
n_assigned = (
|
||||
db.session.query(db.AddressPixelAssociation)
|
||||
.filter(db.AddressPixelAssociation.grid_id == grid.id)
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
import functools
|
||||
from typing import Any
|
||||
|
||||
import folium
|
||||
|
@ -80,9 +81,6 @@ class Address(meta.Base):
|
|||
)
|
||||
pixels = orm.relationship('AddressPixelAssociation', back_populates='address')
|
||||
|
||||
# We do not implement a `.__init__()` method and leave that to SQLAlchemy.
|
||||
# Instead, we use `hasattr()` to check for uninitialized attributes. grep:b1f68d24
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Non-literal text representation."""
|
||||
return '<{cls}({street} in {city})>'.format(
|
||||
|
@ -100,7 +98,7 @@ class Address(meta.Base):
|
|||
"""
|
||||
return self.id == self.primary_id
|
||||
|
||||
@property
|
||||
@functools.cached_property
|
||||
def location(self) -> utils.Location:
|
||||
"""The location of the address.
|
||||
|
||||
|
@ -112,10 +110,9 @@ class Address(meta.Base):
|
|||
Implementation detail: This property is cached as none of the
|
||||
underlying attributes to calculate the value are to be changed.
|
||||
"""
|
||||
if not hasattr(self, '_location'): # noqa:WPS421 note:b1f68d24
|
||||
self._location = utils.Location(self.latitude, self.longitude)
|
||||
self._location.relate_to(self.city.southwest)
|
||||
return self._location
|
||||
location = utils.Location(self.latitude, self.longitude)
|
||||
location.relate_to(self.city.southwest)
|
||||
return location
|
||||
|
||||
@property
|
||||
def x(self) -> int: # noqa=WPS111
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
import functools
|
||||
import itertools
|
||||
import json
|
||||
from typing import List
|
||||
|
@ -46,7 +47,7 @@ class DistanceMatrix(meta.Base):
|
|||
# The duration is measured in seconds.
|
||||
bicycle_duration = sa.Column(sa.Integer, nullable=True)
|
||||
# An array of latitude-longitude pairs approximating a courier's way.
|
||||
directions = sa.Column(postgresql.JSON, nullable=True)
|
||||
_directions = sa.Column('directions', postgresql.JSON, nullable=True)
|
||||
|
||||
# Constraints
|
||||
__table_args__ = (
|
||||
|
@ -73,7 +74,7 @@ class DistanceMatrix(meta.Base):
|
|||
'0 <= air_distance AND air_distance < 20000', name='realistic_air_distance',
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
'bicycle_distance < 25000', # `.bicycle_distance` may not be negatative
|
||||
'bicycle_distance < 25000', # `.bicycle_distance` may not be negative
|
||||
name='realistic_bicycle_distance', # due to the constraint below.
|
||||
),
|
||||
sa.CheckConstraint(
|
||||
|
@ -97,9 +98,6 @@ class DistanceMatrix(meta.Base):
|
|||
foreign_keys='[DistanceMatrix.second_address_id, DistanceMatrix.city_id]',
|
||||
)
|
||||
|
||||
# We do not implement a `.__init__()` method and leave that to SQLAlchemy.
|
||||
# Instead, we use `hasattr()` to check for uninitialized attributes. grep:86ffc14e
|
||||
|
||||
@classmethod
|
||||
def from_addresses(
|
||||
cls, *addresses: db.Address, google_maps: bool = False,
|
||||
|
@ -114,7 +112,7 @@ class DistanceMatrix(meta.Base):
|
|||
Args:
|
||||
*addresses: to calculate the pair-wise distances for;
|
||||
must contain at least two `Address` objects
|
||||
google_maps: if `.bicylce_distance` and `.directions` should be
|
||||
google_maps: if `.bicycle_distance` and `._directions` should be
|
||||
populated with a query to the Google Maps Directions API;
|
||||
by default, only the `.air_distance` is calculated with `geopy`
|
||||
|
||||
|
@ -130,7 +128,7 @@ class DistanceMatrix(meta.Base):
|
|||
(first, second) if first.id < second.id else (second, first)
|
||||
)
|
||||
|
||||
# If there is no `DistaneMatrix` object in the database ...
|
||||
# If there is no `DistanceMatrix` object in the database ...
|
||||
distance = ( # noqa:ECE001
|
||||
db.session.query(db.DistanceMatrix)
|
||||
.filter(db.DistanceMatrix.first_address == first)
|
||||
|
@ -161,10 +159,10 @@ class DistanceMatrix(meta.Base):
|
|||
return distances
|
||||
|
||||
def sync_with_google_maps(self) -> None:
|
||||
"""Fill in `.bicycle_distance` and `.directions` with Google Maps.
|
||||
"""Fill in `.bicycle_distance` and `._directions` with Google Maps.
|
||||
|
||||
`.directions` will not contain the coordinates of `.first_address` and
|
||||
`.second_address`.
|
||||
`._directions` will NOT contain the coordinates
|
||||
of `.first_address` and `.second_address`.
|
||||
|
||||
This uses the Google Maps Directions API.
|
||||
|
||||
|
@ -207,28 +205,24 @@ class DistanceMatrix(meta.Base):
|
|||
steps.discard(self.first_address.location.lat_lng)
|
||||
steps.discard(self.second_address.location.lat_lng)
|
||||
|
||||
self.directions = json.dumps(list(steps)) # noqa:WPS601
|
||||
self._directions = json.dumps(list(steps)) # noqa:WPS601
|
||||
|
||||
db.session.add(self)
|
||||
db.session.commit()
|
||||
|
||||
@property
|
||||
@functools.cached_property
|
||||
def path(self) -> List[utils.Location]:
|
||||
"""The couriers' path from `.first_address` to `.second_address`.
|
||||
|
||||
The returned `Location`s all relates to `.first_address.city.southwest`.
|
||||
|
||||
Implementation detail: This property is cached as none of the
|
||||
underlying attributes (i.e., `.directions`) are to be changed.
|
||||
underlying attributes (i.e., `._directions`) are to be changed.
|
||||
"""
|
||||
if not hasattr(self, '_path'): # noqa:WPS421 note:86ffc14e
|
||||
inner_points = [
|
||||
utils.Location(point[0], point[1])
|
||||
for point in json.loads(self.directions)
|
||||
utils.Location(*point) for point in json.loads(self._directions)
|
||||
]
|
||||
for point in inner_points:
|
||||
point.relate_to(self.first_address.city.southwest)
|
||||
|
||||
self._path = inner_points
|
||||
|
||||
return self._path
|
||||
return inner_points
|
||||
|
|
|
@ -2,6 +2,8 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
import functools
|
||||
|
||||
import folium
|
||||
import sqlalchemy as sa
|
||||
from sqlalchemy import orm
|
||||
|
@ -38,51 +40,39 @@ class City(meta.Base):
|
|||
addresses = orm.relationship('Address', back_populates='city')
|
||||
grids = orm.relationship('Grid', back_populates='city')
|
||||
|
||||
# We do not implement a `.__init__()` method and leave that to SQLAlchemy.
|
||||
# Instead, we use `hasattr()` to check for uninitialized attributes. grep:d334120e
|
||||
# We do not implement a `.__init__()` method and use SQLAlchemy's default.
|
||||
# The uninitialized attribute `._map` is computed on the fly. note:d334120ei
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Non-literal text representation."""
|
||||
return '<{cls}({name})>'.format(cls=self.__class__.__name__, name=self.name)
|
||||
|
||||
@property
|
||||
@functools.cached_property
|
||||
def center(self) -> utils.Location:
|
||||
"""Location of the city's center.
|
||||
|
||||
Implementation detail: This property is cached as none of the
|
||||
underlying attributes to calculate the value are to be changed.
|
||||
"""
|
||||
if not hasattr(self, '_center'): # noqa:WPS421 note:d334120e
|
||||
self._center = utils.Location(self.center_latitude, self.center_longitude)
|
||||
return self._center
|
||||
return utils.Location(self.center_latitude, self.center_longitude)
|
||||
|
||||
@property
|
||||
@functools.cached_property
|
||||
def northeast(self) -> utils.Location:
|
||||
"""The city's northeast corner of the Google Maps viewport.
|
||||
|
||||
Implementation detail: This property is cached as none of the
|
||||
underlying attributes to calculate the value are to be changed.
|
||||
"""
|
||||
if not hasattr(self, '_northeast'): # noqa:WPS421 note:d334120e
|
||||
self._northeast = utils.Location(
|
||||
self.northeast_latitude, self.northeast_longitude,
|
||||
)
|
||||
return utils.Location(self.northeast_latitude, self.northeast_longitude)
|
||||
|
||||
return self._northeast
|
||||
|
||||
@property
|
||||
@functools.cached_property
|
||||
def southwest(self) -> utils.Location:
|
||||
"""The city's southwest corner of the Google Maps viewport.
|
||||
|
||||
Implementation detail: This property is cached as none of the
|
||||
underlying attributes to calculate the value are to be changed.
|
||||
"""
|
||||
if not hasattr(self, '_southwest'): # noqa:WPS421 note:d334120e
|
||||
self._southwest = utils.Location(
|
||||
self.southwest_latitude, self.southwest_longitude,
|
||||
)
|
||||
|
||||
return self._southwest
|
||||
return utils.Location(self.southwest_latitude, self.southwest_longitude)
|
||||
|
||||
@property
|
||||
def total_x(self) -> int:
|
||||
|
@ -103,16 +93,17 @@ class City(meta.Base):
|
|||
def clear_map(self) -> City: # pragma: no cover
|
||||
"""Create a new `folium.Map` object aligned with the city's viewport.
|
||||
|
||||
The map is available via the `.map` property. Note that it is a
|
||||
mutable objects that is changed from various locations in the code base.
|
||||
The map is available via the `.map` property. Note that it is mutable
|
||||
and changed from various locations in the code base.
|
||||
|
||||
Returns:
|
||||
self: enabling method chaining
|
||||
""" # noqa:DAR203
|
||||
""" # noqa:DAR203 note:d334120e
|
||||
self._map = folium.Map(
|
||||
location=[self.center_latitude, self.center_longitude],
|
||||
zoom_start=self.initial_zoom,
|
||||
)
|
||||
|
||||
return self
|
||||
|
||||
@property # pragma: no cover
|
||||
|
@ -221,11 +212,11 @@ class City(meta.Base):
|
|||
sa.text(
|
||||
f""" -- # noqa:S608
|
||||
SELECT DISTINCT
|
||||
zip_code
|
||||
{config.CLEAN_SCHEMA}.addresses.zip_code
|
||||
FROM
|
||||
{config.CLEAN_SCHEMA}.addresses
|
||||
{config.CLEAN_SCHEMA}.addresses AS addresses
|
||||
WHERE
|
||||
city_id = {self.id};
|
||||
{config.CLEAN_SCHEMA}.addresses.city_id = {self.id};
|
||||
""",
|
||||
),
|
||||
)
|
||||
|
|
|
@ -31,7 +31,7 @@ class Forecast(meta.Base):
|
|||
model = sa.Column(sa.Unicode(length=20), nullable=False)
|
||||
# We also store the actual order counts for convenient retrieval.
|
||||
# A `UniqueConstraint` below ensures that redundant values that
|
||||
# are to be expected are consistend across rows.
|
||||
# are to be expected are consistent across rows.
|
||||
actual = sa.Column(sa.SmallInteger, nullable=False)
|
||||
# Raw `.prediction`s are stored as `float`s (possibly negative).
|
||||
# The rounding is then done on the fly if required.
|
||||
|
@ -157,7 +157,7 @@ class Forecast(meta.Base):
|
|||
Background: The functions in `urban_meal_delivery.forecasts.methods`
|
||||
return `pd.Dataframe`s with "start_at" (i.e., `pd.Timestamp` objects)
|
||||
values in the index and five columns "prediction", "low80", "high80",
|
||||
"low95", and "high95" with `np.float` values. The `*Model.predic()`
|
||||
"low95", and "high95" with `np.float` values. The `*Model.predict()`
|
||||
methods in `urban_meal_delivery.forecasts.models` then add an "actual"
|
||||
column. This constructor converts these results into ORM models.
|
||||
Also, the `np.float` values are cast as plain `float` ones as
|
||||
|
|
|
@ -2,6 +2,7 @@
|
|||
|
||||
from __future__ import annotations
|
||||
|
||||
import functools
|
||||
from typing import List
|
||||
|
||||
import folium
|
||||
|
@ -68,56 +69,50 @@ class Pixel(meta.Base):
|
|||
"""The area of a pixel in square kilometers."""
|
||||
return self.grid.pixel_area
|
||||
|
||||
@property
|
||||
@functools.cached_property
|
||||
def northeast(self) -> utils.Location:
|
||||
"""The pixel's northeast corner, relative to `.grid.city.southwest`.
|
||||
|
||||
Implementation detail: This property is cached as none of the
|
||||
underlying attributes to calculate the value are to be changed.
|
||||
"""
|
||||
if not hasattr(self, '_northeast'): # noqa:WPS421 note:d334120e
|
||||
# The origin is the southwest corner of the `.grid.city`'s viewport.
|
||||
easting_origin = self.grid.city.southwest.easting
|
||||
northing_origin = self.grid.city.southwest.northing
|
||||
easting, northing = (
|
||||
self.grid.city.southwest.easting + ((self.n_x + 1) * self.side_length),
|
||||
self.grid.city.southwest.northing + ((self.n_y + 1) * self.side_length),
|
||||
)
|
||||
latitude, longitude = utm.to_latlon(
|
||||
easting, northing, *self.grid.city.southwest.zone_details,
|
||||
)
|
||||
|
||||
# `+1` as otherwise we get the pixel's `.southwest` corner.
|
||||
easting = easting_origin + ((self.n_x + 1) * self.side_length)
|
||||
northing = northing_origin + ((self.n_y + 1) * self.side_length)
|
||||
zone, band = self.grid.city.southwest.zone_details
|
||||
latitude, longitude = utm.to_latlon(easting, northing, zone, band)
|
||||
location = utils.Location(latitude, longitude)
|
||||
location.relate_to(self.grid.city.southwest)
|
||||
|
||||
self._northeast = utils.Location(latitude, longitude)
|
||||
self._northeast.relate_to(self.grid.city.southwest)
|
||||
return location
|
||||
|
||||
return self._northeast
|
||||
|
||||
@property
|
||||
@functools.cached_property
|
||||
def southwest(self) -> utils.Location:
|
||||
"""The pixel's northeast corner, relative to `.grid.city.southwest`.
|
||||
"""The pixel's southwest corner, relative to `.grid.city.southwest`.
|
||||
|
||||
Implementation detail: This property is cached as none of the
|
||||
underlying attributes to calculate the value are to be changed.
|
||||
"""
|
||||
if not hasattr(self, '_southwest'): # noqa:WPS421 note:d334120e
|
||||
# The origin is the southwest corner of the `.grid.city`'s viewport.
|
||||
easting_origin = self.grid.city.southwest.easting
|
||||
northing_origin = self.grid.city.southwest.northing
|
||||
easting, northing = (
|
||||
self.grid.city.southwest.easting + (self.n_x * self.side_length),
|
||||
self.grid.city.southwest.northing + (self.n_y * self.side_length),
|
||||
)
|
||||
latitude, longitude = utm.to_latlon(
|
||||
easting, northing, *self.grid.city.southwest.zone_details,
|
||||
)
|
||||
|
||||
easting = easting_origin + (self.n_x * self.side_length)
|
||||
northing = northing_origin + (self.n_y * self.side_length)
|
||||
zone, band = self.grid.city.southwest.zone_details
|
||||
latitude, longitude = utm.to_latlon(easting, northing, zone, band)
|
||||
location = utils.Location(latitude, longitude)
|
||||
location.relate_to(self.grid.city.southwest)
|
||||
|
||||
self._southwest = utils.Location(latitude, longitude)
|
||||
self._southwest.relate_to(self.grid.city.southwest)
|
||||
return location
|
||||
|
||||
return self._southwest
|
||||
|
||||
@property
|
||||
@functools.cached_property
|
||||
def restaurants(self) -> List[db.Restaurant]: # pragma: no cover
|
||||
"""Obtain all `Restaurant`s in `self`."""
|
||||
if not hasattr(self, '_restaurants'): # noqa:WPS421 note:d334120e
|
||||
self._restaurants = ( # noqa:ECE001
|
||||
return ( # noqa:ECE001
|
||||
db.session.query(db.Restaurant)
|
||||
.join(
|
||||
db.AddressPixelAssociation,
|
||||
|
@ -127,8 +122,6 @@ class Pixel(meta.Base):
|
|||
.all()
|
||||
)
|
||||
|
||||
return self._restaurants
|
||||
|
||||
def clear_map(self) -> Pixel: # pragma: no cover
|
||||
"""Shortcut to the `.city.clear_map()` method.
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@ class Location: # noqa:WPS214
|
|||
- assumes earth is a sphere and models the location in 3D
|
||||
|
||||
UTM:
|
||||
- the Universal Transverse Mercator sytem
|
||||
- the Universal Transverse Mercator system
|
||||
- projects WGS84 coordinates onto a 2D map
|
||||
- can be used for visualizations and calculations directly
|
||||
- distances are in meters
|
||||
|
@ -70,7 +70,7 @@ class Location: # noqa:WPS214
|
|||
@property
|
||||
def lat_lng(self) -> Tuple[float, float]:
|
||||
"""The `.latitude` and `.longitude` as a 2-`tuple`."""
|
||||
return (self._latitude, self._longitude)
|
||||
return self._latitude, self._longitude
|
||||
|
||||
@property
|
||||
def easting(self) -> int:
|
||||
|
@ -90,7 +90,7 @@ class Location: # noqa:WPS214
|
|||
@property
|
||||
def zone_details(self) -> Tuple[int, str]:
|
||||
"""The UTM zone of the location as the zone number and the band."""
|
||||
return (self._zone, self._band)
|
||||
return self._zone, self._band
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
"""Check if two `Location` objects are the same location."""
|
||||
|
|
|
@ -31,8 +31,8 @@ def predict(
|
|||
Raises:
|
||||
ValueError: if `training_ts` contains `NaN` values
|
||||
"""
|
||||
# Initialize R only if necessary as it is tested only in nox's
|
||||
# "ci-tests-slow" session and "ci-tests-fast" should not fail.
|
||||
# Initialize R only if it is actually used.
|
||||
# For example, the nox session "ci-tests-fast" does not use it.
|
||||
from urban_meal_delivery import init_r # noqa:F401,WPS433
|
||||
|
||||
# Re-seed R every time it is used to ensure reproducibility.
|
||||
|
|
|
@ -154,8 +154,8 @@ def stl( # noqa:C901,WPS210,WPS211,WPS231
|
|||
else:
|
||||
robust = False
|
||||
|
||||
# Initialize R only if necessary as it is tested only in nox's
|
||||
# "ci-tests-slow" session and "ci-tests-fast" should not fail.
|
||||
# Initialize R only if it is actually used.
|
||||
# For example, the nox session "ci-tests-fast" does not use it.
|
||||
from urban_meal_delivery import init_r # noqa:F401,WPS433
|
||||
|
||||
# Re-seed R every time it is used to ensure reproducibility.
|
||||
|
|
|
@ -32,8 +32,8 @@ def predict(
|
|||
Raises:
|
||||
ValueError: if `training_ts` contains `NaN` values
|
||||
"""
|
||||
# Initialize R only if necessary as it is tested only in nox's
|
||||
# "ci-tests-slow" session and "ci-tests-fast" should not fail.
|
||||
# Initialize R only if it is actually used.
|
||||
# For example, the nox session "ci-tests-fast" does not use it.
|
||||
from urban_meal_delivery import init_r # noqa:F401,WPS433
|
||||
|
||||
# Re-seed R every time it is used to ensure reproducibility.
|
||||
|
|
|
@ -15,7 +15,7 @@ For the paper check:
|
|||
|
||||
This sub-package is organized as follows. The `base` module defines an abstract
|
||||
`ForecastingModelABC` class that unifies how the concrete `*Model`s work.
|
||||
While the abstact `.predict()` method returns a `pd.DataFrame` (= basically,
|
||||
While the abstract `.predict()` method returns a `pd.DataFrame` (= basically,
|
||||
the result of one of the forecasting `methods`, the concrete `.make_forecast()`
|
||||
method converts the results into `Forecast` (=ORM) objects.
|
||||
Also, `.make_forecast()` implements a caching strategy where already made
|
||||
|
@ -23,7 +23,7 @@ Also, `.make_forecast()` implements a caching strategy where already made
|
|||
which could be a heavier computation.
|
||||
|
||||
The `tactical` sub-package contains all the `*Model`s used to implement the
|
||||
UDP's predictive routing strategy.
|
||||
predictive routing strategy employed by the UDP.
|
||||
|
||||
A future `planning` sub-package will contain the `*Model`s used to plan the
|
||||
`Courier`'s shifts a week ahead.
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
"""Forecasting `*Model`s to predict demand for tactical purposes.
|
||||
|
||||
The `*Model`s in this module predict only a small number (e.g., one)
|
||||
of time steps into the near future and are used to implement the UDP's
|
||||
predictive routing strategies.
|
||||
of time steps into the near future and are used to implement the
|
||||
predictive routing strategies employed by the UDP.
|
||||
|
||||
They are classified into "horizontal", "vertical", and "real-time" models
|
||||
with respect to what historic data they are trained on and how often they
|
||||
|
|
|
@ -51,7 +51,7 @@ class HorizontalETSModel(base.ForecastingModelABC):
|
|||
# Make `predictions` with the seasonal ETS method ("ZZZ" model).
|
||||
predictions = methods.ets.predict(
|
||||
training_ts=training_ts,
|
||||
forecast_interval=actuals_ts.index,
|
||||
forecast_interval=pd.DatetimeIndex(actuals_ts.index),
|
||||
frequency=frequency, # `== 7`, the number of weekdays
|
||||
seasonal_fit=True, # because there was no decomposition before
|
||||
)
|
||||
|
@ -59,7 +59,7 @@ class HorizontalETSModel(base.ForecastingModelABC):
|
|||
predictions.insert(loc=0, column='actual', value=actuals_ts)
|
||||
|
||||
# Sanity checks.
|
||||
if predictions.isnull().any().any(): # pragma: no cover
|
||||
if predictions.isnull().sum().any(): # pragma: no cover
|
||||
raise RuntimeError('missing predictions in hets model')
|
||||
if predict_at not in predictions.index: # pragma: no cover
|
||||
raise RuntimeError('missing prediction for `predict_at`')
|
||||
|
|
|
@ -59,7 +59,7 @@ class RealtimeARIMAModel(base.ForecastingModelABC):
|
|||
# Make predictions for the seasonal component by linear extrapolation.
|
||||
seasonal_predictions = methods.extrapolate_season.predict(
|
||||
training_ts=decomposed_training_ts['seasonal'],
|
||||
forecast_interval=actuals_ts.index,
|
||||
forecast_interval=pd.DatetimeIndex(actuals_ts.index),
|
||||
frequency=frequency,
|
||||
)
|
||||
|
||||
|
@ -68,7 +68,7 @@ class RealtimeARIMAModel(base.ForecastingModelABC):
|
|||
training_ts=(
|
||||
decomposed_training_ts['trend'] + decomposed_training_ts['residual']
|
||||
),
|
||||
forecast_interval=actuals_ts.index,
|
||||
forecast_interval=pd.DatetimeIndex(actuals_ts.index),
|
||||
# Because the seasonality was taken out before,
|
||||
# the `training_ts` has, by definition, a `frequency` of `1`.
|
||||
frequency=1,
|
||||
|
@ -109,7 +109,7 @@ class RealtimeARIMAModel(base.ForecastingModelABC):
|
|||
# Sanity checks.
|
||||
if len(predictions) != 1: # pragma: no cover
|
||||
raise RuntimeError('real-time models should predict exactly one time step')
|
||||
if predictions.isnull().any().any(): # pragma: no cover
|
||||
if predictions.isnull().sum().any(): # pragma: no cover
|
||||
raise RuntimeError('missing predictions in rtarima model')
|
||||
if predict_at not in predictions.index: # pragma: no cover
|
||||
raise RuntimeError('missing prediction for `predict_at`')
|
||||
|
|
|
@ -61,7 +61,7 @@ class VerticalARIMAModel(base.ForecastingModelABC):
|
|||
# Make predictions for the seasonal component by linear extrapolation.
|
||||
seasonal_predictions = methods.extrapolate_season.predict(
|
||||
training_ts=decomposed_training_ts['seasonal'],
|
||||
forecast_interval=actuals_ts.index,
|
||||
forecast_interval=pd.DatetimeIndex(actuals_ts.index),
|
||||
frequency=frequency,
|
||||
)
|
||||
|
||||
|
@ -70,7 +70,7 @@ class VerticalARIMAModel(base.ForecastingModelABC):
|
|||
training_ts=(
|
||||
decomposed_training_ts['trend'] + decomposed_training_ts['residual']
|
||||
),
|
||||
forecast_interval=actuals_ts.index,
|
||||
forecast_interval=pd.DatetimeIndex(actuals_ts.index),
|
||||
# Because the seasonality was taken out before,
|
||||
# the `training_ts` has, by definition, a `frequency` of `1`.
|
||||
frequency=1,
|
||||
|
@ -111,7 +111,7 @@ class VerticalARIMAModel(base.ForecastingModelABC):
|
|||
# Sanity checks.
|
||||
if len(predictions) <= 1: # pragma: no cover
|
||||
raise RuntimeError('vertical models should predict several time steps')
|
||||
if predictions.isnull().any().any(): # pragma: no cover
|
||||
if predictions.isnull().sum().any(): # pragma: no cover
|
||||
raise RuntimeError('missing predictions in varima model')
|
||||
if predict_at not in predictions.index: # pragma: no cover
|
||||
raise RuntimeError('missing prediction for `predict_at`')
|
||||
|
|
|
@ -84,41 +84,50 @@ class OrderHistory:
|
|||
pixels.pixel_id,
|
||||
DATE_TRUNC('MINUTE', orders.placed_at)
|
||||
AS placed_at_without_seconds,
|
||||
((
|
||||
(
|
||||
(
|
||||
(
|
||||
EXTRACT(MINUTES FROM orders.placed_at)::INTEGER
|
||||
% {self._time_step}
|
||||
)::TEXT || ' MINUTES')::INTERVAL
|
||||
AS minutes_to_be_cut
|
||||
)::TEXT
|
||||
||
|
||||
' MINUTES'
|
||||
)::INTERVAL
|
||||
) AS minutes_to_be_cut
|
||||
FROM (
|
||||
SELECT
|
||||
id,
|
||||
placed_at,
|
||||
pickup_address_id
|
||||
{config.CLEAN_SCHEMA}.orders.id,
|
||||
{config.CLEAN_SCHEMA}.orders.placed_at,
|
||||
{config.CLEAN_SCHEMA}.orders.pickup_address_id
|
||||
FROM
|
||||
{config.CLEAN_SCHEMA}.orders
|
||||
INNER JOIN (
|
||||
SELECT
|
||||
id AS address_id
|
||||
{config.CLEAN_SCHEMA}.addresses.id AS address_id
|
||||
FROM
|
||||
{config.CLEAN_SCHEMA}.addresses
|
||||
WHERE
|
||||
city_id = {self._grid.city.id}
|
||||
{config.CLEAN_SCHEMA}.addresses.city_id
|
||||
= {self._grid.city.id}
|
||||
) AS in_city
|
||||
ON orders.pickup_address_id = in_city.address_id
|
||||
ON {config.CLEAN_SCHEMA}.orders.pickup_address_id
|
||||
= in_city.address_id
|
||||
WHERE
|
||||
ad_hoc IS TRUE
|
||||
{config.CLEAN_SCHEMA}.orders.ad_hoc IS TRUE
|
||||
) AS
|
||||
orders
|
||||
INNER JOIN (
|
||||
SELECT
|
||||
address_id,
|
||||
pixel_id
|
||||
{config.CLEAN_SCHEMA}.addresses_pixels.address_id,
|
||||
{config.CLEAN_SCHEMA}.addresses_pixels.pixel_id
|
||||
FROM
|
||||
{config.CLEAN_SCHEMA}.addresses_pixels
|
||||
WHERE
|
||||
grid_id = {self._grid.id}
|
||||
{config.CLEAN_SCHEMA}.addresses_pixels.grid_id
|
||||
= {self._grid.id}
|
||||
AND
|
||||
city_id = {self._grid.city.id} -- -> sanity check
|
||||
{config.CLEAN_SCHEMA}.addresses_pixels.city_id
|
||||
= {self._grid.city.id} -- -> sanity check
|
||||
) AS pixels
|
||||
ON orders.pickup_address_id = pixels.address_id
|
||||
) AS placed_at_aggregated_into_start_at
|
||||
|
|
|
@ -29,8 +29,8 @@ def assoc(address, another_address, make_address):
|
|||
# `.first_address` to `.second_address`.
|
||||
directions = json.dumps(
|
||||
[
|
||||
(float(addr.latitude), float(addr.longitude))
|
||||
for addr in (make_address() for _ in range(5)) # noqa:WPS335
|
||||
(float(add.latitude), float(add.longitude))
|
||||
for add in (make_address() for _ in range(5)) # noqa:WPS335
|
||||
],
|
||||
)
|
||||
|
||||
|
@ -40,7 +40,7 @@ def assoc(address, another_address, make_address):
|
|||
air_distance=round(air_distance),
|
||||
bicycle_distance=round(1.25 * air_distance),
|
||||
bicycle_duration=300,
|
||||
directions=directions,
|
||||
_directions=directions,
|
||||
)
|
||||
|
||||
|
||||
|
@ -327,7 +327,7 @@ class TestSyncWithGoogleMaps:
|
|||
'copyrights': 'Map data ©2021',
|
||||
'legs': [
|
||||
{
|
||||
'distance': {'text': '3.0 km', 'value': 2969},
|
||||
'distance': {'text': '3.0 km', 'value': 2999},
|
||||
'duration': {'text': '10 mins', 'value': 596},
|
||||
'end_address': '13 Place Paul et Jean Paul Avisseau, ...',
|
||||
'end_location': {'lat': 44.85540839999999, 'lng': -0.5672105},
|
||||
|
@ -335,7 +335,7 @@ class TestSyncWithGoogleMaps:
|
|||
'start_location': {'lat': 44.8342256, 'lng': -0.570372},
|
||||
'steps': [
|
||||
{
|
||||
'distance': {'text': '0.1 km', 'value': 108},
|
||||
'distance': {'text': '0.1 km', 'value': 138},
|
||||
'duration': {'text': '1 min', 'value': 43},
|
||||
'end_location': {
|
||||
'lat': 44.83434380000001,
|
||||
|
@ -569,7 +569,7 @@ class TestSyncWithGoogleMaps:
|
|||
def _fake_google_api(self, api_response, monkeypatch):
|
||||
"""Patch out the call to the Google Maps Directions API."""
|
||||
|
||||
def directions(self, *args, **kwargs):
|
||||
def directions(_self, *_args, **_kwargs):
|
||||
return api_response
|
||||
|
||||
monkeypatch.setattr(googlemaps.Client, 'directions', directions)
|
||||
|
@ -579,13 +579,13 @@ class TestSyncWithGoogleMaps:
|
|||
"""Call the method for a `DistanceMatrix` object without Google data."""
|
||||
assoc.bicycle_distance = None
|
||||
assoc.bicycle_duration = None
|
||||
assoc.directions = None
|
||||
assoc._directions = None
|
||||
|
||||
assoc.sync_with_google_maps()
|
||||
|
||||
assert assoc.bicycle_distance == 2_969
|
||||
assert assoc.bicycle_distance == 2_999
|
||||
assert assoc.bicycle_duration == 596
|
||||
assert assoc.directions is not None
|
||||
assert assoc._directions is not None
|
||||
|
||||
@pytest.mark.usefixtures('_fake_google_api')
|
||||
def test_repeated_sync_instances_with_google_maps(self, db_session, assoc):
|
||||
|
@ -597,13 +597,13 @@ class TestSyncWithGoogleMaps:
|
|||
"""
|
||||
old_distance = assoc.bicycle_distance
|
||||
old_duration = assoc.bicycle_duration
|
||||
old_directions = assoc.directions
|
||||
old_directions = assoc._directions
|
||||
|
||||
assoc.sync_with_google_maps()
|
||||
|
||||
assert assoc.bicycle_distance is old_distance
|
||||
assert assoc.bicycle_duration is old_duration
|
||||
assert assoc.directions is old_directions
|
||||
assert assoc._directions is old_directions
|
||||
|
||||
|
||||
class TestProperties:
|
||||
|
|
Loading…
Reference in a new issue