From f8fd9c83bdabbf18406615a2ef51aebadca0aa53 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sat, 7 Nov 2020 12:51:09 +0100 Subject: [PATCH 01/72] Add submodule for demand forecasting paper --- .gitmodules | 3 +++ papers/demand-forecasting | 1 + 2 files changed, 4 insertions(+) create mode 100644 .gitmodules create mode 160000 papers/demand-forecasting diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..e01b84e --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "papers/demand-forecasting"] + path = papers/demand-forecasting + url = git@github.com:webartifex/urban-meal-delivery-demand-forecasting.git diff --git a/papers/demand-forecasting b/papers/demand-forecasting new file mode 160000 index 0000000..1184c54 --- /dev/null +++ b/papers/demand-forecasting @@ -0,0 +1 @@ +Subproject commit 1184c54377f45f6cabbf6d977b84ff85ec4da9eb From af5d54f15982656cffc16b4f1a4f191c50239917 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sat, 7 Nov 2020 16:23:27 +0100 Subject: [PATCH 02/72] Upgrade poetry to v1.1.0 The order of keys in the poetry.lock file is changed. --- poetry.lock | 1076 +++++++++++++++++++++++++-------------------------- 1 file changed, 530 insertions(+), 546 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9fa86ac..65247f0 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,60 +1,59 @@ [[package]] -category = "dev" -description = "A configurable sidebar-enabled Sphinx theme" name = "alabaster" +version = "0.7.12" +description = "A configurable sidebar-enabled Sphinx theme" +category = "dev" optional = false python-versions = "*" -version = "0.7.12" [[package]] -category = "main" -description = "A database migration tool for SQLAlchemy." name = "alembic" +version = "1.4.3" +description = "A database migration tool for SQLAlchemy." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.4.3" [package.dependencies] Mako = "*" -SQLAlchemy = ">=1.1.0" python-dateutil = "*" python-editor = ">=0.3" +SQLAlchemy = ">=1.1.0" [[package]] -category = "dev" -description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." name = "appdirs" +version = "1.4.4" +description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +category = "dev" optional = false python-versions = "*" -version = "1.4.4" [[package]] -category = "main" -description = "Disable App Nap on OS X 10.9" -marker = "sys_platform == \"darwin\" or platform_system == \"Darwin\"" name = "appnope" +version = "0.1.0" +description = "Disable App Nap on OS X 10.9" +category = "main" optional = true python-versions = "*" -version = "0.1.0" [[package]] -category = "dev" -description = "Bash tab completion for argparse" name = "argcomplete" +version = "1.12.1" +description = "Bash tab completion for argparse" +category = "dev" optional = false python-versions = "*" -version = "1.12.1" [package.extras] test = ["coverage", "flake8", "pexpect", "wheel"] [[package]] -category = "main" -description = "The secure Argon2 password hashing algorithm." name = "argon2-cffi" +version = "20.1.0" +description = "The secure Argon2 password hashing algorithm." +category = "main" optional = true python-versions = "*" -version = "20.1.0" [package.dependencies] cffi = ">=1.0.0" @@ -66,31 +65,31 @@ docs = ["sphinx"] tests = ["coverage (>=5.0.2)", "hypothesis", "pytest"] [[package]] -category = "dev" -description = "Read/rewrite/write Python ASTs" name = "astor" +version = "0.8.1" +description = "Read/rewrite/write Python ASTs" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -version = "0.8.1" [[package]] -category = "dev" -description = "Pretty print the output of python stdlib `ast.parse`." name = "astpretty" +version = "2.0.0" +description = "Pretty print the output of python stdlib `ast.parse`." +category = "dev" optional = false python-versions = ">=3.6.1" -version = "2.0.0" [package.extras] typed = ["typed-ast"] [[package]] -category = "dev" -description = "An abstract syntax tree for Python with inference support." name = "astroid" +version = "2.4.2" +description = "An abstract syntax tree for Python with inference support." +category = "dev" optional = false python-versions = ">=3.5" -version = "2.4.2" [package.dependencies] lazy-object-proxy = ">=1.4.0,<1.5.0" @@ -98,29 +97,28 @@ six = ">=1.12,<2.0" wrapt = ">=1.11,<2.0" [[package]] -category = "main" -description = "Async generators and context managers for Python 3.5+" name = "async-generator" +version = "1.10" +description = "Async generators and context managers for Python 3.5+" +category = "main" optional = true python-versions = ">=3.5" -version = "1.10" [[package]] -category = "dev" -description = "Atomic file writes." -marker = "sys_platform == \"win32\"" name = "atomicwrites" +version = "1.4.0" +description = "Atomic file writes." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.4.0" [[package]] -category = "main" -description = "Classes Without Boilerplate" name = "attrs" +version = "20.2.0" +description = "Classes Without Boilerplate" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "20.2.0" [package.extras] dev = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "sphinx-rtd-theme", "pre-commit"] @@ -129,57 +127,57 @@ tests = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six tests_no_zope = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] [[package]] -category = "dev" -description = "Removes unused imports and unused variables" name = "autoflake" +version = "1.4" +description = "Removes unused imports and unused variables" +category = "dev" optional = false python-versions = "*" -version = "1.4" [package.dependencies] pyflakes = ">=1.1.0" [[package]] -category = "dev" -description = "Internationalization utilities" name = "babel" +version = "2.8.0" +description = "Internationalization utilities" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "2.8.0" [package.dependencies] pytz = ">=2015.7" [[package]] -category = "main" -description = "Specifications for callback functions passed in to an API" name = "backcall" +version = "0.2.0" +description = "Specifications for callback functions passed in to an API" +category = "main" optional = true python-versions = "*" -version = "0.2.0" [[package]] -category = "dev" -description = "Security oriented static analyser for python code." name = "bandit" +version = "1.6.2" +description = "Security oriented static analyser for python code." +category = "dev" optional = false python-versions = "*" -version = "1.6.2" [package.dependencies] +colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} GitPython = ">=1.0.1" PyYAML = ">=3.13" -colorama = ">=0.3.9" six = ">=1.10.0" stevedore = ">=1.20.0" [[package]] -category = "dev" -description = "The uncompromising code formatter." name = "black" +version = "19.10b0" +description = "The uncompromising code formatter." +category = "dev" optional = false python-versions = ">=3.6" -version = "19.10b0" [package.dependencies] appdirs = "*" @@ -194,12 +192,12 @@ typed-ast = ">=1.4.0" d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] -category = "main" -description = "An easy safelist-based HTML-sanitizing tool." name = "bleach" +version = "3.2.1" +description = "An easy safelist-based HTML-sanitizing tool." +category = "main" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "3.2.1" [package.dependencies] packaging = "*" @@ -207,150 +205,149 @@ six = ">=1.9.0" webencodings = "*" [[package]] -category = "main" -description = "Python package for providing Mozilla's CA Bundle." name = "certifi" +version = "2020.6.20" +description = "Python package for providing Mozilla's CA Bundle." +category = "main" optional = false python-versions = "*" -version = "2020.6.20" [[package]] -category = "main" -description = "Foreign Function Interface for Python calling C code." name = "cffi" +version = "1.14.3" +description = "Foreign Function Interface for Python calling C code." +category = "main" optional = true python-versions = "*" -version = "1.14.3" [package.dependencies] pycparser = "*" [[package]] -category = "dev" -description = "Validate configuration and produce human readable error messages." name = "cfgv" +version = "3.2.0" +description = "Validate configuration and produce human readable error messages." +category = "dev" optional = false python-versions = ">=3.6.1" -version = "3.2.0" [[package]] -category = "main" -description = "Universal encoding detector for Python 2 and 3" name = "chardet" -optional = false -python-versions = "*" version = "3.0.4" - -[[package]] +description = "Universal encoding detector for Python 2 and 3" category = "main" -description = "Composable command line interface toolkit" -name = "click" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "7.1.2" - -[[package]] -category = "main" -description = "Cross-platform colored terminal text." -marker = "sys_platform == \"win32\" or platform_system == \"Windows\" or platform_system == \"Windows\"" -name = "colorama" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "0.4.3" - -[[package]] -category = "dev" -description = "Log formatting with colors!" -name = "colorlog" optional = false python-versions = "*" -version = "4.2.1" + +[[package]] +name = "click" +version = "7.1.2" +description = "Composable command line interface toolkit" +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "colorama" +version = "0.4.3" +description = "Cross-platform colored terminal text." +category = "main" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" + +[[package]] +name = "colorlog" +version = "4.5.0" +description = "Log formatting with colors!" +category = "dev" +optional = false +python-versions = "*" [package.dependencies] -colorama = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} [[package]] -category = "dev" -description = "Code coverage measurement for Python" name = "coverage" +version = "5.3" +description = "Code coverage measurement for Python" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" -version = "5.3" [package.extras] toml = ["toml"] [[package]] -category = "dev" -description = "A utility for ensuring Google-style docstrings stay up to date with the source code." name = "darglint" +version = "1.5.4" +description = "A utility for ensuring Google-style docstrings stay up to date with the source code." +category = "dev" optional = false python-versions = ">=3.5,<4.0" -version = "1.5.4" [[package]] -category = "main" -description = "Decorators for Humans" name = "decorator" +version = "4.4.2" +description = "Decorators for Humans" +category = "main" optional = true python-versions = ">=2.6, !=3.0.*, !=3.1.*" -version = "4.4.2" [[package]] -category = "main" -description = "XML bomb protection for Python stdlib modules" name = "defusedxml" +version = "0.6.0" +description = "XML bomb protection for Python stdlib modules" +category = "main" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "0.6.0" [[package]] -category = "dev" -description = "Distribution utilities" name = "distlib" +version = "0.3.1" +description = "Distribution utilities" +category = "dev" optional = false python-versions = "*" -version = "0.3.1" [[package]] -category = "dev" -description = "Docutils -- Python Documentation Utilities" name = "docutils" +version = "0.16" +description = "Docutils -- Python Documentation Utilities" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "0.16" [[package]] -category = "main" -description = "Discover and load entry points from installed packages." name = "entrypoints" +version = "0.3" +description = "Discover and load entry points from installed packages." +category = "main" optional = true python-versions = ">=2.7" -version = "0.3" [[package]] -category = "dev" -description = "Removes commented-out code." name = "eradicate" -optional = false -python-versions = "*" version = "1.0" - -[[package]] +description = "Removes commented-out code." category = "dev" -description = "A platform independent file lock." -name = "filelock" optional = false python-versions = "*" -version = "3.0.12" [[package]] +name = "filelock" +version = "3.0.12" +description = "A platform independent file lock." category = "dev" -description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = "*" + +[[package]] name = "flake8" +version = "3.8.3" +description = "the modular source code checker: pep8 pyflakes and co" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -version = "3.8.3" [package.dependencies] mccabe = ">=0.6.0,<0.7.0" @@ -358,23 +355,23 @@ pycodestyle = ">=2.6.0a1,<2.7.0" pyflakes = ">=2.2.0,<2.3.0" [[package]] -category = "dev" -description = "Flake8 Type Annotation Checks" name = "flake8-annotations" +version = "2.4.1" +description = "Flake8 Type Annotation Checks" +category = "dev" optional = false python-versions = ">=3.6.1,<4.0.0" -version = "2.4.1" [package.dependencies] flake8 = ">=3.7,<3.9" [[package]] -category = "dev" -description = "Automated security testing with bandit and flake8." name = "flake8-bandit" +version = "2.1.2" +description = "Automated security testing with bandit and flake8." +category = "dev" optional = false python-versions = "*" -version = "2.1.2" [package.dependencies] bandit = "*" @@ -383,93 +380,93 @@ flake8-polyfill = "*" pycodestyle = "*" [[package]] -category = "dev" -description = "flake8 plugin to call black as a code style validator" name = "flake8-black" +version = "0.2.1" +description = "flake8 plugin to call black as a code style validator" +category = "dev" optional = false python-versions = "*" -version = "0.2.1" [package.dependencies] black = "*" flake8 = ">=3.0.0" [[package]] -category = "dev" -description = "Flake8 plugin to forbid backslashes for line breaks" name = "flake8-broken-line" +version = "0.2.1" +description = "Flake8 plugin to forbid backslashes for line breaks" +category = "dev" optional = false python-versions = ">=3.6,<4.0" -version = "0.2.1" [package.dependencies] flake8 = ">=3.5,<4.0" [[package]] -category = "dev" -description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." name = "flake8-bugbear" +version = "19.8.0" +description = "A plugin for flake8 finding likely bugs and design problems in your program. Contains warnings that don't belong in pyflakes and pycodestyle." +category = "dev" optional = false python-versions = ">=3.5" -version = "19.8.0" [package.dependencies] attrs = "*" flake8 = ">=3.0.0" [[package]] -category = "dev" -description = "Flake8 lint for trailing commas." name = "flake8-commas" +version = "2.0.0" +description = "Flake8 lint for trailing commas." +category = "dev" optional = false python-versions = "*" -version = "2.0.0" [package.dependencies] flake8 = ">=2,<4.0.0" [[package]] -category = "dev" -description = "A flake8 plugin to help you write better list/set/dict comprehensions." name = "flake8-comprehensions" +version = "3.2.3" +description = "A flake8 plugin to help you write better list/set/dict comprehensions." +category = "dev" optional = false python-versions = ">=3.5" -version = "3.2.3" [package.dependencies] flake8 = ">=3.0,<3.2.0 || >3.2.0,<4" [[package]] -category = "dev" -description = "ipdb/pdb statement checker plugin for flake8" name = "flake8-debugger" +version = "3.2.1" +description = "ipdb/pdb statement checker plugin for flake8" +category = "dev" optional = false python-versions = "*" -version = "3.2.1" [package.dependencies] flake8 = ">=1.5" pycodestyle = "*" [[package]] -category = "dev" -description = "Extension for flake8 which uses pydocstyle to check docstrings" name = "flake8-docstrings" +version = "1.5.0" +description = "Extension for flake8 which uses pydocstyle to check docstrings" +category = "dev" optional = false python-versions = "*" -version = "1.5.0" [package.dependencies] flake8 = ">=3" pydocstyle = ">=2.1" [[package]] -category = "dev" -description = "Flake8 plugin to find commented out code" name = "flake8-eradicate" +version = "0.3.0" +description = "Flake8 plugin to find commented out code" +category = "dev" optional = false python-versions = ">=3.6,<4.0" -version = "0.3.0" [package.dependencies] attrs = "*" @@ -477,12 +474,12 @@ eradicate = ">=1.0,<2.0" flake8 = ">=3.5,<4.0" [[package]] -category = "dev" -description = "A flake8 extension that checks expressions complexity" name = "flake8-expression-complexity" +version = "0.0.8" +description = "A flake8 extension that checks expressions complexity" +category = "dev" optional = false python-versions = ">=3.6" -version = "0.0.8" [package.dependencies] astpretty = "*" @@ -490,155 +487,152 @@ flake8 = "*" setuptools = "*" [[package]] -category = "dev" -description = "flake8 plugin that integrates isort ." name = "flake8-isort" +version = "3.0.1" +description = "flake8 plugin that integrates isort ." +category = "dev" optional = false python-versions = "*" -version = "3.0.1" [package.dependencies] flake8 = ">=3.2.1,<4" +isort = {version = ">=4.3.5,<5", extras = ["pyproject"]} testfixtures = ">=6.8.0,<7" -[package.dependencies.isort] -extras = ["pyproject"] -version = ">=4.3.5,<5" - [package.extras] test = ["pytest (>=4.0.2,<6)"] [[package]] -category = "dev" -description = "The package provides base classes and utils for flake8 plugin writing" name = "flake8-plugin-utils" +version = "1.3.1" +description = "The package provides base classes and utils for flake8 plugin writing" +category = "dev" optional = false python-versions = ">=3.6,<4.0" -version = "1.3.1" [[package]] -category = "dev" -description = "Polyfill package for Flake8 plugins" name = "flake8-polyfill" +version = "1.0.2" +description = "Polyfill package for Flake8 plugins" +category = "dev" optional = false python-versions = "*" -version = "1.0.2" [package.dependencies] flake8 = "*" [[package]] -category = "dev" -description = "A flake8 plugin checking common style issues or inconsistencies with pytest-based tests." name = "flake8-pytest-style" +version = "1.3.0" +description = "A flake8 plugin checking common style issues or inconsistencies with pytest-based tests." +category = "dev" optional = false python-versions = ">=3.6,<4.0" -version = "1.3.0" [package.dependencies] flake8-plugin-utils = ">=1.3.1,<2.0.0" [[package]] -category = "dev" -description = "Flake8 lint for quotes." name = "flake8-quotes" +version = "2.1.2" +description = "Flake8 lint for quotes." +category = "dev" optional = false python-versions = "*" -version = "2.1.2" [package.dependencies] flake8 = "*" [[package]] -category = "dev" -description = "Python docstring reStructuredText (RST) validator" name = "flake8-rst-docstrings" +version = "0.0.12" +description = "Python docstring reStructuredText (RST) validator" +category = "dev" optional = false python-versions = "*" -version = "0.0.12" [package.dependencies] flake8 = ">=3.0.0" restructuredtext_lint = "*" [[package]] -category = "dev" -description = "string format checker, plugin for flake8" name = "flake8-string-format" +version = "0.2.3" +description = "string format checker, plugin for flake8" +category = "dev" optional = false python-versions = "*" -version = "0.2.3" [package.dependencies] flake8 = "*" [[package]] -category = "dev" -description = "Git Object Database" name = "gitdb" +version = "4.0.5" +description = "Git Object Database" +category = "dev" optional = false python-versions = ">=3.4" -version = "4.0.5" [package.dependencies] smmap = ">=3.0.1,<4" [[package]] -category = "dev" -description = "Python Git Library" name = "gitpython" +version = "3.1.8" +description = "Python Git Library" +category = "dev" optional = false python-versions = ">=3.4" -version = "3.1.8" [package.dependencies] gitdb = ">=4.0.1,<5" [[package]] -category = "dev" -description = "File identification library for Python" name = "identify" +version = "1.5.5" +description = "File identification library for Python" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -version = "1.5.5" [package.extras] license = ["editdistance"] [[package]] -category = "main" -description = "Internationalized Domain Names in Applications (IDNA)" name = "idna" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" version = "2.10" - -[[package]] -category = "dev" -description = "Getting image size from png/jpeg/jpeg2000/gif file" -name = "imagesize" +description = "Internationalized Domain Names in Applications (IDNA)" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.2.0" [[package]] +name = "imagesize" +version = "1.2.0" +description = "Getting image size from png/jpeg/jpeg2000/gif file" category = "dev" -description = "iniconfig: brain-dead simple config-ini parsing" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] name = "iniconfig" +version = "1.0.1" +description = "iniconfig: brain-dead simple config-ini parsing" +category = "dev" optional = false python-versions = "*" -version = "1.0.1" [[package]] -category = "main" -description = "IPython Kernel for Jupyter" name = "ipykernel" +version = "5.3.4" +description = "IPython Kernel for Jupyter" +category = "main" optional = true python-versions = ">=3.5" -version = "5.3.4" [package.dependencies] -appnope = "*" +appnope = {version = "*", markers = "platform_system == \"Darwin\""} ipython = ">=5.0.0" jupyter-client = "*" tornado = ">=4.2" @@ -648,20 +642,20 @@ traitlets = ">=4.1.0" test = ["pytest (!=5.3.4)", "pytest-cov", "flaky", "nose"] [[package]] -category = "main" -description = "IPython: Productive Interactive Computing" name = "ipython" +version = "7.18.1" +description = "IPython: Productive Interactive Computing" +category = "main" optional = true python-versions = ">=3.7" -version = "7.18.1" [package.dependencies] -appnope = "*" +appnope = {version = "*", markers = "sys_platform == \"darwin\""} backcall = "*" -colorama = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} decorator = "*" jedi = ">=0.10" -pexpect = ">4.3" +pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} pickleshare = "*" prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" pygments = "*" @@ -680,20 +674,20 @@ qtconsole = ["qtconsole"] test = ["nose (>=0.10.1)", "requests", "testpath", "pygments", "nbformat", "ipykernel", "numpy (>=1.14)"] [[package]] -category = "main" -description = "Vestigial utilities from IPython" name = "ipython-genutils" +version = "0.2.0" +description = "Vestigial utilities from IPython" +category = "main" optional = true python-versions = "*" -version = "0.2.0" [[package]] -category = "dev" -description = "A Python utility / library to sort Python imports." name = "isort" +version = "4.3.21" +description = "A Python utility / library to sort Python imports." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "4.3.21" [package.extras] pipfile = ["pipreqs", "requirementslib"] @@ -702,27 +696,27 @@ requirements = ["pipreqs", "pip-api"] xdg_home = ["appdirs (>=1.4.0)"] [[package]] -category = "main" -description = "An autocompletion tool for Python that can be used for text editors." name = "jedi" +version = "0.17.2" +description = "An autocompletion tool for Python that can be used for text editors." +category = "main" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "0.17.2" [package.dependencies] parso = ">=0.7.0,<0.8.0" [package.extras] -qa = ["flake8 (3.7.9)"] +qa = ["flake8 (==3.7.9)"] testing = ["Django (<3.1)", "colorama", "docopt", "pytest (>=3.9.0,<5.0.0)"] [[package]] -category = "main" -description = "A very fast and expressive template engine." name = "jinja2" +version = "2.11.2" +description = "A very fast and expressive template engine." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "2.11.2" [package.dependencies] MarkupSafe = ">=0.23" @@ -731,23 +725,23 @@ MarkupSafe = ">=0.23" i18n = ["Babel (>=0.8)"] [[package]] -category = "main" -description = "A Python implementation of the JSON5 data format." name = "json5" +version = "0.9.5" +description = "A Python implementation of the JSON5 data format." +category = "main" optional = true python-versions = "*" -version = "0.9.5" [package.extras] dev = ["hypothesis"] [[package]] -category = "main" -description = "An implementation of JSON Schema validation for Python" name = "jsonschema" +version = "3.2.0" +description = "An implementation of JSON Schema validation for Python" +category = "main" optional = true python-versions = "*" -version = "3.2.0" [package.dependencies] attrs = ">=17.4.0" @@ -760,12 +754,12 @@ format = ["idna", "jsonpointer (>1.13)", "rfc3987", "strict-rfc3339", "webcolors format_nongpl = ["idna", "jsonpointer (>1.13)", "webcolors", "rfc3986-validator (>0.1.0)", "rfc3339-validator"] [[package]] -category = "main" -description = "Jupyter protocol implementation and client libraries" name = "jupyter-client" +version = "6.1.7" +description = "Jupyter protocol implementation and client libraries" +category = "main" optional = true python-versions = ">=3.5" -version = "6.1.7" [package.dependencies] jupyter-core = ">=4.6.0" @@ -778,24 +772,24 @@ traitlets = "*" test = ["ipykernel", "ipython", "mock", "pytest", "pytest-asyncio", "async-generator", "pytest-timeout"] [[package]] -category = "main" -description = "Jupyter core package. A base package on which Jupyter projects rely." name = "jupyter-core" +version = "4.6.3" +description = "Jupyter core package. A base package on which Jupyter projects rely." +category = "main" optional = true python-versions = "!=3.0,!=3.1,!=3.2,!=3.3,!=3.4,>=2.7" -version = "4.6.3" [package.dependencies] -pywin32 = ">=1.0" +pywin32 = {version = ">=1.0", markers = "sys_platform == \"win32\""} traitlets = "*" [[package]] -category = "main" -description = "The JupyterLab notebook server extension." name = "jupyterlab" +version = "2.2.8" +description = "The JupyterLab notebook server extension." +category = "main" optional = true python-versions = ">=3.5" -version = "2.2.8" [package.dependencies] jinja2 = ">=2.10" @@ -808,23 +802,23 @@ docs = ["jsx-lexer", "recommonmark", "sphinx", "sphinx-rtd-theme", "sphinx-copyb test = ["pytest", "pytest-check-links", "requests", "wheel", "virtualenv"] [[package]] -category = "main" -description = "Pygments theme using JupyterLab CSS variables" name = "jupyterlab-pygments" +version = "0.1.2" +description = "Pygments theme using JupyterLab CSS variables" +category = "main" optional = true python-versions = "*" -version = "0.1.2" [package.dependencies] pygments = ">=2.4.1,<3" [[package]] -category = "main" -description = "JupyterLab Server" name = "jupyterlab-server" +version = "1.2.0" +description = "JupyterLab Server" +category = "main" optional = true python-versions = ">=3.5" -version = "1.2.0" [package.dependencies] jinja2 = ">=2.10" @@ -837,20 +831,20 @@ requests = "*" test = ["pytest", "requests"] [[package]] -category = "dev" -description = "A fast and thorough lazy object proxy." name = "lazy-object-proxy" +version = "1.4.3" +description = "A fast and thorough lazy object proxy." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.4.3" [[package]] -category = "main" -description = "A super-fast templating language that borrows the best ideas from the existing templating languages." name = "mako" +version = "1.1.3" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.1.3" [package.dependencies] MarkupSafe = ">=0.9.2" @@ -860,36 +854,36 @@ babel = ["babel"] lingua = ["lingua"] [[package]] -category = "main" -description = "Safely add untrusted strings to HTML/XML markup." name = "markupsafe" +version = "1.1.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" -version = "1.1.1" [[package]] -category = "dev" -description = "McCabe checker, plugin for flake8" name = "mccabe" +version = "0.6.1" +description = "McCabe checker, plugin for flake8" +category = "dev" optional = false python-versions = "*" -version = "0.6.1" [[package]] -category = "main" -description = "The fastest markdown parser in pure Python" name = "mistune" +version = "0.8.4" +description = "The fastest markdown parser in pure Python" +category = "main" optional = true python-versions = "*" -version = "0.8.4" [[package]] -category = "dev" -description = "Optional static typing for Python" name = "mypy" +version = "0.782" +description = "Optional static typing for Python" +category = "dev" optional = false python-versions = ">=3.5" -version = "0.782" [package.dependencies] mypy-extensions = ">=0.4.3,<0.5.0" @@ -900,31 +894,31 @@ typing-extensions = ">=3.7.4" dmypy = ["psutil (>=4.0)"] [[package]] -category = "dev" -description = "Experimental type system extensions for programs checked with the mypy typechecker." name = "mypy-extensions" +version = "0.4.3" +description = "Experimental type system extensions for programs checked with the mypy typechecker." +category = "dev" optional = false python-versions = "*" -version = "0.4.3" [[package]] -category = "main" -description = "A simple extension for Jupyter Notebook and Jupyter Lab to beautify Python code automatically using Black." name = "nb-black" +version = "1.0.7" +description = "A simple extension for Jupyter Notebook and Jupyter Lab to beautify Python code automatically using Black." +category = "main" optional = true python-versions = "*" -version = "1.0.7" [package.dependencies] ipython = "*" [[package]] -category = "main" -description = "A client library for executing notebooks. Formally nbconvert's ExecutePreprocessor." name = "nbclient" +version = "0.5.0" +description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." +category = "main" optional = true python-versions = ">=3.6" -version = "0.5.0" [package.dependencies] async-generator = "*" @@ -939,12 +933,12 @@ sphinx = ["Sphinx (>=1.7)", "sphinx-book-theme", "mock", "moto", "myst-parser"] test = ["codecov", "coverage", "ipython", "ipykernel", "ipywidgets", "pytest (>=4.1)", "pytest-cov (>=2.6.1)", "check-manifest", "flake8", "mypy", "tox", "bumpversion", "xmltodict", "pip (>=18.1)", "wheel (>=0.31.0)", "setuptools (>=38.6.0)", "twine (>=1.11.0)", "black"] [[package]] -category = "main" -description = "Converting Jupyter Notebooks" name = "nbconvert" +version = "6.0.6" +description = "Converting Jupyter Notebooks" +category = "main" optional = true python-versions = ">=3.6" -version = "6.0.6" [package.dependencies] bleach = "*" @@ -962,19 +956,19 @@ testpath = "*" traitlets = ">=4.2" [package.extras] -all = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (0.2.2)", "tornado (>=4.0)", "sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] +all = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (==0.2.2)", "tornado (>=4.0)", "sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] docs = ["sphinx (>=1.5.1)", "sphinx-rtd-theme", "nbsphinx (>=0.2.12)", "ipython"] serve = ["tornado (>=4.0)"] -test = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (0.2.2)"] -webpdf = ["pyppeteer (0.2.2)"] +test = ["pytest", "pytest-cov", "pytest-dependency", "ipykernel", "ipywidgets (>=7)", "pyppeteer (==0.2.2)"] +webpdf = ["pyppeteer (==0.2.2)"] [[package]] -category = "main" -description = "The Jupyter Notebook format" name = "nbformat" +version = "5.0.7" +description = "The Jupyter Notebook format" +category = "main" optional = true python-versions = ">=3.5" -version = "5.0.7" [package.dependencies] ipython-genutils = "*" @@ -986,31 +980,30 @@ traitlets = ">=4.1" test = ["pytest", "pytest-cov", "testpath"] [[package]] -category = "main" -description = "Patch asyncio to allow nested event loops" name = "nest-asyncio" +version = "1.4.1" +description = "Patch asyncio to allow nested event loops" +category = "main" optional = true python-versions = ">=3.5" -version = "1.4.1" [[package]] -category = "dev" -description = "Node.js virtual environment builder" name = "nodeenv" +version = "1.5.0" +description = "Node.js virtual environment builder" +category = "dev" optional = false python-versions = "*" -version = "1.5.0" [[package]] -category = "main" -description = "A web-based notebook environment for interactive computing" name = "notebook" +version = "6.1.4" +description = "A web-based notebook environment for interactive computing" +category = "main" optional = true python-versions = ">=3.5" -version = "6.1.4" [package.dependencies] -Send2Trash = "*" argon2-cffi = "*" ipykernel = "*" ipython-genutils = "*" @@ -1021,6 +1014,7 @@ nbconvert = "*" nbformat = "*" prometheus-client = "*" pyzmq = ">=17" +Send2Trash = "*" terminado = ">=0.8.3" tornado = ">=5.0" traitlets = ">=4.2.1" @@ -1030,12 +1024,12 @@ docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt"] test = ["nose", "coverage", "requests", "nose-warnings-filters", "nbval", "nose-exclude", "selenium", "pytest", "pytest-cov", "requests-unixsocket"] [[package]] -category = "dev" -description = "Flexible test automation." name = "nox" +version = "2020.8.22" +description = "Flexible test automation." +category = "dev" optional = false python-versions = ">=3.5" -version = "2020.8.22" [package.dependencies] argcomplete = ">=1.9.4,<2.0" @@ -1047,32 +1041,32 @@ virtualenv = ">=14.0.0" tox_to_nox = ["jinja2", "tox"] [[package]] -category = "main" -description = "NumPy is the fundamental package for array computing with Python." name = "numpy" +version = "1.19.2" +description = "NumPy is the fundamental package for array computing with Python." +category = "main" optional = true python-versions = ">=3.6" -version = "1.19.2" [[package]] -category = "main" -description = "Core utilities for Python packages" name = "packaging" +version = "20.4" +description = "Core utilities for Python packages" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "20.4" [package.dependencies] pyparsing = ">=2.0.2" six = "*" [[package]] -category = "main" -description = "Powerful data structures for data analysis, time series, and statistics" name = "pandas" +version = "1.1.2" +description = "Powerful data structures for data analysis, time series, and statistics" +category = "main" optional = true python-versions = ">=3.6.1" -version = "1.1.2" [package.dependencies] numpy = ">=1.15.4" @@ -1083,89 +1077,88 @@ pytz = ">=2017.2" test = ["pytest (>=4.0.2)", "pytest-xdist", "hypothesis (>=3.58)"] [[package]] -category = "main" -description = "Utilities for writing pandoc filters in python" name = "pandocfilters" +version = "1.4.2" +description = "Utilities for writing pandoc filters in python" +category = "main" optional = true python-versions = "*" -version = "1.4.2" [[package]] -category = "main" -description = "A Python Parser" name = "parso" +version = "0.7.1" +description = "A Python Parser" +category = "main" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "0.7.1" [package.extras] testing = ["docopt", "pytest (>=3.0.7)"] [[package]] -category = "dev" -description = "Utility library for gitignore style pattern matching of file paths." name = "pathspec" +version = "0.8.0" +description = "Utility library for gitignore style pattern matching of file paths." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "0.8.0" [[package]] -category = "dev" -description = "Python Build Reasonableness" name = "pbr" +version = "5.5.0" +description = "Python Build Reasonableness" +category = "dev" optional = false python-versions = ">=2.6" -version = "5.5.0" [[package]] -category = "dev" -description = "Check PEP-8 naming conventions, plugin for flake8" name = "pep8-naming" +version = "0.9.1" +description = "Check PEP-8 naming conventions, plugin for flake8" +category = "dev" optional = false python-versions = "*" -version = "0.9.1" [package.dependencies] flake8-polyfill = ">=1.0.2,<2" [[package]] -category = "main" -description = "Pexpect allows easy control of interactive console applications." -marker = "sys_platform != \"win32\"" name = "pexpect" +version = "4.8.0" +description = "Pexpect allows easy control of interactive console applications." +category = "main" optional = true python-versions = "*" -version = "4.8.0" [package.dependencies] ptyprocess = ">=0.5" [[package]] -category = "main" -description = "Tiny 'shelve'-like database with concurrency support" name = "pickleshare" +version = "0.7.5" +description = "Tiny 'shelve'-like database with concurrency support" +category = "main" optional = true python-versions = "*" -version = "0.7.5" [[package]] -category = "dev" -description = "plugin and hook calling mechanisms for python" name = "pluggy" +version = "0.13.1" +description = "plugin and hook calling mechanisms for python" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "0.13.1" [package.extras] dev = ["pre-commit", "tox"] [[package]] -category = "dev" -description = "A framework for managing and maintaining multi-language pre-commit hooks." name = "pre-commit" +version = "2.7.1" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +category = "dev" optional = false python-versions = ">=3.6.1" -version = "2.7.1" [package.dependencies] cfgv = ">=2.0.0" @@ -1176,138 +1169,137 @@ toml = "*" virtualenv = ">=20.0.8" [[package]] -category = "main" -description = "Python client for the Prometheus monitoring system." name = "prometheus-client" +version = "0.8.0" +description = "Python client for the Prometheus monitoring system." +category = "main" optional = true python-versions = "*" -version = "0.8.0" [package.extras] twisted = ["twisted"] [[package]] -category = "main" -description = "Library for building powerful interactive command lines in Python" name = "prompt-toolkit" +version = "3.0.7" +description = "Library for building powerful interactive command lines in Python" +category = "main" optional = true python-versions = ">=3.6.1" -version = "3.0.7" [package.dependencies] wcwidth = "*" [[package]] -category = "main" -description = "psycopg2 - Python-PostgreSQL Database Adapter" name = "psycopg2" +version = "2.8.6" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" -version = "2.8.6" [[package]] -category = "main" -description = "Run a subprocess in a pseudo terminal" -marker = "sys_platform != \"win32\" or os_name != \"nt\"" name = "ptyprocess" +version = "0.6.0" +description = "Run a subprocess in a pseudo terminal" +category = "main" optional = true python-versions = "*" -version = "0.6.0" [[package]] -category = "dev" -description = "library with cross-python path, ini-parsing, io, code, log facilities" name = "py" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" version = "1.9.0" - -[[package]] +description = "library with cross-python path, ini-parsing, io, code, log facilities" +category = "dev" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" + +[[package]] +name = "pycodestyle" +version = "2.6.0" +description = "Python style guide checker" category = "dev" -description = "Python style guide checker" -name = "pycodestyle" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "2.6.0" [[package]] -category = "main" -description = "C parser in Python" name = "pycparser" +version = "2.20" +description = "C parser in Python" +category = "main" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "2.20" [[package]] -category = "dev" -description = "Python docstring style checker" name = "pydocstyle" +version = "5.1.1" +description = "Python docstring style checker" +category = "dev" optional = false python-versions = ">=3.5" -version = "5.1.1" [package.dependencies] snowballstemmer = "*" [[package]] -category = "dev" -description = "passive checker of Python programs" name = "pyflakes" +version = "2.2.0" +description = "passive checker of Python programs" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "2.2.0" [[package]] -category = "main" -description = "Pygments is a syntax highlighting package written in Python." name = "pygments" +version = "2.7.1" +description = "Pygments is a syntax highlighting package written in Python." +category = "main" optional = false python-versions = ">=3.5" -version = "2.7.1" [[package]] -category = "dev" -description = "python code static checker" name = "pylint" +version = "2.6.0" +description = "python code static checker" +category = "dev" optional = false python-versions = ">=3.5.*" -version = "2.6.0" [package.dependencies] astroid = ">=2.4.0,<=2.5" -colorama = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} isort = ">=4.2.5,<6" mccabe = ">=0.6,<0.7" toml = ">=0.7.1" [[package]] -category = "main" -description = "Python parsing module" name = "pyparsing" +version = "2.4.7" +description = "Python parsing module" +category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -version = "2.4.7" [[package]] -category = "main" -description = "Persistent/Functional/Immutable data structures" name = "pyrsistent" +version = "0.17.3" +description = "Persistent/Functional/Immutable data structures" +category = "main" optional = true python-versions = ">=3.5" -version = "0.17.3" [[package]] -category = "dev" -description = "pytest: simple powerful testing with Python" name = "pytest" +version = "6.1.0" +description = "pytest: simple powerful testing with Python" +category = "dev" optional = false python-versions = ">=3.5" -version = "6.1.0" [package.dependencies] -atomicwrites = ">=1.0" +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} attrs = ">=17.4.0" -colorama = "*" +colorama = {version = "*", markers = "sys_platform == \"win32\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<1.0" @@ -1315,122 +1307,120 @@ py = ">=1.8.2" toml = "*" [package.extras] -checkqa_mypy = ["mypy (0.780)"] +checkqa_mypy = ["mypy (==0.780)"] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] [[package]] -category = "dev" -description = "Pytest plugin for measuring coverage." name = "pytest-cov" +version = "2.10.1" +description = "Pytest plugin for measuring coverage." +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "2.10.1" [package.dependencies] coverage = ">=4.4" pytest = ">=4.6" [package.extras] -testing = ["fields", "hunter", "process-tests (2.0.2)", "six", "pytest-xdist", "virtualenv"] +testing = ["fields", "hunter", "process-tests (==2.0.2)", "six", "pytest-xdist", "virtualenv"] [[package]] -category = "dev" -description = "py.test plugin that allows you to add environment variables." name = "pytest-env" +version = "0.6.2" +description = "py.test plugin that allows you to add environment variables." +category = "dev" optional = false python-versions = "*" -version = "0.6.2" [package.dependencies] pytest = ">=2.6.0" [[package]] -category = "main" -description = "Extensions to the standard Python datetime module" name = "python-dateutil" +version = "2.8.1" +description = "Extensions to the standard Python datetime module" +category = "main" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -version = "2.8.1" [package.dependencies] six = ">=1.5" [[package]] -category = "main" -description = "Add .env support to your django/flask apps in development and deployments" name = "python-dotenv" +version = "0.14.0" +description = "Add .env support to your django/flask apps in development and deployments" +category = "main" optional = false python-versions = "*" -version = "0.14.0" [package.extras] cli = ["click (>=5.0)"] [[package]] -category = "main" -description = "Programmatically open an editor, capture the result." name = "python-editor" -optional = false -python-versions = "*" version = "1.0.4" - -[[package]] +description = "Programmatically open an editor, capture the result." category = "main" -description = "World timezone definitions, modern and historical" -name = "pytz" optional = false python-versions = "*" + +[[package]] +name = "pytz" version = "2020.1" +description = "World timezone definitions, modern and historical" +category = "main" +optional = false +python-versions = "*" [[package]] -category = "main" -description = "Python for Window Extensions" -marker = "sys_platform == \"win32\"" name = "pywin32" -optional = true -python-versions = "*" version = "228" - -[[package]] +description = "Python for Window Extensions" +category = "main" +optional = true +python-versions = "*" + +[[package]] +name = "pywinpty" +version = "0.5.7" +description = "Python bindings for the winpty library" category = "main" -description = "Python bindings for the winpty library" -marker = "os_name == \"nt\"" -name = "pywinpty" optional = true python-versions = "*" -version = "0.5.7" [[package]] -category = "dev" -description = "YAML parser and emitter for Python" name = "pyyaml" +version = "5.3.1" +description = "YAML parser and emitter for Python" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "5.3.1" [[package]] -category = "main" -description = "Python bindings for 0MQ" name = "pyzmq" +version = "19.0.2" +description = "Python bindings for 0MQ" +category = "main" optional = true python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*" -version = "19.0.2" [[package]] -category = "dev" -description = "Alternative regular expression module, to replace re." name = "regex" +version = "2020.9.27" +description = "Alternative regular expression module, to replace re." +category = "dev" optional = false python-versions = "*" -version = "2020.9.27" [[package]] -category = "main" -description = "Python HTTP for Humans." name = "requests" +version = "2.24.0" +description = "Python HTTP for Humans." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -version = "2.24.0" [package.dependencies] certifi = ">=2017.4.17" @@ -1443,65 +1433,65 @@ security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7)", "win-inet-pton"] [[package]] -category = "dev" -description = "reStructuredText linter" name = "restructuredtext-lint" +version = "1.3.1" +description = "reStructuredText linter" +category = "dev" optional = false python-versions = "*" -version = "1.3.1" [package.dependencies] docutils = ">=0.11,<1.0" [[package]] -category = "main" -description = "Send file to trash natively under Mac OS X, Windows and Linux." name = "send2trash" +version = "1.5.0" +description = "Send file to trash natively under Mac OS X, Windows and Linux." +category = "main" optional = true python-versions = "*" -version = "1.5.0" [[package]] -category = "main" -description = "Python 2 and 3 compatibility utilities" name = "six" +version = "1.15.0" +description = "Python 2 and 3 compatibility utilities" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -version = "1.15.0" [[package]] -category = "dev" -description = "A pure Python implementation of a sliding window memory map manager" name = "smmap" +version = "3.0.4" +description = "A pure Python implementation of a sliding window memory map manager" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "3.0.4" [[package]] -category = "dev" -description = "This package provides 26 stemmers for 25 languages generated from Snowball algorithms." name = "snowballstemmer" +version = "2.0.0" +description = "This package provides 26 stemmers for 25 languages generated from Snowball algorithms." +category = "dev" optional = false python-versions = "*" -version = "2.0.0" [[package]] -category = "dev" -description = "Python documentation generator" name = "sphinx" +version = "3.2.1" +description = "Python documentation generator" +category = "dev" optional = false python-versions = ">=3.5" -version = "3.2.1" [package.dependencies] -Jinja2 = ">=2.3" -Pygments = ">=2.0" alabaster = ">=0.7,<0.8" babel = ">=1.3" -colorama = ">=0.3.5" +colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} docutils = ">=0.12" imagesize = "*" +Jinja2 = ">=2.3" packaging = "*" +Pygments = ">=2.0" requests = ">=2.5.0" setuptools = "*" snowballstemmer = ">=1.1" @@ -1518,12 +1508,12 @@ lint = ["flake8 (>=3.5.0)", "flake8-import-order", "mypy (>=0.780)", "docutils-s test = ["pytest", "pytest-cov", "html5lib", "typed-ast", "cython"] [[package]] -category = "dev" -description = "Type hints (PEP 484) support for the Sphinx autodoc extension" name = "sphinx-autodoc-typehints" +version = "1.11.0" +description = "Type hints (PEP 484) support for the Sphinx autodoc extension" +category = "dev" optional = false python-versions = ">=3.5.2" -version = "1.11.0" [package.dependencies] Sphinx = ">=3.0" @@ -1533,83 +1523,83 @@ test = ["pytest (>=3.1.0)", "typing-extensions (>=3.5)", "sphobjinv (>=2.0)", "d type_comments = ["typed-ast (>=1.4.0)"] [[package]] -category = "dev" -description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" name = "sphinxcontrib-applehelp" +version = "1.0.2" +description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +category = "dev" optional = false python-versions = ">=3.5" -version = "1.0.2" [package.extras] lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] [[package]] -category = "dev" -description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." name = "sphinxcontrib-devhelp" +version = "1.0.2" +description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." +category = "dev" optional = false python-versions = ">=3.5" -version = "1.0.2" [package.extras] lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] [[package]] -category = "dev" -description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" name = "sphinxcontrib-htmlhelp" +version = "1.0.3" +description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" +category = "dev" optional = false python-versions = ">=3.5" -version = "1.0.3" [package.extras] lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest", "html5lib"] [[package]] -category = "dev" -description = "A sphinx extension which renders display math in HTML via JavaScript" name = "sphinxcontrib-jsmath" +version = "1.0.1" +description = "A sphinx extension which renders display math in HTML via JavaScript" +category = "dev" optional = false python-versions = ">=3.5" -version = "1.0.1" [package.extras] test = ["pytest", "flake8", "mypy"] [[package]] -category = "dev" -description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." name = "sphinxcontrib-qthelp" -optional = false -python-versions = ">=3.5" version = "1.0.3" - -[package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] -test = ["pytest"] - -[[package]] +description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.extras] +lint = ["flake8", "mypy", "docutils-stubs"] +test = ["pytest"] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "1.1.4" +description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." category = "dev" -description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." -name = "sphinxcontrib-serializinghtml" optional = false python-versions = ">=3.5" -version = "1.1.4" [package.extras] lint = ["flake8", "mypy", "docutils-stubs"] test = ["pytest"] [[package]] -category = "main" -description = "Database Abstraction Library" name = "sqlalchemy" +version = "1.3.19" +description = "Database Abstraction Library" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -version = "1.3.19" [package.extras] mssql = ["pyodbc"] @@ -1624,36 +1614,36 @@ postgresql_psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql"] [[package]] -category = "dev" -description = "Manage dynamic plugins for Python applications" name = "stevedore" +version = "3.2.2" +description = "Manage dynamic plugins for Python applications" +category = "dev" optional = false python-versions = ">=3.6" -version = "3.2.2" [package.dependencies] pbr = ">=2.0.0,<2.1.0 || >2.1.0" [[package]] -category = "main" -description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." name = "terminado" +version = "0.9.1" +description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." +category = "main" optional = true python-versions = ">=3.6" -version = "0.9.1" [package.dependencies] -ptyprocess = "*" -pywinpty = ">=0.5" +ptyprocess = {version = "*", markers = "os_name != \"nt\""} +pywinpty = {version = ">=0.5", markers = "os_name == \"nt\""} tornado = ">=4" [[package]] -category = "dev" -description = "A collection of helpers and mock objects for unit tests and doc tests." name = "testfixtures" +version = "6.14.2" +description = "A collection of helpers and mock objects for unit tests and doc tests." +category = "dev" optional = false python-versions = "*" -version = "6.14.2" [package.extras] build = ["setuptools-git", "wheel", "twine"] @@ -1661,39 +1651,39 @@ docs = ["sphinx", "zope.component", "sybil", "twisted", "mock", "django (<2)", " test = ["pytest (>=3.6)", "pytest-cov", "pytest-django", "zope.component", "sybil", "twisted", "mock", "django (<2)", "django"] [[package]] -category = "main" -description = "Test utilities for code working with files and commands" name = "testpath" +version = "0.4.4" +description = "Test utilities for code working with files and commands" +category = "main" optional = true python-versions = "*" -version = "0.4.4" [package.extras] test = ["pathlib2"] [[package]] -category = "dev" -description = "Python Library for Tom's Obvious, Minimal Language" name = "toml" +version = "0.10.1" +description = "Python Library for Tom's Obvious, Minimal Language" +category = "dev" optional = false python-versions = "*" -version = "0.10.1" [[package]] -category = "main" -description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." name = "tornado" +version = "6.0.4" +description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." +category = "main" optional = true python-versions = ">= 3.5" -version = "6.0.4" [[package]] -category = "main" -description = "Traitlets Python configuration system" name = "traitlets" +version = "5.0.4" +description = "Traitlets Python configuration system" +category = "main" optional = true python-versions = ">=3.7" -version = "5.0.4" [package.dependencies] ipython-genutils = "*" @@ -1702,28 +1692,28 @@ ipython-genutils = "*" test = ["pytest"] [[package]] -category = "dev" -description = "a fork of Python 2 and 3 ast modules with type comment support" name = "typed-ast" -optional = false -python-versions = "*" version = "1.4.1" - -[[package]] +description = "a fork of Python 2 and 3 ast modules with type comment support" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "typing-extensions" +version = "3.7.4.3" +description = "Backported and Experimental Type Hints for Python 3.5+" category = "dev" -description = "Backported and Experimental Type Hints for Python 3.5+" -name = "typing-extensions" optional = false python-versions = "*" -version = "3.7.4.3" [[package]] -category = "main" -description = "HTTP library with thread-safe connection pooling, file post, and more." name = "urllib3" +version = "1.25.10" +description = "HTTP library with thread-safe connection pooling, file post, and more." +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" -version = "1.25.10" [package.extras] brotli = ["brotlipy (>=0.6.0)"] @@ -1731,12 +1721,12 @@ secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "pyOpenSSL (>=0 socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"] [[package]] -category = "dev" -description = "Virtual Python Environment builder" name = "virtualenv" +version = "20.0.31" +description = "Virtual Python Environment builder" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" -version = "20.0.31" [package.dependencies] appdirs = ">=1.4.3,<2" @@ -1749,28 +1739,28 @@ docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sp testing = ["coverage (>=5)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "pytest-xdist (>=1.31.0)", "packaging (>=20.0)", "xonsh (>=0.9.16)"] [[package]] -category = "main" -description = "Measures the displayed width of unicode strings in a terminal" name = "wcwidth" -optional = true -python-versions = "*" version = "0.2.5" - -[[package]] +description = "Measures the displayed width of unicode strings in a terminal" +category = "main" +optional = true +python-versions = "*" + +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" category = "main" -description = "Character encoding aliases for legacy web content" -name = "webencodings" optional = true python-versions = "*" -version = "0.5.1" [[package]] -category = "dev" -description = "The strictest and most opinionated python linter ever" name = "wemake-python-styleguide" +version = "0.14.1" +description = "The strictest and most opinionated python linter ever" +category = "dev" optional = false python-versions = ">=3.6,<4.0" -version = "0.14.1" [package.dependencies] astor = ">=0.8,<0.9" @@ -1794,32 +1784,26 @@ pygments = ">=2.4,<3.0" typing_extensions = ">=3.6,<4.0" [[package]] -category = "dev" -description = "Module for decorators, wrappers and monkey patching." name = "wrapt" +version = "1.12.1" +description = "Module for decorators, wrappers and monkey patching." +category = "dev" optional = false python-versions = "*" -version = "1.12.1" [[package]] -category = "dev" -description = "A rewrite of the builtin doctest module" name = "xdoctest" +version = "0.13.0" +description = "A rewrite of the builtin doctest module" +category = "dev" optional = false python-versions = "*" -version = "0.13.0" [package.dependencies] +colorama = {version = "*", optional = true, markers = "platform_system == \"Windows\" and extra == \"optional\""} +Pygments = {version = "*", optional = true, markers = "extra == \"optional\""} six = "*" -[package.dependencies.Pygments] -optional = true -version = "*" - -[package.dependencies.colorama] -optional = true -version = "*" - [package.extras] all = ["six", "pytest", "pytest-cov", "codecov", "scikit-build", "cmake", "ninja", "pybind11", "pygments", "colorama"] optional = ["pygments", "colorama"] From 03e498cab914551cde127923529fa11487796ee6 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sat, 7 Nov 2020 16:25:18 +0100 Subject: [PATCH 03/72] Pin the dependencies ... ... after upgrading: - sqlalchemy - dev dependencies + darglint + flake8(-comprehensions) + pre-commit + pytest + sphinx(-autodoc-typehints) - reseach dependencies + jupyterlab + numpy + pandas + pytz - transient dependencies + attrs + colorama + gitpython + identify + iniconfig + ipython + nbclient + nbconvert + nbformat + nest-asyncio + notebook + pandocfilters + pbr + prompt-toolkit + pygments + regex + testfixtures + toml + tornado + traitlets + urllib3 + virtualenv --- poetry.lock | 511 ++++++++++++++++++++++++++++++---------------------- 1 file changed, 293 insertions(+), 218 deletions(-) diff --git a/poetry.lock b/poetry.lock index 65247f0..c142320 100644 --- a/poetry.lock +++ b/poetry.lock @@ -60,9 +60,9 @@ cffi = ">=1.0.0" six = "*" [package.extras] -dev = ["coverage (>=5.0.2)", "hypothesis", "pytest", "sphinx", "wheel", "pre-commit"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest", "sphinx", "wheel", "pre-commit"] docs = ["sphinx"] -tests = ["coverage (>=5.0.2)", "hypothesis", "pytest"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pytest"] [[package]] name = "astor" @@ -114,17 +114,17 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "attrs" -version = "20.2.0" +version = "20.3.0" description = "Classes Without Boilerplate" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.extras] -dev = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "sphinx", "sphinx-rtd-theme", "pre-commit"] -docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] -tests = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] -tests_no_zope = ["coverage (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "furo", "sphinx", "pre-commit"] +docs = ["furo", "sphinx", "zope.interface"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] [[package]] name = "autoflake" @@ -249,7 +249,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "colorama" -version = "0.4.3" +version = "0.4.4" description = "Cross-platform colored terminal text." category = "main" optional = false @@ -279,7 +279,7 @@ toml = ["toml"] [[package]] name = "darglint" -version = "1.5.4" +version = "1.5.5" description = "A utility for ensuring Google-style docstrings stay up to date with the source code." category = "dev" optional = false @@ -343,7 +343,7 @@ python-versions = "*" [[package]] name = "flake8" -version = "3.8.3" +version = "3.8.4" description = "the modular source code checker: pep8 pyflakes and co" category = "dev" optional = false @@ -427,7 +427,7 @@ flake8 = ">=2,<4.0.0" [[package]] name = "flake8-comprehensions" -version = "3.2.3" +version = "3.3.0" description = "A flake8 plugin to help you write better list/set/dict comprehensions." category = "dev" optional = false @@ -484,7 +484,6 @@ python-versions = ">=3.6" [package.dependencies] astpretty = "*" flake8 = "*" -setuptools = "*" [[package]] name = "flake8-isort" @@ -579,7 +578,7 @@ smmap = ">=3.0.1,<4" [[package]] name = "gitpython" -version = "3.1.8" +version = "3.1.11" description = "Python Git Library" category = "dev" optional = false @@ -590,7 +589,7 @@ gitdb = ">=4.0.1,<5" [[package]] name = "identify" -version = "1.5.5" +version = "1.5.9" description = "File identification library for Python" category = "dev" optional = false @@ -617,7 +616,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "iniconfig" -version = "1.0.1" +version = "1.1.1" description = "iniconfig: brain-dead simple config-ini parsing" category = "dev" optional = false @@ -643,7 +642,7 @@ test = ["pytest (!=5.3.4)", "pytest-cov", "flaky", "nose"] [[package]] name = "ipython" -version = "7.18.1" +version = "7.19.0" description = "IPython: Productive Interactive Computing" category = "main" optional = true @@ -659,7 +658,6 @@ pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} pickleshare = "*" prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" pygments = "*" -setuptools = ">=18.5" traitlets = ">=4.2" [package.extras] @@ -689,6 +687,9 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +[package.dependencies] +toml = {version = "*", optional = true, markers = "extra == \"pyproject\""} + [package.extras] pipfile = ["pipreqs", "requirementslib"] pyproject = ["toml"] @@ -746,7 +747,6 @@ python-versions = "*" [package.dependencies] attrs = ">=17.4.0" pyrsistent = ">=0.14.0" -setuptools = "*" six = ">=1.11.0" [package.extras] @@ -785,7 +785,7 @@ traitlets = "*" [[package]] name = "jupyterlab" -version = "2.2.8" +version = "2.2.9" description = "The JupyterLab notebook server extension." category = "main" optional = true @@ -914,7 +914,7 @@ ipython = "*" [[package]] name = "nbclient" -version = "0.5.0" +version = "0.5.1" description = "A client library for executing notebooks. Formerly nbconvert's ExecutePreprocessor." category = "main" optional = true @@ -934,7 +934,7 @@ test = ["codecov", "coverage", "ipython", "ipykernel", "ipywidgets", "pytest (>= [[package]] name = "nbconvert" -version = "6.0.6" +version = "6.0.7" description = "Converting Jupyter Notebooks" category = "main" optional = true @@ -964,7 +964,7 @@ webpdf = ["pyppeteer (==0.2.2)"] [[package]] name = "nbformat" -version = "5.0.7" +version = "5.0.8" description = "The Jupyter Notebook format" category = "main" optional = true @@ -977,11 +977,12 @@ jupyter-core = "*" traitlets = ">=4.1" [package.extras] -test = ["pytest", "pytest-cov", "testpath"] +fast = ["fastjsonschema"] +test = ["fastjsonschema", "testpath", "pytest", "pytest-cov"] [[package]] name = "nest-asyncio" -version = "1.4.1" +version = "1.4.2" description = "Patch asyncio to allow nested event loops" category = "main" optional = true @@ -997,7 +998,7 @@ python-versions = "*" [[package]] name = "notebook" -version = "6.1.4" +version = "6.1.5" description = "A web-based notebook environment for interactive computing" category = "main" optional = true @@ -1042,7 +1043,7 @@ tox_to_nox = ["jinja2", "tox"] [[package]] name = "numpy" -version = "1.19.2" +version = "1.19.4" description = "NumPy is the fundamental package for array computing with Python." category = "main" optional = true @@ -1062,7 +1063,7 @@ six = "*" [[package]] name = "pandas" -version = "1.1.2" +version = "1.1.4" description = "Powerful data structures for data analysis, time series, and statistics" category = "main" optional = true @@ -1078,11 +1079,11 @@ test = ["pytest (>=4.0.2)", "pytest-xdist", "hypothesis (>=3.58)"] [[package]] name = "pandocfilters" -version = "1.4.2" +version = "1.4.3" description = "Utilities for writing pandoc filters in python" category = "main" optional = true -python-versions = "*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "parso" @@ -1105,7 +1106,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pbr" -version = "5.5.0" +version = "5.5.1" description = "Python Build Reasonableness" category = "dev" optional = false @@ -1154,7 +1155,7 @@ dev = ["pre-commit", "tox"] [[package]] name = "pre-commit" -version = "2.7.1" +version = "2.8.2" description = "A framework for managing and maintaining multi-language pre-commit hooks." category = "dev" optional = false @@ -1181,7 +1182,7 @@ twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.7" +version = "3.0.8" description = "Library for building powerful interactive command lines in Python" category = "main" optional = true @@ -1251,7 +1252,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pygments" -version = "2.7.1" +version = "2.7.2" description = "Pygments is a syntax highlighting package written in Python." category = "main" optional = false @@ -1290,7 +1291,7 @@ python-versions = ">=3.5" [[package]] name = "pytest" -version = "6.1.0" +version = "6.1.2" description = "pytest: simple powerful testing with Python" category = "dev" optional = false @@ -1368,7 +1369,7 @@ python-versions = "*" [[package]] name = "pytz" -version = "2020.1" +version = "2020.4" description = "World timezone definitions, modern and historical" category = "main" optional = false @@ -1408,7 +1409,7 @@ python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*" [[package]] name = "regex" -version = "2020.9.27" +version = "2020.10.28" description = "Alternative regular expression module, to replace re." category = "dev" optional = false @@ -1430,7 +1431,7 @@ urllib3 = ">=1.21.1,<1.25.0 || >1.25.0,<1.25.1 || >1.25.1,<1.26" [package.extras] security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] -socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7)", "win-inet-pton"] +socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] [[package]] name = "restructuredtext-lint" @@ -1477,7 +1478,7 @@ python-versions = "*" [[package]] name = "sphinx" -version = "3.2.1" +version = "3.3.0" description = "Python documentation generator" category = "dev" optional = false @@ -1493,7 +1494,6 @@ Jinja2 = ">=2.3" packaging = "*" Pygments = ">=2.0" requests = ">=2.5.0" -setuptools = "*" snowballstemmer = ">=1.1" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" @@ -1504,12 +1504,12 @@ sphinxcontrib-serializinghtml = "*" [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["flake8 (>=3.5.0)", "flake8-import-order", "mypy (>=0.780)", "docutils-stubs"] +lint = ["flake8 (>=3.5.0)", "flake8-import-order", "mypy (>=0.790)", "docutils-stubs"] test = ["pytest", "pytest-cov", "html5lib", "typed-ast", "cython"] [[package]] name = "sphinx-autodoc-typehints" -version = "1.11.0" +version = "1.11.1" description = "Type hints (PEP 484) support for the Sphinx autodoc extension" category = "dev" optional = false @@ -1519,7 +1519,7 @@ python-versions = ">=3.5.2" Sphinx = ">=3.0" [package.extras] -test = ["pytest (>=3.1.0)", "typing-extensions (>=3.5)", "sphobjinv (>=2.0)", "dataclasses"] +test = ["pytest (>=3.1.0)", "typing-extensions (>=3.5)", "sphobjinv (>=2.0)", "Sphinx (>=3.2.0)", "dataclasses"] type_comments = ["typed-ast (>=1.4.0)"] [[package]] @@ -1595,7 +1595,7 @@ test = ["pytest"] [[package]] name = "sqlalchemy" -version = "1.3.19" +version = "1.3.20" description = "Database Abstraction Library" category = "main" optional = false @@ -1639,7 +1639,7 @@ tornado = ">=4" [[package]] name = "testfixtures" -version = "6.14.2" +version = "6.15.0" description = "A collection of helpers and mock objects for unit tests and doc tests." category = "dev" optional = false @@ -1663,15 +1663,15 @@ test = ["pathlib2"] [[package]] name = "toml" -version = "0.10.1" +version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" category = "dev" optional = false -python-versions = "*" +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "tornado" -version = "6.0.4" +version = "6.1" description = "Tornado is a Python web framework and asynchronous networking library, originally developed at FriendFeed." category = "main" optional = true @@ -1679,7 +1679,7 @@ python-versions = ">= 3.5" [[package]] name = "traitlets" -version = "5.0.4" +version = "5.0.5" description = "Traitlets Python configuration system" category = "main" optional = true @@ -1709,7 +1709,7 @@ python-versions = "*" [[package]] name = "urllib3" -version = "1.25.10" +version = "1.25.11" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false @@ -1717,12 +1717,12 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" [package.extras] brotli = ["brotlipy (>=0.6.0)"] -secure = ["certifi", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "pyOpenSSL (>=0.14)", "ipaddress"] -socks = ["PySocks (>=1.5.6,<1.5.7 || >1.5.7,<2.0)"] +secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] +socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "virtualenv" -version = "20.0.31" +version = "20.1.0" description = "Virtual Python Environment builder" category = "dev" optional = false @@ -1736,7 +1736,7 @@ six = ">=1.9.0,<2" [package.extras] docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=19.9.0rc1)"] -testing = ["coverage (>=5)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "pytest-xdist (>=1.31.0)", "packaging (>=20.0)", "xonsh (>=0.9.16)"] +testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "pytest-xdist (>=1.31.0)", "packaging (>=20.0)", "xonsh (>=0.9.16)"] [[package]] name = "wcwidth" @@ -1813,9 +1813,9 @@ tests = ["pytest", "pytest-cov", "codecov", "scikit-build", "cmake", "ninja", "p research = ["jupyterlab", "nb_black", "numpy", "pandas", "pytz"] [metadata] -content-hash = "eba980d4335eef2012a1e7ce27941731149eb224cdfad856aa0bcd7701e9e557" -lock-version = "1.0" +lock-version = "1.1" python-versions = "^3.8" +content-hash = "eba980d4335eef2012a1e7ce27941731149eb224cdfad856aa0bcd7701e9e557" [metadata.files] alabaster = [ @@ -1877,8 +1877,8 @@ atomicwrites = [ {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, ] attrs = [ - {file = "attrs-20.2.0-py2.py3-none-any.whl", hash = "sha256:fce7fc47dfc976152e82d53ff92fa0407700c21acd20886a13777a0d20e655dc"}, - {file = "attrs-20.2.0.tar.gz", hash = "sha256:26b54ddbbb9ee1d34d5d3668dd37d6cf74990ab23c828c2888dccdceee395594"}, + {file = "attrs-20.3.0-py2.py3-none-any.whl", hash = "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6"}, + {file = "attrs-20.3.0.tar.gz", hash = "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"}, ] autoflake = [ {file = "autoflake-1.4.tar.gz", hash = "sha256:61a353012cff6ab94ca062823d1fb2f692c4acda51c76ff83a8d77915fba51ea"}, @@ -1958,12 +1958,11 @@ click = [ {file = "click-7.1.2.tar.gz", hash = "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a"}, ] colorama = [ - {file = "colorama-0.4.3-py2.py3-none-any.whl", hash = "sha256:7d73d2a99753107a36ac6b455ee49046802e59d9d076ef8e47b61499fa29afff"}, - {file = "colorama-0.4.3.tar.gz", hash = "sha256:e96da0d330793e2cb9485e9ddfd918d456036c7149416295932478192f4436a1"}, + {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, ] colorlog = [ - {file = "colorlog-4.2.1-py2.py3-none-any.whl", hash = "sha256:43597fd822ce705190fc997519342fdaaf44b9b47f896ece7aa153ed4b909c74"}, - {file = "colorlog-4.2.1.tar.gz", hash = "sha256:75e55822c3a3387d721579241e776de2cf089c9ef9528b1f09e8b04d403ad118"}, + {file = "colorlog-4.5.0-py2.py3-none-any.whl", hash = "sha256:677d39a84c4ca37c87532729c143c6a35cb558a1225b9544713fe42a04102ad2"}, + {file = "colorlog-4.5.0.tar.gz", hash = "sha256:72d25293d6d191e5adc851879240ad33a1ce131a7b36b37f940e505681007e65"}, ] coverage = [ {file = "coverage-5.3-cp27-cp27m-macosx_10_13_intel.whl", hash = "sha256:bd3166bb3b111e76a4f8e2980fa1addf2920a4ca9b2b8ca36a3bc3dedc618270"}, @@ -2002,8 +2001,8 @@ coverage = [ {file = "coverage-5.3.tar.gz", hash = "sha256:280baa8ec489c4f542f8940f9c4c2181f0306a8ee1a54eceba071a449fb870a0"}, ] darglint = [ - {file = "darglint-1.5.4-py3-none-any.whl", hash = "sha256:e58ff63f0f29a4dc8f9c1e102c7d00539290567d72feb74b7b9d5f8302992b8d"}, - {file = "darglint-1.5.4.tar.gz", hash = "sha256:7ebaafc8559d0db7735b6e15904ee5cca4be56fa85eac21c025c328278c6317a"}, + {file = "darglint-1.5.5-py3-none-any.whl", hash = "sha256:cd882c812f28ee3b5577259bfd8d6d25962386dd87fc1f3756eac24370aaa060"}, + {file = "darglint-1.5.5.tar.gz", hash = "sha256:2f12ce2ef3d8189279a8f2eb4c53fd215dbacae50e37765542a91310400a9cd6"}, ] decorator = [ {file = "decorator-4.4.2-py2.py3-none-any.whl", hash = "sha256:41fa54c2a0cc4ba648be4fd43cff00aedf5b9465c9bf18d64325bc225f08f760"}, @@ -2033,8 +2032,8 @@ filelock = [ {file = "filelock-3.0.12.tar.gz", hash = "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59"}, ] flake8 = [ - {file = "flake8-3.8.3-py2.py3-none-any.whl", hash = "sha256:15e351d19611c887e482fb960eae4d44845013cc142d42896e9862f775d8cf5c"}, - {file = "flake8-3.8.3.tar.gz", hash = "sha256:f04b9fcbac03b0a3e58c0ab3a0ecc462e023a9faf046d57794184028123aa208"}, + {file = "flake8-3.8.4-py2.py3-none-any.whl", hash = "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839"}, + {file = "flake8-3.8.4.tar.gz", hash = "sha256:aadae8761ec651813c24be05c6f7b4680857ef6afaae4651a4eccaef97ce6c3b"}, ] flake8-annotations = [ {file = "flake8-annotations-2.4.1.tar.gz", hash = "sha256:0bcebb0792f1f96d617ded674dca7bf64181870bfe5dace353a1483551f8e5f1"}, @@ -2059,8 +2058,8 @@ flake8-commas = [ {file = "flake8_commas-2.0.0-py2.py3-none-any.whl", hash = "sha256:ee2141a3495ef9789a3894ed8802d03eff1eaaf98ce6d8653a7c573ef101935e"}, ] flake8-comprehensions = [ - {file = "flake8-comprehensions-3.2.3.tar.gz", hash = "sha256:d5751acc0f7364794c71d06f113f4686d6e2e26146a50fa93130b9f200fe160d"}, - {file = "flake8_comprehensions-3.2.3-py3-none-any.whl", hash = "sha256:44eaae9894aa15f86e0c86df1e218e7917494fab6f96d28f96a029c460f17d92"}, + {file = "flake8-comprehensions-3.3.0.tar.gz", hash = "sha256:355ef47288523cad7977cb9c1bc81b71c82b7091e425cd9fbcd7e5c19a613677"}, + {file = "flake8_comprehensions-3.3.0-py3-none-any.whl", hash = "sha256:c1dd6d8a00e9722619a5c5e0e6c5747f5cf23c089032c86eaf614c14a2e40adb"}, ] flake8-debugger = [ {file = "flake8-debugger-3.2.1.tar.gz", hash = "sha256:712d7c1ff69ddf3f0130e94cc88c2519e720760bce45e8c330bfdcb61ab4090d"}, @@ -2107,12 +2106,12 @@ gitdb = [ {file = "gitdb-4.0.5.tar.gz", hash = "sha256:c9e1f2d0db7ddb9a704c2a0217be31214e91a4fe1dea1efad19ae42ba0c285c9"}, ] gitpython = [ - {file = "GitPython-3.1.8-py3-none-any.whl", hash = "sha256:1858f4fd089abe92ae465f01d5aaaf55e937eca565fb2c1fce35a51b5f85c910"}, - {file = "GitPython-3.1.8.tar.gz", hash = "sha256:080bf8e2cf1a2b907634761c2eaefbe83b69930c94c66ad11b65a8252959f912"}, + {file = "GitPython-3.1.11-py3-none-any.whl", hash = "sha256:6eea89b655917b500437e9668e4a12eabdcf00229a0df1762aabd692ef9b746b"}, + {file = "GitPython-3.1.11.tar.gz", hash = "sha256:befa4d101f91bad1b632df4308ec64555db684c360bd7d2130b4807d49ce86b8"}, ] identify = [ - {file = "identify-1.5.5-py2.py3-none-any.whl", hash = "sha256:da683bfb7669fa749fc7731f378229e2dbf29a1d1337cbde04106f02236eb29d"}, - {file = "identify-1.5.5.tar.gz", hash = "sha256:7c22c384a2c9b32c5cc891d13f923f6b2653aa83e2d75d8f79be240d6c86c4f4"}, + {file = "identify-1.5.9-py2.py3-none-any.whl", hash = "sha256:5dd84ac64a9a115b8e0b27d1756b244b882ad264c3c423f42af8235a6e71ca12"}, + {file = "identify-1.5.9.tar.gz", hash = "sha256:c9504ba6a043ee2db0a9d69e43246bc138034895f6338d5aed1b41e4a73b1513"}, ] idna = [ {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, @@ -2123,16 +2122,16 @@ imagesize = [ {file = "imagesize-1.2.0.tar.gz", hash = "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"}, ] iniconfig = [ - {file = "iniconfig-1.0.1-py3-none-any.whl", hash = "sha256:80cf40c597eb564e86346103f609d74efce0f6b4d4f30ec8ce9e2c26411ba437"}, - {file = "iniconfig-1.0.1.tar.gz", hash = "sha256:e5f92f89355a67de0595932a6c6c02ab4afddc6fcdc0bfc5becd0d60884d3f69"}, + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] ipykernel = [ {file = "ipykernel-5.3.4-py3-none-any.whl", hash = "sha256:d6fbba26dba3cebd411382bc484f7bc2caa98427ae0ddb4ab37fe8bfeb5c7dd3"}, {file = "ipykernel-5.3.4.tar.gz", hash = "sha256:9b2652af1607986a1b231c62302d070bc0534f564c393a5d9d130db9abbbe89d"}, ] ipython = [ - {file = "ipython-7.18.1-py3-none-any.whl", hash = "sha256:2e22c1f74477b5106a6fb301c342ab8c64bb75d702e350f05a649e8cb40a0fb8"}, - {file = "ipython-7.18.1.tar.gz", hash = "sha256:a331e78086001931de9424940699691ad49dfb457cea31f5471eae7b78222d5e"}, + {file = "ipython-7.19.0-py3-none-any.whl", hash = "sha256:c987e8178ced651532b3b1ff9965925bfd445c279239697052561a9ab806d28f"}, + {file = "ipython-7.19.0.tar.gz", hash = "sha256:cbb2ef3d5961d44e6a963b9817d4ea4e1fa2eb589c371a470fed14d8d40cbd6a"}, ] ipython-genutils = [ {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, @@ -2167,8 +2166,8 @@ jupyter-core = [ {file = "jupyter_core-4.6.3.tar.gz", hash = "sha256:394fd5dd787e7c8861741880bdf8a00ce39f95de5d18e579c74b882522219e7e"}, ] jupyterlab = [ - {file = "jupyterlab-2.2.8-py3-none-any.whl", hash = "sha256:95d0509557881cfa8a5fcdf225f2fca46faf1bc52fc56a28e0b72fcc594c90ab"}, - {file = "jupyterlab-2.2.8.tar.gz", hash = "sha256:c8377bee30504919c1e79949f9fe35443ab7f5c4be622c95307e8108410c8b8c"}, + {file = "jupyterlab-2.2.9-py3-none-any.whl", hash = "sha256:59af02c26a15ec2d2862a15bc72e41ae304b406a0b0d3f4f705eeb7caf91902b"}, + {file = "jupyterlab-2.2.9.tar.gz", hash = "sha256:3be8f8edea173753dd838c1b6d3bbcb6f5c801121f824a477025c1b6a1d33dc6"}, ] jupyterlab-pygments = [ {file = "jupyterlab_pygments-0.1.2-py2.py3-none-any.whl", hash = "sha256:abfb880fd1561987efaefcb2d2ac75145d2a5d0139b1876d5be806e32f630008"}, @@ -2272,85 +2271,101 @@ nb-black = [ {file = "nb_black-1.0.7.tar.gz", hash = "sha256:1ca52e3a46675f6a0a6d79ac73a1f8f951bef60f919eced56173e76ab1b6d62b"}, ] nbclient = [ - {file = "nbclient-0.5.0-py3-none-any.whl", hash = "sha256:8a6e27ff581cee50895f44c41936ce02369674e85e2ad58643d8d4a6c36771b0"}, - {file = "nbclient-0.5.0.tar.gz", hash = "sha256:8ad52d27ba144fca1402db014857e53c5a864a2f407be66ca9d74c3a56d6591d"}, + {file = "nbclient-0.5.1-py3-none-any.whl", hash = "sha256:4d6b116187c795c99b9dba13d46e764d596574b14c296d60670c8dfe454db364"}, + {file = "nbclient-0.5.1.tar.gz", hash = "sha256:01e2d726d16eaf2cde6db74a87e2451453547e8832d142f73f72fddcd4fe0250"}, ] nbconvert = [ - {file = "nbconvert-6.0.6-py3-none-any.whl", hash = "sha256:d8549f62e739a4d51f275c2932b1783ee5039dde07a2b71de70c0296a42c8394"}, - {file = "nbconvert-6.0.6.tar.gz", hash = "sha256:68335477288aab8a9b9ec03002dce59b4eb1ca967116741ec218a4e78c129efd"}, + {file = "nbconvert-6.0.7-py3-none-any.whl", hash = "sha256:39e9f977920b203baea0be67eea59f7b37a761caa542abe80f5897ce3cf6311d"}, + {file = "nbconvert-6.0.7.tar.gz", hash = "sha256:cbbc13a86dfbd4d1b5dee106539de0795b4db156c894c2c5dc382062bbc29002"}, ] nbformat = [ - {file = "nbformat-5.0.7-py3-none-any.whl", hash = "sha256:ea55c9b817855e2dfcd3f66d74857342612a60b1f09653440f4a5845e6e3523f"}, - {file = "nbformat-5.0.7.tar.gz", hash = "sha256:54d4d6354835a936bad7e8182dcd003ca3dc0cedfee5a306090e04854343b340"}, + {file = "nbformat-5.0.8-py3-none-any.whl", hash = "sha256:aa9450c16d29286dc69b92ea4913c1bffe86488f90184445996ccc03a2f60382"}, + {file = "nbformat-5.0.8.tar.gz", hash = "sha256:f545b22138865bfbcc6b1ffe89ed5a2b8e2dc5d4fe876f2ca60d8e6f702a30f8"}, ] nest-asyncio = [ - {file = "nest_asyncio-1.4.1-py3-none-any.whl", hash = "sha256:a4487c4f49f2d11a7bb89a512a6886b6a5045f47097f49815b2851aaa8599cf0"}, - {file = "nest_asyncio-1.4.1.tar.gz", hash = "sha256:b86c3193abda5b2eeccf8c79894bc71c680369a178f4b068514ac00720b14e01"}, + {file = "nest_asyncio-1.4.2-py3-none-any.whl", hash = "sha256:c2d3bdc76ba235a7ad215128afe31d74a320d25790c50cd94685ec5ea221b94d"}, + {file = "nest_asyncio-1.4.2.tar.gz", hash = "sha256:c614fcfaca72b1f04778bc0e73f49c84500b3d045c49d149fc46f1566643c175"}, ] nodeenv = [ {file = "nodeenv-1.5.0-py2.py3-none-any.whl", hash = "sha256:5304d424c529c997bc888453aeaa6362d242b6b4631e90f3d4bf1b290f1c84a9"}, {file = "nodeenv-1.5.0.tar.gz", hash = "sha256:ab45090ae383b716c4ef89e690c41ff8c2b257b85b309f01f3654df3d084bd7c"}, ] notebook = [ - {file = "notebook-6.1.4-py3-none-any.whl", hash = "sha256:07b6e8b8a61aa2f780fe9a97430470485bc71262bc5cae8521f1441b910d2c88"}, - {file = "notebook-6.1.4.tar.gz", hash = "sha256:687d01f963ea20360c0b904ee7a37c3d8cda553858c8d6e33fd0afd13e89de32"}, + {file = "notebook-6.1.5-py3-none-any.whl", hash = "sha256:508cf9dad7cdb3188f1aa27017dc78179029dfe83814fc505329f689bc2ab50f"}, + {file = "notebook-6.1.5.tar.gz", hash = "sha256:3db37ae834c5f3b6378381229d0e5dfcbfb558d08c8ce646b1ad355147f5e91d"}, ] nox = [ {file = "nox-2020.8.22-py3-none-any.whl", hash = "sha256:55f8cab16bcfaaea08b141c83bf2b7c779e943518d0de6cd9c38cd8da95d11ea"}, {file = "nox-2020.8.22.tar.gz", hash = "sha256:efa5adcf1134012f96bcd0a496ccebd4c9e9da53a831888a2a779462440eebcf"}, ] numpy = [ - {file = "numpy-1.19.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b594f76771bc7fc8a044c5ba303427ee67c17a09b36e1fa32bde82f5c419d17a"}, - {file = "numpy-1.19.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:e6ddbdc5113628f15de7e4911c02aed74a4ccff531842c583e5032f6e5a179bd"}, - {file = "numpy-1.19.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:3733640466733441295b0d6d3dcbf8e1ffa7e897d4d82903169529fd3386919a"}, - {file = "numpy-1.19.2-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:4339741994c775396e1a274dba3609c69ab0f16056c1077f18979bec2a2c2e6e"}, - {file = "numpy-1.19.2-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:7c6646314291d8f5ea900a7ea9c4261f834b5b62159ba2abe3836f4fa6705526"}, - {file = "numpy-1.19.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:7118f0a9f2f617f921ec7d278d981244ba83c85eea197be7c5a4f84af80a9c3c"}, - {file = "numpy-1.19.2-cp36-cp36m-win32.whl", hash = "sha256:9a3001248b9231ed73894c773142658bab914645261275f675d86c290c37f66d"}, - {file = "numpy-1.19.2-cp36-cp36m-win_amd64.whl", hash = "sha256:967c92435f0b3ba37a4257c48b8715b76741410467e2bdb1097e8391fccfae15"}, - {file = "numpy-1.19.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:d526fa58ae4aead839161535d59ea9565863bb0b0bdb3cc63214613fb16aced4"}, - {file = "numpy-1.19.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:eb25c381d168daf351147713f49c626030dcff7a393d5caa62515d415a6071d8"}, - {file = "numpy-1.19.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:62139af94728d22350a571b7c82795b9d59be77fc162414ada6c8b6a10ef5d02"}, - {file = "numpy-1.19.2-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:0c66da1d202c52051625e55a249da35b31f65a81cb56e4c69af0dfb8fb0125bf"}, - {file = "numpy-1.19.2-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:2117536e968abb7357d34d754e3733b0d7113d4c9f1d921f21a3d96dec5ff716"}, - {file = "numpy-1.19.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:54045b198aebf41bf6bf4088012777c1d11703bf74461d70cd350c0af2182e45"}, - {file = "numpy-1.19.2-cp37-cp37m-win32.whl", hash = "sha256:aba1d5daf1144b956bc87ffb87966791f5e9f3e1f6fab3d7f581db1f5b598f7a"}, - {file = "numpy-1.19.2-cp37-cp37m-win_amd64.whl", hash = "sha256:addaa551b298052c16885fc70408d3848d4e2e7352de4e7a1e13e691abc734c1"}, - {file = "numpy-1.19.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:58d66a6b3b55178a1f8a5fe98df26ace76260a70de694d99577ddeab7eaa9a9d"}, - {file = "numpy-1.19.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:59f3d687faea7a4f7f93bd9665e5b102f32f3fa28514f15b126f099b7997203d"}, - {file = "numpy-1.19.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cebd4f4e64cfe87f2039e4725781f6326a61f095bc77b3716502bed812b385a9"}, - {file = "numpy-1.19.2-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:c35a01777f81e7333bcf276b605f39c872e28295441c265cd0c860f4b40148c1"}, - {file = "numpy-1.19.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d7ac33585e1f09e7345aa902c281bd777fdb792432d27fca857f39b70e5dd31c"}, - {file = "numpy-1.19.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:04c7d4ebc5ff93d9822075ddb1751ff392a4375e5885299445fcebf877f179d5"}, - {file = "numpy-1.19.2-cp38-cp38-win32.whl", hash = "sha256:51ee93e1fac3fe08ef54ff1c7f329db64d8a9c5557e6c8e908be9497ac76374b"}, - {file = "numpy-1.19.2-cp38-cp38-win_amd64.whl", hash = "sha256:1669ec8e42f169ff715a904c9b2105b6640f3f2a4c4c2cb4920ae8b2785dac65"}, - {file = "numpy-1.19.2-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:0bfd85053d1e9f60234f28f63d4a5147ada7f432943c113a11afcf3e65d9d4c8"}, - {file = "numpy-1.19.2.zip", hash = "sha256:0d310730e1e793527065ad7dde736197b705d0e4c9999775f212b03c44a8484c"}, + {file = "numpy-1.19.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e9b30d4bd69498fc0c3fe9db5f62fffbb06b8eb9321f92cc970f2969be5e3949"}, + {file = "numpy-1.19.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:fedbd128668ead37f33917820b704784aff695e0019309ad446a6d0b065b57e4"}, + {file = "numpy-1.19.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8ece138c3a16db8c1ad38f52eb32be6086cc72f403150a79336eb2045723a1ad"}, + {file = "numpy-1.19.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:64324f64f90a9e4ef732be0928be853eee378fd6a01be21a0a8469c4f2682c83"}, + {file = "numpy-1.19.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:ad6f2ff5b1989a4899bf89800a671d71b1612e5ff40866d1f4d8bcf48d4e5764"}, + {file = "numpy-1.19.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d6c7bb82883680e168b55b49c70af29b84b84abb161cbac2800e8fcb6f2109b6"}, + {file = "numpy-1.19.4-cp36-cp36m-win32.whl", hash = "sha256:13d166f77d6dc02c0a73c1101dd87fdf01339febec1030bd810dcd53fff3b0f1"}, + {file = "numpy-1.19.4-cp36-cp36m-win_amd64.whl", hash = "sha256:448ebb1b3bf64c0267d6b09a7cba26b5ae61b6d2dbabff7c91b660c7eccf2bdb"}, + {file = "numpy-1.19.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:27d3f3b9e3406579a8af3a9f262f5339005dd25e0ecf3cf1559ff8a49ed5cbf2"}, + {file = "numpy-1.19.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:16c1b388cc31a9baa06d91a19366fb99ddbe1c7b205293ed072211ee5bac1ed2"}, + {file = "numpy-1.19.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e5b6ed0f0b42317050c88022349d994fe72bfe35f5908617512cd8c8ef9da2a9"}, + {file = "numpy-1.19.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:18bed2bcb39e3f758296584337966e68d2d5ba6aab7e038688ad53c8f889f757"}, + {file = "numpy-1.19.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:fe45becb4c2f72a0907c1d0246ea6449fe7a9e2293bb0e11c4e9a32bb0930a15"}, + {file = "numpy-1.19.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:6d7593a705d662be5bfe24111af14763016765f43cb6923ed86223f965f52387"}, + {file = "numpy-1.19.4-cp37-cp37m-win32.whl", hash = "sha256:6ae6c680f3ebf1cf7ad1d7748868b39d9f900836df774c453c11c5440bc15b36"}, + {file = "numpy-1.19.4-cp37-cp37m-win_amd64.whl", hash = "sha256:9eeb7d1d04b117ac0d38719915ae169aa6b61fca227b0b7d198d43728f0c879c"}, + {file = "numpy-1.19.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cb1017eec5257e9ac6209ac172058c430e834d5d2bc21961dceeb79d111e5909"}, + {file = "numpy-1.19.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:edb01671b3caae1ca00881686003d16c2209e07b7ef8b7639f1867852b948f7c"}, + {file = "numpy-1.19.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:f29454410db6ef8126c83bd3c968d143304633d45dc57b51252afbd79d700893"}, + {file = "numpy-1.19.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:ec149b90019852266fec2341ce1db513b843e496d5a8e8cdb5ced1923a92faab"}, + {file = "numpy-1.19.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:1aeef46a13e51931c0b1cf8ae1168b4a55ecd282e6688fdb0a948cc5a1d5afb9"}, + {file = "numpy-1.19.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:08308c38e44cc926bdfce99498b21eec1f848d24c302519e64203a8da99a97db"}, + {file = "numpy-1.19.4-cp38-cp38-win32.whl", hash = "sha256:5734bdc0342aba9dfc6f04920988140fb41234db42381cf7ccba64169f9fe7ac"}, + {file = "numpy-1.19.4-cp38-cp38-win_amd64.whl", hash = "sha256:09c12096d843b90eafd01ea1b3307e78ddd47a55855ad402b157b6c4862197ce"}, + {file = "numpy-1.19.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e452dc66e08a4ce642a961f134814258a082832c78c90351b75c41ad16f79f63"}, + {file = "numpy-1.19.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:a5d897c14513590a85774180be713f692df6fa8ecf6483e561a6d47309566f37"}, + {file = "numpy-1.19.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:a09f98011236a419ee3f49cedc9ef27d7a1651df07810ae430a6b06576e0b414"}, + {file = "numpy-1.19.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:50e86c076611212ca62e5a59f518edafe0c0730f7d9195fec718da1a5c2bb1fc"}, + {file = "numpy-1.19.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f0d3929fe88ee1c155129ecd82f981b8856c5d97bcb0d5f23e9b4242e79d1de3"}, + {file = "numpy-1.19.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:c42c4b73121caf0ed6cd795512c9c09c52a7287b04d105d112068c1736d7c753"}, + {file = "numpy-1.19.4-cp39-cp39-win32.whl", hash = "sha256:8cac8790a6b1ddf88640a9267ee67b1aee7a57dfa2d2dd33999d080bc8ee3a0f"}, + {file = "numpy-1.19.4-cp39-cp39-win_amd64.whl", hash = "sha256:4377e10b874e653fe96985c05feed2225c912e328c8a26541f7fc600fb9c637b"}, + {file = "numpy-1.19.4-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:2a2740aa9733d2e5b2dfb33639d98a64c3b0f24765fed86b0fd2aec07f6a0a08"}, + {file = "numpy-1.19.4.zip", hash = "sha256:141ec3a3300ab89c7f2b0775289954d193cc8edb621ea05f99db9cb181530512"}, ] packaging = [ {file = "packaging-20.4-py2.py3-none-any.whl", hash = "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"}, {file = "packaging-20.4.tar.gz", hash = "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8"}, ] pandas = [ - {file = "pandas-1.1.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:eb0ac2fd04428f18b547716f70c699a7cc9c65a6947ed8c7e688d96eb91e3db8"}, - {file = "pandas-1.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:02ec9f5f0b7df7227931a884569ef0b6d32d76789c84bcac1a719dafd1f912e8"}, - {file = "pandas-1.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:1edf6c254d2d138188e9987159978ee70e23362fe9197f3f100844a197f7e1e4"}, - {file = "pandas-1.1.2-cp36-cp36m-win32.whl", hash = "sha256:b821f239514a9ce46dd1cd6c9298a03ed58d0235d414ea264aacc1b14916bbe4"}, - {file = "pandas-1.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:ab6ea0f3116f408a8a59cd50158bfd19d2a024f4e221f14ab1bcd2da4f0c6fdf"}, - {file = "pandas-1.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:474fa53e3b2f3a543cbca81f7457bd1f44e7eb1be7171067636307e21b624e9c"}, - {file = "pandas-1.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:9e135ce9929cd0f0ba24f0545936af17ba935f844d4c3a2b979354a73c9440e0"}, - {file = "pandas-1.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:188cdfbf8399bc144fa95040536b5ce3429d2eda6c9c8b238c987af7df9f128c"}, - {file = "pandas-1.1.2-cp37-cp37m-win32.whl", hash = "sha256:08783a33989a6747317766b75be30a594a9764b9f145bb4bcc06e337930d9807"}, - {file = "pandas-1.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:f7008ec22b92d771b145150978d930a28fab8da3a10131b01bbf39574acdad0b"}, - {file = "pandas-1.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:59df9f0276aa4854d8bff28c5e5aeb74d9c6bb4d9f55d272b7124a7df40e47d0"}, - {file = "pandas-1.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:eeb64c5b3d4f2ea072ca8afdeb2b946cd681a863382ca79734f1b520b8d2fa26"}, - {file = "pandas-1.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:c9235b37489168ed6b173551c816b50aa89f03c24a8549a8b4d47d8dc79bfb1e"}, - {file = "pandas-1.1.2-cp38-cp38-win32.whl", hash = "sha256:0936991228241db937e87f82ec552a33888dd04a2e0d5a2fa3c689f92fab09e0"}, - {file = "pandas-1.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:026d764d0b86ee53183aa4c0b90774b6146123eeada4e24946d7d24290777be1"}, - {file = "pandas-1.1.2.tar.gz", hash = "sha256:b64ffd87a2cfd31b40acd4b92cb72ea9a52a48165aec4c140e78fd69c45d1444"}, + {file = "pandas-1.1.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e2b8557fe6d0a18db4d61c028c6af61bfed44ef90e419ed6fadbdc079eba141e"}, + {file = "pandas-1.1.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:3aa8e10768c730cc1b610aca688f588831fa70b65a26cb549fbb9f35049a05e0"}, + {file = "pandas-1.1.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:185cf8c8f38b169dbf7001e1a88c511f653fbb9dfa3e048f5e19c38049e991dc"}, + {file = "pandas-1.1.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:0d9a38a59242a2f6298fff45d09768b78b6eb0c52af5919ea9e45965d7ba56d9"}, + {file = "pandas-1.1.4-cp36-cp36m-win32.whl", hash = "sha256:8b4c2055ebd6e497e5ecc06efa5b8aa76f59d15233356eb10dad22a03b757805"}, + {file = "pandas-1.1.4-cp36-cp36m-win_amd64.whl", hash = "sha256:5dac3aeaac5feb1016e94bde851eb2012d1733a222b8afa788202b836c97dad5"}, + {file = "pandas-1.1.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6d2b5b58e7df46b2c010ec78d7fb9ab20abf1d306d0614d3432e7478993fbdb0"}, + {file = "pandas-1.1.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c681e8fcc47a767bf868341d8f0d76923733cbdcabd6ec3a3560695c69f14a1e"}, + {file = "pandas-1.1.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:c5a3597880a7a29a31ebd39b73b2c824316ae63a05c3c8a5ce2aea3fc68afe35"}, + {file = "pandas-1.1.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:6613c7815ee0b20222178ad32ec144061cb07e6a746970c9160af1ebe3ad43b4"}, + {file = "pandas-1.1.4-cp37-cp37m-win32.whl", hash = "sha256:43cea38cbcadb900829858884f49745eb1f42f92609d368cabcc674b03e90efc"}, + {file = "pandas-1.1.4-cp37-cp37m-win_amd64.whl", hash = "sha256:5378f58172bd63d8c16dd5d008d7dcdd55bf803fcdbe7da2dcb65dbbf322f05b"}, + {file = "pandas-1.1.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a7d2547b601ecc9a53fd41561de49a43d2231728ad65c7713d6b616cd02ddbed"}, + {file = "pandas-1.1.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:41746d520f2b50409dffdba29a15c42caa7babae15616bcf80800d8cfcae3d3e"}, + {file = "pandas-1.1.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:a15653480e5b92ee376f8458197a58cca89a6e95d12cccb4c2d933df5cecc63f"}, + {file = "pandas-1.1.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:5fdb2a61e477ce58d3f1fdf2470ee142d9f0dde4969032edaf0b8f1a9dafeaa2"}, + {file = "pandas-1.1.4-cp38-cp38-win32.whl", hash = "sha256:8a5d7e57b9df2c0a9a202840b2881bb1f7a648eba12dd2d919ac07a33a36a97f"}, + {file = "pandas-1.1.4-cp38-cp38-win_amd64.whl", hash = "sha256:54404abb1cd3f89d01f1fb5350607815326790efb4789be60508f458cdd5ccbf"}, + {file = "pandas-1.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:112c5ba0f9ea0f60b2cc38c25f87ca1d5ca10f71efbee8e0f1bee9cf584ed5d5"}, + {file = "pandas-1.1.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:cf135a08f306ebbcfea6da8bf775217613917be23e5074c69215b91e180caab4"}, + {file = "pandas-1.1.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:b1f8111635700de7ac350b639e7e452b06fc541a328cf6193cf8fc638804bab8"}, + {file = "pandas-1.1.4-cp39-cp39-win32.whl", hash = "sha256:09e0503758ad61afe81c9069505f8cb8c1e36ea8cc1e6826a95823ef5b327daf"}, + {file = "pandas-1.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:0a11a6290ef3667575cbd4785a1b62d658c25a2fd70a5adedba32e156a8f1773"}, + {file = "pandas-1.1.4.tar.gz", hash = "sha256:a979d0404b135c63954dea79e6246c45dd45371a88631cdbb4877d844e6de3b6"}, ] pandocfilters = [ - {file = "pandocfilters-1.4.2.tar.gz", hash = "sha256:b3dd70e169bb5449e6bc6ff96aea89c5eea8c5f6ab5e207fc2f521a2cf4a0da9"}, + {file = "pandocfilters-1.4.3.tar.gz", hash = "sha256:bc63fbb50534b4b1f8ebe1860889289e8af94a23bff7445259592df25a3906eb"}, ] parso = [ {file = "parso-0.7.1-py2.py3-none-any.whl", hash = "sha256:97218d9159b2520ff45eb78028ba8b50d2bc61dcc062a9682666f2dc4bd331ea"}, @@ -2361,8 +2376,8 @@ pathspec = [ {file = "pathspec-0.8.0.tar.gz", hash = "sha256:da45173eb3a6f2a5a487efba21f050af2b41948be6ab52b6a1e3ff22bb8b7061"}, ] pbr = [ - {file = "pbr-5.5.0-py2.py3-none-any.whl", hash = "sha256:5adc0f9fc64319d8df5ca1e4e06eea674c26b80e6f00c530b18ce6a6592ead15"}, - {file = "pbr-5.5.0.tar.gz", hash = "sha256:14bfd98f51c78a3dd22a1ef45cf194ad79eee4a19e8e1a0d5c7f8e81ffe182ea"}, + {file = "pbr-5.5.1-py2.py3-none-any.whl", hash = "sha256:b236cde0ac9a6aedd5e3c34517b423cd4fd97ef723849da6b0d2231142d89c00"}, + {file = "pbr-5.5.1.tar.gz", hash = "sha256:5fad80b613c402d5b7df7bd84812548b2a61e9977387a80a5fc5c396492b13c9"}, ] pep8-naming = [ {file = "pep8-naming-0.9.1.tar.gz", hash = "sha256:a33d38177056321a167decd6ba70b890856ba5025f0a8eca6a3eda607da93caf"}, @@ -2381,16 +2396,16 @@ pluggy = [ {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, ] pre-commit = [ - {file = "pre_commit-2.7.1-py2.py3-none-any.whl", hash = "sha256:810aef2a2ba4f31eed1941fc270e72696a1ad5590b9751839c90807d0fff6b9a"}, - {file = "pre_commit-2.7.1.tar.gz", hash = "sha256:c54fd3e574565fe128ecc5e7d2f91279772ddb03f8729645fa812fe809084a70"}, + {file = "pre_commit-2.8.2-py2.py3-none-any.whl", hash = "sha256:22e6aa3bd571debb01eb7d34483f11c01b65237be4eebbf30c3d4fb65762d315"}, + {file = "pre_commit-2.8.2.tar.gz", hash = "sha256:905ebc9b534b991baec87e934431f2d0606ba27f2b90f7f652985f5a5b8b6ae6"}, ] prometheus-client = [ {file = "prometheus_client-0.8.0-py2.py3-none-any.whl", hash = "sha256:983c7ac4b47478720db338f1491ef67a100b474e3bc7dafcbaefb7d0b8f9b01c"}, {file = "prometheus_client-0.8.0.tar.gz", hash = "sha256:c6e6b706833a6bd1fd51711299edee907857be10ece535126a158f911ee80915"}, ] prompt-toolkit = [ - {file = "prompt_toolkit-3.0.7-py3-none-any.whl", hash = "sha256:83074ee28ad4ba6af190593d4d4c607ff525272a504eb159199b6dd9f950c950"}, - {file = "prompt_toolkit-3.0.7.tar.gz", hash = "sha256:822f4605f28f7d2ba6b0b09a31e25e140871e96364d1d377667b547bb3bf4489"}, + {file = "prompt_toolkit-3.0.8-py3-none-any.whl", hash = "sha256:7debb9a521e0b1ee7d2fe96ee4bd60ef03c6492784de0547337ca4433e46aa63"}, + {file = "prompt_toolkit-3.0.8.tar.gz", hash = "sha256:25c95d2ac813909f813c93fde734b6e44406d1477a9faef7c915ff37d39c0a8c"}, ] psycopg2 = [ {file = "psycopg2-2.8.6-cp27-cp27m-win32.whl", hash = "sha256:068115e13c70dc5982dfc00c5d70437fe37c014c808acce119b5448361c03725"}, @@ -2432,8 +2447,8 @@ pyflakes = [ {file = "pyflakes-2.2.0.tar.gz", hash = "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"}, ] pygments = [ - {file = "Pygments-2.7.1-py3-none-any.whl", hash = "sha256:307543fe65c0947b126e83dd5a61bd8acbd84abec11f43caebaf5534cbc17998"}, - {file = "Pygments-2.7.1.tar.gz", hash = "sha256:926c3f319eda178d1bd90851e4317e6d8cdb5e292a3386aac9bd75eca29cf9c7"}, + {file = "Pygments-2.7.2-py3-none-any.whl", hash = "sha256:88a0bbcd659fcb9573703957c6b9cff9fab7295e6e76db54c9d00ae42df32773"}, + {file = "Pygments-2.7.2.tar.gz", hash = "sha256:381985fcc551eb9d37c52088a32914e00517e57f4a21609f48141ba08e193fa0"}, ] pylint = [ {file = "pylint-2.6.0-py3-none-any.whl", hash = "sha256:bfe68f020f8a0fece830a22dd4d5dddb4ecc6137db04face4c3420a46a52239f"}, @@ -2447,8 +2462,8 @@ pyrsistent = [ {file = "pyrsistent-0.17.3.tar.gz", hash = "sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e"}, ] pytest = [ - {file = "pytest-6.1.0-py3-none-any.whl", hash = "sha256:1cd09785c0a50f9af72220dd12aa78cfa49cbffc356c61eab009ca189e018a33"}, - {file = "pytest-6.1.0.tar.gz", hash = "sha256:d010e24666435b39a4cf48740b039885642b6c273a3f77be3e7e03554d2806b7"}, + {file = "pytest-6.1.2-py3-none-any.whl", hash = "sha256:4288fed0d9153d9646bfcdf0c0428197dba1ecb27a33bb6e031d002fa88653fe"}, + {file = "pytest-6.1.2.tar.gz", hash = "sha256:c0a7e94a8cdbc5422a51ccdad8e6f1024795939cc89159a0ae7f0b316ad3823e"}, ] pytest-cov = [ {file = "pytest-cov-2.10.1.tar.gz", hash = "sha256:47bd0ce14056fdd79f93e1713f88fad7bdcc583dcd7783da86ef2f085a0bb88e"}, @@ -2473,8 +2488,8 @@ python-editor = [ {file = "python_editor-1.0.4-py3.5.egg", hash = "sha256:c3da2053dbab6b29c94e43c486ff67206eafbe7eb52dbec7390b5e2fb05aac77"}, ] pytz = [ - {file = "pytz-2020.1-py2.py3-none-any.whl", hash = "sha256:a494d53b6d39c3c6e44c3bec237336e14305e4f29bbf800b599253057fbb79ed"}, - {file = "pytz-2020.1.tar.gz", hash = "sha256:c35965d010ce31b23eeb663ed3cc8c906275d6be1a34393a1d73a41febf4a048"}, + {file = "pytz-2020.4-py2.py3-none-any.whl", hash = "sha256:5c55e189b682d420be27c6995ba6edce0c0a77dd67bfbe2ae6607134d5851ffd"}, + {file = "pytz-2020.4.tar.gz", hash = "sha256:3e6b7dd2d1e0a59084bcee14a17af60c5c562cdc16d828e8eba2e683d3a7e268"}, ] pywin32 = [ {file = "pywin32-228-cp27-cp27m-win32.whl", hash = "sha256:37dc9935f6a383cc744315ae0c2882ba1768d9b06700a70f35dc1ce73cd4ba9c"}, @@ -2546,27 +2561,49 @@ pyzmq = [ {file = "pyzmq-19.0.2.tar.gz", hash = "sha256:296540a065c8c21b26d63e3cea2d1d57902373b16e4256afe46422691903a438"}, ] regex = [ - {file = "regex-2020.9.27-cp27-cp27m-win32.whl", hash = "sha256:d23a18037313714fb3bb5a94434d3151ee4300bae631894b1ac08111abeaa4a3"}, - {file = "regex-2020.9.27-cp27-cp27m-win_amd64.whl", hash = "sha256:84e9407db1b2eb368b7ecc283121b5e592c9aaedbe8c78b1a2f1102eb2e21d19"}, - {file = "regex-2020.9.27-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5f18875ac23d9aa2f060838e8b79093e8bb2313dbaaa9f54c6d8e52a5df097be"}, - {file = "regex-2020.9.27-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ae91972f8ac958039920ef6e8769277c084971a142ce2b660691793ae44aae6b"}, - {file = "regex-2020.9.27-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:9a02d0ae31d35e1ec12a4ea4d4cca990800f66a917d0fb997b20fbc13f5321fc"}, - {file = "regex-2020.9.27-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:ebbe29186a3d9b0c591e71b7393f1ae08c83cb2d8e517d2a822b8f7ec99dfd8b"}, - {file = "regex-2020.9.27-cp36-cp36m-win32.whl", hash = "sha256:4707f3695b34335afdfb09be3802c87fa0bc27030471dbc082f815f23688bc63"}, - {file = "regex-2020.9.27-cp36-cp36m-win_amd64.whl", hash = "sha256:9bc13e0d20b97ffb07821aa3e113f9998e84994fe4d159ffa3d3a9d1b805043b"}, - {file = "regex-2020.9.27-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:f1b3afc574a3db3b25c89161059d857bd4909a1269b0b3cb3c904677c8c4a3f7"}, - {file = "regex-2020.9.27-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5533a959a1748a5c042a6da71fe9267a908e21eded7a4f373efd23a2cbdb0ecc"}, - {file = "regex-2020.9.27-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:1fe0a41437bbd06063aa184c34804efa886bcc128222e9916310c92cd54c3b4c"}, - {file = "regex-2020.9.27-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:c570f6fa14b9c4c8a4924aaad354652366577b4f98213cf76305067144f7b100"}, - {file = "regex-2020.9.27-cp37-cp37m-win32.whl", hash = "sha256:eda4771e0ace7f67f58bc5b560e27fb20f32a148cbc993b0c3835970935c2707"}, - {file = "regex-2020.9.27-cp37-cp37m-win_amd64.whl", hash = "sha256:60b0e9e6dc45683e569ec37c55ac20c582973841927a85f2d8a7d20ee80216ab"}, - {file = "regex-2020.9.27-cp38-cp38-manylinux1_i686.whl", hash = "sha256:088afc8c63e7bd187a3c70a94b9e50ab3f17e1d3f52a32750b5b77dbe99ef5ef"}, - {file = "regex-2020.9.27-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:eaf548d117b6737df379fdd53bdde4f08870e66d7ea653e230477f071f861121"}, - {file = "regex-2020.9.27-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:41bb65f54bba392643557e617316d0d899ed5b4946dccee1cb6696152b29844b"}, - {file = "regex-2020.9.27-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:8d69cef61fa50c8133382e61fd97439de1ae623fe943578e477e76a9d9471637"}, - {file = "regex-2020.9.27-cp38-cp38-win32.whl", hash = "sha256:f2388013e68e750eaa16ccbea62d4130180c26abb1d8e5d584b9baf69672b30f"}, - {file = "regex-2020.9.27-cp38-cp38-win_amd64.whl", hash = "sha256:4318d56bccfe7d43e5addb272406ade7a2274da4b70eb15922a071c58ab0108c"}, - {file = "regex-2020.9.27.tar.gz", hash = "sha256:a6f32aea4260dfe0e55dc9733ea162ea38f0ea86aa7d0f77b15beac5bf7b369d"}, + {file = "regex-2020.10.28-cp27-cp27m-win32.whl", hash = "sha256:4b5a9bcb56cc146c3932c648603b24514447eafa6ce9295234767bf92f69b504"}, + {file = "regex-2020.10.28-cp27-cp27m-win_amd64.whl", hash = "sha256:c13d311a4c4a8d671f5860317eb5f09591fbe8259676b86a85769423b544451e"}, + {file = "regex-2020.10.28-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c454ad88e56e80e44f824ef8366bb7e4c3def12999151fd5c0ea76a18fe9aa3e"}, + {file = "regex-2020.10.28-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c8a2b7ccff330ae4c460aff36626f911f918555660cc28163417cb84ffb25789"}, + {file = "regex-2020.10.28-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4afa350f162551cf402bfa3cd8302165c8e03e689c897d185f16a167328cc6dd"}, + {file = "regex-2020.10.28-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:b88fa3b8a3469f22b4f13d045d9bd3eda797aa4e406fde0a2644bc92bbdd4bdd"}, + {file = "regex-2020.10.28-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:f43109822df2d3faac7aad79613f5f02e4eab0fc8ad7932d2e70e2a83bd49c26"}, + {file = "regex-2020.10.28-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:de7fd57765398d141949946c84f3590a68cf5887dac3fc52388df0639b01eda4"}, + {file = "regex-2020.10.28-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:9b6305295b6591e45f069d3553c54d50cc47629eb5c218aac99e0f7fafbf90a1"}, + {file = "regex-2020.10.28-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:bd904c0dec29bbd0769887a816657491721d5f545c29e30fd9d7a1a275dc80ab"}, + {file = "regex-2020.10.28-cp36-cp36m-win32.whl", hash = "sha256:8092a5a06ad9a7a247f2a76ace121183dc4e1a84c259cf9c2ce3bbb69fac3582"}, + {file = "regex-2020.10.28-cp36-cp36m-win_amd64.whl", hash = "sha256:49461446b783945597c4076aea3f49aee4b4ce922bd241e4fcf62a3e7c61794c"}, + {file = "regex-2020.10.28-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:297116e79074ec2a2f885d22db00ce6e88b15f75162c5e8b38f66ea734e73c64"}, + {file = "regex-2020.10.28-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:8ca9dca965bd86ea3631b975d63b0693566d3cc347e55786d5514988b6f5b84c"}, + {file = "regex-2020.10.28-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ea37320877d56a7f0a1e6a625d892cf963aa7f570013499f5b8d5ab8402b5625"}, + {file = "regex-2020.10.28-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:3a5f08039eee9ea195a89e180c5762bfb55258bfb9abb61a20d3abee3b37fd12"}, + {file = "regex-2020.10.28-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:cb905f3d2e290a8b8f1579d3984f2cfa7c3a29cc7cba608540ceeed18513f520"}, + {file = "regex-2020.10.28-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:96f99219dddb33e235a37283306834700b63170d7bb2a1ee17e41c6d589c8eb9"}, + {file = "regex-2020.10.28-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:227a8d2e5282c2b8346e7f68aa759e0331a0b4a890b55a5cfbb28bd0261b84c0"}, + {file = "regex-2020.10.28-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:2564def9ce0710d510b1fc7e5178ce2d20f75571f788b5197b3c8134c366f50c"}, + {file = "regex-2020.10.28-cp37-cp37m-win32.whl", hash = "sha256:a62162be05edf64f819925ea88d09d18b09bebf20971b363ce0c24e8b4aa14c0"}, + {file = "regex-2020.10.28-cp37-cp37m-win_amd64.whl", hash = "sha256:03855ee22980c3e4863dc84c42d6d2901133362db5daf4c36b710dd895d78f0a"}, + {file = "regex-2020.10.28-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf4f896c42c63d1f22039ad57de2644c72587756c0cfb3cc3b7530cfe228277f"}, + {file = "regex-2020.10.28-cp38-cp38-manylinux1_i686.whl", hash = "sha256:625116aca6c4b57c56ea3d70369cacc4d62fead4930f8329d242e4fe7a58ce4b"}, + {file = "regex-2020.10.28-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2dc522e25e57e88b4980d2bdd334825dbf6fa55f28a922fc3bfa60cc09e5ef53"}, + {file = "regex-2020.10.28-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:119e0355dbdd4cf593b17f2fc5dbd4aec2b8899d0057e4957ba92f941f704bf5"}, + {file = "regex-2020.10.28-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:cfcf28ed4ce9ced47b9b9670a4f0d3d3c0e4d4779ad4dadb1ad468b097f808aa"}, + {file = "regex-2020.10.28-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:b45bab9f224de276b7bc916f6306b86283f6aa8afe7ed4133423efb42015a898"}, + {file = "regex-2020.10.28-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:52e83a5f28acd621ba8e71c2b816f6541af7144b69cc5859d17da76c436a5427"}, + {file = "regex-2020.10.28-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:aacc8623ffe7999a97935eeabbd24b1ae701d08ea8f874a6ff050e93c3e658cf"}, + {file = "regex-2020.10.28-cp38-cp38-win32.whl", hash = "sha256:06b52815d4ad38d6524666e0d50fe9173533c9cc145a5779b89733284e6f688f"}, + {file = "regex-2020.10.28-cp38-cp38-win_amd64.whl", hash = "sha256:c3466a84fce42c2016113101018a9981804097bacbab029c2d5b4fcb224b89de"}, + {file = "regex-2020.10.28-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:127a9e0c0d91af572fbb9e56d00a504dbd4c65e574ddda3d45b55722462210de"}, + {file = "regex-2020.10.28-cp39-cp39-manylinux1_i686.whl", hash = "sha256:c2c6c56ee97485a127555c9595c069201b5161de9d05495fbe2132b5ac104786"}, + {file = "regex-2020.10.28-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1ec66700a10e3c75f1f92cbde36cca0d3aaee4c73dfa26699495a3a30b09093c"}, + {file = "regex-2020.10.28-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:11116d424734fe356d8777f89d625f0df783251ada95d6261b4c36ad27a394bb"}, + {file = "regex-2020.10.28-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f1fce1e4929157b2afeb4bb7069204d4370bab9f4fc03ca1fbec8bd601f8c87d"}, + {file = "regex-2020.10.28-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:3dfca201fa6b326239e1bccb00b915e058707028809b8ecc0cf6819ad233a740"}, + {file = "regex-2020.10.28-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:b8a686a6c98872007aa41fdbb2e86dc03b287d951ff4a7f1da77fb7f14113e4d"}, + {file = "regex-2020.10.28-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:c32c91a0f1ac779cbd73e62430de3d3502bbc45ffe5bb6c376015acfa848144b"}, + {file = "regex-2020.10.28-cp39-cp39-win32.whl", hash = "sha256:832339223b9ce56b7b15168e691ae654d345ac1635eeb367ade9ecfe0e66bee0"}, + {file = "regex-2020.10.28-cp39-cp39-win_amd64.whl", hash = "sha256:654c1635f2313d0843028487db2191530bca45af61ca85d0b16555c399625b0e"}, + {file = "regex-2020.10.28.tar.gz", hash = "sha256:dd3e6547ecf842a29cf25123fbf8d2461c53c8d37aa20d87ecee130c89b7079b"}, ] requests = [ {file = "requests-2.24.0-py2.py3-none-any.whl", hash = "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898"}, @@ -2592,12 +2629,12 @@ snowballstemmer = [ {file = "snowballstemmer-2.0.0.tar.gz", hash = "sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52"}, ] sphinx = [ - {file = "Sphinx-3.2.1-py3-none-any.whl", hash = "sha256:ce6fd7ff5b215af39e2fcd44d4a321f6694b4530b6f2b2109b64d120773faea0"}, - {file = "Sphinx-3.2.1.tar.gz", hash = "sha256:321d6d9b16fa381a5306e5a0b76cd48ffbc588e6340059a729c6fdd66087e0e8"}, + {file = "Sphinx-3.3.0-py3-none-any.whl", hash = "sha256:3abdb2c57a65afaaa4f8573cbabd5465078eb6fd282c1e4f87f006875a7ec0c7"}, + {file = "Sphinx-3.3.0.tar.gz", hash = "sha256:1c21e7c5481a31b531e6cbf59c3292852ccde175b504b00ce2ff0b8f4adc3649"}, ] sphinx-autodoc-typehints = [ - {file = "sphinx-autodoc-typehints-1.11.0.tar.gz", hash = "sha256:bbf0b203f1019b0f9843ee8eef0cff856dc04b341f6dbe1113e37f2ebf243e11"}, - {file = "sphinx_autodoc_typehints-1.11.0-py3-none-any.whl", hash = "sha256:89e19370a55db4aef1be2094d8fb1fb500ca455c55b3fcc8d2600ff805227e04"}, + {file = "sphinx-autodoc-typehints-1.11.1.tar.gz", hash = "sha256:244ba6d3e2fdb854622f643c7763d6f95b6886eba24bec28e86edf205e4ddb20"}, + {file = "sphinx_autodoc_typehints-1.11.1-py3-none-any.whl", hash = "sha256:da049791d719f4c9813642496ee4764203e317f0697eb75446183fa2a68e3f77"}, ] sphinxcontrib-applehelp = [ {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, @@ -2624,38 +2661,44 @@ sphinxcontrib-serializinghtml = [ {file = "sphinxcontrib_serializinghtml-1.1.4-py2.py3-none-any.whl", hash = "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a"}, ] sqlalchemy = [ - {file = "SQLAlchemy-1.3.19-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:f2e8a9c0c8813a468aa659a01af6592f71cd30237ec27c4cc0683f089f90dcfc"}, - {file = "SQLAlchemy-1.3.19-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:33d29ae8f1dc7c75b191bb6833f55a19c932514b9b5ce8c3ab9bc3047da5db36"}, - {file = "SQLAlchemy-1.3.19-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:3292a28344922415f939ee7f4fc0c186f3d5a0bf02192ceabd4f1129d71b08de"}, - {file = "SQLAlchemy-1.3.19-cp27-cp27m-win32.whl", hash = "sha256:883c9fb62cebd1e7126dd683222b3b919657590c3e2db33bdc50ebbad53e0338"}, - {file = "SQLAlchemy-1.3.19-cp27-cp27m-win_amd64.whl", hash = "sha256:860d0fe234922fd5552b7f807fbb039e3e7ca58c18c8d38aa0d0a95ddf4f6c23"}, - {file = "SQLAlchemy-1.3.19-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:73a40d4fcd35fdedce07b5885905753d5d4edf413fbe53544dd871f27d48bd4f"}, - {file = "SQLAlchemy-1.3.19-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:5a49e8473b1ab1228302ed27365ea0fadd4bf44bc0f9e73fe38e10fdd3d6b4fc"}, - {file = "SQLAlchemy-1.3.19-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:6547b27698b5b3bbfc5210233bd9523de849b2bb8a0329cd754c9308fc8a05ce"}, - {file = "SQLAlchemy-1.3.19-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:107d4af989831d7b091e382d192955679ec07a9209996bf8090f1f539ffc5804"}, - {file = "SQLAlchemy-1.3.19-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:eb1d71643e4154398b02e88a42fc8b29db8c44ce4134cf0f4474bfc5cb5d4dac"}, - {file = "SQLAlchemy-1.3.19-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:b6ff91356354b7ff3bd208adcf875056d3d886ed7cef90c571aef2ab8a554b12"}, - {file = "SQLAlchemy-1.3.19-cp35-cp35m-win32.whl", hash = "sha256:96f51489ac187f4bab588cf51f9ff2d40b6d170ac9a4270ffaed535c8404256b"}, - {file = "SQLAlchemy-1.3.19-cp35-cp35m-win_amd64.whl", hash = "sha256:618db68745682f64cedc96ca93707805d1f3a031747b5a0d8e150cfd5055ae4d"}, - {file = "SQLAlchemy-1.3.19-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:6557af9e0d23f46b8cd56f8af08eaac72d2e3c632ac8d5cf4e20215a8dca7cea"}, - {file = "SQLAlchemy-1.3.19-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8280f9dae4adb5889ce0bb3ec6a541bf05434db5f9ab7673078c00713d148365"}, - {file = "SQLAlchemy-1.3.19-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:b595e71c51657f9ee3235db8b53d0b57c09eee74dfb5b77edff0e46d2218dc02"}, - {file = "SQLAlchemy-1.3.19-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:51064ee7938526bab92acd049d41a1dc797422256086b39c08bafeffb9d304c6"}, - {file = "SQLAlchemy-1.3.19-cp36-cp36m-win32.whl", hash = "sha256:8afcb6f4064d234a43fea108859942d9795c4060ed0fbd9082b0f280181a15c1"}, - {file = "SQLAlchemy-1.3.19-cp36-cp36m-win_amd64.whl", hash = "sha256:e49947d583fe4d29af528677e4f0aa21f5e535ca2ae69c48270ebebd0d8843c0"}, - {file = "SQLAlchemy-1.3.19-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:9e865835e36dfbb1873b65e722ea627c096c11b05f796831e3a9b542926e979e"}, - {file = "SQLAlchemy-1.3.19-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:276936d41111a501cf4a1a0543e25449108d87e9f8c94714f7660eaea89ae5fe"}, - {file = "SQLAlchemy-1.3.19-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:c7adb1f69a80573698c2def5ead584138ca00fff4ad9785a4b0b2bf927ba308d"}, - {file = "SQLAlchemy-1.3.19-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:465c999ef30b1c7525f81330184121521418a67189053bcf585824d833c05b66"}, - {file = "SQLAlchemy-1.3.19-cp37-cp37m-win32.whl", hash = "sha256:aa0554495fe06172b550098909be8db79b5accdf6ffb59611900bea345df5eba"}, - {file = "SQLAlchemy-1.3.19-cp37-cp37m-win_amd64.whl", hash = "sha256:15c0bcd3c14f4086701c33a9e87e2c7ceb3bcb4a246cd88ec54a49cf2a5bd1a6"}, - {file = "SQLAlchemy-1.3.19-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:fe7fe11019fc3e6600819775a7d55abc5446dda07e9795f5954fdbf8a49e1c37"}, - {file = "SQLAlchemy-1.3.19-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:c898b3ebcc9eae7b36bd0b4bbbafce2d8076680f6868bcbacee2d39a7a9726a7"}, - {file = "SQLAlchemy-1.3.19-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:072766c3bd09294d716b2d114d46ffc5ccf8ea0b714a4e1c48253014b771c6bb"}, - {file = "SQLAlchemy-1.3.19-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:26c5ca9d09f0e21b8671a32f7d83caad5be1f6ff45eef5ec2f6fd0db85fc5dc0"}, - {file = "SQLAlchemy-1.3.19-cp38-cp38-win32.whl", hash = "sha256:b70bad2f1a5bd3460746c3fb3ab69e4e0eb5f59d977a23f9b66e5bdc74d97b86"}, - {file = "SQLAlchemy-1.3.19-cp38-cp38-win_amd64.whl", hash = "sha256:83469ad15262402b0e0974e612546bc0b05f379b5aa9072ebf66d0f8fef16bea"}, - {file = "SQLAlchemy-1.3.19.tar.gz", hash = "sha256:3bba2e9fbedb0511769780fe1d63007081008c5c2d7d715e91858c94dbaa260e"}, + {file = "SQLAlchemy-1.3.20-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bad73f9888d30f9e1d57ac8829f8a12091bdee4949b91db279569774a866a18e"}, + {file = "SQLAlchemy-1.3.20-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:e32e3455db14602b6117f0f422f46bc297a3853ae2c322ecd1e2c4c04daf6ed5"}, + {file = "SQLAlchemy-1.3.20-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:5cdfe54c1e37279dc70d92815464b77cd8ee30725adc9350f06074f91dbfeed2"}, + {file = "SQLAlchemy-1.3.20-cp27-cp27m-win32.whl", hash = "sha256:2e9bd5b23bba8ae8ce4219c9333974ff5e103c857d9ff0e4b73dc4cb244c7d86"}, + {file = "SQLAlchemy-1.3.20-cp27-cp27m-win_amd64.whl", hash = "sha256:5d92c18458a4aa27497a986038d5d797b5279268a2de303cd00910658e8d149c"}, + {file = "SQLAlchemy-1.3.20-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:53fd857c6c8ffc0aa6a5a3a2619f6a74247e42ec9e46b836a8ffa4abe7aab327"}, + {file = "SQLAlchemy-1.3.20-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:0a92745bb1ebbcb3985ed7bda379b94627f0edbc6c82e9e4bac4fb5647ae609a"}, + {file = "SQLAlchemy-1.3.20-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:b6f036ecc017ec2e2cc2a40615b41850dc7aaaea6a932628c0afc73ab98ba3fb"}, + {file = "SQLAlchemy-1.3.20-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:3aa6d45e149a16aa1f0c46816397e12313d5e37f22205c26e06975e150ffcf2a"}, + {file = "SQLAlchemy-1.3.20-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:ed53209b5f0f383acb49a927179fa51a6e2259878e164273ebc6815f3a752465"}, + {file = "SQLAlchemy-1.3.20-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:d3b709d64b5cf064972b3763b47139e4a0dc4ae28a36437757f7663f67b99710"}, + {file = "SQLAlchemy-1.3.20-cp35-cp35m-win32.whl", hash = "sha256:950f0e17ffba7a7ceb0dd056567bc5ade22a11a75920b0e8298865dc28c0eff6"}, + {file = "SQLAlchemy-1.3.20-cp35-cp35m-win_amd64.whl", hash = "sha256:8dcbf377529a9af167cbfc5b8acec0fadd7c2357fc282a1494c222d3abfc9629"}, + {file = "SQLAlchemy-1.3.20-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:0157c269701d88f5faf1fa0e4560e4d814f210c01a5b55df3cab95e9346a8bcc"}, + {file = "SQLAlchemy-1.3.20-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:7cd40cb4bc50d9e87b3540b23df6e6b24821ba7e1f305c1492b0806c33dbdbec"}, + {file = "SQLAlchemy-1.3.20-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:c092fe282de83d48e64d306b4bce03114859cdbfe19bf8a978a78a0d44ddadb1"}, + {file = "SQLAlchemy-1.3.20-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:166917a729b9226decff29416f212c516227c2eb8a9c9f920d69ced24e30109f"}, + {file = "SQLAlchemy-1.3.20-cp36-cp36m-win32.whl", hash = "sha256:632b32183c0cb0053194a4085c304bc2320e5299f77e3024556fa2aa395c2a8b"}, + {file = "SQLAlchemy-1.3.20-cp36-cp36m-win_amd64.whl", hash = "sha256:bbc58fca72ce45a64bb02b87f73df58e29848b693869e58bd890b2ddbb42d83b"}, + {file = "SQLAlchemy-1.3.20-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:b15002b9788ffe84e42baffc334739d3b68008a973d65fad0a410ca5d0531980"}, + {file = "SQLAlchemy-1.3.20-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:9e379674728f43a0cd95c423ac0e95262500f9bfd81d33b999daa8ea1756d162"}, + {file = "SQLAlchemy-1.3.20-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:2b5dafed97f778e9901b79cc01b88d39c605e0545b4541f2551a2fd785adc15b"}, + {file = "SQLAlchemy-1.3.20-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:fcdb3755a7c355bc29df1b5e6fb8226d5c8b90551d202d69d0076a8a5649d68b"}, + {file = "SQLAlchemy-1.3.20-cp37-cp37m-win32.whl", hash = "sha256:bca4d367a725694dae3dfdc86cf1d1622b9f414e70bd19651f5ac4fb3aa96d61"}, + {file = "SQLAlchemy-1.3.20-cp37-cp37m-win_amd64.whl", hash = "sha256:f605f348f4e6a2ba00acb3399c71d213b92f27f2383fc4abebf7a37368c12142"}, + {file = "SQLAlchemy-1.3.20-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:84f0ac4a09971536b38cc5d515d6add7926a7e13baa25135a1dbb6afa351a376"}, + {file = "SQLAlchemy-1.3.20-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2909dffe5c9a615b7e6c92d1ac2d31e3026dc436440a4f750f4749d114d88ceb"}, + {file = "SQLAlchemy-1.3.20-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:c3ab23ee9674336654bf9cac30eb75ac6acb9150dc4b1391bec533a7a4126471"}, + {file = "SQLAlchemy-1.3.20-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:009e8388d4d551a2107632921320886650b46332f61dc935e70c8bcf37d8e0d6"}, + {file = "SQLAlchemy-1.3.20-cp38-cp38-win32.whl", hash = "sha256:bf53d8dddfc3e53a5bda65f7f4aa40fae306843641e3e8e701c18a5609471edf"}, + {file = "SQLAlchemy-1.3.20-cp38-cp38-win_amd64.whl", hash = "sha256:7c735c7a6db8ee9554a3935e741cf288f7dcbe8706320251eb38c412e6a4281d"}, + {file = "SQLAlchemy-1.3.20-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:4bdbdb8ca577c6c366d15791747c1de6ab14529115a2eb52774240c412a7b403"}, + {file = "SQLAlchemy-1.3.20-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:ce64a44c867d128ab8e675f587aae7f61bd2db836a3c4ba522d884cd7c298a77"}, + {file = "SQLAlchemy-1.3.20-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:be41d5de7a8e241864189b7530ca4aaf56a5204332caa70555c2d96379e18079"}, + {file = "SQLAlchemy-1.3.20-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:1f5f369202912be72fdf9a8f25067a5ece31a2b38507bb869306f173336348da"}, + {file = "SQLAlchemy-1.3.20-cp39-cp39-win32.whl", hash = "sha256:0cca1844ba870e81c03633a99aa3dc62256fb96323431a5dec7d4e503c26372d"}, + {file = "SQLAlchemy-1.3.20-cp39-cp39-win_amd64.whl", hash = "sha256:d05cef4a164b44ffda58200efcb22355350979e000828479971ebca49b82ddb1"}, + {file = "SQLAlchemy-1.3.20.tar.gz", hash = "sha256:d2f25c7f410338d31666d7ddedfa67570900e248b940d186b48461bd4e5569a1"}, ] stevedore = [ {file = "stevedore-3.2.2-py3-none-any.whl", hash = "sha256:5e1ab03eaae06ef6ce23859402de785f08d97780ed774948ef16c4652c41bc62"}, @@ -2666,31 +2709,63 @@ terminado = [ {file = "terminado-0.9.1.tar.gz", hash = "sha256:3da72a155b807b01c9e8a5babd214e052a0a45a975751da3521a1c3381ce6d76"}, ] testfixtures = [ - {file = "testfixtures-6.14.2-py2.py3-none-any.whl", hash = "sha256:816557888877f498081c1b5c572049b4a2ddffedb77401308ff4cdc1bb9147b7"}, - {file = "testfixtures-6.14.2.tar.gz", hash = "sha256:14d9907390f5f9c7189b3d511b64f34f1072d07cc13b604a57e1bb79029376e3"}, + {file = "testfixtures-6.15.0-py2.py3-none-any.whl", hash = "sha256:e17f4f526fc90b0ac9bc7f8ca62b7dec17d9faf3d721f56bda4f0fd94d02f85a"}, + {file = "testfixtures-6.15.0.tar.gz", hash = "sha256:409f77cfbdad822d12a8ce5c4aa8fb4d0bb38073f4a5444fede3702716a2cec2"}, ] testpath = [ {file = "testpath-0.4.4-py2.py3-none-any.whl", hash = "sha256:bfcf9411ef4bf3db7579063e0546938b1edda3d69f4e1fb8756991f5951f85d4"}, {file = "testpath-0.4.4.tar.gz", hash = "sha256:60e0a3261c149755f4399a1fff7d37523179a70fdc3abdf78de9fc2604aeec7e"}, ] toml = [ - {file = "toml-0.10.1-py2.py3-none-any.whl", hash = "sha256:bda89d5935c2eac546d648028b9901107a595863cb36bae0c73ac804a9b4ce88"}, - {file = "toml-0.10.1.tar.gz", hash = "sha256:926b612be1e5ce0634a2ca03470f95169cf16f939018233a670519cb4ac58b0f"}, + {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, + {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] tornado = [ - {file = "tornado-6.0.4-cp35-cp35m-win32.whl", hash = "sha256:5217e601700f24e966ddab689f90b7ea4bd91ff3357c3600fa1045e26d68e55d"}, - {file = "tornado-6.0.4-cp35-cp35m-win_amd64.whl", hash = "sha256:c98232a3ac391f5faea6821b53db8db461157baa788f5d6222a193e9456e1740"}, - {file = "tornado-6.0.4-cp36-cp36m-win32.whl", hash = "sha256:5f6a07e62e799be5d2330e68d808c8ac41d4a259b9cea61da4101b83cb5dc673"}, - {file = "tornado-6.0.4-cp36-cp36m-win_amd64.whl", hash = "sha256:c952975c8ba74f546ae6de2e226ab3cc3cc11ae47baf607459a6728585bb542a"}, - {file = "tornado-6.0.4-cp37-cp37m-win32.whl", hash = "sha256:2c027eb2a393d964b22b5c154d1a23a5f8727db6fda837118a776b29e2b8ebc6"}, - {file = "tornado-6.0.4-cp37-cp37m-win_amd64.whl", hash = "sha256:5618f72e947533832cbc3dec54e1dffc1747a5cb17d1fd91577ed14fa0dc081b"}, - {file = "tornado-6.0.4-cp38-cp38-win32.whl", hash = "sha256:22aed82c2ea340c3771e3babc5ef220272f6fd06b5108a53b4976d0d722bcd52"}, - {file = "tornado-6.0.4-cp38-cp38-win_amd64.whl", hash = "sha256:c58d56003daf1b616336781b26d184023ea4af13ae143d9dda65e31e534940b9"}, - {file = "tornado-6.0.4.tar.gz", hash = "sha256:0fe2d45ba43b00a41cd73f8be321a44936dc1aba233dee979f17a042b83eb6dc"}, + {file = "tornado-6.1-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:d371e811d6b156d82aa5f9a4e08b58debf97c302a35714f6f45e35139c332e32"}, + {file = "tornado-6.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:0d321a39c36e5f2c4ff12b4ed58d41390460f798422c4504e09eb5678e09998c"}, + {file = "tornado-6.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9de9e5188a782be6b1ce866e8a51bc76a0fbaa0e16613823fc38e4fc2556ad05"}, + {file = "tornado-6.1-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:61b32d06ae8a036a6607805e6720ef00a3c98207038444ba7fd3d169cd998910"}, + {file = "tornado-6.1-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:3e63498f680547ed24d2c71e6497f24bca791aca2fe116dbc2bd0ac7f191691b"}, + {file = "tornado-6.1-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:6c77c9937962577a6a76917845d06af6ab9197702a42e1346d8ae2e76b5e3675"}, + {file = "tornado-6.1-cp35-cp35m-win32.whl", hash = "sha256:6286efab1ed6e74b7028327365cf7346b1d777d63ab30e21a0f4d5b275fc17d5"}, + {file = "tornado-6.1-cp35-cp35m-win_amd64.whl", hash = "sha256:fa2ba70284fa42c2a5ecb35e322e68823288a4251f9ba9cc77be04ae15eada68"}, + {file = "tornado-6.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:0a00ff4561e2929a2c37ce706cb8233b7907e0cdc22eab98888aca5dd3775feb"}, + {file = "tornado-6.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:748290bf9112b581c525e6e6d3820621ff020ed95af6f17fedef416b27ed564c"}, + {file = "tornado-6.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:e385b637ac3acaae8022e7e47dfa7b83d3620e432e3ecb9a3f7f58f150e50921"}, + {file = "tornado-6.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:25ad220258349a12ae87ede08a7b04aca51237721f63b1808d39bdb4b2164558"}, + {file = "tornado-6.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:65d98939f1a2e74b58839f8c4dab3b6b3c1ce84972ae712be02845e65391ac7c"}, + {file = "tornado-6.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:e519d64089b0876c7b467274468709dadf11e41d65f63bba207e04217f47c085"}, + {file = "tornado-6.1-cp36-cp36m-win32.whl", hash = "sha256:b87936fd2c317b6ee08a5741ea06b9d11a6074ef4cc42e031bc6403f82a32575"}, + {file = "tornado-6.1-cp36-cp36m-win_amd64.whl", hash = "sha256:cc0ee35043162abbf717b7df924597ade8e5395e7b66d18270116f8745ceb795"}, + {file = "tornado-6.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:7250a3fa399f08ec9cb3f7b1b987955d17e044f1ade821b32e5f435130250d7f"}, + {file = "tornado-6.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ed3ad863b1b40cd1d4bd21e7498329ccaece75db5a5bf58cd3c9f130843e7102"}, + {file = "tornado-6.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:dcef026f608f678c118779cd6591c8af6e9b4155c44e0d1bc0c87c036fb8c8c4"}, + {file = "tornado-6.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:70dec29e8ac485dbf57481baee40781c63e381bebea080991893cd297742b8fd"}, + {file = "tornado-6.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:d3f7594930c423fd9f5d1a76bee85a2c36fd8b4b16921cae7e965f22575e9c01"}, + {file = "tornado-6.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:3447475585bae2e77ecb832fc0300c3695516a47d46cefa0528181a34c5b9d3d"}, + {file = "tornado-6.1-cp37-cp37m-win32.whl", hash = "sha256:e7229e60ac41a1202444497ddde70a48d33909e484f96eb0da9baf8dc68541df"}, + {file = "tornado-6.1-cp37-cp37m-win_amd64.whl", hash = "sha256:cb5ec8eead331e3bb4ce8066cf06d2dfef1bfb1b2a73082dfe8a161301b76e37"}, + {file = "tornado-6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:20241b3cb4f425e971cb0a8e4ffc9b0a861530ae3c52f2b0434e6c1b57e9fd95"}, + {file = "tornado-6.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:c77da1263aa361938476f04c4b6c8916001b90b2c2fdd92d8d535e1af48fba5a"}, + {file = "tornado-6.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:fba85b6cd9c39be262fcd23865652920832b61583de2a2ca907dbd8e8a8c81e5"}, + {file = "tornado-6.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:1e8225a1070cd8eec59a996c43229fe8f95689cb16e552d130b9793cb570a288"}, + {file = "tornado-6.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:d14d30e7f46a0476efb0deb5b61343b1526f73ebb5ed84f23dc794bdb88f9d9f"}, + {file = "tornado-6.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8f959b26f2634a091bb42241c3ed8d3cedb506e7c27b8dd5c7b9f745318ddbb6"}, + {file = "tornado-6.1-cp38-cp38-win32.whl", hash = "sha256:34ca2dac9e4d7afb0bed4677512e36a52f09caa6fded70b4e3e1c89dbd92c326"}, + {file = "tornado-6.1-cp38-cp38-win_amd64.whl", hash = "sha256:6196a5c39286cc37c024cd78834fb9345e464525d8991c21e908cc046d1cc02c"}, + {file = "tornado-6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0ba29bafd8e7e22920567ce0d232c26d4d47c8b5cf4ed7b562b5db39fa199c5"}, + {file = "tornado-6.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:33892118b165401f291070100d6d09359ca74addda679b60390b09f8ef325ffe"}, + {file = "tornado-6.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7da13da6f985aab7f6f28debab00c67ff9cbacd588e8477034c0652ac141feea"}, + {file = "tornado-6.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:e0791ac58d91ac58f694d8d2957884df8e4e2f6687cdf367ef7eb7497f79eaa2"}, + {file = "tornado-6.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:66324e4e1beede9ac79e60f88de548da58b1f8ab4b2f1354d8375774f997e6c0"}, + {file = "tornado-6.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:a48900ecea1cbb71b8c71c620dee15b62f85f7c14189bdeee54966fbd9a0c5bd"}, + {file = "tornado-6.1-cp39-cp39-win32.whl", hash = "sha256:d3d20ea5782ba63ed13bc2b8c291a053c8d807a8fa927d941bd718468f7b950c"}, + {file = "tornado-6.1-cp39-cp39-win_amd64.whl", hash = "sha256:548430be2740e327b3fe0201abe471f314741efcb0067ec4f2d7dcfb4825f3e4"}, + {file = "tornado-6.1.tar.gz", hash = "sha256:33c6e81d7bd55b468d2e793517c909b139960b6c790a60b7991b9b6b76fb9791"}, ] traitlets = [ - {file = "traitlets-5.0.4-py3-none-any.whl", hash = "sha256:9664ec0c526e48e7b47b7d14cd6b252efa03e0129011de0a9c1d70315d4309c3"}, - {file = "traitlets-5.0.4.tar.gz", hash = "sha256:86c9351f94f95de9db8a04ad8e892da299a088a64fd283f9f6f18770ae5eae1b"}, + {file = "traitlets-5.0.5-py3-none-any.whl", hash = "sha256:69ff3f9d5351f31a7ad80443c2674b7099df13cc41fc5fa6e2f6d3b0330b0426"}, + {file = "traitlets-5.0.5.tar.gz", hash = "sha256:178f4ce988f69189f7e523337a3e11d91c786ded9360174a3d9ca83e79bc5396"}, ] typed-ast = [ {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3"}, @@ -2721,12 +2796,12 @@ typing-extensions = [ {file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"}, ] urllib3 = [ - {file = "urllib3-1.25.10-py2.py3-none-any.whl", hash = "sha256:e7983572181f5e1522d9c98453462384ee92a0be7fac5f1413a1e35c56cc0461"}, - {file = "urllib3-1.25.10.tar.gz", hash = "sha256:91056c15fa70756691db97756772bb1eb9678fa585d9184f24534b100dc60f4a"}, + {file = "urllib3-1.25.11-py2.py3-none-any.whl", hash = "sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e"}, + {file = "urllib3-1.25.11.tar.gz", hash = "sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2"}, ] virtualenv = [ - {file = "virtualenv-20.0.31-py2.py3-none-any.whl", hash = "sha256:e0305af10299a7fb0d69393d8f04cb2965dda9351140d11ac8db4e5e3970451b"}, - {file = "virtualenv-20.0.31.tar.gz", hash = "sha256:43add625c53c596d38f971a465553f6318decc39d98512bc100fa1b1e839c8dc"}, + {file = "virtualenv-20.1.0-py2.py3-none-any.whl", hash = "sha256:b0011228208944ce71052987437d3843e05690b2f23d1c7da4263fde104c97a2"}, + {file = "virtualenv-20.1.0.tar.gz", hash = "sha256:b8d6110f493af256a40d65e29846c69340a947669eec8ce784fcf3dd3af28380"}, ] wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, From 51bb7e823516317cc51b5439f9e1775fa04ee111 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sat, 7 Nov 2020 16:42:35 +0100 Subject: [PATCH 04/72] Adjust the branch reference fixer task's logic ... ... to assume a feature branch if the branch name does not start with 'release' or 'research' and change all references into 'develop'. --- noxfile.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/noxfile.py b/noxfile.py index 6a9620d..a86cb05 100644 --- a/noxfile.py +++ b/noxfile.py @@ -328,7 +328,7 @@ def test_suite(session): @nox.session(name='fix-branch-references', python=PYTHON, venv_backend='none') -def fix_branch_references(session): # noqa:WPS210 +def fix_branch_references(session): # noqa:WPS210,WPS231 """Replace branch references with the current branch. Intended to be run as a pre-commit hook. @@ -336,9 +336,15 @@ def fix_branch_references(session): # noqa:WPS210 Many files in the project (e.g., README.md) contain links to resources on github.com or nbviewer.jupyter.org that contain branch labels. - This task rewrites these links such that they contain the branch reference - of the current branch. If the branch is only a temporary one that is to be - merged into the 'main' branch, all references are adjusted to 'main' as well. + This task rewrites these links such that they contain branch references + that make sense given the context: + + - If the branch is only a temporary one that is to be merged into + the 'main' branch, all references are adjusted to 'main' as well. + + - If the branch is not named after a default branch in the GitFlow + model, it is interpreted as a feature branch and the references + are adjusted into 'develop'. This task may be called with one positional argument that is interpreted as the branch to which all references are changed into. @@ -362,6 +368,10 @@ def fix_branch_references(session): # noqa:WPS210 # into 'main', we adjust all branch references to 'main' as well. if branch.startswith('release') or branch.startswith('research'): branch = 'main' + # If the current branch appears to be a feature branch, we adjust + # all branch references to 'develop'. + elif branch != 'main': + branch = 'develop' # If a "--branch=BRANCH_NAME" argument is passed in # as the only positional argument, we use BRANCH_NAME. # Note: The --branch is required as session.posargs contains From 143ecba98e85aa5990d24f2cc976781f2e7b7dfb Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Mon, 14 Dec 2020 13:46:45 +0100 Subject: [PATCH 05/72] Update submodule for demand-forecasting paper The paper is now published. --- papers/demand-forecasting | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/papers/demand-forecasting b/papers/demand-forecasting index 1184c54..9ee3396 160000 --- a/papers/demand-forecasting +++ b/papers/demand-forecasting @@ -1 +1 @@ -Subproject commit 1184c54377f45f6cabbf6d977b84ff85ec4da9eb +Subproject commit 9ee3396a24ce20c9886b4cde5cfe2665fd5a8102 From 570cb0112e0c9b27dd0572e9748198e23b7b6bfe Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Mon, 14 Dec 2020 13:54:26 +0100 Subject: [PATCH 06/72] Pin the dependencies ... ... after upgrading: - dev dependencies + packaging + pre-commit + pytest + sphinx - research dependencies + pandas - transient dependencies + appnode + argcomplete + babel + bandit + certifi + cffi + colorlog + darglint + identify + ipykernel + jupyter-core + nest-asyncio + pathspec + prometheus-client + py + pygments + pywin32 + pyzmq + regex + requests + restructedtext-lint + stevedore + testfixtures + urllib3 + virtualenv --- poetry.lock | 469 ++++++++++++++++++++++++++-------------------------- 1 file changed, 234 insertions(+), 235 deletions(-) diff --git a/poetry.lock b/poetry.lock index c142320..b44af50 100644 --- a/poetry.lock +++ b/poetry.lock @@ -30,15 +30,15 @@ python-versions = "*" [[package]] name = "appnope" -version = "0.1.0" -description = "Disable App Nap on OS X 10.9" +version = "0.1.2" +description = "Disable App Nap on macOS >= 10.9" category = "main" optional = true python-versions = "*" [[package]] name = "argcomplete" -version = "1.12.1" +version = "1.12.2" description = "Bash tab completion for argparse" category = "dev" optional = false @@ -139,7 +139,7 @@ pyflakes = ">=1.1.0" [[package]] name = "babel" -version = "2.8.0" +version = "2.9.0" description = "Internationalization utilities" category = "dev" optional = false @@ -158,16 +158,16 @@ python-versions = "*" [[package]] name = "bandit" -version = "1.6.2" +version = "1.7.0" description = "Security oriented static analyser for python code." category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.5" [package.dependencies] colorama = {version = ">=0.3.9", markers = "platform_system == \"Windows\""} GitPython = ">=1.0.1" -PyYAML = ">=3.13" +PyYAML = ">=5.3.1" six = ">=1.10.0" stevedore = ">=1.20.0" @@ -206,7 +206,7 @@ webencodings = "*" [[package]] name = "certifi" -version = "2020.6.20" +version = "2020.12.5" description = "Python package for providing Mozilla's CA Bundle." category = "main" optional = false @@ -214,7 +214,7 @@ python-versions = "*" [[package]] name = "cffi" -version = "1.14.3" +version = "1.14.4" description = "Foreign Function Interface for Python calling C code." category = "main" optional = true @@ -257,7 +257,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "colorlog" -version = "4.5.0" +version = "4.6.2" description = "Log formatting with colors!" category = "dev" optional = false @@ -279,11 +279,11 @@ toml = ["toml"] [[package]] name = "darglint" -version = "1.5.5" +version = "1.5.8" description = "A utility for ensuring Google-style docstrings stay up to date with the source code." category = "dev" optional = false -python-versions = ">=3.5,<4.0" +python-versions = ">=3.6,<4.0" [[package]] name = "decorator" @@ -589,7 +589,7 @@ gitdb = ">=4.0.1,<5" [[package]] name = "identify" -version = "1.5.9" +version = "1.5.10" description = "File identification library for Python" category = "dev" optional = false @@ -624,7 +624,7 @@ python-versions = "*" [[package]] name = "ipykernel" -version = "5.3.4" +version = "5.4.2" description = "IPython Kernel for Jupyter" category = "main" optional = true @@ -773,11 +773,11 @@ test = ["ipykernel", "ipython", "mock", "pytest", "pytest-asyncio", "async-gener [[package]] name = "jupyter-core" -version = "4.6.3" +version = "4.7.0" description = "Jupyter core package. A base package on which Jupyter projects rely." category = "main" optional = true -python-versions = "!=3.0,!=3.1,!=3.2,!=3.3,!=3.4,>=2.7" +python-versions = ">=3.6" [package.dependencies] pywin32 = {version = ">=1.0", markers = "sys_platform == \"win32\""} @@ -982,7 +982,7 @@ test = ["fastjsonschema", "testpath", "pytest", "pytest-cov"] [[package]] name = "nest-asyncio" -version = "1.4.2" +version = "1.4.3" description = "Patch asyncio to allow nested event loops" category = "main" optional = true @@ -1051,7 +1051,7 @@ python-versions = ">=3.6" [[package]] name = "packaging" -version = "20.4" +version = "20.8" description = "Core utilities for Python packages" category = "main" optional = false @@ -1059,11 +1059,10 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [package.dependencies] pyparsing = ">=2.0.2" -six = "*" [[package]] name = "pandas" -version = "1.1.4" +version = "1.1.5" description = "Powerful data structures for data analysis, time series, and statistics" category = "main" optional = true @@ -1098,7 +1097,7 @@ testing = ["docopt", "pytest (>=3.0.7)"] [[package]] name = "pathspec" -version = "0.8.0" +version = "0.8.1" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false @@ -1155,7 +1154,7 @@ dev = ["pre-commit", "tox"] [[package]] name = "pre-commit" -version = "2.8.2" +version = "2.9.3" description = "A framework for managing and maintaining multi-language pre-commit hooks." category = "dev" optional = false @@ -1171,7 +1170,7 @@ virtualenv = ">=20.0.8" [[package]] name = "prometheus-client" -version = "0.8.0" +version = "0.9.0" description = "Python client for the Prometheus monitoring system." category = "main" optional = true @@ -1209,9 +1208,9 @@ python-versions = "*" [[package]] name = "py" -version = "1.9.0" +version = "1.10.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" -category = "dev" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" @@ -1252,7 +1251,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pygments" -version = "2.7.2" +version = "2.7.3" description = "Pygments is a syntax highlighting package written in Python." category = "main" optional = false @@ -1291,24 +1290,23 @@ python-versions = ">=3.5" [[package]] name = "pytest" -version = "6.1.2" +version = "6.2.0" description = "pytest: simple powerful testing with Python" category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [package.dependencies] atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} -attrs = ">=17.4.0" +attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<1.0" +pluggy = ">=0.12,<1.0.0a1" py = ">=1.8.2" toml = "*" [package.extras] -checkqa_mypy = ["mypy (==0.780)"] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] [[package]] @@ -1377,7 +1375,7 @@ python-versions = "*" [[package]] name = "pywin32" -version = "228" +version = "300" description = "Python for Window Extensions" category = "main" optional = true @@ -1401,15 +1399,19 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pyzmq" -version = "19.0.2" +version = "20.0.0" description = "Python bindings for 0MQ" category = "main" optional = true -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*" +python-versions = ">=3.5" + +[package.dependencies] +cffi = {version = "*", markers = "implementation_name === \"pypy\""} +py = {version = "*", markers = "implementation_name === \"pypy\""} [[package]] name = "regex" -version = "2020.10.28" +version = "2020.11.13" description = "Alternative regular expression module, to replace re." category = "dev" optional = false @@ -1417,7 +1419,7 @@ python-versions = "*" [[package]] name = "requests" -version = "2.24.0" +version = "2.25.0" description = "Python HTTP for Humans." category = "main" optional = false @@ -1427,7 +1429,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" certifi = ">=2017.4.17" chardet = ">=3.0.2,<4" idna = ">=2.5,<3" -urllib3 = ">=1.21.1,<1.25.0 || >1.25.0,<1.25.1 || >1.25.1,<1.26" +urllib3 = ">=1.21.1,<1.27" [package.extras] security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] @@ -1435,7 +1437,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] [[package]] name = "restructuredtext-lint" -version = "1.3.1" +version = "1.3.2" description = "reStructuredText linter" category = "dev" optional = false @@ -1478,7 +1480,7 @@ python-versions = "*" [[package]] name = "sphinx" -version = "3.3.0" +version = "3.3.1" description = "Python documentation generator" category = "dev" optional = false @@ -1615,7 +1617,7 @@ pymysql = ["pymysql"] [[package]] name = "stevedore" -version = "3.2.2" +version = "3.3.0" description = "Manage dynamic plugins for Python applications" category = "dev" optional = false @@ -1639,7 +1641,7 @@ tornado = ">=4" [[package]] name = "testfixtures" -version = "6.15.0" +version = "6.16.0" description = "A collection of helpers and mock objects for unit tests and doc tests." category = "dev" optional = false @@ -1709,7 +1711,7 @@ python-versions = "*" [[package]] name = "urllib3" -version = "1.25.11" +version = "1.26.2" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false @@ -1722,7 +1724,7 @@ socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] [[package]] name = "virtualenv" -version = "20.1.0" +version = "20.2.2" description = "Virtual Python Environment builder" category = "dev" optional = false @@ -1831,12 +1833,12 @@ appdirs = [ {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, ] appnope = [ - {file = "appnope-0.1.0-py2.py3-none-any.whl", hash = "sha256:5b26757dc6f79a3b7dc9fab95359328d5747fcb2409d331ea66d0272b90ab2a0"}, - {file = "appnope-0.1.0.tar.gz", hash = "sha256:8b995ffe925347a2138d7ac0fe77155e4311a0ea6d6da4f5128fe4b3cbe5ed71"}, + {file = "appnope-0.1.2-py2.py3-none-any.whl", hash = "sha256:93aa393e9d6c54c5cd570ccadd8edad61ea0c4b9ea7a01409020c9aa019eb442"}, + {file = "appnope-0.1.2.tar.gz", hash = "sha256:dd83cd4b5b460958838f6eb3000c660b1f9caf2a5b1de4264e941512f603258a"}, ] argcomplete = [ - {file = "argcomplete-1.12.1-py2.py3-none-any.whl", hash = "sha256:5cd1ac4fc49c29d6016fc2cc4b19a3c08c3624544503495bf25989834c443898"}, - {file = "argcomplete-1.12.1.tar.gz", hash = "sha256:849c2444c35bb2175aea74100ca5f644c29bf716429399c0f2203bb5d9a8e4e6"}, + {file = "argcomplete-1.12.2-py2.py3-none-any.whl", hash = "sha256:17f01a9b9b9ece3e6b07058eae737ad6e10de8b4e149105f84614783913aba71"}, + {file = "argcomplete-1.12.2.tar.gz", hash = "sha256:de0e1282330940d52ea92a80fea2e4b9e0da1932aaa570f84d268939d1897b04"}, ] argon2-cffi = [ {file = "argon2-cffi-20.1.0.tar.gz", hash = "sha256:d8029b2d3e4b4cea770e9e5a0104dd8fa185c1724a0f01528ae4826a6d25f97d"}, @@ -1884,16 +1886,16 @@ autoflake = [ {file = "autoflake-1.4.tar.gz", hash = "sha256:61a353012cff6ab94ca062823d1fb2f692c4acda51c76ff83a8d77915fba51ea"}, ] babel = [ - {file = "Babel-2.8.0-py2.py3-none-any.whl", hash = "sha256:d670ea0b10f8b723672d3a6abeb87b565b244da220d76b4dba1b66269ec152d4"}, - {file = "Babel-2.8.0.tar.gz", hash = "sha256:1aac2ae2d0d8ea368fa90906567f5c08463d98ade155c0c4bfedd6a0f7160e38"}, + {file = "Babel-2.9.0-py2.py3-none-any.whl", hash = "sha256:9d35c22fcc79893c3ecc85ac4a56cde1ecf3f19c540bba0922308a6c06ca6fa5"}, + {file = "Babel-2.9.0.tar.gz", hash = "sha256:da031ab54472314f210b0adcff1588ee5d1d1d0ba4dbd07b94dba82bde791e05"}, ] backcall = [ {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, ] bandit = [ - {file = "bandit-1.6.2-py2.py3-none-any.whl", hash = "sha256:336620e220cf2d3115877685e264477ff9d9abaeb0afe3dc7264f55fa17a3952"}, - {file = "bandit-1.6.2.tar.gz", hash = "sha256:41e75315853507aa145d62a78a2a6c5e3240fe14ee7c601459d0df9418196065"}, + {file = "bandit-1.7.0-py3-none-any.whl", hash = "sha256:216be4d044209fa06cf2a3e51b319769a51be8318140659719aa7a115c35ed07"}, + {file = "bandit-1.7.0.tar.gz", hash = "sha256:8a4c7415254d75df8ff3c3b15cfe9042ecee628a1e40b44c15a98890fbfc2608"}, ] black = [ {file = "black-19.10b0-py36-none-any.whl", hash = "sha256:1b30e59be925fafc1ee4565e5e08abef6b03fe455102883820fe5ee2e4734e0b"}, @@ -1904,46 +1906,46 @@ bleach = [ {file = "bleach-3.2.1.tar.gz", hash = "sha256:52b5919b81842b1854196eaae5ca29679a2f2e378905c346d3ca8227c2c66080"}, ] certifi = [ - {file = "certifi-2020.6.20-py2.py3-none-any.whl", hash = "sha256:8fc0819f1f30ba15bdb34cceffb9ef04d99f420f68eb75d901e9560b8749fc41"}, - {file = "certifi-2020.6.20.tar.gz", hash = "sha256:5930595817496dd21bb8dc35dad090f1c2cd0adfaf21204bf6732ca5d8ee34d3"}, + {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, + {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"}, ] cffi = [ - {file = "cffi-1.14.3-2-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3eeeb0405fd145e714f7633a5173318bd88d8bbfc3dd0a5751f8c4f70ae629bc"}, - {file = "cffi-1.14.3-2-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:cb763ceceae04803adcc4e2d80d611ef201c73da32d8f2722e9d0ab0c7f10768"}, - {file = "cffi-1.14.3-2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:44f60519595eaca110f248e5017363d751b12782a6f2bd6a7041cba275215f5d"}, - {file = "cffi-1.14.3-2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c53af463f4a40de78c58b8b2710ade243c81cbca641e34debf3396a9640d6ec1"}, - {file = "cffi-1.14.3-2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:33c6cdc071ba5cd6d96769c8969a0531be2d08c2628a0143a10a7dcffa9719ca"}, - {file = "cffi-1.14.3-2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c11579638288e53fc94ad60022ff1b67865363e730ee41ad5e6f0a17188b327a"}, - {file = "cffi-1.14.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:3cb3e1b9ec43256c4e0f8d2837267a70b0e1ca8c4f456685508ae6106b1f504c"}, - {file = "cffi-1.14.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:f0620511387790860b249b9241c2f13c3a80e21a73e0b861a2df24e9d6f56730"}, - {file = "cffi-1.14.3-cp27-cp27m-win32.whl", hash = "sha256:005f2bfe11b6745d726dbb07ace4d53f057de66e336ff92d61b8c7e9c8f4777d"}, - {file = "cffi-1.14.3-cp27-cp27m-win_amd64.whl", hash = "sha256:2f9674623ca39c9ebe38afa3da402e9326c245f0f5ceff0623dccdac15023e05"}, - {file = "cffi-1.14.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:09e96138280241bd355cd585148dec04dbbedb4f46128f340d696eaafc82dd7b"}, - {file = "cffi-1.14.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:3363e77a6176afb8823b6e06db78c46dbc4c7813b00a41300a4873b6ba63b171"}, - {file = "cffi-1.14.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:0ef488305fdce2580c8b2708f22d7785ae222d9825d3094ab073e22e93dfe51f"}, - {file = "cffi-1.14.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:0b1ad452cc824665ddc682400b62c9e4f5b64736a2ba99110712fdee5f2505c4"}, - {file = "cffi-1.14.3-cp35-cp35m-win32.whl", hash = "sha256:85ba797e1de5b48aa5a8427b6ba62cf69607c18c5d4eb747604b7302f1ec382d"}, - {file = "cffi-1.14.3-cp35-cp35m-win_amd64.whl", hash = "sha256:e66399cf0fc07de4dce4f588fc25bfe84a6d1285cc544e67987d22663393926d"}, - {file = "cffi-1.14.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:15f351bed09897fbda218e4db5a3d5c06328862f6198d4fb385f3e14e19decb3"}, - {file = "cffi-1.14.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4d7c26bfc1ea9f92084a1d75e11999e97b62d63128bcc90c3624d07813c52808"}, - {file = "cffi-1.14.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:23e5d2040367322824605bc29ae8ee9175200b92cb5483ac7d466927a9b3d537"}, - {file = "cffi-1.14.3-cp36-cp36m-win32.whl", hash = "sha256:a624fae282e81ad2e4871bdb767e2c914d0539708c0f078b5b355258293c98b0"}, - {file = "cffi-1.14.3-cp36-cp36m-win_amd64.whl", hash = "sha256:de31b5164d44ef4943db155b3e8e17929707cac1e5bd2f363e67a56e3af4af6e"}, - {file = "cffi-1.14.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:f92cdecb618e5fa4658aeb97d5eb3d2f47aa94ac6477c6daf0f306c5a3b9e6b1"}, - {file = "cffi-1.14.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:22399ff4870fb4c7ef19fff6eeb20a8bbf15571913c181c78cb361024d574579"}, - {file = "cffi-1.14.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:f4eae045e6ab2bb54ca279733fe4eb85f1effda392666308250714e01907f394"}, - {file = "cffi-1.14.3-cp37-cp37m-win32.whl", hash = "sha256:b0358e6fefc74a16f745afa366acc89f979040e0cbc4eec55ab26ad1f6a9bfbc"}, - {file = "cffi-1.14.3-cp37-cp37m-win_amd64.whl", hash = "sha256:6642f15ad963b5092d65aed022d033c77763515fdc07095208f15d3563003869"}, - {file = "cffi-1.14.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:2791f68edc5749024b4722500e86303a10d342527e1e3bcac47f35fbd25b764e"}, - {file = "cffi-1.14.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:529c4ed2e10437c205f38f3691a68be66c39197d01062618c55f74294a4a4828"}, - {file = "cffi-1.14.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8f0f1e499e4000c4c347a124fa6a27d37608ced4fe9f7d45070563b7c4c370c9"}, - {file = "cffi-1.14.3-cp38-cp38-win32.whl", hash = "sha256:3b8eaf915ddc0709779889c472e553f0d3e8b7bdf62dab764c8921b09bf94522"}, - {file = "cffi-1.14.3-cp38-cp38-win_amd64.whl", hash = "sha256:bbd2f4dfee1079f76943767fce837ade3087b578aeb9f69aec7857d5bf25db15"}, - {file = "cffi-1.14.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:cc75f58cdaf043fe6a7a6c04b3b5a0e694c6a9e24050967747251fb80d7bce0d"}, - {file = "cffi-1.14.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:bf39a9e19ce7298f1bd6a9758fa99707e9e5b1ebe5e90f2c3913a47bc548747c"}, - {file = "cffi-1.14.3-cp39-cp39-win32.whl", hash = "sha256:d80998ed59176e8cba74028762fbd9b9153b9afc71ea118e63bbf5d4d0f9552b"}, - {file = "cffi-1.14.3-cp39-cp39-win_amd64.whl", hash = "sha256:c150eaa3dadbb2b5339675b88d4573c1be3cb6f2c33a6c83387e10cc0bf05bd3"}, - {file = "cffi-1.14.3.tar.gz", hash = "sha256:f92f789e4f9241cd262ad7a555ca2c648a98178a953af117ef7fad46aa1d5591"}, + {file = "cffi-1.14.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:ebb253464a5d0482b191274f1c8bf00e33f7e0b9c66405fbffc61ed2c839c775"}, + {file = "cffi-1.14.4-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:2c24d61263f511551f740d1a065eb0212db1dbbbbd241db758f5244281590c06"}, + {file = "cffi-1.14.4-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9f7a31251289b2ab6d4012f6e83e58bc3b96bd151f5b5262467f4bb6b34a7c26"}, + {file = "cffi-1.14.4-cp27-cp27m-win32.whl", hash = "sha256:5cf4be6c304ad0b6602f5c4e90e2f59b47653ac1ed9c662ed379fe48a8f26b0c"}, + {file = "cffi-1.14.4-cp27-cp27m-win_amd64.whl", hash = "sha256:f60567825f791c6f8a592f3c6e3bd93dd2934e3f9dac189308426bd76b00ef3b"}, + {file = "cffi-1.14.4-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:c6332685306b6417a91b1ff9fae889b3ba65c2292d64bd9245c093b1b284809d"}, + {file = "cffi-1.14.4-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d9efd8b7a3ef378dd61a1e77367f1924375befc2eba06168b6ebfa903a5e59ca"}, + {file = "cffi-1.14.4-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:51a8b381b16ddd370178a65360ebe15fbc1c71cf6f584613a7ea08bfad946698"}, + {file = "cffi-1.14.4-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:1d2c4994f515e5b485fd6d3a73d05526aa0fcf248eb135996b088d25dfa1865b"}, + {file = "cffi-1.14.4-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:af5c59122a011049aad5dd87424b8e65a80e4a6477419c0c1015f73fb5ea0293"}, + {file = "cffi-1.14.4-cp35-cp35m-win32.whl", hash = "sha256:594234691ac0e9b770aee9fcdb8fa02c22e43e5c619456efd0d6c2bf276f3eb2"}, + {file = "cffi-1.14.4-cp35-cp35m-win_amd64.whl", hash = "sha256:64081b3f8f6f3c3de6191ec89d7dc6c86a8a43911f7ecb422c60e90c70be41c7"}, + {file = "cffi-1.14.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f803eaa94c2fcda012c047e62bc7a51b0bdabda1cad7a92a522694ea2d76e49f"}, + {file = "cffi-1.14.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:105abaf8a6075dc96c1fe5ae7aae073f4696f2905fde6aeada4c9d2926752362"}, + {file = "cffi-1.14.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0638c3ae1a0edfb77c6765d487fee624d2b1ee1bdfeffc1f0b58c64d149e7eec"}, + {file = "cffi-1.14.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:7c6b1dece89874d9541fc974917b631406233ea0440d0bdfbb8e03bf39a49b3b"}, + {file = "cffi-1.14.4-cp36-cp36m-win32.whl", hash = "sha256:155136b51fd733fa94e1c2ea5211dcd4c8879869008fc811648f16541bf99668"}, + {file = "cffi-1.14.4-cp36-cp36m-win_amd64.whl", hash = "sha256:6bc25fc545a6b3d57b5f8618e59fc13d3a3a68431e8ca5fd4c13241cd70d0009"}, + {file = "cffi-1.14.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a7711edca4dcef1a75257b50a2fbfe92a65187c47dab5a0f1b9b332c5919a3fb"}, + {file = "cffi-1.14.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:00e28066507bfc3fe865a31f325c8391a1ac2916219340f87dfad602c3e48e5d"}, + {file = "cffi-1.14.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:798caa2a2384b1cbe8a2a139d80734c9db54f9cc155c99d7cc92441a23871c03"}, + {file = "cffi-1.14.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:a5ed8c05548b54b998b9498753fb9cadbfd92ee88e884641377d8a8b291bcc01"}, + {file = "cffi-1.14.4-cp37-cp37m-win32.whl", hash = "sha256:00a1ba5e2e95684448de9b89888ccd02c98d512064b4cb987d48f4b40aa0421e"}, + {file = "cffi-1.14.4-cp37-cp37m-win_amd64.whl", hash = "sha256:9cc46bc107224ff5b6d04369e7c595acb700c3613ad7bcf2e2012f62ece80c35"}, + {file = "cffi-1.14.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:df5169c4396adc04f9b0a05f13c074df878b6052430e03f50e68adf3a57aa28d"}, + {file = "cffi-1.14.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:9ffb888f19d54a4d4dfd4b3f29bc2c16aa4972f1c2ab9c4ab09b8ab8685b9c2b"}, + {file = "cffi-1.14.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8d6603078baf4e11edc4168a514c5ce5b3ba6e3e9c374298cb88437957960a53"}, + {file = "cffi-1.14.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:d5ff0621c88ce83a28a10d2ce719b2ee85635e85c515f12bac99a95306da4b2e"}, + {file = "cffi-1.14.4-cp38-cp38-win32.whl", hash = "sha256:b4e248d1087abf9f4c10f3c398896c87ce82a9856494a7155823eb45a892395d"}, + {file = "cffi-1.14.4-cp38-cp38-win_amd64.whl", hash = "sha256:ec80dc47f54e6e9a78181ce05feb71a0353854cc26999db963695f950b5fb375"}, + {file = "cffi-1.14.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:840793c68105fe031f34d6a086eaea153a0cd5c491cde82a74b420edd0a2b909"}, + {file = "cffi-1.14.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:b18e0a9ef57d2b41f5c68beefa32317d286c3d6ac0484efd10d6e07491bb95dd"}, + {file = "cffi-1.14.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:045d792900a75e8b1e1b0ab6787dd733a8190ffcf80e8c8ceb2fb10a29ff238a"}, + {file = "cffi-1.14.4-cp39-cp39-win32.whl", hash = "sha256:ba4e9e0ae13fc41c6b23299545e5ef73055213e466bd107953e4a013a5ddd7e3"}, + {file = "cffi-1.14.4-cp39-cp39-win_amd64.whl", hash = "sha256:f032b34669220030f905152045dfa27741ce1a6db3324a5bc0b96b6c7420c87b"}, + {file = "cffi-1.14.4.tar.gz", hash = "sha256:1a465cbe98a7fd391d47dce4b8f7e5b921e6cd805ef421d04f5f66ba8f06086c"}, ] cfgv = [ {file = "cfgv-3.2.0-py2.py3-none-any.whl", hash = "sha256:32e43d604bbe7896fe7c248a9c2276447dbef840feb28fe20494f62af110211d"}, @@ -1961,8 +1963,8 @@ colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, ] colorlog = [ - {file = "colorlog-4.5.0-py2.py3-none-any.whl", hash = "sha256:677d39a84c4ca37c87532729c143c6a35cb558a1225b9544713fe42a04102ad2"}, - {file = "colorlog-4.5.0.tar.gz", hash = "sha256:72d25293d6d191e5adc851879240ad33a1ce131a7b36b37f940e505681007e65"}, + {file = "colorlog-4.6.2-py2.py3-none-any.whl", hash = "sha256:edd5ada5de03e880e42b2526f8be5570fd9b692f8eb7cf6b1fdcac3e3fb23976"}, + {file = "colorlog-4.6.2.tar.gz", hash = "sha256:54e5f153419c22afc283c130c4201db19a3dbd83221a0f4657d5ee66234a2ea4"}, ] coverage = [ {file = "coverage-5.3-cp27-cp27m-macosx_10_13_intel.whl", hash = "sha256:bd3166bb3b111e76a4f8e2980fa1addf2920a4ca9b2b8ca36a3bc3dedc618270"}, @@ -2001,8 +2003,8 @@ coverage = [ {file = "coverage-5.3.tar.gz", hash = "sha256:280baa8ec489c4f542f8940f9c4c2181f0306a8ee1a54eceba071a449fb870a0"}, ] darglint = [ - {file = "darglint-1.5.5-py3-none-any.whl", hash = "sha256:cd882c812f28ee3b5577259bfd8d6d25962386dd87fc1f3756eac24370aaa060"}, - {file = "darglint-1.5.5.tar.gz", hash = "sha256:2f12ce2ef3d8189279a8f2eb4c53fd215dbacae50e37765542a91310400a9cd6"}, + {file = "darglint-1.5.8-py3-none-any.whl", hash = "sha256:2e1012945a09d19a15cc87f9d15e7b14c18473ec9cf7769c641951b348de1353"}, + {file = "darglint-1.5.8.tar.gz", hash = "sha256:529f4969029d5ff5f74bfec48adc14b6f003409141f722b6cc4b787dddc8a4dd"}, ] decorator = [ {file = "decorator-4.4.2-py2.py3-none-any.whl", hash = "sha256:41fa54c2a0cc4ba648be4fd43cff00aedf5b9465c9bf18d64325bc225f08f760"}, @@ -2110,8 +2112,8 @@ gitpython = [ {file = "GitPython-3.1.11.tar.gz", hash = "sha256:befa4d101f91bad1b632df4308ec64555db684c360bd7d2130b4807d49ce86b8"}, ] identify = [ - {file = "identify-1.5.9-py2.py3-none-any.whl", hash = "sha256:5dd84ac64a9a115b8e0b27d1756b244b882ad264c3c423f42af8235a6e71ca12"}, - {file = "identify-1.5.9.tar.gz", hash = "sha256:c9504ba6a043ee2db0a9d69e43246bc138034895f6338d5aed1b41e4a73b1513"}, + {file = "identify-1.5.10-py2.py3-none-any.whl", hash = "sha256:cc86e6a9a390879dcc2976cef169dd9cc48843ed70b7380f321d1b118163c60e"}, + {file = "identify-1.5.10.tar.gz", hash = "sha256:943cd299ac7f5715fcb3f684e2fc1594c1e0f22a90d15398e5888143bd4144b5"}, ] idna = [ {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, @@ -2126,8 +2128,8 @@ iniconfig = [ {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] ipykernel = [ - {file = "ipykernel-5.3.4-py3-none-any.whl", hash = "sha256:d6fbba26dba3cebd411382bc484f7bc2caa98427ae0ddb4ab37fe8bfeb5c7dd3"}, - {file = "ipykernel-5.3.4.tar.gz", hash = "sha256:9b2652af1607986a1b231c62302d070bc0534f564c393a5d9d130db9abbbe89d"}, + {file = "ipykernel-5.4.2-py3-none-any.whl", hash = "sha256:63b4b96c513e1138874934e3e783a8e5e13c02b9036e37107bfe042ac8955005"}, + {file = "ipykernel-5.4.2.tar.gz", hash = "sha256:e20ceb7e52cb4d250452e1230be76e0b2323f33bd46c6b2bc7abb6601740e182"}, ] ipython = [ {file = "ipython-7.19.0-py3-none-any.whl", hash = "sha256:c987e8178ced651532b3b1ff9965925bfd445c279239697052561a9ab806d28f"}, @@ -2162,8 +2164,8 @@ jupyter-client = [ {file = "jupyter_client-6.1.7.tar.gz", hash = "sha256:49e390b36fe4b4226724704ea28d9fb903f1a3601b6882ce3105221cd09377a1"}, ] jupyter-core = [ - {file = "jupyter_core-4.6.3-py2.py3-none-any.whl", hash = "sha256:a4ee613c060fe5697d913416fc9d553599c05e4492d58fac1192c9a6844abb21"}, - {file = "jupyter_core-4.6.3.tar.gz", hash = "sha256:394fd5dd787e7c8861741880bdf8a00ce39f95de5d18e579c74b882522219e7e"}, + {file = "jupyter_core-4.7.0-py3-none-any.whl", hash = "sha256:0a451c9b295e4db772bdd8d06f2f1eb31caeec0e81fbb77ba37d4a3024e3b315"}, + {file = "jupyter_core-4.7.0.tar.gz", hash = "sha256:aa1f9496ab3abe72da4efe0daab0cb2233997914581f9a071e07498c6add8ed3"}, ] jupyterlab = [ {file = "jupyterlab-2.2.9-py3-none-any.whl", hash = "sha256:59af02c26a15ec2d2862a15bc72e41ae304b406a0b0d3f4f705eeb7caf91902b"}, @@ -2283,8 +2285,8 @@ nbformat = [ {file = "nbformat-5.0.8.tar.gz", hash = "sha256:f545b22138865bfbcc6b1ffe89ed5a2b8e2dc5d4fe876f2ca60d8e6f702a30f8"}, ] nest-asyncio = [ - {file = "nest_asyncio-1.4.2-py3-none-any.whl", hash = "sha256:c2d3bdc76ba235a7ad215128afe31d74a320d25790c50cd94685ec5ea221b94d"}, - {file = "nest_asyncio-1.4.2.tar.gz", hash = "sha256:c614fcfaca72b1f04778bc0e73f49c84500b3d045c49d149fc46f1566643c175"}, + {file = "nest_asyncio-1.4.3-py3-none-any.whl", hash = "sha256:dbe032f3e9ff7f120e76be22bf6e7958e867aed1743e6894b8a9585fe8495cc9"}, + {file = "nest_asyncio-1.4.3.tar.gz", hash = "sha256:eaa09ef1353ebefae19162ad423eef7a12166bcc63866f8bff8f3635353cd9fa"}, ] nodeenv = [ {file = "nodeenv-1.5.0-py2.py3-none-any.whl", hash = "sha256:5304d424c529c997bc888453aeaa6362d242b6b4631e90f3d4bf1b290f1c84a9"}, @@ -2335,34 +2337,34 @@ numpy = [ {file = "numpy-1.19.4.zip", hash = "sha256:141ec3a3300ab89c7f2b0775289954d193cc8edb621ea05f99db9cb181530512"}, ] packaging = [ - {file = "packaging-20.4-py2.py3-none-any.whl", hash = "sha256:998416ba6962ae7fbd6596850b80e17859a5753ba17c32284f67bfff33784181"}, - {file = "packaging-20.4.tar.gz", hash = "sha256:4357f74f47b9c12db93624a82154e9b120fa8293699949152b22065d556079f8"}, + {file = "packaging-20.8-py2.py3-none-any.whl", hash = "sha256:24e0da08660a87484d1602c30bb4902d74816b6985b93de36926f5bc95741858"}, + {file = "packaging-20.8.tar.gz", hash = "sha256:78598185a7008a470d64526a8059de9aaa449238f280fc9eb6b13ba6c4109093"}, ] pandas = [ - {file = "pandas-1.1.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e2b8557fe6d0a18db4d61c028c6af61bfed44ef90e419ed6fadbdc079eba141e"}, - {file = "pandas-1.1.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:3aa8e10768c730cc1b610aca688f588831fa70b65a26cb549fbb9f35049a05e0"}, - {file = "pandas-1.1.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:185cf8c8f38b169dbf7001e1a88c511f653fbb9dfa3e048f5e19c38049e991dc"}, - {file = "pandas-1.1.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:0d9a38a59242a2f6298fff45d09768b78b6eb0c52af5919ea9e45965d7ba56d9"}, - {file = "pandas-1.1.4-cp36-cp36m-win32.whl", hash = "sha256:8b4c2055ebd6e497e5ecc06efa5b8aa76f59d15233356eb10dad22a03b757805"}, - {file = "pandas-1.1.4-cp36-cp36m-win_amd64.whl", hash = "sha256:5dac3aeaac5feb1016e94bde851eb2012d1733a222b8afa788202b836c97dad5"}, - {file = "pandas-1.1.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6d2b5b58e7df46b2c010ec78d7fb9ab20abf1d306d0614d3432e7478993fbdb0"}, - {file = "pandas-1.1.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c681e8fcc47a767bf868341d8f0d76923733cbdcabd6ec3a3560695c69f14a1e"}, - {file = "pandas-1.1.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:c5a3597880a7a29a31ebd39b73b2c824316ae63a05c3c8a5ce2aea3fc68afe35"}, - {file = "pandas-1.1.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:6613c7815ee0b20222178ad32ec144061cb07e6a746970c9160af1ebe3ad43b4"}, - {file = "pandas-1.1.4-cp37-cp37m-win32.whl", hash = "sha256:43cea38cbcadb900829858884f49745eb1f42f92609d368cabcc674b03e90efc"}, - {file = "pandas-1.1.4-cp37-cp37m-win_amd64.whl", hash = "sha256:5378f58172bd63d8c16dd5d008d7dcdd55bf803fcdbe7da2dcb65dbbf322f05b"}, - {file = "pandas-1.1.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a7d2547b601ecc9a53fd41561de49a43d2231728ad65c7713d6b616cd02ddbed"}, - {file = "pandas-1.1.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:41746d520f2b50409dffdba29a15c42caa7babae15616bcf80800d8cfcae3d3e"}, - {file = "pandas-1.1.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:a15653480e5b92ee376f8458197a58cca89a6e95d12cccb4c2d933df5cecc63f"}, - {file = "pandas-1.1.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:5fdb2a61e477ce58d3f1fdf2470ee142d9f0dde4969032edaf0b8f1a9dafeaa2"}, - {file = "pandas-1.1.4-cp38-cp38-win32.whl", hash = "sha256:8a5d7e57b9df2c0a9a202840b2881bb1f7a648eba12dd2d919ac07a33a36a97f"}, - {file = "pandas-1.1.4-cp38-cp38-win_amd64.whl", hash = "sha256:54404abb1cd3f89d01f1fb5350607815326790efb4789be60508f458cdd5ccbf"}, - {file = "pandas-1.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:112c5ba0f9ea0f60b2cc38c25f87ca1d5ca10f71efbee8e0f1bee9cf584ed5d5"}, - {file = "pandas-1.1.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:cf135a08f306ebbcfea6da8bf775217613917be23e5074c69215b91e180caab4"}, - {file = "pandas-1.1.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:b1f8111635700de7ac350b639e7e452b06fc541a328cf6193cf8fc638804bab8"}, - {file = "pandas-1.1.4-cp39-cp39-win32.whl", hash = "sha256:09e0503758ad61afe81c9069505f8cb8c1e36ea8cc1e6826a95823ef5b327daf"}, - {file = "pandas-1.1.4-cp39-cp39-win_amd64.whl", hash = "sha256:0a11a6290ef3667575cbd4785a1b62d658c25a2fd70a5adedba32e156a8f1773"}, - {file = "pandas-1.1.4.tar.gz", hash = "sha256:a979d0404b135c63954dea79e6246c45dd45371a88631cdbb4877d844e6de3b6"}, + {file = "pandas-1.1.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:bf23a3b54d128b50f4f9d4675b3c1857a688cc6731a32f931837d72effb2698d"}, + {file = "pandas-1.1.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5a780260afc88268a9d3ac3511d8f494fdcf637eece62fb9eb656a63d53eb7ca"}, + {file = "pandas-1.1.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:b61080750d19a0122469ab59b087380721d6b72a4e7d962e4d7e63e0c4504814"}, + {file = "pandas-1.1.5-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:0de3ddb414d30798cbf56e642d82cac30a80223ad6fe484d66c0ce01a84d6f2f"}, + {file = "pandas-1.1.5-cp36-cp36m-win32.whl", hash = "sha256:70865f96bb38fec46f7ebd66d4b5cfd0aa6b842073f298d621385ae3898d28b5"}, + {file = "pandas-1.1.5-cp36-cp36m-win_amd64.whl", hash = "sha256:19a2148a1d02791352e9fa637899a78e371a3516ac6da5c4edc718f60cbae648"}, + {file = "pandas-1.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:26fa92d3ac743a149a31b21d6f4337b0594b6302ea5575b37af9ca9611e8981a"}, + {file = "pandas-1.1.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c16d59c15d946111d2716856dd5479221c9e4f2f5c7bc2d617f39d870031e086"}, + {file = "pandas-1.1.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:3be7a7a0ca71a2640e81d9276f526bca63505850add10206d0da2e8a0a325dae"}, + {file = "pandas-1.1.5-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:573fba5b05bf2c69271a32e52399c8de599e4a15ab7cec47d3b9c904125ab788"}, + {file = "pandas-1.1.5-cp37-cp37m-win32.whl", hash = "sha256:21b5a2b033380adbdd36b3116faaf9a4663e375325831dac1b519a44f9e439bb"}, + {file = "pandas-1.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:24c7f8d4aee71bfa6401faeba367dd654f696a77151a8a28bc2013f7ced4af98"}, + {file = "pandas-1.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2860a97cbb25444ffc0088b457da0a79dc79f9c601238a3e0644312fcc14bf11"}, + {file = "pandas-1.1.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:5008374ebb990dad9ed48b0f5d0038124c73748f5384cc8c46904dace27082d9"}, + {file = "pandas-1.1.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2c2f7c670ea4e60318e4b7e474d56447cf0c7d83b3c2a5405a0dbb2600b9c48e"}, + {file = "pandas-1.1.5-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:0a643bae4283a37732ddfcecab3f62dd082996021b980f580903f4e8e01b3c5b"}, + {file = "pandas-1.1.5-cp38-cp38-win32.whl", hash = "sha256:5447ea7af4005b0daf695a316a423b96374c9c73ffbd4533209c5ddc369e644b"}, + {file = "pandas-1.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:4c62e94d5d49db116bef1bd5c2486723a292d79409fc9abd51adf9e05329101d"}, + {file = "pandas-1.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:731568be71fba1e13cae212c362f3d2ca8932e83cb1b85e3f1b4dd77d019254a"}, + {file = "pandas-1.1.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:c61c043aafb69329d0f961b19faa30b1dab709dd34c9388143fc55680059e55a"}, + {file = "pandas-1.1.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:2b1c6cd28a0dfda75c7b5957363333f01d370936e4c6276b7b8e696dd500582a"}, + {file = "pandas-1.1.5-cp39-cp39-win32.whl", hash = "sha256:c94ff2780a1fd89f190390130d6d36173ca59fcfb3fe0ff596f9a56518191ccb"}, + {file = "pandas-1.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:edda9bacc3843dfbeebaf7a701763e68e741b08fccb889c003b0a52f0ee95782"}, + {file = "pandas-1.1.5.tar.gz", hash = "sha256:f10fc41ee3c75a474d3bdf68d396f10782d013d7f67db99c0efbfd0acb99701b"}, ] pandocfilters = [ {file = "pandocfilters-1.4.3.tar.gz", hash = "sha256:bc63fbb50534b4b1f8ebe1860889289e8af94a23bff7445259592df25a3906eb"}, @@ -2372,8 +2374,8 @@ parso = [ {file = "parso-0.7.1.tar.gz", hash = "sha256:caba44724b994a8a5e086460bb212abc5a8bc46951bf4a9a1210745953622eb9"}, ] pathspec = [ - {file = "pathspec-0.8.0-py2.py3-none-any.whl", hash = "sha256:7d91249d21749788d07a2d0f94147accd8f845507400749ea19c1ec9054a12b0"}, - {file = "pathspec-0.8.0.tar.gz", hash = "sha256:da45173eb3a6f2a5a487efba21f050af2b41948be6ab52b6a1e3ff22bb8b7061"}, + {file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"}, + {file = "pathspec-0.8.1.tar.gz", hash = "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd"}, ] pbr = [ {file = "pbr-5.5.1-py2.py3-none-any.whl", hash = "sha256:b236cde0ac9a6aedd5e3c34517b423cd4fd97ef723849da6b0d2231142d89c00"}, @@ -2396,12 +2398,12 @@ pluggy = [ {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, ] pre-commit = [ - {file = "pre_commit-2.8.2-py2.py3-none-any.whl", hash = "sha256:22e6aa3bd571debb01eb7d34483f11c01b65237be4eebbf30c3d4fb65762d315"}, - {file = "pre_commit-2.8.2.tar.gz", hash = "sha256:905ebc9b534b991baec87e934431f2d0606ba27f2b90f7f652985f5a5b8b6ae6"}, + {file = "pre_commit-2.9.3-py2.py3-none-any.whl", hash = "sha256:6c86d977d00ddc8a60d68eec19f51ef212d9462937acf3ea37c7adec32284ac0"}, + {file = "pre_commit-2.9.3.tar.gz", hash = "sha256:ee784c11953e6d8badb97d19bc46b997a3a9eded849881ec587accd8608d74a4"}, ] prometheus-client = [ - {file = "prometheus_client-0.8.0-py2.py3-none-any.whl", hash = "sha256:983c7ac4b47478720db338f1491ef67a100b474e3bc7dafcbaefb7d0b8f9b01c"}, - {file = "prometheus_client-0.8.0.tar.gz", hash = "sha256:c6e6b706833a6bd1fd51711299edee907857be10ece535126a158f911ee80915"}, + {file = "prometheus_client-0.9.0-py2.py3-none-any.whl", hash = "sha256:b08c34c328e1bf5961f0b4352668e6c8f145b4a087e09b7296ef62cbe4693d35"}, + {file = "prometheus_client-0.9.0.tar.gz", hash = "sha256:9da7b32f02439d8c04f7777021c304ed51d9ec180604700c1ba72a4d44dceb03"}, ] prompt-toolkit = [ {file = "prompt_toolkit-3.0.8-py3-none-any.whl", hash = "sha256:7debb9a521e0b1ee7d2fe96ee4bd60ef03c6492784de0547337ca4433e46aa63"}, @@ -2427,8 +2429,8 @@ ptyprocess = [ {file = "ptyprocess-0.6.0.tar.gz", hash = "sha256:923f299cc5ad920c68f2bc0bc98b75b9f838b93b599941a6b63ddbc2476394c0"}, ] py = [ - {file = "py-1.9.0-py2.py3-none-any.whl", hash = "sha256:366389d1db726cd2fcfc79732e75410e5fe4d31db13692115529d34069a043c2"}, - {file = "py-1.9.0.tar.gz", hash = "sha256:9ca6883ce56b4e8da7e79ac18787889fa5206c79dcc67fb065376cd2fe03f342"}, + {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, + {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, ] pycodestyle = [ {file = "pycodestyle-2.6.0-py2.py3-none-any.whl", hash = "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367"}, @@ -2447,8 +2449,8 @@ pyflakes = [ {file = "pyflakes-2.2.0.tar.gz", hash = "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"}, ] pygments = [ - {file = "Pygments-2.7.2-py3-none-any.whl", hash = "sha256:88a0bbcd659fcb9573703957c6b9cff9fab7295e6e76db54c9d00ae42df32773"}, - {file = "Pygments-2.7.2.tar.gz", hash = "sha256:381985fcc551eb9d37c52088a32914e00517e57f4a21609f48141ba08e193fa0"}, + {file = "Pygments-2.7.3-py3-none-any.whl", hash = "sha256:f275b6c0909e5dafd2d6269a656aa90fa58ebf4a74f8fcf9053195d226b24a08"}, + {file = "Pygments-2.7.3.tar.gz", hash = "sha256:ccf3acacf3782cbed4a989426012f1c535c9a90d3a7fc3f16d231b9372d2b716"}, ] pylint = [ {file = "pylint-2.6.0-py3-none-any.whl", hash = "sha256:bfe68f020f8a0fece830a22dd4d5dddb4ecc6137db04face4c3420a46a52239f"}, @@ -2462,8 +2464,8 @@ pyrsistent = [ {file = "pyrsistent-0.17.3.tar.gz", hash = "sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e"}, ] pytest = [ - {file = "pytest-6.1.2-py3-none-any.whl", hash = "sha256:4288fed0d9153d9646bfcdf0c0428197dba1ecb27a33bb6e031d002fa88653fe"}, - {file = "pytest-6.1.2.tar.gz", hash = "sha256:c0a7e94a8cdbc5422a51ccdad8e6f1024795939cc89159a0ae7f0b316ad3823e"}, + {file = "pytest-6.2.0-py3-none-any.whl", hash = "sha256:d69e1a80b34fe4d596c9142f35d9e523d98a2838976f1a68419a8f051b24cec6"}, + {file = "pytest-6.2.0.tar.gz", hash = "sha256:b12e09409c5bdedc28d308469e156127004a436b41e9b44f9bff6446cbab9152"}, ] pytest-cov = [ {file = "pytest-cov-2.10.1.tar.gz", hash = "sha256:47bd0ce14056fdd79f93e1713f88fad7bdcc583dcd7783da86ef2f085a0bb88e"}, @@ -2492,18 +2494,16 @@ pytz = [ {file = "pytz-2020.4.tar.gz", hash = "sha256:3e6b7dd2d1e0a59084bcee14a17af60c5c562cdc16d828e8eba2e683d3a7e268"}, ] pywin32 = [ - {file = "pywin32-228-cp27-cp27m-win32.whl", hash = "sha256:37dc9935f6a383cc744315ae0c2882ba1768d9b06700a70f35dc1ce73cd4ba9c"}, - {file = "pywin32-228-cp27-cp27m-win_amd64.whl", hash = "sha256:11cb6610efc2f078c9e6d8f5d0f957620c333f4b23466931a247fb945ed35e89"}, - {file = "pywin32-228-cp35-cp35m-win32.whl", hash = "sha256:1f45db18af5d36195447b2cffacd182fe2d296849ba0aecdab24d3852fbf3f80"}, - {file = "pywin32-228-cp35-cp35m-win_amd64.whl", hash = "sha256:6e38c44097a834a4707c1b63efa9c2435f5a42afabff634a17f563bc478dfcc8"}, - {file = "pywin32-228-cp36-cp36m-win32.whl", hash = "sha256:ec16d44b49b5f34e99eb97cf270806fdc560dff6f84d281eb2fcb89a014a56a9"}, - {file = "pywin32-228-cp36-cp36m-win_amd64.whl", hash = "sha256:a60d795c6590a5b6baeacd16c583d91cce8038f959bd80c53bd9a68f40130f2d"}, - {file = "pywin32-228-cp37-cp37m-win32.whl", hash = "sha256:af40887b6fc200eafe4d7742c48417529a8702dcc1a60bf89eee152d1d11209f"}, - {file = "pywin32-228-cp37-cp37m-win_amd64.whl", hash = "sha256:00eaf43dbd05ba6a9b0080c77e161e0b7a601f9a3f660727a952e40140537de7"}, - {file = "pywin32-228-cp38-cp38-win32.whl", hash = "sha256:fa6ba028909cfc64ce9e24bcf22f588b14871980d9787f1e2002c99af8f1850c"}, - {file = "pywin32-228-cp38-cp38-win_amd64.whl", hash = "sha256:9b3466083f8271e1a5eb0329f4e0d61925d46b40b195a33413e0905dccb285e8"}, - {file = "pywin32-228-cp39-cp39-win32.whl", hash = "sha256:ed74b72d8059a6606f64842e7917aeee99159ebd6b8d6261c518d002837be298"}, - {file = "pywin32-228-cp39-cp39-win_amd64.whl", hash = "sha256:8319bafdcd90b7202c50d6014efdfe4fde9311b3ff15fd6f893a45c0868de203"}, + {file = "pywin32-300-cp35-cp35m-win32.whl", hash = "sha256:1c204a81daed2089e55d11eefa4826c05e604d27fe2be40b6bf8db7b6a39da63"}, + {file = "pywin32-300-cp35-cp35m-win_amd64.whl", hash = "sha256:350c5644775736351b77ba68da09a39c760d75d2467ecec37bd3c36a94fbed64"}, + {file = "pywin32-300-cp36-cp36m-win32.whl", hash = "sha256:a3b4c48c852d4107e8a8ec980b76c94ce596ea66d60f7a697582ea9dce7e0db7"}, + {file = "pywin32-300-cp36-cp36m-win_amd64.whl", hash = "sha256:27a30b887afbf05a9cbb05e3ffd43104a9b71ce292f64a635389dbad0ed1cd85"}, + {file = "pywin32-300-cp37-cp37m-win32.whl", hash = "sha256:d7e8c7efc221f10d6400c19c32a031add1c4a58733298c09216f57b4fde110dc"}, + {file = "pywin32-300-cp37-cp37m-win_amd64.whl", hash = "sha256:8151e4d7a19262d6694162d6da85d99a16f8b908949797fd99c83a0bfaf5807d"}, + {file = "pywin32-300-cp38-cp38-win32.whl", hash = "sha256:fbb3b1b0fbd0b4fc2a3d1d81fe0783e30062c1abed1d17c32b7879d55858cfae"}, + {file = "pywin32-300-cp38-cp38-win_amd64.whl", hash = "sha256:60a8fa361091b2eea27f15718f8eb7f9297e8d51b54dbc4f55f3d238093d5190"}, + {file = "pywin32-300-cp39-cp39-win32.whl", hash = "sha256:638b68eea5cfc8def537e43e9554747f8dee786b090e47ead94bfdafdb0f2f50"}, + {file = "pywin32-300-cp39-cp39-win_amd64.whl", hash = "sha256:b1609ce9bd5c411b81f941b246d683d6508992093203d4eb7f278f4ed1085c3f"}, ] pywinpty = [ {file = "pywinpty-0.5.7-cp27-cp27m-win32.whl", hash = "sha256:b358cb552c0f6baf790de375fab96524a0498c9df83489b8c23f7f08795e966b"}, @@ -2531,86 +2531,85 @@ pyyaml = [ {file = "PyYAML-5.3.1.tar.gz", hash = "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d"}, ] pyzmq = [ - {file = "pyzmq-19.0.2-cp27-cp27m-macosx_10_9_intel.whl", hash = "sha256:59f1e54627483dcf61c663941d94c4af9bf4163aec334171686cdaee67974fe5"}, - {file = "pyzmq-19.0.2-cp27-cp27m-win32.whl", hash = "sha256:c36ffe1e5aa35a1af6a96640d723d0d211c5f48841735c2aa8d034204e87eb87"}, - {file = "pyzmq-19.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:0a422fc290d03958899743db091f8154958410fc76ce7ee0ceb66150f72c2c97"}, - {file = "pyzmq-19.0.2-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:c20dd60b9428f532bc59f2ef6d3b1029a28fc790d408af82f871a7db03e722ff"}, - {file = "pyzmq-19.0.2-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d46fb17f5693244de83e434648b3dbb4f4b0fec88415d6cbab1c1452b6f2ae17"}, - {file = "pyzmq-19.0.2-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:f1a25a61495b6f7bb986accc5b597a3541d9bd3ef0016f50be16dbb32025b302"}, - {file = "pyzmq-19.0.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:ab0d01148d13854de716786ca73701012e07dff4dfbbd68c4e06d8888743526e"}, - {file = "pyzmq-19.0.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:720d2b6083498a9281eaee3f2927486e9fe02cd16d13a844f2e95217f243efea"}, - {file = "pyzmq-19.0.2-cp35-cp35m-win32.whl", hash = "sha256:29d51279060d0a70f551663bc592418bcad7f4be4eea7b324f6dd81de05cb4c1"}, - {file = "pyzmq-19.0.2-cp35-cp35m-win_amd64.whl", hash = "sha256:5120c64646e75f6db20cc16b9a94203926ead5d633de9feba4f137004241221d"}, - {file = "pyzmq-19.0.2-cp36-cp36m-macosx_10_9_intel.whl", hash = "sha256:8a6ada5a3f719bf46a04ba38595073df8d6b067316c011180102ba2a1925f5b5"}, - {file = "pyzmq-19.0.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:fa411b1d8f371d3a49d31b0789eb6da2537dadbb2aef74a43aa99a78195c3f76"}, - {file = "pyzmq-19.0.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:00dca814469436455399660247d74045172955459c0bd49b54a540ce4d652185"}, - {file = "pyzmq-19.0.2-cp36-cp36m-win32.whl", hash = "sha256:046b92e860914e39612e84fa760fc3f16054d268c11e0e25dcb011fb1bc6a075"}, - {file = "pyzmq-19.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:99cc0e339a731c6a34109e5c4072aaa06d8e32c0b93dc2c2d90345dd45fa196c"}, - {file = "pyzmq-19.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e36f12f503511d72d9bdfae11cadbadca22ff632ff67c1b5459f69756a029c19"}, - {file = "pyzmq-19.0.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c40fbb2b9933369e994b837ee72193d6a4c35dfb9a7c573257ef7ff28961272c"}, - {file = "pyzmq-19.0.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5d9fc809aa8d636e757e4ced2302569d6e60e9b9c26114a83f0d9d6519c40493"}, - {file = "pyzmq-19.0.2-cp37-cp37m-win32.whl", hash = "sha256:3fa6debf4bf9412e59353defad1f8035a1e68b66095a94ead8f7a61ae90b2675"}, - {file = "pyzmq-19.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:73483a2caaa0264ac717af33d6fb3f143d8379e60a422730ee8d010526ce1913"}, - {file = "pyzmq-19.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:36ab114021c0cab1a423fe6689355e8f813979f2c750968833b318c1fa10a0fd"}, - {file = "pyzmq-19.0.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:8b66b94fe6243d2d1d89bca336b2424399aac57932858b9a30309803ffc28112"}, - {file = "pyzmq-19.0.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:654d3e06a4edc566b416c10293064732516cf8871a4522e0a2ba00cc2a2e600c"}, - {file = "pyzmq-19.0.2-cp38-cp38-win32.whl", hash = "sha256:276ad604bffd70992a386a84bea34883e696a6b22e7378053e5d3227321d9702"}, - {file = "pyzmq-19.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:09d24a80ccb8cbda1af6ed8eb26b005b6743e58e9290566d2a6841f4e31fa8e0"}, - {file = "pyzmq-19.0.2-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:c1a31cd42905b405530e92bdb70a8a56f048c8a371728b8acf9d746ecd4482c0"}, - {file = "pyzmq-19.0.2-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a7e7f930039ee0c4c26e4dfee015f20bd6919cd8b97c9cd7afbde2923a5167b6"}, - {file = "pyzmq-19.0.2.tar.gz", hash = "sha256:296540a065c8c21b26d63e3cea2d1d57902373b16e4256afe46422691903a438"}, + {file = "pyzmq-20.0.0-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:523d542823cabb94065178090e05347bd204365f6e7cb260f0071c995d392fc2"}, + {file = "pyzmq-20.0.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:225774a48ed7414c0395335e7123ef8c418dbcbe172caabdc2496133b03254c2"}, + {file = "pyzmq-20.0.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:bc7dd697356b31389d5118b9bcdef3e8d8079e8181800c4e8d72dccd56e1ff68"}, + {file = "pyzmq-20.0.0-cp35-cp35m-win32.whl", hash = "sha256:d81184489369ec325bd50ba1c935361e63f31f578430b9ad95471899361a8253"}, + {file = "pyzmq-20.0.0-cp35-cp35m-win_amd64.whl", hash = "sha256:7113eb93dcd0a5750c65d123ed0099e036a3a3f2dcb48afedd025ffa125c983b"}, + {file = "pyzmq-20.0.0-cp36-cp36m-macosx_10_9_intel.whl", hash = "sha256:b62113eeb9a0649cebed9b21fd578f3a0175ef214a2a91dcb7b31bbf55805295"}, + {file = "pyzmq-20.0.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:f0beef935efe78a63c785bb21ed56c1c24448511383e3994927c8bb2caf5e714"}, + {file = "pyzmq-20.0.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:46250789730489009fe139cbf576679557c070a6a3628077d09a4153d52fd381"}, + {file = "pyzmq-20.0.0-cp36-cp36m-win32.whl", hash = "sha256:bf755905a7d30d2749079611b9a89924c1f2da2695dc09ce221f42122c9808e3"}, + {file = "pyzmq-20.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2742e380d186673eee6a570ef83d4568741945434ba36d92b98d36cdbfedbd44"}, + {file = "pyzmq-20.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1e9b75a119606732023a305d1c214146c09a91f8116f6aff3e8b7d0a60b6f0ff"}, + {file = "pyzmq-20.0.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:03638e46d486dd1c118e03c8bf9c634bdcae679600eac6573ae1e54906de7c2f"}, + {file = "pyzmq-20.0.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:63ee08e35be72fdd7568065a249a5b5cf51a2e8ab6ee63cf9f73786fcb9e710b"}, + {file = "pyzmq-20.0.0-cp37-cp37m-win32.whl", hash = "sha256:c95dda497a7c1b1e734b5e8353173ca5dd7b67784d8821d13413a97856588057"}, + {file = "pyzmq-20.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:cc09c5cd1a4332611c8564d65e6a432dc6db3e10793d0254da9fa1e31d9ffd6d"}, + {file = "pyzmq-20.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6e24907857c80dc67692e31f5bf3ad5bf483ee0142cec95b3d47e2db8c43bdda"}, + {file = "pyzmq-20.0.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:53706f4a792cdae422121fb6a5e65119bad02373153364fc9d004cf6a90394de"}, + {file = "pyzmq-20.0.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:895695be380f0f85d2e3ec5ccf68a93c92d45bd298567525ad5633071589872c"}, + {file = "pyzmq-20.0.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:d92c7f41a53ece82b91703ea433c7d34143248cf0cead33aa11c5fc621c764bf"}, + {file = "pyzmq-20.0.0-cp38-cp38-win32.whl", hash = "sha256:309d763d89ec1845c0e0fa14e1fb6558fd8c9ef05ed32baec27d7a8499cc7bb0"}, + {file = "pyzmq-20.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:0e554fd390021edbe0330b67226325a820b0319c5b45e1b0a59bf22ccc36e793"}, + {file = "pyzmq-20.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cfa54a162a7b32641665e99b2c12084555afe9fc8fe80ec8b2f71a57320d10e1"}, + {file = "pyzmq-20.0.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:dc2f48b575dff6edefd572f1ac84cf0c3f18ad5fcf13384de32df740a010594a"}, + {file = "pyzmq-20.0.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:5efe02bdcc5eafcac0aab531292294298f0ab8d28ed43be9e507d0e09173d1a4"}, + {file = "pyzmq-20.0.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:0af84f34f27b5c6a0e906c648bdf46d4caebf9c8e6e16db0728f30a58141cad6"}, + {file = "pyzmq-20.0.0-cp39-cp39-win32.whl", hash = "sha256:c63fafd2556d218368c51d18588f8e6f8d86d09d493032415057faf6de869b34"}, + {file = "pyzmq-20.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:f110a4d3f8f01209eec304ed542f6c8054cce9b0f16dfe3d571e57c290e4e133"}, + {file = "pyzmq-20.0.0-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4d9259a5eb3f71abbaf61f165cacf42240bfeea3783bebd8255341abdfe206f1"}, + {file = "pyzmq-20.0.0.tar.gz", hash = "sha256:824ad5888331aadeac772bce27e1c2fbcab82fade92edbd234542c4e12f0dca9"}, ] regex = [ - {file = "regex-2020.10.28-cp27-cp27m-win32.whl", hash = "sha256:4b5a9bcb56cc146c3932c648603b24514447eafa6ce9295234767bf92f69b504"}, - {file = "regex-2020.10.28-cp27-cp27m-win_amd64.whl", hash = "sha256:c13d311a4c4a8d671f5860317eb5f09591fbe8259676b86a85769423b544451e"}, - {file = "regex-2020.10.28-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c454ad88e56e80e44f824ef8366bb7e4c3def12999151fd5c0ea76a18fe9aa3e"}, - {file = "regex-2020.10.28-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c8a2b7ccff330ae4c460aff36626f911f918555660cc28163417cb84ffb25789"}, - {file = "regex-2020.10.28-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4afa350f162551cf402bfa3cd8302165c8e03e689c897d185f16a167328cc6dd"}, - {file = "regex-2020.10.28-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:b88fa3b8a3469f22b4f13d045d9bd3eda797aa4e406fde0a2644bc92bbdd4bdd"}, - {file = "regex-2020.10.28-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:f43109822df2d3faac7aad79613f5f02e4eab0fc8ad7932d2e70e2a83bd49c26"}, - {file = "regex-2020.10.28-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:de7fd57765398d141949946c84f3590a68cf5887dac3fc52388df0639b01eda4"}, - {file = "regex-2020.10.28-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:9b6305295b6591e45f069d3553c54d50cc47629eb5c218aac99e0f7fafbf90a1"}, - {file = "regex-2020.10.28-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:bd904c0dec29bbd0769887a816657491721d5f545c29e30fd9d7a1a275dc80ab"}, - {file = "regex-2020.10.28-cp36-cp36m-win32.whl", hash = "sha256:8092a5a06ad9a7a247f2a76ace121183dc4e1a84c259cf9c2ce3bbb69fac3582"}, - {file = "regex-2020.10.28-cp36-cp36m-win_amd64.whl", hash = "sha256:49461446b783945597c4076aea3f49aee4b4ce922bd241e4fcf62a3e7c61794c"}, - {file = "regex-2020.10.28-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:297116e79074ec2a2f885d22db00ce6e88b15f75162c5e8b38f66ea734e73c64"}, - {file = "regex-2020.10.28-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:8ca9dca965bd86ea3631b975d63b0693566d3cc347e55786d5514988b6f5b84c"}, - {file = "regex-2020.10.28-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ea37320877d56a7f0a1e6a625d892cf963aa7f570013499f5b8d5ab8402b5625"}, - {file = "regex-2020.10.28-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:3a5f08039eee9ea195a89e180c5762bfb55258bfb9abb61a20d3abee3b37fd12"}, - {file = "regex-2020.10.28-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:cb905f3d2e290a8b8f1579d3984f2cfa7c3a29cc7cba608540ceeed18513f520"}, - {file = "regex-2020.10.28-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:96f99219dddb33e235a37283306834700b63170d7bb2a1ee17e41c6d589c8eb9"}, - {file = "regex-2020.10.28-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:227a8d2e5282c2b8346e7f68aa759e0331a0b4a890b55a5cfbb28bd0261b84c0"}, - {file = "regex-2020.10.28-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:2564def9ce0710d510b1fc7e5178ce2d20f75571f788b5197b3c8134c366f50c"}, - {file = "regex-2020.10.28-cp37-cp37m-win32.whl", hash = "sha256:a62162be05edf64f819925ea88d09d18b09bebf20971b363ce0c24e8b4aa14c0"}, - {file = "regex-2020.10.28-cp37-cp37m-win_amd64.whl", hash = "sha256:03855ee22980c3e4863dc84c42d6d2901133362db5daf4c36b710dd895d78f0a"}, - {file = "regex-2020.10.28-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bf4f896c42c63d1f22039ad57de2644c72587756c0cfb3cc3b7530cfe228277f"}, - {file = "regex-2020.10.28-cp38-cp38-manylinux1_i686.whl", hash = "sha256:625116aca6c4b57c56ea3d70369cacc4d62fead4930f8329d242e4fe7a58ce4b"}, - {file = "regex-2020.10.28-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2dc522e25e57e88b4980d2bdd334825dbf6fa55f28a922fc3bfa60cc09e5ef53"}, - {file = "regex-2020.10.28-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:119e0355dbdd4cf593b17f2fc5dbd4aec2b8899d0057e4957ba92f941f704bf5"}, - {file = "regex-2020.10.28-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:cfcf28ed4ce9ced47b9b9670a4f0d3d3c0e4d4779ad4dadb1ad468b097f808aa"}, - {file = "regex-2020.10.28-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:b45bab9f224de276b7bc916f6306b86283f6aa8afe7ed4133423efb42015a898"}, - {file = "regex-2020.10.28-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:52e83a5f28acd621ba8e71c2b816f6541af7144b69cc5859d17da76c436a5427"}, - {file = "regex-2020.10.28-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:aacc8623ffe7999a97935eeabbd24b1ae701d08ea8f874a6ff050e93c3e658cf"}, - {file = "regex-2020.10.28-cp38-cp38-win32.whl", hash = "sha256:06b52815d4ad38d6524666e0d50fe9173533c9cc145a5779b89733284e6f688f"}, - {file = "regex-2020.10.28-cp38-cp38-win_amd64.whl", hash = "sha256:c3466a84fce42c2016113101018a9981804097bacbab029c2d5b4fcb224b89de"}, - {file = "regex-2020.10.28-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:127a9e0c0d91af572fbb9e56d00a504dbd4c65e574ddda3d45b55722462210de"}, - {file = "regex-2020.10.28-cp39-cp39-manylinux1_i686.whl", hash = "sha256:c2c6c56ee97485a127555c9595c069201b5161de9d05495fbe2132b5ac104786"}, - {file = "regex-2020.10.28-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:1ec66700a10e3c75f1f92cbde36cca0d3aaee4c73dfa26699495a3a30b09093c"}, - {file = "regex-2020.10.28-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:11116d424734fe356d8777f89d625f0df783251ada95d6261b4c36ad27a394bb"}, - {file = "regex-2020.10.28-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f1fce1e4929157b2afeb4bb7069204d4370bab9f4fc03ca1fbec8bd601f8c87d"}, - {file = "regex-2020.10.28-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:3dfca201fa6b326239e1bccb00b915e058707028809b8ecc0cf6819ad233a740"}, - {file = "regex-2020.10.28-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:b8a686a6c98872007aa41fdbb2e86dc03b287d951ff4a7f1da77fb7f14113e4d"}, - {file = "regex-2020.10.28-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:c32c91a0f1ac779cbd73e62430de3d3502bbc45ffe5bb6c376015acfa848144b"}, - {file = "regex-2020.10.28-cp39-cp39-win32.whl", hash = "sha256:832339223b9ce56b7b15168e691ae654d345ac1635eeb367ade9ecfe0e66bee0"}, - {file = "regex-2020.10.28-cp39-cp39-win_amd64.whl", hash = "sha256:654c1635f2313d0843028487db2191530bca45af61ca85d0b16555c399625b0e"}, - {file = "regex-2020.10.28.tar.gz", hash = "sha256:dd3e6547ecf842a29cf25123fbf8d2461c53c8d37aa20d87ecee130c89b7079b"}, + {file = "regex-2020.11.13-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8b882a78c320478b12ff024e81dc7d43c1462aa4a3341c754ee65d857a521f85"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a63f1a07932c9686d2d416fb295ec2c01ab246e89b4d58e5fa468089cab44b70"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:6e4b08c6f8daca7d8f07c8d24e4331ae7953333dbd09c648ed6ebd24db5a10ee"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:bba349276b126947b014e50ab3316c027cac1495992f10e5682dc677b3dfa0c5"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:56e01daca75eae420bce184edd8bb341c8eebb19dd3bce7266332258f9fb9dd7"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:6a8ce43923c518c24a2579fda49f093f1397dad5d18346211e46f134fc624e31"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:1ab79fcb02b930de09c76d024d279686ec5d532eb814fd0ed1e0051eb8bd2daa"}, + {file = "regex-2020.11.13-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:9801c4c1d9ae6a70aeb2128e5b4b68c45d4f0af0d1535500884d644fa9b768c6"}, + {file = "regex-2020.11.13-cp36-cp36m-win32.whl", hash = "sha256:49cae022fa13f09be91b2c880e58e14b6da5d10639ed45ca69b85faf039f7a4e"}, + {file = "regex-2020.11.13-cp36-cp36m-win_amd64.whl", hash = "sha256:749078d1eb89484db5f34b4012092ad14b327944ee7f1c4f74d6279a6e4d1884"}, + {file = "regex-2020.11.13-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b2f4007bff007c96a173e24dcda236e5e83bde4358a557f9ccf5e014439eae4b"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:38c8fd190db64f513fe4e1baa59fed086ae71fa45083b6936b52d34df8f86a88"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5862975b45d451b6db51c2e654990c1820523a5b07100fc6903e9c86575202a0"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:262c6825b309e6485ec2493ffc7e62a13cf13fb2a8b6d212f72bd53ad34118f1"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:bafb01b4688833e099d79e7efd23f99172f501a15c44f21ea2118681473fdba0"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:e32f5f3d1b1c663af7f9c4c1e72e6ffe9a78c03a31e149259f531e0fed826512"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:3bddc701bdd1efa0d5264d2649588cbfda549b2899dc8d50417e47a82e1387ba"}, + {file = "regex-2020.11.13-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:02951b7dacb123d8ea6da44fe45ddd084aa6777d4b2454fa0da61d569c6fa538"}, + {file = "regex-2020.11.13-cp37-cp37m-win32.whl", hash = "sha256:0d08e71e70c0237883d0bef12cad5145b84c3705e9c6a588b2a9c7080e5af2a4"}, + {file = "regex-2020.11.13-cp37-cp37m-win_amd64.whl", hash = "sha256:1fa7ee9c2a0e30405e21031d07d7ba8617bc590d391adfc2b7f1e8b99f46f444"}, + {file = "regex-2020.11.13-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:baf378ba6151f6e272824b86a774326f692bc2ef4cc5ce8d5bc76e38c813a55f"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e3faaf10a0d1e8e23a9b51d1900b72e1635c2d5b0e1bea1c18022486a8e2e52d"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2a11a3e90bd9901d70a5b31d7dd85114755a581a5da3fc996abfefa48aee78af"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1ebb090a426db66dd80df8ca85adc4abfcbad8a7c2e9a5ec7513ede522e0a8f"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:b2b1a5ddae3677d89b686e5c625fc5547c6e492bd755b520de5332773a8af06b"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:2c99e97d388cd0a8d30f7c514d67887d8021541b875baf09791a3baad48bb4f8"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:c084582d4215593f2f1d28b65d2a2f3aceff8342aa85afd7be23a9cad74a0de5"}, + {file = "regex-2020.11.13-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:a3d748383762e56337c39ab35c6ed4deb88df5326f97a38946ddd19028ecce6b"}, + {file = "regex-2020.11.13-cp38-cp38-win32.whl", hash = "sha256:7913bd25f4ab274ba37bc97ad0e21c31004224ccb02765ad984eef43e04acc6c"}, + {file = "regex-2020.11.13-cp38-cp38-win_amd64.whl", hash = "sha256:6c54ce4b5d61a7129bad5c5dc279e222afd00e721bf92f9ef09e4fae28755683"}, + {file = "regex-2020.11.13-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1862a9d9194fae76a7aaf0150d5f2a8ec1da89e8b55890b1786b8f88a0f619dc"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux1_i686.whl", hash = "sha256:4902e6aa086cbb224241adbc2f06235927d5cdacffb2425c73e6570e8d862364"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7a25fcbeae08f96a754b45bdc050e1fb94b95cab046bf56b016c25e9ab127b3e"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:d2d8ce12b7c12c87e41123997ebaf1a5767a5be3ec545f64675388970f415e2e"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f7d29a6fc4760300f86ae329e3b6ca28ea9c20823df123a2ea8693e967b29917"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:717881211f46de3ab130b58ec0908267961fadc06e44f974466d1887f865bd5b"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:3128e30d83f2e70b0bed9b2a34e92707d0877e460b402faca908c6667092ada9"}, + {file = "regex-2020.11.13-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:8f6a2229e8ad946e36815f2a03386bb8353d4bde368fdf8ca5f0cb97264d3b5c"}, + {file = "regex-2020.11.13-cp39-cp39-win32.whl", hash = "sha256:f8f295db00ef5f8bae530fc39af0b40486ca6068733fb860b42115052206466f"}, + {file = "regex-2020.11.13-cp39-cp39-win_amd64.whl", hash = "sha256:a15f64ae3a027b64496a71ab1f722355e570c3fac5ba2801cafce846bf5af01d"}, + {file = "regex-2020.11.13.tar.gz", hash = "sha256:83d6b356e116ca119db8e7c6fc2983289d87b27b3fac238cfe5dca529d884562"}, ] requests = [ - {file = "requests-2.24.0-py2.py3-none-any.whl", hash = "sha256:fe75cc94a9443b9246fc7049224f75604b113c36acb93f87b80ed42c44cbb898"}, - {file = "requests-2.24.0.tar.gz", hash = "sha256:b3559a131db72c33ee969480840fff4bb6dd111de7dd27c8ee1f820f4f00231b"}, + {file = "requests-2.25.0-py2.py3-none-any.whl", hash = "sha256:e786fa28d8c9154e6a4de5d46a1d921b8749f8b74e28bde23768e5e16eece998"}, + {file = "requests-2.25.0.tar.gz", hash = "sha256:7f1a0b932f4a60a1a65caa4263921bb7d9ee911957e0ae4a23a6dd08185ad5f8"}, ] restructuredtext-lint = [ - {file = "restructuredtext_lint-1.3.1.tar.gz", hash = "sha256:470e53b64817211a42805c3a104d2216f6f5834b22fe7adb637d1de4d6501fb8"}, + {file = "restructuredtext_lint-1.3.2.tar.gz", hash = "sha256:d3b10a1fe2ecac537e51ae6d151b223b78de9fafdd50e5eb6b08c243df173c80"}, ] send2trash = [ {file = "Send2Trash-1.5.0-py3-none-any.whl", hash = "sha256:f1691922577b6fa12821234aeb57599d887c4900b9ca537948d2dac34aea888b"}, @@ -2629,8 +2628,8 @@ snowballstemmer = [ {file = "snowballstemmer-2.0.0.tar.gz", hash = "sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52"}, ] sphinx = [ - {file = "Sphinx-3.3.0-py3-none-any.whl", hash = "sha256:3abdb2c57a65afaaa4f8573cbabd5465078eb6fd282c1e4f87f006875a7ec0c7"}, - {file = "Sphinx-3.3.0.tar.gz", hash = "sha256:1c21e7c5481a31b531e6cbf59c3292852ccde175b504b00ce2ff0b8f4adc3649"}, + {file = "Sphinx-3.3.1-py3-none-any.whl", hash = "sha256:d4e59ad4ea55efbb3c05cde3bfc83bfc14f0c95aa95c3d75346fcce186a47960"}, + {file = "Sphinx-3.3.1.tar.gz", hash = "sha256:1e8d592225447104d1172be415bc2972bd1357e3e12fdc76edf2261105db4300"}, ] sphinx-autodoc-typehints = [ {file = "sphinx-autodoc-typehints-1.11.1.tar.gz", hash = "sha256:244ba6d3e2fdb854622f643c7763d6f95b6886eba24bec28e86edf205e4ddb20"}, @@ -2701,16 +2700,16 @@ sqlalchemy = [ {file = "SQLAlchemy-1.3.20.tar.gz", hash = "sha256:d2f25c7f410338d31666d7ddedfa67570900e248b940d186b48461bd4e5569a1"}, ] stevedore = [ - {file = "stevedore-3.2.2-py3-none-any.whl", hash = "sha256:5e1ab03eaae06ef6ce23859402de785f08d97780ed774948ef16c4652c41bc62"}, - {file = "stevedore-3.2.2.tar.gz", hash = "sha256:f845868b3a3a77a2489d226568abe7328b5c2d4f6a011cc759dfa99144a521f0"}, + {file = "stevedore-3.3.0-py3-none-any.whl", hash = "sha256:50d7b78fbaf0d04cd62411188fa7eedcb03eb7f4c4b37005615ceebe582aa82a"}, + {file = "stevedore-3.3.0.tar.gz", hash = "sha256:3a5bbd0652bf552748871eaa73a4a8dc2899786bc497a2aa1fcb4dcdb0debeee"}, ] terminado = [ {file = "terminado-0.9.1-py3-none-any.whl", hash = "sha256:c55f025beb06c2e2669f7ba5a04f47bb3304c30c05842d4981d8f0fc9ab3b4e3"}, {file = "terminado-0.9.1.tar.gz", hash = "sha256:3da72a155b807b01c9e8a5babd214e052a0a45a975751da3521a1c3381ce6d76"}, ] testfixtures = [ - {file = "testfixtures-6.15.0-py2.py3-none-any.whl", hash = "sha256:e17f4f526fc90b0ac9bc7f8ca62b7dec17d9faf3d721f56bda4f0fd94d02f85a"}, - {file = "testfixtures-6.15.0.tar.gz", hash = "sha256:409f77cfbdad822d12a8ce5c4aa8fb4d0bb38073f4a5444fede3702716a2cec2"}, + {file = "testfixtures-6.16.0-py2.py3-none-any.whl", hash = "sha256:017f1924f464189915e67162f530758537175ddd1461b211c666f0587ebc2939"}, + {file = "testfixtures-6.16.0.tar.gz", hash = "sha256:f3f567f35b3d004b0e19ee7dff61e75e3c05f3ffc1c41580d967b1fe144b3de9"}, ] testpath = [ {file = "testpath-0.4.4-py2.py3-none-any.whl", hash = "sha256:bfcf9411ef4bf3db7579063e0546938b1edda3d69f4e1fb8756991f5951f85d4"}, @@ -2796,12 +2795,12 @@ typing-extensions = [ {file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"}, ] urllib3 = [ - {file = "urllib3-1.25.11-py2.py3-none-any.whl", hash = "sha256:f5321fbe4bf3fefa0efd0bfe7fb14e90909eb62a48ccda331726b4319897dd5e"}, - {file = "urllib3-1.25.11.tar.gz", hash = "sha256:8d7eaa5a82a1cac232164990f04874c594c9453ec55eef02eab885aa02fc17a2"}, + {file = "urllib3-1.26.2-py2.py3-none-any.whl", hash = "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473"}, + {file = "urllib3-1.26.2.tar.gz", hash = "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08"}, ] virtualenv = [ - {file = "virtualenv-20.1.0-py2.py3-none-any.whl", hash = "sha256:b0011228208944ce71052987437d3843e05690b2f23d1c7da4263fde104c97a2"}, - {file = "virtualenv-20.1.0.tar.gz", hash = "sha256:b8d6110f493af256a40d65e29846c69340a947669eec8ce784fcf3dd3af28380"}, + {file = "virtualenv-20.2.2-py2.py3-none-any.whl", hash = "sha256:54b05fc737ea9c9ee9f8340f579e5da5b09fb64fd010ab5757eb90268616907c"}, + {file = "virtualenv-20.2.2.tar.gz", hash = "sha256:b7a8ec323ee02fb2312f098b6b4c9de99559b462775bc8fe3627a73706603c1b"}, ] wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, From 9ee9c04a69c67f85ae952861024ad8440d462af8 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Mon, 14 Dec 2020 14:26:12 +0100 Subject: [PATCH 07/72] Remove python-dotenv from the dependencies zsh-dotenv automatically loads the environment variables upon entering the project's root. --- poetry.lock | 17 +---------------- pyproject.toml | 1 - setup.cfg | 2 -- src/urban_meal_delivery/configuration.py | 5 ----- 4 files changed, 1 insertion(+), 24 deletions(-) diff --git a/poetry.lock b/poetry.lock index b44af50..58c6815 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1346,17 +1346,6 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" [package.dependencies] six = ">=1.5" -[[package]] -name = "python-dotenv" -version = "0.14.0" -description = "Add .env support to your django/flask apps in development and deployments" -category = "main" -optional = false -python-versions = "*" - -[package.extras] -cli = ["click (>=5.0)"] - [[package]] name = "python-editor" version = "1.0.4" @@ -1817,7 +1806,7 @@ research = ["jupyterlab", "nb_black", "numpy", "pandas", "pytz"] [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "eba980d4335eef2012a1e7ce27941731149eb224cdfad856aa0bcd7701e9e557" +content-hash = "5f49faba0f11ddf2c4439fe0aad8113a68cbd504b2f68e57dfbec38db3cbe474" [metadata.files] alabaster = [ @@ -2478,10 +2467,6 @@ python-dateutil = [ {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, ] -python-dotenv = [ - {file = "python-dotenv-0.14.0.tar.gz", hash = "sha256:8c10c99a1b25d9a68058a1ad6f90381a62ba68230ca93966882a4dbc3bc9c33d"}, - {file = "python_dotenv-0.14.0-py2.py3-none-any.whl", hash = "sha256:c10863aee750ad720f4f43436565e4c1698798d763b63234fb5021b6c616e423"}, -] python-editor = [ {file = "python-editor-1.0.4.tar.gz", hash = "sha256:51fda6bcc5ddbbb7063b2af7509e43bd84bfc32a4ff71349ec7847713882327b"}, {file = "python_editor-1.0.4-py2-none-any.whl", hash = "sha256:5f98b069316ea1c2ed3f67e7f5df6c0d8f10b689964a4a811ff64f0106819ec8"}, diff --git a/pyproject.toml b/pyproject.toml index ec5414b..2656f96 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,7 +31,6 @@ python = "^3.8" alembic = "^1.4.2" click = "^7.1.2" psycopg2 = "^2.8.5" # adapter for PostgreSQL -python-dotenv = "^0.14.0" sqlalchemy = "^1.3.18" # Jupyter Lab => notebooks with analyses using the developed package diff --git a/setup.cfg b/setup.cfg index 5bbd00d..f764328 100644 --- a/setup.cfg +++ b/setup.cfg @@ -217,8 +217,6 @@ single_line_exclusions = typing [mypy] cache_dir = .cache/mypy -[mypy-dotenv] -ignore_missing_imports = true [mypy-nox.*] ignore_missing_imports = true [mypy-packaging] diff --git a/src/urban_meal_delivery/configuration.py b/src/urban_meal_delivery/configuration.py index 0e6eefa..c4cc451 100644 --- a/src/urban_meal_delivery/configuration.py +++ b/src/urban_meal_delivery/configuration.py @@ -13,11 +13,6 @@ import random import string import warnings -import dotenv - - -dotenv.load_dotenv() - def random_schema_name() -> str: """Generate a random PostgreSQL schema name for testing.""" From c1064673aaa83d52c8542e41941b7ed66bad63ca Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Mon, 14 Dec 2020 15:15:08 +0100 Subject: [PATCH 08/72] Isolate configuration related code better - create the global `config` object inside the `urban_meal_delivery.configuration` module - streamline documentation and comments --- setup.cfg | 2 +- src/urban_meal_delivery/__init__.py | 16 +++------------- src/urban_meal_delivery/configuration.py | 8 ++++++-- tests/conftest.py | 2 ++ 4 files changed, 12 insertions(+), 16 deletions(-) diff --git a/setup.cfg b/setup.cfg index f764328..2a467c6 100644 --- a/setup.cfg +++ b/setup.cfg @@ -272,4 +272,4 @@ console_output_style = count env = TESTING=true markers = - e2e: integration tests, inlc., for example, tests touching a database + e2e: integration tests, incl., for example, tests touching the database diff --git a/src/urban_meal_delivery/__init__.py b/src/urban_meal_delivery/__init__.py index 943ba9b..ad34978 100644 --- a/src/urban_meal_delivery/__init__.py +++ b/src/urban_meal_delivery/__init__.py @@ -5,11 +5,12 @@ Example: >>> umd.__version__ != '0.0.0' True """ +# The config object must come before all other project-internal imports. +from urban_meal_delivery.configuration import config # noqa:F401 isort:skip -import os as _os from importlib import metadata as _metadata -from urban_meal_delivery import configuration as _configuration +from urban_meal_delivery import db # noqa:F401 try: @@ -24,14 +25,3 @@ else: __author__ = _pkg_info['author'] __pkg_name__ = _pkg_info['name'] __version__ = _pkg_info['version'] - - -# Global `config` object to be used in the package. -config: _configuration.Config = _configuration.make_config( - 'testing' if _os.getenv('TESTING') else 'production', -) - - -# Import `db` down here as it depends on `config`. -# pylint:disable=wrong-import-position -from urban_meal_delivery import db # noqa:E402,F401 isort:skip diff --git a/src/urban_meal_delivery/configuration.py b/src/urban_meal_delivery/configuration.py index c4cc451..72c10d3 100644 --- a/src/urban_meal_delivery/configuration.py +++ b/src/urban_meal_delivery/configuration.py @@ -73,7 +73,7 @@ def make_config(env: str = 'production') -> Config: """Create a new `Config` object. Args: - env: either 'production' or 'testing'; defaults to the first + env: either 'production' or 'testing' Returns: config: a namespace with all configurations @@ -81,7 +81,8 @@ def make_config(env: str = 'production') -> Config: Raises: ValueError: if `env` is not as specified """ # noqa:DAR203 - config: Config + config: Config # otherwise mypy is confused + if env.strip().lower() == 'production': config = ProductionConfig() elif env.strip().lower() == 'testing': @@ -94,3 +95,6 @@ def make_config(env: str = 'production') -> Config: warnings.warn('Bad configurartion: no DATABASE_URI set in the environment') return config + + +config = make_config('testing' if os.getenv('TESTING') else 'production') diff --git a/tests/conftest.py b/tests/conftest.py index 1b91688..b58c430 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,6 +5,8 @@ import os from urban_meal_delivery import config +# The TESTING environment variable is set +# in setup.cfg in pytest's config section. if not os.getenv('TESTING'): raise RuntimeError('Tests must be executed with TESTING set in the environment') From 86ad139c7b997defa263ce3ebc2efde17b77e009 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Mon, 14 Dec 2020 15:26:57 +0100 Subject: [PATCH 09/72] Fix --require-hashes mode in GitHub Actions - GitHub Actions complains about missing hashes in poetry's export of pinned dependencies - as an example, see https://github.com/webartifex/urban-meal-delivery/runs/1550750320 --- noxfile.py | 1 + 1 file changed, 1 insertion(+) diff --git a/noxfile.py b/noxfile.py index a86cb05..3c9d3e0 100644 --- a/noxfile.py +++ b/noxfile.py @@ -529,6 +529,7 @@ def _install_packages(session: Session, *packages_or_pip_args: str, **kwargs) -> '--dev', '--format=requirements.txt', f'--output={requirements_txt.name}', + '--without-hashes', external=True, ) session.install( From 671d209cc510cd244e389f84a855af4ebe74ce67 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Mon, 14 Dec 2020 16:21:12 +0100 Subject: [PATCH 10/72] Move submodule with demand-forecasting paper into research folder --- .gitmodules | 4 ++-- {papers => research/papers}/demand-forecasting | 0 2 files changed, 2 insertions(+), 2 deletions(-) rename {papers => research/papers}/demand-forecasting (100%) diff --git a/.gitmodules b/.gitmodules index e01b84e..8300932 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,3 +1,3 @@ -[submodule "papers/demand-forecasting"] - path = papers/demand-forecasting +[submodule "research/papers/demand-forecasting"] + path = research/papers/demand-forecasting url = git@github.com:webartifex/urban-meal-delivery-demand-forecasting.git diff --git a/papers/demand-forecasting b/research/papers/demand-forecasting similarity index 100% rename from papers/demand-forecasting rename to research/papers/demand-forecasting From b9c3697434781d0f16d641ec5598382a41bc3942 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Mon, 14 Dec 2020 16:56:27 +0100 Subject: [PATCH 11/72] Move notebooks into the research folder --- README.md | 4 ++-- notebooks/00_clean_data.ipynb => research/clean_data.ipynb | 0 2 files changed, 2 insertions(+), 2 deletions(-) rename notebooks/00_clean_data.ipynb => research/clean_data.ipynb (100%) diff --git a/README.md b/README.md index f34cf37..537aa98 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ that iteratively build on each other. ### Data Cleaning The UDP provided its raw data as a PostgreSQL dump. -This [notebook](https://nbviewer.jupyter.org/github/webartifex/urban-meal-delivery/blob/develop/notebooks/00_clean_data.ipynb) +This [notebook](https://nbviewer.jupyter.org/github/webartifex/urban-meal-delivery/blob/develop/research/clean_data.ipynb) cleans the data extensively and maps them onto the [ORM models](https://github.com/webartifex/urban-meal-delivery/tree/develop/src/urban_meal_delivery/db) defined in the `urban-meal-delivery` package @@ -25,7 +25,7 @@ and contains all source code to drive the analyses. Due to a non-disclosure agreement with the UDP, neither the raw nor the cleaned data are published as of now. -However, previews of the data can be seen throughout the [notebooks/](https://github.com/webartifex/urban-meal-delivery/tree/develop/notebooks) folders. +However, previews of the data can be seen throughout the [research/](https://github.com/webartifex/urban-meal-delivery/tree/develop/research) folder. ### Real-time Demand Forecasting diff --git a/notebooks/00_clean_data.ipynb b/research/clean_data.ipynb similarity index 100% rename from notebooks/00_clean_data.ipynb rename to research/clean_data.ipynb From 0aefa226660bcbbadd3af74e52d50bd025f6e838 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Tue, 15 Dec 2020 11:35:05 +0100 Subject: [PATCH 12/72] Integrate pytest-randomly into the test suite As a lot of the integration tests populate the database with test data, it is deemed safer to run the tests in random order to uncover potential dependencies between distinct test cases. Because of how the `db_session` fixture is designed, this should already be taken care of. --- noxfile.py | 2 ++ poetry.lock | 17 ++++++++++++++++- pyproject.toml | 1 + 3 files changed, 19 insertions(+), 1 deletion(-) diff --git a/noxfile.py b/noxfile.py index 3c9d3e0..b516587 100644 --- a/noxfile.py +++ b/noxfile.py @@ -226,6 +226,7 @@ def test(session): 'pytest', 'pytest-cov', 'pytest-env', + 'pytest-randomly', 'xdoctest[optional]', ) @@ -240,6 +241,7 @@ def test(session): '--cov-branch', '--cov-fail-under=100', '--cov-report=term-missing:skip-covered', + '--randomly-seed=4287', '-k', 'not e2e', PYTEST_LOCATION, diff --git a/poetry.lock b/poetry.lock index 58c6815..805e302 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1335,6 +1335,17 @@ python-versions = "*" [package.dependencies] pytest = ">=2.6.0" +[[package]] +name = "pytest-randomly" +version = "3.5.0" +description = "Pytest plugin to randomly order tests and control random.seed." +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +pytest = "*" + [[package]] name = "python-dateutil" version = "2.8.1" @@ -1806,7 +1817,7 @@ research = ["jupyterlab", "nb_black", "numpy", "pandas", "pytz"] [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "5f49faba0f11ddf2c4439fe0aad8113a68cbd504b2f68e57dfbec38db3cbe474" +content-hash = "28c08518fa365282d2b06ea2b78784906c69114f05da597ba5df4bcc64fe4aaa" [metadata.files] alabaster = [ @@ -2463,6 +2474,10 @@ pytest-cov = [ pytest-env = [ {file = "pytest-env-0.6.2.tar.gz", hash = "sha256:7e94956aef7f2764f3c147d216ce066bf6c42948bb9e293169b1b1c880a580c2"}, ] +pytest-randomly = [ + {file = "pytest-randomly-3.5.0.tar.gz", hash = "sha256:440cec143fd9b0adeb072006c71e0294402a2bc2ccd08079c2341087ba4cf2d1"}, + {file = "pytest_randomly-3.5.0-py3-none-any.whl", hash = "sha256:9db10d160237f3f8ee60cef72e4cb9ea88d2893c9dd5c8aa334b060cdeb67c3a"}, +] python-dateutil = [ {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, diff --git a/pyproject.toml b/pyproject.toml index 2656f96..6d0808d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -75,6 +75,7 @@ packaging = "^20.4" # used to test the packaged version pytest = "^6.0.1" pytest-cov = "^2.10.0" pytest-env = "^0.6.2" +pytest-randomly = "^3.5.0" xdoctest = { version="^0.13.0", extras=["optional"] } # Documentation From 8345579b6c6a11c3cf3028f93381f4cdfbd5e97e Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Tue, 15 Dec 2020 12:23:45 +0100 Subject: [PATCH 13/72] Add factory_boy to the dev dependencies --- poetry.lock | 49 ++++++++++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 1 + 2 files changed, 49 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index 805e302..cd0debf 100644 --- a/poetry.lock +++ b/poetry.lock @@ -333,6 +333,33 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "factory-boy" +version = "3.1.0" +description = "A versatile test fixtures replacement based on thoughtbot's factory_bot for Ruby." +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +Faker = ">=0.7.0" + +[package.extras] +dev = ["coverage", "django", "flake8", "isort", "pillow", "sqlalchemy", "mongoengine", "wheel (>=0.32.0)", "tox", "zest.releaser"] +doc = ["sphinx", "sphinx-rtd-theme"] + +[[package]] +name = "faker" +version = "5.0.1" +description = "Faker is a Python package that generates fake data for you." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +python-dateutil = ">=2.4" +text-unidecode = "1.3" + [[package]] name = "filelock" version = "3.0.12" @@ -1663,6 +1690,14 @@ python-versions = "*" [package.extras] test = ["pathlib2"] +[[package]] +name = "text-unidecode" +version = "1.3" +description = "The most basic Text::Unidecode port" +category = "dev" +optional = false +python-versions = "*" + [[package]] name = "toml" version = "0.10.2" @@ -1817,7 +1852,7 @@ research = ["jupyterlab", "nb_black", "numpy", "pandas", "pytz"] [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "28c08518fa365282d2b06ea2b78784906c69114f05da597ba5df4bcc64fe4aaa" +content-hash = "d361e5db0ef8817013d51be2d62b09f4eea2eb6bcb6bca2ba4d8e409ef990072" [metadata.files] alabaster = [ @@ -2029,6 +2064,14 @@ entrypoints = [ eradicate = [ {file = "eradicate-1.0.tar.gz", hash = "sha256:4ffda82aae6fd49dfffa777a857cb758d77502a1f2e0f54c9ac5155a39d2d01a"}, ] +factory-boy = [ + {file = "factory_boy-3.1.0-py2.py3-none-any.whl", hash = "sha256:d8626622550c8ba31392f9e19fdbcef9f139cf1ad643c5923f20490a7b3e2e3d"}, + {file = "factory_boy-3.1.0.tar.gz", hash = "sha256:ded73e49135c24bd4d3f45bf1eb168f8d290090f5cf4566b8df3698317dc9c08"}, +] +faker = [ + {file = "Faker-5.0.1-py3-none-any.whl", hash = "sha256:1fcb415562ee6e2395b041e85fa6901d4708d30b84d54015226fa754ed0822c3"}, + {file = "Faker-5.0.1.tar.gz", hash = "sha256:e8beccb398ee9b8cc1a91d9295121d66512b6753b4846eb1e7370545d46b3311"}, +] filelock = [ {file = "filelock-3.0.12-py3-none-any.whl", hash = "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836"}, {file = "filelock-3.0.12.tar.gz", hash = "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59"}, @@ -2715,6 +2758,10 @@ testpath = [ {file = "testpath-0.4.4-py2.py3-none-any.whl", hash = "sha256:bfcf9411ef4bf3db7579063e0546938b1edda3d69f4e1fb8756991f5951f85d4"}, {file = "testpath-0.4.4.tar.gz", hash = "sha256:60e0a3261c149755f4399a1fff7d37523179a70fdc3abdf78de9fc2604aeec7e"}, ] +text-unidecode = [ + {file = "text-unidecode-1.3.tar.gz", hash = "sha256:bad6603bb14d279193107714b288be206cac565dfa49aa5b105294dd5c4aab93"}, + {file = "text_unidecode-1.3-py2.py3-none-any.whl", hash = "sha256:1311f10e8b895935241623731c2ba64f4c455287888b18189350b67134a822e8"}, +] toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, diff --git a/pyproject.toml b/pyproject.toml index 6d0808d..14282a6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -71,6 +71,7 @@ pylint = "^2.5.3" wemake-python-styleguide = "^0.14.1" # flake8 plug-in # Test Suite +factory-boy = "^3.1.0" packaging = "^20.4" # used to test the packaged version pytest = "^6.0.1" pytest-cov = "^2.10.0" From 2ddd43053483cc3734d6be292955a049ee0d9d01 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Tue, 15 Dec 2020 19:07:14 +0100 Subject: [PATCH 14/72] Add Faker to the dev dependencies --- poetry.lock | 8 ++++---- pyproject.toml | 1 + 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index cd0debf..ec26666 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1317,7 +1317,7 @@ python-versions = ">=3.5" [[package]] name = "pytest" -version = "6.2.0" +version = "6.2.1" description = "pytest: simple powerful testing with Python" category = "dev" optional = false @@ -1852,7 +1852,7 @@ research = ["jupyterlab", "nb_black", "numpy", "pandas", "pytz"] [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "d361e5db0ef8817013d51be2d62b09f4eea2eb6bcb6bca2ba4d8e409ef990072" +content-hash = "f067b44ed9a0fb333cddfcefa02d30516180c8c4af148ff4cb083abdd532f8e9" [metadata.files] alabaster = [ @@ -2507,8 +2507,8 @@ pyrsistent = [ {file = "pyrsistent-0.17.3.tar.gz", hash = "sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e"}, ] pytest = [ - {file = "pytest-6.2.0-py3-none-any.whl", hash = "sha256:d69e1a80b34fe4d596c9142f35d9e523d98a2838976f1a68419a8f051b24cec6"}, - {file = "pytest-6.2.0.tar.gz", hash = "sha256:b12e09409c5bdedc28d308469e156127004a436b41e9b44f9bff6446cbab9152"}, + {file = "pytest-6.2.1-py3-none-any.whl", hash = "sha256:1969f797a1a0dbd8ccf0fecc80262312729afea9c17f1d70ebf85c5e76c6f7c8"}, + {file = "pytest-6.2.1.tar.gz", hash = "sha256:66e419b1899bc27346cb2c993e12c5e5e8daba9073c1fbce33b9807abc95c306"}, ] pytest-cov = [ {file = "pytest-cov-2.10.1.tar.gz", hash = "sha256:47bd0ce14056fdd79f93e1713f88fad7bdcc583dcd7783da86ef2f085a0bb88e"}, diff --git a/pyproject.toml b/pyproject.toml index 14282a6..c6f6d68 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -71,6 +71,7 @@ pylint = "^2.5.3" wemake-python-styleguide = "^0.14.1" # flake8 plug-in # Test Suite +Faker = "^5.0.1" factory-boy = "^3.1.0" packaging = "^20.4" # used to test the packaged version pytest = "^6.0.1" From 3e0300cb0e955c7f101702ac175ec9eb7c0ef027 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Wed, 16 Dec 2020 11:04:43 +0100 Subject: [PATCH 15/72] Disable too-few-public-methods error in pylint --- setup.cfg | 2 ++ src/urban_meal_delivery/configuration.py | 6 ------ src/urban_meal_delivery/db/couriers.py | 2 -- src/urban_meal_delivery/db/customers.py | 2 -- src/urban_meal_delivery/db/restaurants.py | 2 -- 5 files changed, 2 insertions(+), 12 deletions(-) diff --git a/setup.cfg b/setup.cfg index 2a467c6..c3342a7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -259,6 +259,8 @@ disable = undefined-variable, unused-import, unused-variable, # wemake-python-styleguide redefined-outer-name, + # A lot of classes in the test suite and `Meta` classes cause complaints. + too-few-public-methods, [pylint.REPORTS] score = no diff --git a/src/urban_meal_delivery/configuration.py b/src/urban_meal_delivery/configuration.py index 72c10d3..d20320a 100644 --- a/src/urban_meal_delivery/configuration.py +++ b/src/urban_meal_delivery/configuration.py @@ -26,8 +26,6 @@ def random_schema_name() -> str: class Config: """Configuration that applies in all situations.""" - # pylint:disable=too-few-public-methods - CUTOFF_DAY = datetime.datetime(2017, 2, 1) # If a scheduled pre-order is made within this @@ -53,16 +51,12 @@ class Config: class ProductionConfig(Config): """Configuration for the real dataset.""" - # pylint:disable=too-few-public-methods - TESTING = False class TestingConfig(Config): """Configuration for the test suite.""" - # pylint:disable=too-few-public-methods - TESTING = True DATABASE_URI = os.getenv('DATABASE_URI_TESTING') or Config.DATABASE_URI diff --git a/src/urban_meal_delivery/db/couriers.py b/src/urban_meal_delivery/db/couriers.py index be065a5..9087982 100644 --- a/src/urban_meal_delivery/db/couriers.py +++ b/src/urban_meal_delivery/db/couriers.py @@ -10,8 +10,6 @@ from urban_meal_delivery.db import meta class Courier(meta.Base): """A Courier working for the UDP.""" - # pylint:disable=too-few-public-methods - __tablename__ = 'couriers' # Columns diff --git a/src/urban_meal_delivery/db/customers.py b/src/urban_meal_delivery/db/customers.py index e96361a..451ec92 100644 --- a/src/urban_meal_delivery/db/customers.py +++ b/src/urban_meal_delivery/db/customers.py @@ -9,8 +9,6 @@ from urban_meal_delivery.db import meta class Customer(meta.Base): """A Customer of the UDP.""" - # pylint:disable=too-few-public-methods - __tablename__ = 'customers' # Columns diff --git a/src/urban_meal_delivery/db/restaurants.py b/src/urban_meal_delivery/db/restaurants.py index 4531d09..f31d7af 100644 --- a/src/urban_meal_delivery/db/restaurants.py +++ b/src/urban_meal_delivery/db/restaurants.py @@ -9,8 +9,6 @@ from urban_meal_delivery.db import meta class Restaurant(meta.Base): """A Restaurant selling meals on the UDP.""" - # pylint:disable=too-few-public-methods - __tablename__ = 'restaurants' # Columns From 416a58f9dc2ccb5585b952df71c98993285b9a4c Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Mon, 28 Dec 2020 15:52:08 +0100 Subject: [PATCH 16/72] Add geopy to the dev dependencies --- poetry.lock | 38 +++++++++++++++++++++++++++++++++++++- pyproject.toml | 1 + 2 files changed, 38 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index ec26666..673ea5f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -592,6 +592,34 @@ python-versions = "*" [package.dependencies] flake8 = "*" +[[package]] +name = "geographiclib" +version = "1.50" +description = "The geodesic routines from GeographicLib" +category = "dev" +optional = false +python-versions = "*" + +[[package]] +name = "geopy" +version = "2.1.0" +description = "Python Geocoding Toolbox" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +geographiclib = ">=1.49,<2" + +[package.extras] +aiohttp = ["aiohttp"] +dev = ["async-generator", "flake8 (>=3.8.0,<3.9.0)", "isort (>=5.6.0,<5.7.0)", "coverage", "pytest-aiohttp", "pytest (>=3.10)", "readme-renderer", "sphinx", "sphinx-issues", "sphinx-rtd-theme (>=0.5.0)"] +dev-docs = ["readme-renderer", "sphinx", "sphinx-issues", "sphinx-rtd-theme (>=0.5.0)"] +dev-lint = ["async-generator", "flake8 (>=3.8.0,<3.9.0)", "isort (>=5.6.0,<5.7.0)"] +dev-test = ["async-generator", "coverage", "pytest-aiohttp", "pytest (>=3.10)"] +requests = ["urllib3 (>=1.24.2)", "requests (>=2.16.2)"] +timezone = ["pytz"] + [[package]] name = "gitdb" version = "4.0.5" @@ -1852,7 +1880,7 @@ research = ["jupyterlab", "nb_black", "numpy", "pandas", "pytz"] [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "f067b44ed9a0fb333cddfcefa02d30516180c8c4af148ff4cb083abdd532f8e9" +content-hash = "03305636d62193eba71a06cc802846a4b00f5cdcaa3f71ceb4d35f55ead764b0" [metadata.files] alabaster = [ @@ -2146,6 +2174,14 @@ flake8-string-format = [ {file = "flake8-string-format-0.2.3.tar.gz", hash = "sha256:774d56103d9242ed968897455ef49b7d6de272000cfa83de5814273a868832f1"}, {file = "flake8_string_format-0.2.3-py2.py3-none-any.whl", hash = "sha256:68ea72a1a5b75e7018cae44d14f32473c798cf73d75cbaed86c6a9a907b770b2"}, ] +geographiclib = [ + {file = "geographiclib-1.50-py3-none-any.whl", hash = "sha256:51cfa698e7183792bce27d8fb63ac8e83689cd8170a730bf35e1a5c5bf8849b9"}, + {file = "geographiclib-1.50.tar.gz", hash = "sha256:12bd46ee7ec25b291ea139b17aa991e7ef373e21abd053949b75c0e9ca55c632"}, +] +geopy = [ + {file = "geopy-2.1.0-py3-none-any.whl", hash = "sha256:4db8a2b79a2b3358a7d020ea195be639251a831a1b429c0d1b20c9f00c67c788"}, + {file = "geopy-2.1.0.tar.gz", hash = "sha256:892b219413e7955587b029949af3a1949c6fbac9d5ad17b79d850718f6a9550f"}, +] gitdb = [ {file = "gitdb-4.0.5-py3-none-any.whl", hash = "sha256:91f36bfb1ab7949b3b40e23736db18231bf7593edada2ba5c3a174a7b23657ac"}, {file = "gitdb-4.0.5.tar.gz", hash = "sha256:c9e1f2d0db7ddb9a704c2a0217be31214e91a4fe1dea1efad19ae42ba0c285c9"}, diff --git a/pyproject.toml b/pyproject.toml index c6f6d68..390f852 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -73,6 +73,7 @@ wemake-python-styleguide = "^0.14.1" # flake8 plug-in # Test Suite Faker = "^5.0.1" factory-boy = "^3.1.0" +geopy = "^2.1.0" packaging = "^20.4" # used to test the packaged version pytest = "^6.0.1" pytest-cov = "^2.10.0" From 78dba23d5d21bad3671f0bd3734dbaec00316882 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Tue, 29 Dec 2020 14:37:37 +0100 Subject: [PATCH 17/72] Re-factor the ORM tests to use randomized fake data - create `*Factory` classes with fakerboy and faker that generate randomized instances of the ORM models - add new pytest marker: "db" are the integration tests involving the database whereas "e2e" will be all other integration tests - streamline the docstrings in the ORM models --- noxfile.py | 7 +- setup.cfg | 16 +- src/urban_meal_delivery/db/addresses.py | 8 +- src/urban_meal_delivery/db/cities.py | 4 +- src/urban_meal_delivery/db/couriers.py | 4 +- src/urban_meal_delivery/db/customers.py | 4 +- src/urban_meal_delivery/db/orders.py | 76 ++-- src/urban_meal_delivery/db/restaurants.py | 9 +- tests/db/conftest.py | 232 ++-------- tests/db/fake_data/__init__.py | 14 + tests/db/fake_data/factories.py | 366 ++++++++++++++++ tests/db/fake_data/fixture_makers.py | 105 +++++ tests/db/fake_data/static_fixtures.py | 58 +++ tests/db/test_addresses.py | 119 +++-- tests/db/test_cities.py | 86 ++-- tests/db/test_couriers.py | 83 ++-- tests/db/test_customer.py | 46 +- tests/db/test_orders.py | 502 +++++++++++++--------- tests/db/test_restaurants.py | 74 ++-- 19 files changed, 1092 insertions(+), 721 deletions(-) create mode 100644 tests/db/fake_data/__init__.py create mode 100644 tests/db/fake_data/factories.py create mode 100644 tests/db/fake_data/fixture_makers.py create mode 100644 tests/db/fake_data/static_fixtures.py diff --git a/noxfile.py b/noxfile.py index b516587..cd65168 100644 --- a/noxfile.py +++ b/noxfile.py @@ -222,6 +222,9 @@ def test(session): session.run('poetry', 'install', '--no-dev', external=True) _install_packages( session, + 'Faker', + 'factory-boy', + 'geopy', 'packaging', 'pytest', 'pytest-cov', @@ -242,8 +245,8 @@ def test(session): '--cov-fail-under=100', '--cov-report=term-missing:skip-covered', '--randomly-seed=4287', - '-k', - 'not e2e', + '-m', + 'not (db or e2e)', PYTEST_LOCATION, ) session.run('pytest', '--version') diff --git a/setup.cfg b/setup.cfg index c3342a7..cfc1969 100644 --- a/setup.cfg +++ b/setup.cfg @@ -89,6 +89,8 @@ extend-ignore = # Comply with black's style. # Source: https://github.com/psf/black/blob/master/docs/compatible_configs.md#flake8 E203, W503, WPS348, + # Allow underscores in numbers. + WPS303, # f-strings are ok. WPS305, # Classes should not have to specify a base class. @@ -139,14 +141,24 @@ per-file-ignores = tests/*.py: # Type annotations are not strictly enforced. ANN0, ANN2, + # The `Meta` class inside the factory_boy models do not need a docstring. + D106, # `assert` statements are ok in the test suite. S101, + # The `random` module is not used for cryptography. + S311, # Shadowing outer scopes occurs naturally with mocks. WPS442, # Modules may have many test cases. WPS202,WPS204,WPS214, + # Do not check for Jones complexity in the test suite. + WPS221, # No overuse of string constants (e.g., '__version__'). WPS226, + # We do not care about the number of "# noqa"s in the test suite. + WPS402, + # Allow closures. + WPS430, # Numbers are normal in test cases as expected results. WPS432, @@ -166,6 +178,7 @@ show-source = true # wemake-python-styleguide's settings # =================================== allowed-domain-names = + data, obj, param, result, @@ -274,4 +287,5 @@ console_output_style = count env = TESTING=true markers = - e2e: integration tests, incl., for example, tests touching the database + db: tests touching the database + e2e: non-db integration tests diff --git a/src/urban_meal_delivery/db/addresses.py b/src/urban_meal_delivery/db/addresses.py index d9bfa48..8a9337b 100644 --- a/src/urban_meal_delivery/db/addresses.py +++ b/src/urban_meal_delivery/db/addresses.py @@ -1,4 +1,4 @@ -"""Provide the ORM's Address model.""" +"""Provide the ORM's `Address` model.""" import sqlalchemy as sa from sqlalchemy import orm @@ -9,7 +9,7 @@ from urban_meal_delivery.db import meta class Address(meta.Base): - """An Address of a Customer or a Restaurant on the UDP.""" + """An address of a `Customer` or a `Restaurant` on the UDP.""" __tablename__ = 'addresses' @@ -72,11 +72,11 @@ class Address(meta.Base): @hybrid.hybrid_property def is_primary(self) -> bool: - """If an Address object is the earliest one entered at its location. + """If an `Address` object is the earliest one entered at its location. Street addresses may have been entered several times with different versions/spellings of the street name and/or different floors. - `is_primary` indicates the first in a group of addresses. + `.is_primary` indicates the first in a group of `Address` objects. """ return self.id == self._primary_id diff --git a/src/urban_meal_delivery/db/cities.py b/src/urban_meal_delivery/db/cities.py index 00305b2..2a36ced 100644 --- a/src/urban_meal_delivery/db/cities.py +++ b/src/urban_meal_delivery/db/cities.py @@ -1,4 +1,4 @@ -"""Provide the ORM's City model.""" +"""Provide the ORM's `City` model.""" from typing import Dict @@ -10,7 +10,7 @@ from urban_meal_delivery.db import meta class City(meta.Base): - """A City where the UDP operates in.""" + """A city where the UDP operates in.""" __tablename__ = 'cities' diff --git a/src/urban_meal_delivery/db/couriers.py b/src/urban_meal_delivery/db/couriers.py index 9087982..a4c85ca 100644 --- a/src/urban_meal_delivery/db/couriers.py +++ b/src/urban_meal_delivery/db/couriers.py @@ -1,4 +1,4 @@ -"""Provide the ORM's Courier model.""" +"""Provide the ORM's `Courier` model.""" import sqlalchemy as sa from sqlalchemy import orm @@ -8,7 +8,7 @@ from urban_meal_delivery.db import meta class Courier(meta.Base): - """A Courier working for the UDP.""" + """A courier working for the UDP.""" __tablename__ = 'couriers' diff --git a/src/urban_meal_delivery/db/customers.py b/src/urban_meal_delivery/db/customers.py index 451ec92..2a96d9a 100644 --- a/src/urban_meal_delivery/db/customers.py +++ b/src/urban_meal_delivery/db/customers.py @@ -1,4 +1,4 @@ -"""Provide the ORM's Customer model.""" +"""Provide the ORM's `Customer` model.""" import sqlalchemy as sa from sqlalchemy import orm @@ -7,7 +7,7 @@ from urban_meal_delivery.db import meta class Customer(meta.Base): - """A Customer of the UDP.""" + """A customer of the UDP.""" __tablename__ = 'customers' diff --git a/src/urban_meal_delivery/db/orders.py b/src/urban_meal_delivery/db/orders.py index 5bb617c..d3adcdf 100644 --- a/src/urban_meal_delivery/db/orders.py +++ b/src/urban_meal_delivery/db/orders.py @@ -1,4 +1,4 @@ -"""Provide the ORM's Order model.""" +"""Provide the ORM's `Order` model.""" import datetime @@ -10,7 +10,7 @@ from urban_meal_delivery.db import meta class Order(meta.Base): # noqa:WPS214 - """An Order by a Customer of the UDP.""" + """An order by a `Customer` of the UDP.""" __tablename__ = 'orders' @@ -325,12 +325,12 @@ class Order(meta.Base): # noqa:WPS214 @property def scheduled(self) -> bool: - """Inverse of Order.ad_hoc.""" + """Inverse of `.ad_hoc`.""" return not self.ad_hoc @property def completed(self) -> bool: - """Inverse of Order.cancelled.""" + """Inverse of `.cancelled`.""" return not self.cancelled @property @@ -353,9 +353,9 @@ class Order(meta.Base): # noqa:WPS214 @property def time_to_accept(self) -> datetime.timedelta: - """Time until a courier accepted an order. + """Time until the `.courier` accepted the order. - This adds the time it took the UDP to notify a courier. + This measures the time it took the UDP to notify the `.courier` after dispatch. """ if not self.dispatch_at: raise RuntimeError('dispatch_at is not set') @@ -365,9 +365,9 @@ class Order(meta.Base): # noqa:WPS214 @property def time_to_react(self) -> datetime.timedelta: - """Time a courier took to accept an order. + """Time the `.courier` took to accept an order. - This time is a subset of Order.time_to_accept. + A subset of `.time_to_accept`. """ if not self.courier_notified_at: raise RuntimeError('courier_notified_at is not set') @@ -377,7 +377,7 @@ class Order(meta.Base): # noqa:WPS214 @property def time_to_pickup(self) -> datetime.timedelta: - """Time from a courier's acceptance to arrival at the pickup location.""" + """Time from the `.courier`'s acceptance to arrival at `.pickup_address`.""" if not self.courier_accepted_at: raise RuntimeError('courier_accepted_at is not set') if not self.reached_pickup_at: @@ -386,7 +386,7 @@ class Order(meta.Base): # noqa:WPS214 @property def time_at_pickup(self) -> datetime.timedelta: - """Time a courier stayed at the pickup location.""" + """Time the `.courier` stayed at the `.pickup_address`.""" if not self.reached_pickup_at: raise RuntimeError('reached_pickup_at is not set') if not self.pickup_at: @@ -405,13 +405,13 @@ class Order(meta.Base): # noqa:WPS214 @property def courier_early(self) -> datetime.timedelta: - """Time by which a courier is early for pickup. + """Time by which the `.courier` is early for pickup. - Measured relative to Order.scheduled_pickup_at. + Measured relative to `.scheduled_pickup_at`. - 0 if the courier is on time or late. + `datetime.timedelta(seconds=0)` if the `.courier` is on time or late. - Goes together with Order.courier_late. + Goes together with `.courier_late`. """ return max( datetime.timedelta(), self.scheduled_pickup_at - self.reached_pickup_at, @@ -419,13 +419,13 @@ class Order(meta.Base): # noqa:WPS214 @property def courier_late(self) -> datetime.timedelta: - """Time by which a courier is late for pickup. + """Time by which the `.courier` is late for pickup. - Measured relative to Order.scheduled_pickup_at. + Measured relative to `.scheduled_pickup_at`. - 0 if the courier is on time or early. + `datetime.timedelta(seconds=0)` if the `.courier` is on time or early. - Goes together with Order.courier_early. + Goes together with `.courier_early`. """ return max( datetime.timedelta(), self.reached_pickup_at - self.scheduled_pickup_at, @@ -433,31 +433,31 @@ class Order(meta.Base): # noqa:WPS214 @property def restaurant_early(self) -> datetime.timedelta: - """Time by which a restaurant is early for pickup. + """Time by which the `.restaurant` is early for pickup. - Measured relative to Order.scheduled_pickup_at. + Measured relative to `.scheduled_pickup_at`. - 0 if the restaurant is on time or late. + `datetime.timedelta(seconds=0)` if the `.restaurant` is on time or late. - Goes together with Order.restaurant_late. + Goes together with `.restaurant_late`. """ return max(datetime.timedelta(), self.scheduled_pickup_at - self.pickup_at) @property def restaurant_late(self) -> datetime.timedelta: - """Time by which a restaurant is late for pickup. + """Time by which the `.restaurant` is late for pickup. - Measured relative to Order.scheduled_pickup_at. + Measured relative to `.scheduled_pickup_at`. - 0 if the restaurant is on time or early. + `datetime.timedelta(seconds=0)` if the `.restaurant` is on time or early. - Goes together with Order.restaurant_early. + Goes together with `.restaurant_early`. """ return max(datetime.timedelta(), self.pickup_at - self.scheduled_pickup_at) @property def time_to_delivery(self) -> datetime.timedelta: - """Time a courier took from pickup to delivery location.""" + """Time the `.courier` took from `.pickup_address` to `.delivery_address`.""" if not self.pickup_at: raise RuntimeError('pickup_at is not set') if not self.reached_delivery_at: @@ -466,7 +466,7 @@ class Order(meta.Base): # noqa:WPS214 @property def time_at_delivery(self) -> datetime.timedelta: - """Time a courier stayed at the delivery location.""" + """Time the `.courier` stayed at the `.delivery_address`.""" if not self.reached_delivery_at: raise RuntimeError('reached_delivery_at is not set') if not self.delivery_at: @@ -475,20 +475,20 @@ class Order(meta.Base): # noqa:WPS214 @property def courier_waited_at_delivery(self) -> datetime.timedelta: - """Time a courier waited at the delivery location.""" + """Time the `.courier` waited at the `.delivery_address`.""" if self._courier_waited_at_delivery: return self.time_at_delivery return datetime.timedelta() @property def delivery_early(self) -> datetime.timedelta: - """Time by which a scheduled order was early. + """Time by which a `.scheduled` order was early. - Measured relative to Order.scheduled_delivery_at. + Measured relative to `.scheduled_delivery_at`. - 0 if the delivery is on time or late. + `datetime.timedelta(seconds=0)` if the delivery is on time or late. - Goes together with Order.delivery_late. + Goes together with `.delivery_late`. """ if not self.scheduled: raise AttributeError('Makes sense only for scheduled orders') @@ -496,13 +496,13 @@ class Order(meta.Base): # noqa:WPS214 @property def delivery_late(self) -> datetime.timedelta: - """Time by which a scheduled order was late. + """Time by which a `.scheduled` order was late. - Measured relative to Order.scheduled_delivery_at. + Measured relative to `.scheduled_delivery_at`. - 0 if the delivery is on time or early. + `datetime.timedelta(seconds=0)` if the delivery is on time or early. - Goes together with Order.delivery_early. + Goes together with `.delivery_early`. """ if not self.scheduled: raise AttributeError('Makes sense only for scheduled orders') @@ -510,7 +510,7 @@ class Order(meta.Base): # noqa:WPS214 @property def total_time(self) -> datetime.timedelta: - """Time from order placement to delivery for an ad-hoc order.""" + """Time from order placement to delivery for an `.ad_hoc` order.""" if self.scheduled: raise AttributeError('Scheduled orders have no total_time') if self.cancelled: diff --git a/src/urban_meal_delivery/db/restaurants.py b/src/urban_meal_delivery/db/restaurants.py index f31d7af..1319b56 100644 --- a/src/urban_meal_delivery/db/restaurants.py +++ b/src/urban_meal_delivery/db/restaurants.py @@ -1,4 +1,4 @@ -"""Provide the ORM's Restaurant model.""" +"""Provide the ORM's `Restaurant` model.""" import sqlalchemy as sa from sqlalchemy import orm @@ -7,7 +7,12 @@ from urban_meal_delivery.db import meta class Restaurant(meta.Base): - """A Restaurant selling meals on the UDP.""" + """A restaurant selling meals on the UDP. + + In the historic dataset, a `Restaurant` may have changed its `Address` + throughout its life time. The ORM model only stores the current one, + which in most cases is also the only one. + """ __tablename__ = 'restaurants' diff --git a/tests/db/conftest.py b/tests/db/conftest.py index 2508161..fcacfe7 100644 --- a/tests/db/conftest.py +++ b/tests/db/conftest.py @@ -1,23 +1,23 @@ """Utils for testing the ORM layer.""" -import datetime - import pytest from alembic import command as migrations_cmd from alembic import config as migrations_config +from sqlalchemy import orm +from tests.db import fake_data from urban_meal_delivery import config from urban_meal_delivery import db @pytest.fixture(scope='session', params=['all_at_once', 'sequentially']) -def db_engine(request): +def db_connection(request): """Create all tables given the ORM models. The tables are put into a distinct PostgreSQL schema that is removed after all tests are over. - The engine used to do that is yielded. + The database connection used to do that is yielded. There are two modes for this fixture: @@ -27,38 +27,40 @@ def db_engine(request): This ensures that Alembic's migration files are consistent. """ engine = db.make_engine() + connection = engine.connect() if request.param == 'all_at_once': engine.execute(f'CREATE SCHEMA {config.CLEAN_SCHEMA};') - db.Base.metadata.create_all(engine) + db.Base.metadata.create_all(connection) else: cfg = migrations_config.Config('alembic.ini') migrations_cmd.upgrade(cfg, 'head') try: - yield engine + yield connection finally: - engine.execute(f'DROP SCHEMA {config.CLEAN_SCHEMA} CASCADE;') + connection.execute(f'DROP SCHEMA {config.CLEAN_SCHEMA} CASCADE;') if request.param == 'sequentially': tmp_alembic_version = f'{config.ALEMBIC_TABLE}_{config.CLEAN_SCHEMA}' - engine.execute( + connection.execute( f'DROP TABLE {config.ALEMBIC_TABLE_SCHEMA}.{tmp_alembic_version};', ) + connection.close() + @pytest.fixture -def db_session(db_engine): +def db_session(db_connection): """A SQLAlchemy session that rolls back everything after a test case.""" - connection = db_engine.connect() # Begin the outer most transaction # that is rolled back at the end of the test. - transaction = connection.begin() + transaction = db_connection.begin() # Create a session bound on the same connection as the transaction. # Using any other session would not work. - Session = db.make_session_factory() # noqa:N806 - session = Session(bind=connection) + session_factory = orm.sessionmaker() + session = session_factory(bind=db_connection) try: yield session @@ -66,198 +68,20 @@ def db_session(db_engine): finally: session.close() transaction.rollback() - connection.close() -@pytest.fixture -def address_data(): - """The data for an Address object in Paris.""" - return { - 'id': 1, - '_primary_id': 1, # => "itself" - 'created_at': datetime.datetime(2020, 1, 2, 3, 4, 5), - 'place_id': 'ChIJxSr71vZt5kcRoFHY4caCCxw', - 'latitude': 48.85313, - 'longitude': 2.37461, - '_city_id': 1, - 'city_name': 'St. German', - 'zip_code': '75011', - 'street': '42 Rue De Charonne', - 'floor': None, - } +# Import the fixtures from the `fake_data` sub-package. +make_address = fake_data.make_address +make_courier = fake_data.make_courier +make_customer = fake_data.make_customer +make_order = fake_data.make_order +make_restaurant = fake_data.make_restaurant -@pytest.fixture -def address(address_data, city): - """An Address object.""" - address = db.Address(**address_data) - address.city = city - return address - - -@pytest.fixture -def address2_data(): - """The data for an Address object in Paris.""" - return { - 'id': 2, - '_primary_id': 2, # => "itself" - 'created_at': datetime.datetime(2020, 1, 2, 4, 5, 6), - 'place_id': 'ChIJs-9a6QZy5kcRY8Wwk9Ywzl8', - 'latitude': 48.852196, - 'longitude': 2.373937, - '_city_id': 1, - 'city_name': 'Paris', - 'zip_code': '75011', - 'street': 'Rue De Charonne 3', - 'floor': 2, - } - - -@pytest.fixture -def address2(address2_data, city): - """An Address object.""" - address2 = db.Address(**address2_data) - address2.city = city - return address2 - - -@pytest.fixture -def city_data(): - """The data for the City object modeling Paris.""" - return { - 'id': 1, - 'name': 'Paris', - 'kml': " ...", - '_center_latitude': 48.856614, - '_center_longitude': 2.3522219, - '_northeast_latitude': 48.9021449, - '_northeast_longitude': 2.4699208, - '_southwest_latitude': 48.815573, - '_southwest_longitude': 2.225193, - 'initial_zoom': 12, - } - - -@pytest.fixture -def city(city_data): - """A City object.""" - return db.City(**city_data) - - -@pytest.fixture -def courier_data(): - """The data for a Courier object.""" - return { - 'id': 1, - 'created_at': datetime.datetime(2020, 1, 2, 3, 4, 5), - 'vehicle': 'bicycle', - 'historic_speed': 7.89, - 'capacity': 100, - 'pay_per_hour': 750, - 'pay_per_order': 200, - } - - -@pytest.fixture -def courier(courier_data): - """A Courier object.""" - return db.Courier(**courier_data) - - -@pytest.fixture -def customer_data(): - """The data for the Customer object.""" - return {'id': 1} - - -@pytest.fixture -def customer(customer_data): - """A Customer object.""" - return db.Customer(**customer_data) - - -@pytest.fixture -def order_data(): - """The data for an ad-hoc Order object.""" - return { - 'id': 1, - '_delivery_id': 1, - '_customer_id': 1, - 'placed_at': datetime.datetime(2020, 1, 2, 11, 55, 11), - 'ad_hoc': True, - 'scheduled_delivery_at': None, - 'scheduled_delivery_at_corrected': None, - 'first_estimated_delivery_at': datetime.datetime(2020, 1, 2, 12, 35, 0), - 'cancelled': False, - 'cancelled_at': None, - 'cancelled_at_corrected': None, - 'sub_total': 2000, - 'delivery_fee': 250, - 'total': 2250, - '_restaurant_id': 1, - 'restaurant_notified_at': datetime.datetime(2020, 1, 2, 12, 5, 5), - 'restaurant_notified_at_corrected': False, - 'restaurant_confirmed_at': datetime.datetime(2020, 1, 2, 12, 5, 25), - 'restaurant_confirmed_at_corrected': False, - 'estimated_prep_duration': 900, - 'estimated_prep_duration_corrected': False, - 'estimated_prep_buffer': 480, - '_courier_id': 1, - 'dispatch_at': datetime.datetime(2020, 1, 2, 12, 5, 1), - 'dispatch_at_corrected': False, - 'courier_notified_at': datetime.datetime(2020, 1, 2, 12, 6, 2), - 'courier_notified_at_corrected': False, - 'courier_accepted_at': datetime.datetime(2020, 1, 2, 12, 6, 17), - 'courier_accepted_at_corrected': False, - 'utilization': 50, - '_pickup_address_id': 1, - 'reached_pickup_at': datetime.datetime(2020, 1, 2, 12, 16, 21), - 'pickup_at': datetime.datetime(2020, 1, 2, 12, 18, 1), - 'pickup_at_corrected': False, - 'pickup_not_confirmed': False, - 'left_pickup_at': datetime.datetime(2020, 1, 2, 12, 19, 45), - 'left_pickup_at_corrected': False, - '_delivery_address_id': 2, - 'reached_delivery_at': datetime.datetime(2020, 1, 2, 12, 27, 33), - 'delivery_at': datetime.datetime(2020, 1, 2, 12, 29, 55), - 'delivery_at_corrected': False, - 'delivery_not_confirmed': False, - '_courier_waited_at_delivery': False, - 'logged_delivery_distance': 500, - 'logged_avg_speed': 7.89, - 'logged_avg_speed_distance': 490, - } - - -@pytest.fixture -def order( # noqa:WPS211 pylint:disable=too-many-arguments - order_data, customer, restaurant, courier, address, address2, -): - """An Order object.""" - order = db.Order(**order_data) - order.customer = customer - order.restaurant = restaurant - order.courier = courier - order.pickup_address = address - order.delivery_address = address2 - return order - - -@pytest.fixture -def restaurant_data(): - """The data for the Restaurant object.""" - return { - 'id': 1, - 'created_at': datetime.datetime(2020, 1, 2, 3, 4, 5), - 'name': 'Vevay', - '_address_id': 1, - 'estimated_prep_duration': 1000, - } - - -@pytest.fixture -def restaurant(restaurant_data, address): - """A Restaurant object.""" - restaurant = db.Restaurant(**restaurant_data) - restaurant.address = address - return restaurant +address = fake_data.address +city = fake_data.city +city_data = fake_data.city_data +courier = fake_data.courier +customer = fake_data.customer +order = fake_data.order +restaurant = fake_data.restaurant diff --git a/tests/db/fake_data/__init__.py b/tests/db/fake_data/__init__.py new file mode 100644 index 0000000..f6b879c --- /dev/null +++ b/tests/db/fake_data/__init__.py @@ -0,0 +1,14 @@ +"""Fixtures for testing the ORM layer with fake data.""" + +from tests.db.fake_data.fixture_makers import make_address # noqa:F401 +from tests.db.fake_data.fixture_makers import make_courier # noqa:F401 +from tests.db.fake_data.fixture_makers import make_customer # noqa:F401 +from tests.db.fake_data.fixture_makers import make_order # noqa:F401 +from tests.db.fake_data.fixture_makers import make_restaurant # noqa:F401 +from tests.db.fake_data.static_fixtures import address # noqa:F401 +from tests.db.fake_data.static_fixtures import city # noqa:F401 +from tests.db.fake_data.static_fixtures import city_data # noqa:F401 +from tests.db.fake_data.static_fixtures import courier # noqa:F401 +from tests.db.fake_data.static_fixtures import customer # noqa:F401 +from tests.db.fake_data.static_fixtures import order # noqa:F401 +from tests.db.fake_data.static_fixtures import restaurant # noqa:F401 diff --git a/tests/db/fake_data/factories.py b/tests/db/fake_data/factories.py new file mode 100644 index 0000000..0758667 --- /dev/null +++ b/tests/db/fake_data/factories.py @@ -0,0 +1,366 @@ +"""Factories to create instances for the SQLAlchemy models.""" + +import datetime as dt +import random +import string + +import factory +import faker +from factory import alchemy +from geopy import distance + +from urban_meal_delivery import db + + +def _random_timespan( # noqa:WPS211 + *, + min_hours=0, + min_minutes=0, + min_seconds=0, + max_hours=0, + max_minutes=0, + max_seconds=0, +): + """A randomized `timedelta` object between the specified arguments.""" + total_min_seconds = min_hours * 3600 + min_minutes * 60 + min_seconds + total_max_seconds = max_hours * 3600 + max_minutes * 60 + max_seconds + return dt.timedelta(seconds=random.randint(total_min_seconds, total_max_seconds)) + + +# The test day. +_YEAR, _MONTH, _DAY = 2020, 1, 1 + + +def _early_in_the_morning(): + """A randomized `datetime` object early in the morning.""" + return dt.datetime(_YEAR, _MONTH, _DAY, 3, 0) + _random_timespan(max_hours=2) + + +class AddressFactory(alchemy.SQLAlchemyModelFactory): + """Create instances of the `db.Address` model.""" + + class Meta: + model = db.Address + sqlalchemy_get_or_create = ('id',) + + id = factory.Sequence(lambda num: num) # noqa:WPS125 + created_at = factory.LazyFunction(_early_in_the_morning) + + # When testing, all addresses are considered primary ones. + # As non-primary addresses have no different behavior and + # the property is only kept from the original dataset for + # completeness sake, that is ok to do. + _primary_id = factory.LazyAttribute(lambda obj: obj.id) + + # Mimic a Google Maps Place ID with just random characters. + place_id = factory.LazyFunction( + lambda: ''.join(random.choice(string.ascii_lowercase) for _ in range(20)), + ) + + # Place the addresses somewhere in downtown Paris. + latitude = factory.Faker('coordinate', center=48.855, radius=0.01) + longitude = factory.Faker('coordinate', center=2.34, radius=0.03) + # city -> set by the `make_address` fixture as there is only one `city` + city_name = 'Paris' + zip_code = factory.LazyFunction(lambda: random.randint(75001, 75020)) + street = factory.Faker('street_address', locale='fr_FR') + + +class CourierFactory(alchemy.SQLAlchemyModelFactory): + """Create instances of the `db.Courier` model.""" + + class Meta: + model = db.Courier + sqlalchemy_get_or_create = ('id',) + + id = factory.Sequence(lambda num: num) # noqa:WPS125 + created_at = factory.LazyFunction(_early_in_the_morning) + vehicle = 'bicycle' + historic_speed = 7.89 + capacity = 100 + pay_per_hour = 750 + pay_per_order = 200 + + +class CustomerFactory(alchemy.SQLAlchemyModelFactory): + """Create instances of the `db.Customer` model.""" + + class Meta: + model = db.Customer + sqlalchemy_get_or_create = ('id',) + + id = factory.Sequence(lambda num: num) # noqa:WPS125 + + +_restaurant_names = faker.Faker() + + +class RestaurantFactory(alchemy.SQLAlchemyModelFactory): + """Create instances of the `db.Restaurant` model.""" + + class Meta: + model = db.Restaurant + sqlalchemy_get_or_create = ('id',) + + id = factory.Sequence(lambda num: num) # noqa:WPS125 + created_at = factory.LazyFunction(_early_in_the_morning) + name = factory.LazyFunction( + lambda: f"{_restaurant_names.first_name()}'s Restaurant", + ) + # address -> set by the `make_restaurant` fixture as there is only one `city` + estimated_prep_duration = 1000 + + +class AdHocOrderFactory(alchemy.SQLAlchemyModelFactory): + """Create instances of the `db.Order` model. + + This factory creates ad-hoc `Order`s while the `ScheduledOrderFactory` + below creates pre-orders. They are split into two classes mainly + because the logic regarding how the timestamps are calculated from + each other differs. + + See the docstring in the contained `Params` class for + flags to adapt how the `Order` is created. + """ + + # pylint:disable=too-many-instance-attributes + + class Meta: + model = db.Order + sqlalchemy_get_or_create = ('id',) + + class Params: + """Define flags that overwrite some attributes. + + The `factory.Trait` objects in this class are executed after all + the normal attributes in the `OrderFactory` classes were evaluated. + + Flags: + cancel_before_pickup + cancel_after_pickup + """ + + # Timestamps after `cancelled_at` are discarded + # by the `post_generation` hook at the end of the `OrderFactory`. + cancel_ = factory.Trait( # noqa:WPS120 -> leading underscore does not work + cancelled=True, cancelled_at_corrected=False, + ) + cancel_before_pickup = factory.Trait( + cancel_=True, + cancelled_at=factory.LazyAttribute( + lambda obj: obj.dispatch_at + + _random_timespan( + max_seconds=(obj.pickup_at - obj.dispatch_at).total_seconds(), + ), + ), + ) + cancel_after_pickup = factory.Trait( + cancel_=True, + cancelled_at=factory.LazyAttribute( + lambda obj: obj.pickup_at + + _random_timespan( + max_seconds=(obj.delivery_at - obj.pickup_at).total_seconds(), + ), + ), + ) + + # Generic attributes + id = factory.Sequence(lambda num: num) # noqa:WPS125 + # customer -> set by the `make_order` fixture for better control + + # Attributes regarding the specialization of an `Order`: ad-hoc or scheduled. + # Ad-hoc `Order`s are placed between 11.45 and 14.15. + placed_at = factory.LazyFunction( + lambda: dt.datetime(_YEAR, _MONTH, _DAY, 11, 45) + + _random_timespan(max_hours=2, max_minutes=30), + ) + ad_hoc = True + scheduled_delivery_at = None + scheduled_delivery_at_corrected = None + # Without statistical info, we assume an ad-hoc `Order` delivered after 45 minutes. + first_estimated_delivery_at = factory.LazyAttribute( + lambda obj: obj.placed_at + dt.timedelta(minutes=45), + ) + + # Attributes regarding the cancellation of an `Order`. + # May be overwritten with the `cancel_before_pickup` or `cancel_after_pickup` flags. + cancelled = False + cancelled_at = None + cancelled_at_corrected = None + + # Price-related attributes -> sample realistic prices + sub_total = factory.LazyFunction(lambda: 100 * random.randint(15, 25)) + delivery_fee = 250 + total = factory.LazyAttribute(lambda obj: obj.sub_total + obj.delivery_fee) + + # Restaurant-related attributes + # restaurant -> set by the `make_order` fixture for better control + restaurant_notified_at = factory.LazyAttribute( + lambda obj: obj.placed_at + _random_timespan(min_seconds=30, max_seconds=90), + ) + restaurant_notified_at_corrected = False + restaurant_confirmed_at = factory.LazyAttribute( + lambda obj: obj.restaurant_notified_at + + _random_timespan(min_seconds=30, max_seconds=150), + ) + restaurant_confirmed_at_corrected = False + # Use the database defaults of the historic data. + estimated_prep_duration = 900 + estimated_prep_duration_corrected = False + estimated_prep_buffer = 480 + + # Dispatch-related columns + # courier -> set by the `make_order` fixture for better control + dispatch_at = factory.LazyAttribute( + lambda obj: obj.placed_at + _random_timespan(min_seconds=600, max_seconds=1080), + ) + dispatch_at_corrected = False + courier_notified_at = factory.LazyAttribute( + lambda obj: obj.dispatch_at + + _random_timespan(min_seconds=100, max_seconds=140), + ) + courier_notified_at_corrected = False + courier_accepted_at = factory.LazyAttribute( + lambda obj: obj.courier_notified_at + + _random_timespan(min_seconds=15, max_seconds=45), + ) + courier_accepted_at_corrected = False + # Sample a realistic utilization. + utilization = factory.LazyFunction(lambda: random.choice([50, 60, 70, 80, 90, 100])) + + # Pickup-related attributes + # pickup_address -> aligned with `restaurant.address` by the `make_order` fixture + reached_pickup_at = factory.LazyAttribute( + lambda obj: obj.courier_accepted_at + + _random_timespan(min_seconds=300, max_seconds=600), + ) + pickup_at = factory.LazyAttribute( + lambda obj: obj.reached_pickup_at + + _random_timespan(min_seconds=120, max_seconds=600), + ) + pickup_at_corrected = False + pickup_not_confirmed = False + left_pickup_at = factory.LazyAttribute( + lambda obj: obj.pickup_at + _random_timespan(min_seconds=60, max_seconds=180), + ) + left_pickup_at_corrected = False + + # Delivery-related attributes + # delivery_address -> set by the `make_order` fixture as there is only one `city` + reached_delivery_at = factory.LazyAttribute( + lambda obj: obj.left_pickup_at + + _random_timespan(min_seconds=240, max_seconds=480), + ) + delivery_at = factory.LazyAttribute( + lambda obj: obj.reached_delivery_at + + _random_timespan(min_seconds=240, max_seconds=660), + ) + delivery_at_corrected = False + delivery_not_confirmed = False + _courier_waited_at_delivery = factory.LazyAttribute( + lambda obj: False if obj.delivery_at else None, + ) + + # Statistical attributes -> calculate realistic stats + logged_delivery_distance = factory.LazyAttribute( + lambda obj: distance.great_circle( # noqa:WPS317 + (obj.pickup_address.latitude, obj.pickup_address.longitude), + (obj.delivery_address.latitude, obj.delivery_address.longitude), + ).meters, + ) + logged_avg_speed = factory.LazyAttribute( # noqa:ECE001 + lambda obj: round( + ( + obj.logged_avg_speed_distance + / (obj.delivery_at - obj.pickup_at).total_seconds() + ), + 2, + ), + ) + logged_avg_speed_distance = factory.LazyAttribute( + lambda obj: 0.95 * obj.logged_delivery_distance, + ) + + @factory.post_generation + def post( # noqa:C901,WPS23 pylint:disable=unused-argument + obj, create, extracted, **kwargs, # noqa:B902,N805 + ): + """Discard timestamps that occur after cancellation.""" + if obj.cancelled: + if obj.cancelled_at <= obj.restaurant_notified_at: + obj.restaurant_notified_at = None + obj.restaurant_notified_at_corrected = None + if obj.cancelled_at <= obj.restaurant_confirmed_at: + obj.restaurant_confirmed_at = None + obj.restaurant_confirmed_at_corrected = None + if obj.cancelled_at <= obj.dispatch_at: + obj.dispatch_at = None + obj.dispatch_at_corrected = None + if obj.cancelled_at <= obj.courier_notified_at: + obj.courier_notified_at = None + obj.courier_notified_at_corrected = None + if obj.cancelled_at <= obj.courier_accepted_at: + obj.courier_accepted_at = None + obj.courier_accepted_at_corrected = None + if obj.cancelled_at <= obj.reached_pickup_at: + obj.reached_pickup_at = None + if obj.cancelled_at <= obj.pickup_at: + obj.pickup_at = None + obj.pickup_at_corrected = None + obj.pickup_not_confirmed = None + if obj.cancelled_at <= obj.left_pickup_at: + obj.left_pickup_at = None + obj.left_pickup_at_corrected = None + if obj.cancelled_at <= obj.reached_delivery_at: + obj.reached_delivery_at = None + if obj.cancelled_at <= obj.delivery_at: + obj.delivery_at = None + obj.delivery_at_corrected = None + obj.delivery_not_confirmed = None + obj._courier_waited_at_delivery = None # noqa:WPS437 + + +class ScheduledOrderFactory(AdHocOrderFactory): + """Create instances of the `db.Order` model. + + This class takes care of the various timestamps for pre-orders. + + Pre-orders are placed long before the test day's lunch time starts. + All timestamps are relative to either `.dispatch_at` or `.restaurant_notified_at` + and calculated backwards from `.scheduled_delivery_at`. + """ + + # Attributes regarding the specialization of an `Order`: ad-hoc or scheduled. + placed_at = factory.LazyFunction(_early_in_the_morning) + ad_hoc = False + # Discrete `datetime` objects in the "core" lunch time are enough. + scheduled_delivery_at = factory.LazyFunction( + lambda: random.choice( + [ + dt.datetime(_YEAR, _MONTH, _DAY, 12, 0), + dt.datetime(_YEAR, _MONTH, _DAY, 12, 15), + dt.datetime(_YEAR, _MONTH, _DAY, 12, 30), + dt.datetime(_YEAR, _MONTH, _DAY, 12, 45), + dt.datetime(_YEAR, _MONTH, _DAY, 13, 0), + dt.datetime(_YEAR, _MONTH, _DAY, 13, 15), + dt.datetime(_YEAR, _MONTH, _DAY, 13, 30), + ], + ), + ) + scheduled_delivery_at_corrected = False + # Assume the `Order` is on time. + first_estimated_delivery_at = factory.LazyAttribute( + lambda obj: obj.scheduled_delivery_at, + ) + + # Restaurant-related attributes + restaurant_notified_at = factory.LazyAttribute( + lambda obj: obj.scheduled_delivery_at + - _random_timespan(min_minutes=45, max_minutes=50), + ) + + # Dispatch-related attributes + dispatch_at = factory.LazyAttribute( + lambda obj: obj.scheduled_delivery_at + - _random_timespan(min_minutes=40, max_minutes=45), + ) diff --git a/tests/db/fake_data/fixture_makers.py b/tests/db/fake_data/fixture_makers.py new file mode 100644 index 0000000..9a5419b --- /dev/null +++ b/tests/db/fake_data/fixture_makers.py @@ -0,0 +1,105 @@ +"""Fixture factories for testing the ORM layer with fake data.""" + +import pytest + +from tests.db.fake_data import factories + + +@pytest.fixture +def make_address(city): + """Replaces `AddressFactory.build()`: Create an `Address` in the `city`.""" + # Reset the identifiers before every test. + factories.AddressFactory.reset_sequence(1) + + def func(**kwargs): + """Create an `Address` object in the `city`.""" + return factories.AddressFactory.build(city=city, **kwargs) + + return func + + +@pytest.fixture +def make_courier(): + """Replaces `CourierFactory.build()`: Create a `Courier`.""" + # Reset the identifiers before every test. + factories.CourierFactory.reset_sequence(1) + + def func(**kwargs): + """Create a new `Courier` object.""" + return factories.CourierFactory.build(**kwargs) + + return func + + +@pytest.fixture +def make_customer(): + """Replaces `CustomerFactory.build()`: Create a `Customer`.""" + # Reset the identifiers before every test. + factories.CustomerFactory.reset_sequence(1) + + def func(**kwargs): + """Create a new `Customer` object.""" + return factories.CustomerFactory.build(**kwargs) + + return func + + +@pytest.fixture +def make_restaurant(make_address): + """Replaces `RestaurantFactory.build()`: Create a `Restaurant`.""" + # Reset the identifiers before every test. + factories.RestaurantFactory.reset_sequence(1) + + def func(address=None, **kwargs): + """Create a new `Restaurant` object. + + If no `address` is provided, a new `Address` is created. + """ + if address is None: + address = make_address() + + return factories.RestaurantFactory.build(address=address, **kwargs) + + return func + + +@pytest.fixture +def make_order(make_address, make_courier, make_customer, make_restaurant): + """Replaces `OrderFactory.build()`: Create a `Order`.""" + # Reset the identifiers before every test. + factories.AdHocOrderFactory.reset_sequence(1) + + def func(scheduled=False, restaurant=None, courier=None, **kwargs): + """Create a new `Order` object. + + Each `Order` is made by a new `Customer` with a unique `Address` for delivery. + + Args: + scheduled: if an `Order` is a pre-order + restaurant: who receives the `Order`; defaults to a new `Restaurant` + courier: who delivered the `Order`; defaults to a new `Courier` + kwargs: additional keyword arguments forwarded to the `OrderFactory` + + Returns: + order + """ + if scheduled: + factory_cls = factories.ScheduledOrderFactory + else: + factory_cls = factories.AdHocOrderFactory + + if restaurant is None: + restaurant = make_restaurant() + if courier is None: + courier = make_courier() + + return factory_cls.build( + customer=make_customer(), # assume a unique `Customer` per order + restaurant=restaurant, + courier=courier, + pickup_address=restaurant.address, # no `Address` history + delivery_address=make_address(), # unique `Customer` => new `Address` + **kwargs, + ) + + return func diff --git a/tests/db/fake_data/static_fixtures.py b/tests/db/fake_data/static_fixtures.py new file mode 100644 index 0000000..df7d5b7 --- /dev/null +++ b/tests/db/fake_data/static_fixtures.py @@ -0,0 +1,58 @@ +"""Fake data for testing the ORM layer.""" + +import pytest + +from urban_meal_delivery import db + + +@pytest.fixture +def city_data(): + """The data for the one and only `City` object as a `dict`.""" + return { + 'id': 1, + 'name': 'Paris', + 'kml': " ...", + '_center_latitude': 48.856614, + '_center_longitude': 2.3522219, + '_northeast_latitude': 48.9021449, + '_northeast_longitude': 2.4699208, + '_southwest_latitude': 48.815573, + '_southwest_longitude': 2.225193, + 'initial_zoom': 12, + } + + +@pytest.fixture +def city(city_data): + """The one and only `City` object.""" + return db.City(**city_data) + + +@pytest.fixture +def address(make_address): + """An `Address` object in the `city`.""" + return make_address() + + +@pytest.fixture +def courier(make_courier): + """A `Courier` object.""" + return make_courier() + + +@pytest.fixture +def customer(make_customer): + """A `Customer` object.""" + return make_customer() + + +@pytest.fixture +def restaurant(address, make_restaurant): + """A `Restaurant` object located at the `address`.""" + return make_restaurant(address=address) + + +@pytest.fixture +def order(make_order, restaurant): + """An `Order` object for the `restaurant`.""" + return make_order(restaurant=restaurant) diff --git a/tests/db/test_addresses.py b/tests/db/test_addresses.py index ffb5618..4086f9c 100644 --- a/tests/db/test_addresses.py +++ b/tests/db/test_addresses.py @@ -1,140 +1,123 @@ -"""Test the ORM's Address model.""" +"""Test the ORM's `Address` model.""" +# pylint:disable=no-self-use,protected-access import pytest +import sqlalchemy as sqla from sqlalchemy import exc as sa_exc -from sqlalchemy.orm import exc as orm_exc from urban_meal_delivery import db class TestSpecialMethods: - """Test special methods in Address.""" + """Test special methods in `Address`.""" - # pylint:disable=no-self-use - - def test_create_address(self, address_data): - """Test instantiation of a new Address object.""" - result = db.Address(**address_data) - - assert result is not None - - def test_text_representation(self, address_data): - """Address has a non-literal text representation.""" - address = db.Address(**address_data) - street = address_data['street'] - city_name = address_data['city_name'] + def test_create_address(self, address): + """Test instantiation of a new `Address` object.""" + assert address is not None + def test_text_representation(self, address): + """`Address` has a non-literal text representation.""" result = repr(address) - assert result == f'' + assert result == f'' -@pytest.mark.e2e +@pytest.mark.db @pytest.mark.no_cover class TestConstraints: - """Test the database constraints defined in Address.""" + """Test the database constraints defined in `Address`.""" - # pylint:disable=no-self-use + def test_insert_into_database(self, db_session, address): + """Insert an instance into the (empty) database.""" + assert db_session.query(db.Address).count() == 0 - def test_insert_into_database(self, address, db_session): - """Insert an instance into the database.""" db_session.add(address) db_session.commit() - def test_dublicate_primary_key(self, address, address_data, city, db_session): - """Can only add a record once.""" + assert db_session.query(db.Address).count() == 1 + + def test_delete_a_referenced_address(self, db_session, address, make_address): + """Remove a record that is referenced with a FK.""" db_session.add(address) + # Fake another_address that has the same `._primary_id` as `address`. + db_session.add(make_address(_primary_id=address.id)) db_session.commit() - another_address = db.Address(**address_data) - another_address.city = city - db_session.add(another_address) + db_session.delete(address) - with pytest.raises(orm_exc.FlushError): + with pytest.raises( + sa_exc.IntegrityError, match='fk_addresses_to_addresses_via_primary_id', + ): db_session.commit() - def test_delete_a_referenced_address(self, address, address_data, db_session): + def test_delete_a_referenced_city(self, db_session, address): """Remove a record that is referenced with a FK.""" db_session.add(address) db_session.commit() - # Fake a second address that belongs to the same primary address. - address_data['id'] += 1 - another_address = db.Address(**address_data) - db_session.add(another_address) - db_session.commit() + # Must delete without ORM as otherwise an UPDATE statement is emitted. + stmt = sqla.delete(db.City).where(db.City.id == address.city.id) - with pytest.raises(sa_exc.IntegrityError): - db_session.execute( - db.Address.__table__.delete().where( # noqa:WPS609 - db.Address.id == address.id, - ), - ) - - def test_delete_a_referenced_city(self, address, city, db_session): - """Remove a record that is referenced with a FK.""" - db_session.add(address) - db_session.commit() - - with pytest.raises(sa_exc.IntegrityError): - db_session.execute( - db.City.__table__.delete().where(db.City.id == city.id), # noqa:WPS609 - ) + with pytest.raises( + sa_exc.IntegrityError, match='fk_addresses_to_cities_via_city_id', + ): + db_session.execute(stmt) @pytest.mark.parametrize('latitude', [-91, 91]) - def test_invalid_latitude(self, address, db_session, latitude): + def test_invalid_latitude(self, db_session, address, latitude): """Insert an instance with invalid data.""" address.latitude = latitude db_session.add(address) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises( + sa_exc.IntegrityError, match='latitude_between_90_degrees', + ): db_session.commit() @pytest.mark.parametrize('longitude', [-181, 181]) - def test_invalid_longitude(self, address, db_session, longitude): + def test_invalid_longitude(self, db_session, address, longitude): """Insert an instance with invalid data.""" address.longitude = longitude db_session.add(address) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises( + sa_exc.IntegrityError, match='longitude_between_180_degrees', + ): db_session.commit() @pytest.mark.parametrize('zip_code', [-1, 0, 9999, 100000]) - def test_invalid_zip_code(self, address, db_session, zip_code): + def test_invalid_zip_code(self, db_session, address, zip_code): """Insert an instance with invalid data.""" address.zip_code = zip_code db_session.add(address) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises(sa_exc.IntegrityError, match='valid_zip_code'): db_session.commit() @pytest.mark.parametrize('floor', [-1, 41]) - def test_invalid_floor(self, address, db_session, floor): + def test_invalid_floor(self, db_session, address, floor): """Insert an instance with invalid data.""" address.floor = floor db_session.add(address) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises(sa_exc.IntegrityError, match='realistic_floor'): db_session.commit() class TestProperties: - """Test properties in Address.""" + """Test properties in `Address`.""" - # pylint:disable=no-self-use - - def test_is_primary(self, address_data): - """Test Address.is_primary property.""" - address = db.Address(**address_data) + def test_is_primary(self, address): + """Test `Address.is_primary` property.""" + assert address.id == address._primary_id # noqa:WPS437 result = address.is_primary assert result is True - def test_is_not_primary(self, address_data): - """Test Address.is_primary property.""" - address_data['_primary_id'] = 999 - address = db.Address(**address_data) + def test_is_not_primary(self, address): + """Test `Address.is_primary` property.""" + address._primary_id = 999 # noqa:WPS437 result = address.is_primary diff --git a/tests/db/test_cities.py b/tests/db/test_cities.py index 50a7ecb..51aefc7 100644 --- a/tests/db/test_cities.py +++ b/tests/db/test_cities.py @@ -1,65 +1,45 @@ -"""Test the ORM's City model.""" +"""Test the ORM's `City` model.""" +# pylint:disable=no-self-use import pytest -from sqlalchemy.orm import exc as orm_exc from urban_meal_delivery import db class TestSpecialMethods: - """Test special methods in City.""" + """Test special methods in `City`.""" - # pylint:disable=no-self-use - - def test_create_city(self, city_data): - """Test instantiation of a new City object.""" - result = db.City(**city_data) - - assert result is not None - - def test_text_representation(self, city_data): - """City has a non-literal text representation.""" - city = db.City(**city_data) - name = city_data['name'] + def test_create_city(self, city): + """Test instantiation of a new `City` object.""" + assert city is not None + def test_text_representation(self, city): + """`City` has a non-literal text representation.""" result = repr(city) - assert result == f'' + assert result == f'' -@pytest.mark.e2e +@pytest.mark.db @pytest.mark.no_cover class TestConstraints: - """Test the database constraints defined in City.""" + """Test the database constraints defined in `City`.""" - # pylint:disable=no-self-use + def test_insert_into_database(self, db_session, city): + """Insert an instance into the (empty) database.""" + assert db_session.query(db.City).count() == 0 - def test_insert_into_database(self, city, db_session): - """Insert an instance into the database.""" db_session.add(city) db_session.commit() - def test_dublicate_primary_key(self, city, city_data, db_session): - """Can only add a record once.""" - db_session.add(city) - db_session.commit() - - another_city = db.City(**city_data) - db_session.add(another_city) - - with pytest.raises(orm_exc.FlushError): - db_session.commit() + assert db_session.query(db.City).count() == 1 class TestProperties: - """Test properties in City.""" - - # pylint:disable=no-self-use - - def test_location_data(self, city_data): - """Test City.location property.""" - city = db.City(**city_data) + """Test properties in `City`.""" + def test_location_data(self, city, city_data): + """Test `City.location` property.""" result = city.location assert isinstance(result, dict) @@ -67,33 +47,19 @@ class TestProperties: assert result['latitude'] == pytest.approx(city_data['_center_latitude']) assert result['longitude'] == pytest.approx(city_data['_center_longitude']) - def test_viewport_data_overall(self, city_data): - """Test City.viewport property.""" - city = db.City(**city_data) - + def test_viewport_data_overall(self, city): + """Test `City.viewport` property.""" result = city.viewport assert isinstance(result, dict) assert len(result) == 2 - def test_viewport_data_northeast(self, city_data): - """Test City.viewport property.""" - city = db.City(**city_data) - - result = city.viewport['northeast'] + @pytest.mark.parametrize('corner', ['northeast', 'southwest']) + def test_viewport_data_corners(self, city, city_data, corner): + """Test `City.viewport` property.""" + result = city.viewport[corner] assert isinstance(result, dict) assert len(result) == 2 - assert result['latitude'] == pytest.approx(city_data['_northeast_latitude']) - assert result['longitude'] == pytest.approx(city_data['_northeast_longitude']) - - def test_viewport_data_southwest(self, city_data): - """Test City.viewport property.""" - city = db.City(**city_data) - - result = city.viewport['southwest'] - - assert isinstance(result, dict) - assert len(result) == 2 - assert result['latitude'] == pytest.approx(city_data['_southwest_latitude']) - assert result['longitude'] == pytest.approx(city_data['_southwest_longitude']) + assert result['latitude'] == pytest.approx(city_data[f'_{corner}_latitude']) + assert result['longitude'] == pytest.approx(city_data[f'_{corner}_longitude']) diff --git a/tests/db/test_couriers.py b/tests/db/test_couriers.py index a3ba103..3db047e 100644 --- a/tests/db/test_couriers.py +++ b/tests/db/test_couriers.py @@ -1,125 +1,108 @@ -"""Test the ORM's Courier model.""" +"""Test the ORM's `Courier` model.""" +# pylint:disable=no-self-use import pytest from sqlalchemy import exc as sa_exc -from sqlalchemy.orm import exc as orm_exc from urban_meal_delivery import db class TestSpecialMethods: - """Test special methods in Courier.""" + """Test special methods in `Courier`.""" - # pylint:disable=no-self-use - - def test_create_courier(self, courier_data): - """Test instantiation of a new Courier object.""" - result = db.Courier(**courier_data) - - assert result is not None - - def test_text_representation(self, courier_data): - """Courier has a non-literal text representation.""" - courier_data['id'] = 1 - courier = db.Courier(**courier_data) - id_ = courier_data['id'] + def test_create_courier(self, courier): + """Test instantiation of a new `Courier` object.""" + assert courier is not None + def test_text_representation(self, courier): + """`Courier` has a non-literal text representation.""" result = repr(courier) - assert result == f'' + assert result == f'' -@pytest.mark.e2e +@pytest.mark.db @pytest.mark.no_cover class TestConstraints: - """Test the database constraints defined in Courier.""" + """Test the database constraints defined in `Courier`.""" - # pylint:disable=no-self-use + def test_insert_into_database(self, db_session, courier): + """Insert an instance into the (empty) database.""" + assert db_session.query(db.Courier).count() == 0 - def test_insert_into_database(self, courier, db_session): - """Insert an instance into the database.""" db_session.add(courier) db_session.commit() - def test_dublicate_primary_key(self, courier, courier_data, db_session): - """Can only add a record once.""" - db_session.add(courier) - db_session.commit() + assert db_session.query(db.Courier).count() == 1 - another_courier = db.Courier(**courier_data) - db_session.add(another_courier) - - with pytest.raises(orm_exc.FlushError): - db_session.commit() - - def test_invalid_vehicle(self, courier, db_session): + def test_invalid_vehicle(self, db_session, courier): """Insert an instance with invalid data.""" courier.vehicle = 'invalid' db_session.add(courier) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises(sa_exc.IntegrityError, match='available_vehicle_types'): db_session.commit() - def test_negative_speed(self, courier, db_session): + def test_negative_speed(self, db_session, courier): """Insert an instance with invalid data.""" courier.historic_speed = -1 db_session.add(courier) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises(sa_exc.IntegrityError, match='realistic_speed'): db_session.commit() - def test_unrealistic_speed(self, courier, db_session): + def test_unrealistic_speed(self, db_session, courier): """Insert an instance with invalid data.""" courier.historic_speed = 999 db_session.add(courier) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises(sa_exc.IntegrityError, match='realistic_speed'): db_session.commit() - def test_negative_capacity(self, courier, db_session): + def test_negative_capacity(self, db_session, courier): """Insert an instance with invalid data.""" courier.capacity = -1 db_session.add(courier) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises(sa_exc.IntegrityError, match='capacity_under_200_liters'): db_session.commit() - def test_too_much_capacity(self, courier, db_session): + def test_too_much_capacity(self, db_session, courier): """Insert an instance with invalid data.""" courier.capacity = 999 db_session.add(courier) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises(sa_exc.IntegrityError, match='capacity_under_200_liters'): db_session.commit() - def test_negative_pay_per_hour(self, courier, db_session): + def test_negative_pay_per_hour(self, db_session, courier): """Insert an instance with invalid data.""" courier.pay_per_hour = -1 db_session.add(courier) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises(sa_exc.IntegrityError, match='realistic_pay_per_hour'): db_session.commit() - def test_too_much_pay_per_hour(self, courier, db_session): + def test_too_much_pay_per_hour(self, db_session, courier): """Insert an instance with invalid data.""" courier.pay_per_hour = 9999 db_session.add(courier) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises(sa_exc.IntegrityError, match='realistic_pay_per_hour'): db_session.commit() - def test_negative_pay_per_order(self, courier, db_session): + def test_negative_pay_per_order(self, db_session, courier): """Insert an instance with invalid data.""" courier.pay_per_order = -1 db_session.add(courier) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises(sa_exc.IntegrityError, match='realistic_pay_per_order'): db_session.commit() - def test_too_much_pay_per_order(self, courier, db_session): + def test_too_much_pay_per_order(self, db_session, courier): """Insert an instance with invalid data.""" courier.pay_per_order = 999 db_session.add(courier) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises(sa_exc.IntegrityError, match='realistic_pay_per_order'): db_session.commit() diff --git a/tests/db/test_customer.py b/tests/db/test_customer.py index 487a11c..5c74f68 100644 --- a/tests/db/test_customer.py +++ b/tests/db/test_customer.py @@ -1,51 +1,35 @@ -"""Test the ORM's Customer model.""" +"""Test the ORM's `Customer` model.""" +# pylint:disable=no-self-use import pytest -from sqlalchemy.orm import exc as orm_exc from urban_meal_delivery import db class TestSpecialMethods: - """Test special methods in Customer.""" + """Test special methods in `Customer`.""" - # pylint:disable=no-self-use - - def test_create_customer(self, customer_data): - """Test instantiation of a new Customer object.""" - result = db.Customer(**customer_data) - - assert result is not None - - def test_text_representation(self, customer_data): - """Customer has a non-literal text representation.""" - customer = db.Customer(**customer_data) - id_ = customer_data['id'] + def test_create_customer(self, customer): + """Test instantiation of a new `Customer` object.""" + assert customer is not None + def test_text_representation(self, customer): + """`Customer` has a non-literal text representation.""" result = repr(customer) - assert result == f'' + assert result == f'' -@pytest.mark.e2e +@pytest.mark.db @pytest.mark.no_cover class TestConstraints: - """Test the database constraints defined in Customer.""" + """Test the database constraints defined in `Customer`.""" - # pylint:disable=no-self-use + def test_insert_into_database(self, db_session, customer): + """Insert an instance into the (empty) database.""" + assert db_session.query(db.Customer).count() == 0 - def test_insert_into_database(self, customer, db_session): - """Insert an instance into the database.""" db_session.add(customer) db_session.commit() - def test_dublicate_primary_key(self, customer, customer_data, db_session): - """Can only add a record once.""" - db_session.add(customer) - db_session.commit() - - another_customer = db.Customer(**customer_data) - db_session.add(another_customer) - - with pytest.raises(orm_exc.FlushError): - db_session.commit() + assert db_session.query(db.Customer).count() == 1 diff --git a/tests/db/test_orders.py b/tests/db/test_orders.py index fa36072..f23e9bb 100644 --- a/tests/db/test_orders.py +++ b/tests/db/test_orders.py @@ -1,57 +1,41 @@ -"""Test the ORM's Order model.""" +"""Test the ORM's `Order` model.""" +# pylint:disable=no-self-use,protected-access import datetime +import random import pytest -from sqlalchemy.orm import exc as orm_exc from urban_meal_delivery import db class TestSpecialMethods: - """Test special methods in Order.""" + """Test special methods in `Order`.""" - # pylint:disable=no-self-use - - def test_create_order(self, order_data): - """Test instantiation of a new Order object.""" - result = db.Order(**order_data) - - assert result is not None - - def test_text_representation(self, order_data): - """Order has a non-literal text representation.""" - order = db.Order(**order_data) - id_ = order_data['id'] + def test_create_order(self, order): + """Test instantiation of a new `Order` object.""" + assert order is not None + def test_text_representation(self, order): + """`Order` has a non-literal text representation.""" result = repr(order) - assert result == f'' + assert result == f'' -@pytest.mark.e2e +@pytest.mark.db @pytest.mark.no_cover class TestConstraints: - """Test the database constraints defined in Order.""" + """Test the database constraints defined in `Order`.""" - # pylint:disable=no-self-use + def test_insert_into_database(self, db_session, order): + """Insert an instance into the (empty) database.""" + assert db_session.query(db.Order).count() == 0 - def test_insert_into_database(self, order, db_session): - """Insert an instance into the database.""" db_session.add(order) db_session.commit() - def test_dublicate_primary_key(self, order, order_data, city, db_session): - """Can only add a record once.""" - db_session.add(order) - db_session.commit() - - another_order = db.Order(**order_data) - another_order.city = city - db_session.add(another_order) - - with pytest.raises(orm_exc.FlushError): - db_session.commit() + assert db_session.query(db.Order).count() == 1 # TODO (order-constraints): the various Foreign Key and Check Constraints # should be tested eventually. This is not of highest importance as @@ -59,339 +43,431 @@ class TestConstraints: class TestProperties: - """Test properties in Order.""" + """Test properties in `Order`. + + The `order` fixture uses the defaults specified in `factories.OrderFactory` + and provided by the `make_order` fixture. + """ # pylint:disable=no-self-use,too-many-public-methods - def test_is_not_scheduled(self, order_data): - """Test Order.scheduled property.""" - order = db.Order(**order_data) + def test_is_ad_hoc(self, order): + """Test `Order.scheduled` property.""" + assert order.ad_hoc is True result = order.scheduled assert result is False - def test_is_scheduled(self, order_data): - """Test Order.scheduled property.""" - order_data['ad_hoc'] = False - order_data['scheduled_delivery_at'] = datetime.datetime(2020, 1, 2, 12, 30, 0) - order_data['scheduled_delivery_at_corrected'] = False - order = db.Order(**order_data) + def test_is_scheduled(self, make_order): + """Test `Order.scheduled` property.""" + order = make_order(scheduled=True) + assert order.ad_hoc is False result = order.scheduled assert result is True - def test_is_completed(self, order_data): - """Test Order.completed property.""" - order = db.Order(**order_data) - + def test_is_completed(self, order): + """Test `Order.completed` property.""" result = order.completed assert result is True - def test_is_not_completed(self, order_data): - """Test Order.completed property.""" - order_data['cancelled'] = True - order_data['cancelled_at'] = datetime.datetime(2020, 1, 2, 12, 15, 0) - order_data['cancelled_at_corrected'] = False - order = db.Order(**order_data) + def test_is_not_completed1(self, make_order): + """Test `Order.completed` property.""" + order = make_order(cancel_before_pickup=True) + assert order.cancelled is True result = order.completed assert result is False - def test_is_corrected(self, order_data): - """Test Order.corrected property.""" - order_data['dispatch_at_corrected'] = True - order = db.Order(**order_data) + def test_is_not_completed2(self, make_order): + """Test `Order.completed` property.""" + order = make_order(cancel_after_pickup=True) + assert order.cancelled is True + + result = order.completed + + assert result is False + + def test_is_not_corrected(self, order): + """Test `Order.corrected` property.""" + # By default, the `OrderFactory` sets all `.*_corrected` attributes to `False`. + result = order.corrected + + assert result is False + + @pytest.mark.parametrize( + 'column', + [ + 'scheduled_delivery_at', + 'cancelled_at', + 'restaurant_notified_at', + 'restaurant_confirmed_at', + 'dispatch_at', + 'courier_notified_at', + 'courier_accepted_at', + 'pickup_at', + 'left_pickup_at', + 'delivery_at', + ], + ) + def test_is_corrected(self, order, column): + """Test `Order.corrected` property.""" + setattr(order, f'{column}_corrected', True) result = order.corrected assert result is True - def test_time_to_accept_no_dispatch_at(self, order_data): - """Test Order.time_to_accept property.""" - order_data['dispatch_at'] = None - order = db.Order(**order_data) + def test_time_to_accept_no_dispatch_at(self, order): + """Test `Order.time_to_accept` property.""" + order.dispatch_at = None with pytest.raises(RuntimeError, match='not set'): int(order.time_to_accept) - def test_time_to_accept_no_courier_accepted(self, order_data): - """Test Order.time_to_accept property.""" - order_data['courier_accepted_at'] = None - order = db.Order(**order_data) + def test_time_to_accept_no_courier_accepted(self, order): + """Test `Order.time_to_accept` property.""" + order.courier_accepted_at = None with pytest.raises(RuntimeError, match='not set'): int(order.time_to_accept) - def test_time_to_accept_success(self, order_data): - """Test Order.time_to_accept property.""" - order = db.Order(**order_data) - + def test_time_to_accept_success(self, order): + """Test `Order.time_to_accept` property.""" result = order.time_to_accept - assert isinstance(result, datetime.timedelta) + assert result > datetime.timedelta(0) - def test_time_to_react_no_courier_notified(self, order_data): - """Test Order.time_to_react property.""" - order_data['courier_notified_at'] = None - order = db.Order(**order_data) + def test_time_to_react_no_courier_notified(self, order): + """Test `Order.time_to_react` property.""" + order.courier_notified_at = None with pytest.raises(RuntimeError, match='not set'): int(order.time_to_react) - def test_time_to_react_no_courier_accepted(self, order_data): - """Test Order.time_to_react property.""" - order_data['courier_accepted_at'] = None - order = db.Order(**order_data) + def test_time_to_react_no_courier_accepted(self, order): + """Test `Order.time_to_react` property.""" + order.courier_accepted_at = None with pytest.raises(RuntimeError, match='not set'): int(order.time_to_react) - def test_time_to_react_success(self, order_data): - """Test Order.time_to_react property.""" - order = db.Order(**order_data) - + def test_time_to_react_success(self, order): + """Test `Order.time_to_react` property.""" result = order.time_to_react - assert isinstance(result, datetime.timedelta) + assert result > datetime.timedelta(0) - def test_time_to_pickup_no_reached_pickup_at(self, order_data): - """Test Order.time_to_pickup property.""" - order_data['reached_pickup_at'] = None - order = db.Order(**order_data) + def test_time_to_pickup_no_reached_pickup_at(self, order): + """Test `Order.time_to_pickup` property.""" + order.reached_pickup_at = None with pytest.raises(RuntimeError, match='not set'): int(order.time_to_pickup) - def test_time_to_pickup_no_courier_accepted(self, order_data): - """Test Order.time_to_pickup property.""" - order_data['courier_accepted_at'] = None - order = db.Order(**order_data) + def test_time_to_pickup_no_courier_accepted(self, order): + """Test `Order.time_to_pickup` property.""" + order.courier_accepted_at = None with pytest.raises(RuntimeError, match='not set'): int(order.time_to_pickup) - def test_time_to_pickup_success(self, order_data): - """Test Order.time_to_pickup property.""" - order = db.Order(**order_data) - + def test_time_to_pickup_success(self, order): + """Test `Order.time_to_pickup` property.""" result = order.time_to_pickup - assert isinstance(result, datetime.timedelta) + assert result > datetime.timedelta(0) - def test_time_at_pickup_no_reached_pickup_at(self, order_data): - """Test Order.time_at_pickup property.""" - order_data['reached_pickup_at'] = None - order = db.Order(**order_data) + def test_time_at_pickup_no_reached_pickup_at(self, order): + """Test `Order.time_at_pickup` property.""" + order.reached_pickup_at = None with pytest.raises(RuntimeError, match='not set'): int(order.time_at_pickup) - def test_time_at_pickup_no_pickup_at(self, order_data): - """Test Order.time_at_pickup property.""" - order_data['pickup_at'] = None - order = db.Order(**order_data) + def test_time_at_pickup_no_pickup_at(self, order): + """Test `Order.time_at_pickup` property.""" + order.pickup_at = None with pytest.raises(RuntimeError, match='not set'): int(order.time_at_pickup) - def test_time_at_pickup_success(self, order_data): - """Test Order.time_at_pickup property.""" - order = db.Order(**order_data) - + def test_time_at_pickup_success(self, order): + """Test `Order.time_at_pickup` property.""" result = order.time_at_pickup - assert isinstance(result, datetime.timedelta) + assert result > datetime.timedelta(0) - def test_scheduled_pickup_at_no_restaurant_notified( # noqa:WPS118 - self, order_data, - ): - """Test Order.scheduled_pickup_at property.""" - order_data['restaurant_notified_at'] = None - order = db.Order(**order_data) + def test_scheduled_pickup_at_no_restaurant_notified(self, order): # noqa:WPS118 + """Test `Order.scheduled_pickup_at` property.""" + order.restaurant_notified_at = None with pytest.raises(RuntimeError, match='not set'): int(order.scheduled_pickup_at) - def test_scheduled_pickup_at_no_est_prep_duration(self, order_data): # noqa:WPS118 - """Test Order.scheduled_pickup_at property.""" - order_data['estimated_prep_duration'] = None - order = db.Order(**order_data) + def test_scheduled_pickup_at_no_est_prep_duration(self, order): # noqa:WPS118 + """Test `Order.scheduled_pickup_at` property.""" + order.estimated_prep_duration = None with pytest.raises(RuntimeError, match='not set'): int(order.scheduled_pickup_at) - def test_scheduled_pickup_at_success(self, order_data): - """Test Order.scheduled_pickup_at property.""" - order = db.Order(**order_data) - + def test_scheduled_pickup_at_success(self, order): + """Test `Order.scheduled_pickup_at` property.""" result = order.scheduled_pickup_at - assert isinstance(result, datetime.datetime) + assert order.placed_at < result < order.delivery_at - def test_if_courier_early_at_pickup(self, order_data): - """Test Order.courier_early property.""" - order = db.Order(**order_data) + def test_courier_is_early_at_pickup(self, order): + """Test `Order.courier_early` property.""" + # Manipulate the attribute that determines `Order.scheduled_pickup_at`. + order.estimated_prep_duration = 999_999 result = order.courier_early assert bool(result) is True - def test_if_courier_late_at_pickup(self, order_data): - """Test Order.courier_late property.""" - # Opposite of test case before. - order = db.Order(**order_data) + def test_courier_is_not_early_at_pickup(self, order): + """Test `Order.courier_early` property.""" + # Manipulate the attribute that determines `Order.scheduled_pickup_at`. + order.estimated_prep_duration = 1 + + result = order.courier_early + + assert bool(result) is False + + def test_courier_is_late_at_pickup(self, order): + """Test `Order.courier_late` property.""" + # Manipulate the attribute that determines `Order.scheduled_pickup_at`. + order.estimated_prep_duration = 1 + + result = order.courier_late + + assert bool(result) is True + + def test_courier_is_not_late_at_pickup(self, order): + """Test `Order.courier_late` property.""" + # Manipulate the attribute that determines `Order.scheduled_pickup_at`. + order.estimated_prep_duration = 999_999 result = order.courier_late assert bool(result) is False - def test_if_restaurant_early_at_pickup(self, order_data): - """Test Order.restaurant_early property.""" - order = db.Order(**order_data) + def test_restaurant_early_at_pickup(self, order): + """Test `Order.restaurant_early` property.""" + # Manipulate the attribute that determines `Order.scheduled_pickup_at`. + order.estimated_prep_duration = 999_999 result = order.restaurant_early assert bool(result) is True - def test_if_restaurant_late_at_pickup(self, order_data): - """Test Order.restaurant_late property.""" - # Opposite of test case before. - order = db.Order(**order_data) + def test_restaurant_is_not_early_at_pickup(self, order): + """Test `Order.restaurant_early` property.""" + # Manipulate the attribute that determines `Order.scheduled_pickup_at`. + order.estimated_prep_duration = 1 + + result = order.restaurant_early + + assert bool(result) is False + + def test_restaurant_is_late_at_pickup(self, order): + """Test `Order.restaurant_late` property.""" + # Manipulate the attribute that determines `Order.scheduled_pickup_at`. + order.estimated_prep_duration = 1 + + result = order.restaurant_late + + assert bool(result) is True + + def test_restaurant_is_not_late_at_pickup(self, order): + """Test `Order.restaurant_late` property.""" + # Manipulate the attribute that determines `Order.scheduled_pickup_at`. + order.estimated_prep_duration = 999_999 result = order.restaurant_late assert bool(result) is False - def test_time_to_delivery_no_reached_delivery_at(self, order_data): # noqa:WPS118 - """Test Order.time_to_delivery property.""" - order_data['reached_delivery_at'] = None - order = db.Order(**order_data) + def test_time_to_delivery_no_reached_delivery_at(self, order): # noqa:WPS118 + """Test `Order.time_to_delivery` property.""" + order.reached_delivery_at = None with pytest.raises(RuntimeError, match='not set'): int(order.time_to_delivery) - def test_time_to_delivery_no_pickup_at(self, order_data): - """Test Order.time_to_delivery property.""" - order_data['pickup_at'] = None - order = db.Order(**order_data) + def test_time_to_delivery_no_pickup_at(self, order): + """Test `Order.time_to_delivery` property.""" + order.pickup_at = None with pytest.raises(RuntimeError, match='not set'): int(order.time_to_delivery) - def test_time_to_delivery_success(self, order_data): - """Test Order.time_to_delivery property.""" - order = db.Order(**order_data) - + def test_time_to_delivery_success(self, order): + """Test `Order.time_to_delivery` property.""" result = order.time_to_delivery - assert isinstance(result, datetime.timedelta) + assert result > datetime.timedelta(0) - def test_time_at_delivery_no_reached_delivery_at(self, order_data): # noqa:WPS118 - """Test Order.time_at_delivery property.""" - order_data['reached_delivery_at'] = None - order = db.Order(**order_data) + def test_time_at_delivery_no_reached_delivery_at(self, order): # noqa:WPS118 + """Test `Order.time_at_delivery` property.""" + order.reached_delivery_at = None with pytest.raises(RuntimeError, match='not set'): int(order.time_at_delivery) - def test_time_at_delivery_no_delivery_at(self, order_data): - """Test Order.time_at_delivery property.""" - order_data['delivery_at'] = None - order = db.Order(**order_data) + def test_time_at_delivery_no_delivery_at(self, order): + """Test `Order.time_at_delivery` property.""" + order.delivery_at = None with pytest.raises(RuntimeError, match='not set'): int(order.time_at_delivery) - def test_time_at_delivery_success(self, order_data): - """Test Order.time_at_delivery property.""" - order = db.Order(**order_data) - + def test_time_at_delivery_success(self, order): + """Test `Order.time_at_delivery` property.""" result = order.time_at_delivery - assert isinstance(result, datetime.timedelta) + assert result > datetime.timedelta(0) - def test_courier_waited_at_delviery(self, order_data): - """Test Order.courier_waited_at_delivery property.""" - order_data['_courier_waited_at_delivery'] = True - order = db.Order(**order_data) + def test_courier_waited_at_delviery(self, order): + """Test `Order.courier_waited_at_delivery` property.""" + order._courier_waited_at_delivery = True # noqa:WPS437 - result = int(order.courier_waited_at_delivery.total_seconds()) + result = order.courier_waited_at_delivery.total_seconds() assert result > 0 - def test_courier_did_not_wait_at_delivery(self, order_data): - """Test Order.courier_waited_at_delivery property.""" - order_data['_courier_waited_at_delivery'] = False - order = db.Order(**order_data) + def test_courier_did_not_wait_at_delivery(self, order): + """Test `Order.courier_waited_at_delivery` property.""" + order._courier_waited_at_delivery = False # noqa:WPS437 - result = int(order.courier_waited_at_delivery.total_seconds()) + result = order.courier_waited_at_delivery.total_seconds() assert result == 0 - def test_if_delivery_early_success(self, order_data): - """Test Order.delivery_early property.""" - order_data['ad_hoc'] = False - order_data['scheduled_delivery_at'] = datetime.datetime(2020, 1, 2, 12, 30, 0) - order_data['scheduled_delivery_at_corrected'] = False - order = db.Order(**order_data) + def test_ad_hoc_order_cannot_be_early(self, order): + """Test `Order.delivery_early` property.""" + # By default, the `OrderFactory` creates ad-hoc orders. + with pytest.raises(AttributeError, match='scheduled'): + int(order.delivery_early) + + def test_scheduled_order_delivered_early(self, make_order): + """Test `Order.delivery_early` property.""" + order = make_order(scheduled=True) + # Schedule the order to a lot later. + order.scheduled_delivery_at += datetime.timedelta(hours=2) result = order.delivery_early assert bool(result) is True - def test_if_delivery_early_failure(self, order_data): - """Test Order.delivery_early property.""" - order = db.Order(**order_data) + def test_scheduled_order_not_delivered_early(self, make_order): + """Test `Order.delivery_early` property.""" + order = make_order(scheduled=True) + # Schedule the order to a lot earlier. + order.scheduled_delivery_at -= datetime.timedelta(hours=2) - with pytest.raises(AttributeError, match='scheduled'): - int(order.delivery_early) + result = order.delivery_early - def test_if_delivery_late_success(self, order_data): + assert bool(result) is False + + def test_ad_hoc_order_cannot_be_late(self, order): """Test Order.delivery_late property.""" - order_data['ad_hoc'] = False - order_data['scheduled_delivery_at'] = datetime.datetime(2020, 1, 2, 12, 30, 0) - order_data['scheduled_delivery_at_corrected'] = False - order = db.Order(**order_data) + # By default, the `OrderFactory` creates ad-hoc orders. + with pytest.raises(AttributeError, match='scheduled'): + int(order.delivery_late) + + def test_scheduled_order_delivered_late(self, make_order): + """Test `Order.delivery_early` property.""" + order = make_order(scheduled=True) + # Schedule the order to a lot earlier. + order.scheduled_delivery_at -= datetime.timedelta(hours=2) + + result = order.delivery_late + + assert bool(result) is True + + def test_scheduled_order_not_delivered_late(self, make_order): + """Test `Order.delivery_early` property.""" + order = make_order(scheduled=True) + # Schedule the order to a lot later. + order.scheduled_delivery_at += datetime.timedelta(hours=2) result = order.delivery_late assert bool(result) is False - def test_if_delivery_late_failure(self, order_data): - """Test Order.delivery_late property.""" - order = db.Order(**order_data) - - with pytest.raises(AttributeError, match='scheduled'): - int(order.delivery_late) - - def test_no_total_time_for_pre_order(self, order_data): - """Test Order.total_time property.""" - order_data['ad_hoc'] = False - order_data['scheduled_delivery_at'] = datetime.datetime(2020, 1, 2, 12, 30, 0) - order_data['scheduled_delivery_at_corrected'] = False - order = db.Order(**order_data) + def test_no_total_time_for_scheduled_order(self, make_order): + """Test `Order.total_time` property.""" + order = make_order(scheduled=True) with pytest.raises(AttributeError, match='Scheduled'): int(order.total_time) - def test_no_total_time_for_cancelled_order(self, order_data): - """Test Order.total_time property.""" - order_data['cancelled'] = True - order_data['cancelled_at'] = datetime.datetime(2020, 1, 2, 12, 15, 0) - order_data['cancelled_at_corrected'] = False - order = db.Order(**order_data) + def test_no_total_time_for_cancelled_order(self, make_order): + """Test `Order.total_time` property.""" + order = make_order(cancel_before_pickup=True) with pytest.raises(RuntimeError, match='Cancelled'): int(order.total_time) - def test_total_time_success(self, order_data): - """Test Order.total_time property.""" - order = db.Order(**order_data) - + def test_total_time_success(self, order): + """Test `Order.total_time` property.""" result = order.total_time - assert isinstance(result, datetime.timedelta) + assert result > datetime.timedelta(0) + + +@pytest.mark.db +@pytest.mark.no_cover +def test_make_random_orders( # noqa:C901,WPS211,WPS210,WPS213,WPS231 + db_session, make_address, make_courier, make_restaurant, make_order, +): + """Sanity check the all the `make_*` fixtures. + + Ensure that all generated `Address`, `Courier`, `Customer`, `Restauarant`, + and `Order` objects adhere to the database constraints. + """ # noqa:D202 + # Generate a large number of `Order`s to obtain a large variance of data. + for _ in range(1_000): # noqa:WPS122 + + # Ad-hoc `Order`s are far more common than pre-orders. + scheduled = random.choice([True, False, False, False, False]) + + # Randomly pass a `address` argument to `make_restaurant()` and + # a `restaurant` argument to `make_order()`. + if random.random() < 0.5: + address = random.choice([None, make_address()]) + restaurant = make_restaurant(address=address) + else: + restaurant = None + + # Randomly pass a `courier` argument to `make_order()`. + courier = random.choice([None, make_courier()]) + + # A tiny fraction of `Order`s get cancelled. + if random.random() < 0.05: + if random.random() < 0.5: + cancel_before_pickup, cancel_after_pickup = True, False + else: + cancel_before_pickup, cancel_after_pickup = False, True + else: + cancel_before_pickup, cancel_after_pickup = False, False + + # Write all the generated objects to the database. + # This should already trigger an `IntegrityError` if the data are flawed. + order = make_order( + scheduled=scheduled, + restaurant=restaurant, + courier=courier, + cancel_before_pickup=cancel_before_pickup, + cancel_after_pickup=cancel_after_pickup, + ) + db_session.add(order) + + db_session.commit() diff --git a/tests/db/test_restaurants.py b/tests/db/test_restaurants.py index 4662346..536d6f0 100644 --- a/tests/db/test_restaurants.py +++ b/tests/db/test_restaurants.py @@ -1,80 +1,70 @@ -"""Test the ORM's Restaurant model.""" +"""Test the ORM's `Restaurant` model.""" +# pylint:disable=no-self-use import pytest +import sqlalchemy as sqla from sqlalchemy import exc as sa_exc -from sqlalchemy.orm import exc as orm_exc from urban_meal_delivery import db class TestSpecialMethods: - """Test special methods in Restaurant.""" + """Test special methods in `Restaurant`.""" - # pylint:disable=no-self-use - - def test_create_restaurant(self, restaurant_data): - """Test instantiation of a new Restaurant object.""" - result = db.Restaurant(**restaurant_data) - - assert result is not None - - def test_text_representation(self, restaurant_data): - """Restaurant has a non-literal text representation.""" - restaurant = db.Restaurant(**restaurant_data) - name = restaurant_data['name'] + def test_create_restaurant(self, restaurant): + """Test instantiation of a new `Restaurant` object.""" + assert restaurant is not None + def test_text_representation(self, restaurant): + """`Restaurant` has a non-literal text representation.""" result = repr(restaurant) - assert result == f'' + assert result == f'' -@pytest.mark.e2e +@pytest.mark.db @pytest.mark.no_cover class TestConstraints: - """Test the database constraints defined in Restaurant.""" + """Test the database constraints defined in `Restaurant`.""" - # pylint:disable=no-self-use + def test_insert_into_database(self, db_session, restaurant): + """Insert an instance into the (empty) database.""" + assert db_session.query(db.Restaurant).count() == 0 - def test_insert_into_database(self, restaurant, db_session): - """Insert an instance into the database.""" db_session.add(restaurant) db_session.commit() - def test_dublicate_primary_key(self, restaurant, restaurant_data, db_session): - """Can only add a record once.""" - db_session.add(restaurant) - db_session.commit() + assert db_session.query(db.Restaurant).count() == 1 - another_restaurant = db.Restaurant(**restaurant_data) - db_session.add(another_restaurant) - - with pytest.raises(orm_exc.FlushError): - db_session.commit() - - def test_delete_a_referenced_address(self, restaurant, address, db_session): + def test_delete_a_referenced_address(self, db_session, restaurant): """Remove a record that is referenced with a FK.""" db_session.add(restaurant) db_session.commit() - with pytest.raises(sa_exc.IntegrityError): - db_session.execute( - db.Address.__table__.delete().where( # noqa:WPS609 - db.Address.id == address.id, - ), - ) + # Must delete without ORM as otherwise an UPDATE statement is emitted. + stmt = sqla.delete(db.Address).where(db.Address.id == restaurant.address.id) - def test_negative_prep_duration(self, restaurant, db_session): + with pytest.raises( + sa_exc.IntegrityError, match='fk_restaurants_to_addresses_via_address_id', + ): + db_session.execute(stmt) + + def test_negative_prep_duration(self, db_session, restaurant): """Insert an instance with invalid data.""" restaurant.estimated_prep_duration = -1 db_session.add(restaurant) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises( + sa_exc.IntegrityError, match='realistic_estimated_prep_duration', + ): db_session.commit() - def test_too_high_prep_duration(self, restaurant, db_session): + def test_too_high_prep_duration(self, db_session, restaurant): """Insert an instance with invalid data.""" restaurant.estimated_prep_duration = 2500 db_session.add(restaurant) - with pytest.raises(sa_exc.IntegrityError): + with pytest.raises( + sa_exc.IntegrityError, match='realistic_estimated_prep_duration', + ): db_session.commit() From 556b9d36a38bbb18b0973d227bdd260ab5054709 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Wed, 30 Dec 2020 17:37:51 +0100 Subject: [PATCH 18/72] Add shapely to the dependencies --- poetry.lock | 39 ++++++++++++++++++++++++++++++++++++++- pyproject.toml | 1 + 2 files changed, 39 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index 673ea5f..be6e1cf 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1509,6 +1509,19 @@ category = "main" optional = true python-versions = "*" +[[package]] +name = "shapely" +version = "1.7.1" +description = "Geometric objects, predicates, and operations" +category = "main" +optional = false +python-versions = "*" + +[package.extras] +all = ["numpy", "pytest", "pytest-cov"] +test = ["pytest", "pytest-cov"] +vectorized = ["numpy"] + [[package]] name = "six" version = "1.15.0" @@ -1880,7 +1893,7 @@ research = ["jupyterlab", "nb_black", "numpy", "pandas", "pytz"] [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "03305636d62193eba71a06cc802846a4b00f5cdcaa3f71ceb4d35f55ead764b0" +content-hash = "4031594478b5dd997157985531c62d4a69545afff70a280daff91413a61a1c8f" [metadata.files] alabaster = [ @@ -2694,6 +2707,30 @@ send2trash = [ {file = "Send2Trash-1.5.0-py3-none-any.whl", hash = "sha256:f1691922577b6fa12821234aeb57599d887c4900b9ca537948d2dac34aea888b"}, {file = "Send2Trash-1.5.0.tar.gz", hash = "sha256:60001cc07d707fe247c94f74ca6ac0d3255aabcb930529690897ca2a39db28b2"}, ] +shapely = [ + {file = "Shapely-1.7.1-1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:46da0ea527da9cf9503e66c18bab6981c5556859e518fe71578b47126e54ca93"}, + {file = "Shapely-1.7.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:4c10f317e379cc404f8fc510cd9982d5d3e7ba13a9cfd39aa251d894c6366798"}, + {file = "Shapely-1.7.1-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:17df66e87d0fe0193910aeaa938c99f0b04f67b430edb8adae01e7be557b141b"}, + {file = "Shapely-1.7.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:da38ed3d65b8091447dc3717e5218cc336d20303b77b0634b261bc5c1aa2bae8"}, + {file = "Shapely-1.7.1-cp35-cp35m-win32.whl", hash = "sha256:8e7659dd994792a0aad8fb80439f59055a21163e236faf2f9823beb63a380e19"}, + {file = "Shapely-1.7.1-cp35-cp35m-win_amd64.whl", hash = "sha256:791477edb422692e7dc351c5ed6530eb0e949a31b45569946619a0d9cd5f53cb"}, + {file = "Shapely-1.7.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e3afccf0437edc108eef1e2bb9cc4c7073e7705924eb4cd0bf7715cd1ef0ce1b"}, + {file = "Shapely-1.7.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8f15b6ce67dcc05b61f19c689b60f3fe58550ba994290ff8332f711f5aaa9840"}, + {file = "Shapely-1.7.1-cp36-cp36m-win32.whl", hash = "sha256:60e5b2282619249dbe8dc5266d781cc7d7fb1b27fa49f8241f2167672ad26719"}, + {file = "Shapely-1.7.1-cp36-cp36m-win_amd64.whl", hash = "sha256:de618e67b64a51a0768d26a9963ecd7d338a2cf6e9e7582d2385f88ad005b3d1"}, + {file = "Shapely-1.7.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:182716ffb500d114b5d1b75d7fd9d14b7d3414cef3c38c0490534cc9ce20981a"}, + {file = "Shapely-1.7.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:4f3c59f6dbf86a9fc293546de492f5e07344e045f9333f3a753f2dda903c45d1"}, + {file = "Shapely-1.7.1-cp37-cp37m-win32.whl", hash = "sha256:6871acba8fbe744efa4f9f34e726d070bfbf9bffb356a8f6d64557846324232b"}, + {file = "Shapely-1.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:35be1c5d869966569d3dfd4ec31832d7c780e9df760e1fe52131105685941891"}, + {file = "Shapely-1.7.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:052eb5b9ba756808a7825e8a8020fb146ec489dd5c919e7d139014775411e688"}, + {file = "Shapely-1.7.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:90a3e2ae0d6d7d50ff2370ba168fbd416a53e7d8448410758c5d6a5920646c1d"}, + {file = "Shapely-1.7.1-cp38-cp38-win32.whl", hash = "sha256:a3774516c8a83abfd1ddffb8b6ec1b0935d7fe6ea0ff5c31a18bfdae567b4eba"}, + {file = "Shapely-1.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:6593026cd3f5daaea12bcc51ae5c979318070fefee210e7990cb8ac2364e79a1"}, + {file = "Shapely-1.7.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:b40cc7bb089ae4aa9ddba1db900b4cd1bce3925d2a4b5837b639e49de054784f"}, + {file = "Shapely-1.7.1-cp39-cp39-win32.whl", hash = "sha256:2df5260d0f2983309776cb41bfa85c464ec07018d88c0ecfca23d40bfadae2f1"}, + {file = "Shapely-1.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:a5c3a50d823c192f32615a2a6920e8c046b09e07a58eba220407335a9cd2e8ea"}, + {file = "Shapely-1.7.1.tar.gz", hash = "sha256:1641724c1055459a7e2b8bbe47ba25bdc89554582e62aec23cb3f3ca25f9b129"}, +] six = [ {file = "six-1.15.0-py2.py3-none-any.whl", hash = "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced"}, {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"}, diff --git a/pyproject.toml b/pyproject.toml index 390f852..4a877ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -28,6 +28,7 @@ repository = "https://github.com/webartifex/urban-meal-delivery" python = "^3.8" # Package => code developed in *.py files and packaged under src/urban_meal_delivery +Shapely = "^1.7.1" alembic = "^1.4.2" click = "^7.1.2" psycopg2 = "^2.8.5" # adapter for PostgreSQL From 755677db462ee91b180cac48edf5f03d3a2f3c0b Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Fri, 1 Jan 2021 17:59:15 +0100 Subject: [PATCH 19/72] Add utm to the dependencies --- poetry.lock | 29 ++++++++++++++++++++++++++++- pyproject.toml | 1 + 2 files changed, 29 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index be6e1cf..e4862bf 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1798,6 +1798,14 @@ brotli = ["brotlipy (>=0.6.0)"] secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] +[[package]] +name = "utm" +version = "0.7.0" +description = "Bidirectional UTM-WGS84 converter for python" +category = "main" +optional = false +python-versions = "*" + [[package]] name = "virtualenv" version = "20.2.2" @@ -1893,7 +1901,7 @@ research = ["jupyterlab", "nb_black", "numpy", "pandas", "pytz"] [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "4031594478b5dd997157985531c62d4a69545afff70a280daff91413a61a1c8f" +content-hash = "b2671b1aecf282fa648190111c9ca79e7a40b13d0915f59a4f65e7986e737fb5" [metadata.files] alabaster = [ @@ -1933,6 +1941,8 @@ argon2-cffi = [ {file = "argon2_cffi-20.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:6678bb047373f52bcff02db8afab0d2a77d83bde61cfecea7c5c62e2335cb203"}, {file = "argon2_cffi-20.1.0-cp38-cp38-win32.whl", hash = "sha256:77e909cc756ef81d6abb60524d259d959bab384832f0c651ed7dcb6e5ccdbb78"}, {file = "argon2_cffi-20.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:9dfd5197852530294ecb5795c97a823839258dfd5eb9420233c7cfedec2058f2"}, + {file = "argon2_cffi-20.1.0-cp39-cp39-win32.whl", hash = "sha256:e2db6e85c057c16d0bd3b4d2b04f270a7467c147381e8fd73cbbe5bc719832be"}, + {file = "argon2_cffi-20.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:8a84934bd818e14a17943de8099d41160da4a336bcc699bb4c394bbb9b94bd32"}, ] astor = [ {file = "astor-0.8.1-py2.py3-none-any.whl", hash = "sha256:070a54e890cefb5b3739d19f30f5a5ec840ffc9c50ffa7d23cc9fc1a38ebbfc5"}, @@ -2037,6 +2047,7 @@ click = [ ] colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] colorlog = [ {file = "colorlog-4.6.2-py2.py3-none-any.whl", hash = "sha256:edd5ada5de03e880e42b2526f8be5570fd9b692f8eb7cf6b1fdcac3e3fb23976"}, @@ -2514,6 +2525,8 @@ psycopg2 = [ {file = "psycopg2-2.8.6-cp37-cp37m-win_amd64.whl", hash = "sha256:56fee7f818d032f802b8eed81ef0c1232b8b42390df189cab9cfa87573fe52c5"}, {file = "psycopg2-2.8.6-cp38-cp38-win32.whl", hash = "sha256:ad2fe8a37be669082e61fb001c185ffb58867fdbb3e7a6b0b0d2ffe232353a3e"}, {file = "psycopg2-2.8.6-cp38-cp38-win_amd64.whl", hash = "sha256:56007a226b8e95aa980ada7abdea6b40b75ce62a433bd27cec7a8178d57f4051"}, + {file = "psycopg2-2.8.6-cp39-cp39-win32.whl", hash = "sha256:2c93d4d16933fea5bbacbe1aaf8fa8c1348740b2e50b3735d1b0bf8154cbf0f3"}, + {file = "psycopg2-2.8.6-cp39-cp39-win_amd64.whl", hash = "sha256:d5062ae50b222da28253059880a871dc87e099c25cb68acf613d9d227413d6f7"}, {file = "psycopg2-2.8.6.tar.gz", hash = "sha256:fb23f6c71107c37fd667cb4ea363ddeb936b348bbd6449278eb92c189699f543"}, ] ptyprocess = [ @@ -2620,6 +2633,8 @@ pyyaml = [ {file = "PyYAML-5.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf"}, {file = "PyYAML-5.3.1-cp38-cp38-win32.whl", hash = "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97"}, {file = "PyYAML-5.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee"}, + {file = "PyYAML-5.3.1-cp39-cp39-win32.whl", hash = "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a"}, + {file = "PyYAML-5.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e"}, {file = "PyYAML-5.3.1.tar.gz", hash = "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d"}, ] pyzmq = [ @@ -2894,19 +2909,28 @@ typed-ast = [ {file = "typed_ast-1.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75"}, {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652"}, {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"}, + {file = "typed_ast-1.4.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:fcf135e17cc74dbfbc05894ebca928ffeb23d9790b3167a674921db19082401f"}, {file = "typed_ast-1.4.1-cp36-cp36m-win32.whl", hash = "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1"}, {file = "typed_ast-1.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa"}, {file = "typed_ast-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614"}, {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41"}, {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b"}, + {file = "typed_ast-1.4.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:f208eb7aff048f6bea9586e61af041ddf7f9ade7caed625742af423f6bae3298"}, {file = "typed_ast-1.4.1-cp37-cp37m-win32.whl", hash = "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe"}, {file = "typed_ast-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355"}, {file = "typed_ast-1.4.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6"}, {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907"}, {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d"}, + {file = "typed_ast-1.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:7e4c9d7658aaa1fc80018593abdf8598bf91325af6af5cce4ce7c73bc45ea53d"}, {file = "typed_ast-1.4.1-cp38-cp38-win32.whl", hash = "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c"}, {file = "typed_ast-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4"}, {file = "typed_ast-1.4.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34"}, + {file = "typed_ast-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:92c325624e304ebf0e025d1224b77dd4e6393f18aab8d829b5b7e04afe9b7a2c"}, + {file = "typed_ast-1.4.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:d648b8e3bf2fe648745c8ffcee3db3ff903d0817a01a12dd6a6ea7a8f4889072"}, + {file = "typed_ast-1.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:fac11badff8313e23717f3dada86a15389d0708275bddf766cca67a84ead3e91"}, + {file = "typed_ast-1.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:0d8110d78a5736e16e26213114a38ca35cb15b6515d535413b090bd50951556d"}, + {file = "typed_ast-1.4.1-cp39-cp39-win32.whl", hash = "sha256:b52ccf7cfe4ce2a1064b18594381bccf4179c2ecf7f513134ec2f993dd4ab395"}, + {file = "typed_ast-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:3742b32cf1c6ef124d57f95be609c473d7ec4c14d0090e5a5e05a15269fb4d0c"}, {file = "typed_ast-1.4.1.tar.gz", hash = "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b"}, ] typing-extensions = [ @@ -2918,6 +2942,9 @@ urllib3 = [ {file = "urllib3-1.26.2-py2.py3-none-any.whl", hash = "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473"}, {file = "urllib3-1.26.2.tar.gz", hash = "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08"}, ] +utm = [ + {file = "utm-0.7.0.tar.gz", hash = "sha256:3c9a3650e98bb6eecec535418d0dfd4db8f88c8ceaca112a0ff0787e116566e2"}, +] virtualenv = [ {file = "virtualenv-20.2.2-py2.py3-none-any.whl", hash = "sha256:54b05fc737ea9c9ee9f8340f579e5da5b09fb64fd010ab5757eb90268616907c"}, {file = "virtualenv-20.2.2.tar.gz", hash = "sha256:b7a8ec323ee02fb2312f098b6b4c9de99559b462775bc8fe3627a73706603c1b"}, diff --git a/pyproject.toml b/pyproject.toml index 4a877ff..f17ce00 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -33,6 +33,7 @@ alembic = "^1.4.2" click = "^7.1.2" psycopg2 = "^2.8.5" # adapter for PostgreSQL sqlalchemy = "^1.3.18" +utm = "^0.7.0" # Jupyter Lab => notebooks with analyses using the developed package # IMPORTANT: must be kept in sync with the "research" extra below From 6f9935072ed84e932f56d6211264b2ef9e631ed6 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sat, 2 Jan 2021 14:31:59 +0100 Subject: [PATCH 20/72] Add UTMCoordinate class - the class is a utility to abstract working with latitude-longitude coordinates in their UTM representation (~ "cartesian plane") - the class's .x and .y properties enable working with simple x-y coordinates where the (0, 0) origin is the lower-left of a city's viewport --- setup.cfg | 7 + src/urban_meal_delivery/db/utils/__init__.py | 3 + .../db/utils/coordinates.py | 130 +++++++++++ tests/db/utils/__init__.py | 1 + tests/db/utils/test_coordinates.py | 206 ++++++++++++++++++ 5 files changed, 347 insertions(+) create mode 100644 src/urban_meal_delivery/db/utils/__init__.py create mode 100644 src/urban_meal_delivery/db/utils/coordinates.py create mode 100644 tests/db/utils/__init__.py create mode 100644 tests/db/utils/test_coordinates.py diff --git a/setup.cfg b/setup.cfg index cfc1969..91f2727 100644 --- a/setup.cfg +++ b/setup.cfg @@ -149,6 +149,8 @@ per-file-ignores = S311, # Shadowing outer scopes occurs naturally with mocks. WPS442, + # Test names may be longer than 40 characters. + WPS118, # Modules may have many test cases. WPS202,WPS204,WPS214, # Do not check for Jones complexity in the test suite. @@ -167,6 +169,9 @@ per-file-ignores = # Source: https://en.wikipedia.org/wiki/Cyclomatic_complexity#Limiting_complexity_during_development max-complexity = 10 +# Allow more than wemake-python-styleguide's 7 methods per class. +max-methods = 12 + # Comply with black's style. # Source: https://github.com/psf/black/blob/master/docs/the_black_code_style.md#line-length max-line-length = 88 @@ -238,6 +243,8 @@ ignore_missing_imports = true ignore_missing_imports = true [mypy-sqlalchemy.*] ignore_missing_imports = true +[mypy-utm.*] +ignore_missing_imports = true [pylint.FORMAT] diff --git a/src/urban_meal_delivery/db/utils/__init__.py b/src/urban_meal_delivery/db/utils/__init__.py new file mode 100644 index 0000000..3fe2e82 --- /dev/null +++ b/src/urban_meal_delivery/db/utils/__init__.py @@ -0,0 +1,3 @@ +"""Utilities used by the ORM models.""" + +from urban_meal_delivery.db.utils.coordinates import UTMCoordinate # noqa:F401 diff --git a/src/urban_meal_delivery/db/utils/coordinates.py b/src/urban_meal_delivery/db/utils/coordinates.py new file mode 100644 index 0000000..17a60e6 --- /dev/null +++ b/src/urban_meal_delivery/db/utils/coordinates.py @@ -0,0 +1,130 @@ +"""A `UTMCoordinate` class to unify working with coordinates.""" + +from __future__ import annotations + +from typing import Optional + +import utm + + +class UTMCoordinate: + """A GPS location represented in UTM coordinates. + + For further info, we refer to this comprehensive article on the UTM system: + https://en.wikipedia.org/wiki/Universal_Transverse_Mercator_coordinate_system + """ + + # pylint:disable=too-many-instance-attributes + + def __init__( + self, latitude: float, longitude: float, relative_to: UTMCoordinate = None, + ) -> None: + """Cast a WGS84-conforming `latitude`-`longitude` pair as UTM coordinates.""" + # The SQLAlchemy columns come as `Decimal`s due to the `DOUBLE_PRECISION`. + self._latitude = float(latitude) + self._longitude = float(longitude) + + easting, northing, zone, band = utm.from_latlon(self._latitude, self._longitude) + + # `.easting` and `.northing` as `int`s are precise enough. + self._easting = int(easting) + self._northing = int(northing) + self._zone = zone + self._band = band.upper() + + self._normalized_easting: Optional[int] = None + self._normalized_northing: Optional[int] = None + + if relative_to: + try: + self.relate_to(relative_to) + except TypeError: + raise TypeError( + '`relative_to` must be a `UTMCoordinate` object', + ) from None + except ValueError: + raise ValueError( + '`relative_to` must be in the same UTM zone as the `latitude`-`longitude` pair', # noqa:E501 + ) from None + + def __repr__(self) -> str: + """A non-literal text representation. + + Convention is {ZONE} {EASTING} {NORTHING}. + + Example: + `'` + """ + return f'' # noqa:WPS221 + + @property + def easting(self) -> int: + """The easting of the location in meters.""" + return self._easting + + @property + def northing(self) -> int: + """The northing of the location in meters.""" + return self._northing + + @property + def zone(self) -> str: + """The UTM zone of the location.""" + return f'{self._zone}{self._band}' + + def __eq__(self, other: object) -> bool: + """Check if two `UTMCoordinate` objects are the same location.""" + if not isinstance(other, UTMCoordinate): + return NotImplemented + + if self.zone != other.zone: + raise ValueError('locations must be in the same zone, including the band') + + return (self.easting, self.northing) == (other.easting, other.northing) + + @property + def x(self) -> int: # noqa:WPS111 + """The `.easting` of the location in meters, relative to some origin. + + The origin, which defines the `(0, 0)` coordinate, is set with `.relate_to()`. + """ + if self._normalized_easting is None: + raise RuntimeError('an origin to relate to must be set first') + + return self._normalized_easting + + @property + def y(self) -> int: # noqa:WPS111 + """The `.northing` of the location in meters, relative to some origin. + + The origin, which defines the `(0, 0)` coordinate, is set with `.relate_to()`. + """ + if self._normalized_northing is None: + raise RuntimeError('an origin to relate to must be set first') + + return self._normalized_northing + + def relate_to(self, other: UTMCoordinate) -> None: + """Make the origin in the lower-left corner relative to `other`. + + The `.x` and `.y` properties are the `.easting` and `.northing` values + of `self` minus the ones from `other`. So, `.x` and `.y` make up a + Cartesian coordinate system where the `other` origin is `(0, 0)`. + + This method is implicitly called by `.__init__()` if that is called + with a `relative_to` argument. + + To prevent semantic errors in calculations based on the `.x` and `.y` + properties, the `other` origin may only be set once! + """ + if self._normalized_easting is not None: + raise RuntimeError('the `other` origin may only be set once') + + if not isinstance(other, UTMCoordinate): + raise TypeError('`other` is not a `UTMCoordinate` object') + + if self.zone != other.zone: + raise ValueError('`other` must be in the same zone, including the band') + + self._normalized_easting = self.easting - other.easting + self._normalized_northing = self.northing - other.northing diff --git a/tests/db/utils/__init__.py b/tests/db/utils/__init__.py new file mode 100644 index 0000000..4a95f0a --- /dev/null +++ b/tests/db/utils/__init__.py @@ -0,0 +1 @@ +"""Test the utilities for the ORM layer.""" diff --git a/tests/db/utils/test_coordinates.py b/tests/db/utils/test_coordinates.py new file mode 100644 index 0000000..6909240 --- /dev/null +++ b/tests/db/utils/test_coordinates.py @@ -0,0 +1,206 @@ +"""Test the `UTMCoordinate` class.""" +# pylint:disable=no-self-use + +import pytest + +from urban_meal_delivery.db import utils + + +# All tests take place in Paris. +MIN_EASTING, MAX_EASTING = 443_100, 461_200 +MIN_NORTHING, MAX_NORTHING = 5_407_200, 5_416_800 +ZONE = '31U' + + +@pytest.fixture +def location(address): + """A `UTMCoordinate` object based off the `address` fixture.""" + obj = utils.UTMCoordinate(address.latitude, address.longitude) + + assert obj.zone == ZONE # sanity check + + return obj + + +@pytest.fixture +def faraway_location(): + """A `UTMCoordinate` object far away from the `location`.""" + obj = utils.UTMCoordinate(latitude=0, longitude=0) + + assert obj.zone != ZONE # sanity check + + return obj + + +@pytest.fixture +def origin(city): + """A `UTMCoordinate` object based off the one and only `city`.""" + # Use the `city`'s lower left viewport corner as the `(0, 0)` origin. + lower_left = city.viewport['southwest'] + obj = utils.UTMCoordinate(lower_left['latitude'], lower_left['longitude']) + + assert obj.zone == ZONE # sanity check + + return obj + + +class TestSpecialMethods: + """Test special methods in `UTMCoordinate`.""" + + def test_create_utm_coordinates(self, location): + """Test instantiation of a new `UTMCoordinate` object.""" + assert location is not None + + def test_create_utm_coordinates_with_origin(self, address, origin): + """Test instantiation with a `relate_to` argument.""" + result = utils.UTMCoordinate( + latitude=address.latitude, longitude=address.longitude, relative_to=origin, + ) + + assert result is not None + + def test_create_utm_coordinates_with_non_utm_origin(self): + """Test instantiation with a `relate_to` argument of the wrong type.""" + with pytest.raises(TypeError, match='UTMCoordinate'): + utils.UTMCoordinate( + latitude=0, longitude=0, relative_to=object(), + ) + + def test_create_utm_coordinates_with_invalid_origin( + self, address, faraway_location, + ): + """Test instantiation with a `relate_to` argument at an invalid location.""" + with pytest.raises(ValueError, match='must be in the same UTM zone'): + utils.UTMCoordinate( + latitude=address.latitude, + longitude=address.longitude, + relative_to=faraway_location, + ) + + def test_text_representation(self, location): + """The text representation is a non-literal.""" + result = repr(location) + + assert result.startswith('') + + @pytest.mark.e2e + def test_coordinates_in_the_text_representation(self, location): + """Test the UTM convention in the non-literal text `repr()`. + + Example Format: + `'` + """ + result = repr(location) + + parts = result.split(' ') + zone = parts[1] + easting = int(parts[2]) + northing = int(parts[3][:-1]) # strip the ending ">" + + assert zone == location.zone + assert MIN_EASTING < easting < MAX_EASTING + assert MIN_NORTHING < northing < MAX_NORTHING + + def test_compare_utm_coordinates_to_different_data_type(self, location): + """Test `UTMCoordinate.__eq__()`.""" + result = location == object() + + assert result is False + + def test_compare_utm_coordinates_to_far_away_coordinates( + self, location, faraway_location, + ): + """Test `UTMCoordinate.__eq__()`.""" + with pytest.raises(ValueError, match='must be in the same zone'): + bool(location == faraway_location) + + def test_compare_utm_coordinates_to_equal_coordinates(self, location, address): + """Test `UTMCoordinate.__eq__()`.""" + same_location = utils.UTMCoordinate(address.latitude, address.longitude) + + result = location == same_location + + assert result is True + + def test_compare_utm_coordinates_to_themselves(self, location): + """Test `UTMCoordinate.__eq__()`.""" + # pylint:disable=comparison-with-itself + result = location == location # noqa:WPS312 + + assert result is True + + def test_compare_utm_coordinates_to_different_coordinates(self, location, origin): + """Test `UTMCoordinate.__eq__()`.""" + result = location == origin + + assert result is False + + +class TestProperties: + """Test properties in `UTMCoordinate`.""" + + def test_easting(self, location): + """Test `UTMCoordinate.easting` property.""" + result = location.easting + + assert MIN_EASTING < result < MAX_EASTING + + def test_northing(self, location): + """Test `UTMCoordinate.northing` property.""" + result = location.northing + + assert MIN_NORTHING < result < MAX_NORTHING + + def test_zone(self, location): + """Test `UTMCoordinate.zone` property.""" + result = location.zone + + assert result == ZONE + + +class TestRelateTo: + """Test the `UTMCoordinate.relate_to()` method and the `.x` and `.y` properties.""" + + def test_run_relate_to_twice(self, location, origin): + """The `.relate_to()` method must only be run once.""" + location.relate_to(origin) + + with pytest.raises(RuntimeError, match='once'): + location.relate_to(origin) + + def test_call_relate_to_with_wrong_other_type(self, location): + """`other` must be another `UTMCoordinate`.""" + with pytest.raises(TypeError, match='UTMCoordinate'): + location.relate_to(object()) + + def test_call_relate_to_with_far_away_other( + self, location, faraway_location, + ): + """The `other` origin must be in the same UTM zone.""" + with pytest.raises(ValueError, match='must be in the same zone'): + location.relate_to(faraway_location) + + def test_access_x_without_origin(self, location): + """`.relate_to()` must be called before `.x` can be accessed.""" + with pytest.raises(RuntimeError, match='origin to relate to must be set'): + int(location.x) + + def test_access_y_without_origin(self, location): + """`.relate_to()` must be called before `.y` can be accessed.""" + with pytest.raises(RuntimeError, match='origin to relate to must be set'): + int(location.y) + + def test_origin_must_be_lower_left_when_relating_to_oneself(self, location): + """`.x` and `.y` must be `== (0, 0)` when oneself is the origin.""" + location.relate_to(location) + + assert (location.x, location.y) == (0, 0) + + @pytest.mark.e2e + def test_x_and_y_must_not_be_lower_left_for_address_in_city(self, location, origin): + """`.x` and `.y` must be `> (0, 0)` when oneself is the origin.""" + location.relate_to(origin) + + assert location.x > 0 + assert location.y > 0 From 6cb4be80f67d26ed423275bfc223145f2142b88e Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sat, 2 Jan 2021 16:29:50 +0100 Subject: [PATCH 21/72] Add Address.x and Address.y coordinates - the Address.x and Address.y properties use the UTMCoordinate class behind the scenes - x and y are simple coordinates in an x-y plane - the (0, 0) origin is the southwest corner of Address.city.viewport --- setup.cfg | 3 +++ src/urban_meal_delivery/db/addresses.py | 22 ++++++++++++++++++++++ src/urban_meal_delivery/db/cities.py | 24 +++++++++++++++++++++++- tests/db/test_addresses.py | 12 ++++++++++++ tests/db/test_cities.py | 9 +++++++++ 5 files changed, 69 insertions(+), 1 deletion(-) diff --git a/setup.cfg b/setup.cfg index 91f2727..7443dc5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -134,8 +134,11 @@ per-file-ignores = WPS115, # Numbers are normal in config files. WPS432, + # No real string constant over-use. src/urban_meal_delivery/db/addresses.py: WPS226, + src/urban_meal_delivery/db/cities.py: + WPS226, src/urban_meal_delivery/db/orders.py: WPS226, tests/*.py: diff --git a/src/urban_meal_delivery/db/addresses.py b/src/urban_meal_delivery/db/addresses.py index 8a9337b..bfb848c 100644 --- a/src/urban_meal_delivery/db/addresses.py +++ b/src/urban_meal_delivery/db/addresses.py @@ -1,11 +1,14 @@ """Provide the ORM's `Address` model.""" +from typing import Any + import sqlalchemy as sa from sqlalchemy import orm from sqlalchemy.dialects import postgresql from sqlalchemy.ext import hybrid from urban_meal_delivery.db import meta +from urban_meal_delivery.db import utils class Address(meta.Base): @@ -64,6 +67,15 @@ class Address(meta.Base): foreign_keys='[Order._delivery_address_id]', ) + def __init__(self, *args: Any, **kwargs: Any) -> None: + """Create a new address.""" + # Call SQLAlchemy's default `.__init__()` method. + super().__init__(*args, **kwargs) + + self._utm_coordinates = utils.UTMCoordinate( + self.latitude, self.longitude, relative_to=self.city.as_origin, + ) + def __repr__(self) -> str: """Non-literal text representation.""" return '<{cls}({street} in {city})>'.format( @@ -80,3 +92,13 @@ class Address(meta.Base): `.is_primary` indicates the first in a group of `Address` objects. """ return self.id == self._primary_id + + @property + def x(self) -> int: # noqa=WPS111 + """The `.easting` of the address in meters, relative to the `.city`.""" + return self._utm_coordinates.x + + @property + def y(self) -> int: # noqa=WPS111 + """The `.northing` of the address in meters, relative to the `.city`.""" + return self._utm_coordinates.y diff --git a/src/urban_meal_delivery/db/cities.py b/src/urban_meal_delivery/db/cities.py index 2a36ced..a8e1360 100644 --- a/src/urban_meal_delivery/db/cities.py +++ b/src/urban_meal_delivery/db/cities.py @@ -1,12 +1,13 @@ """Provide the ORM's `City` model.""" -from typing import Dict +from typing import Any, Dict import sqlalchemy as sa from sqlalchemy import orm from sqlalchemy.dialects import postgresql from urban_meal_delivery.db import meta +from urban_meal_delivery.db import utils class City(meta.Base): @@ -45,6 +46,18 @@ class City(meta.Base): # Relationships addresses = orm.relationship('Address', back_populates='city') + def __init__(self, *args: Any, **kwargs: Any) -> None: + """Create a new city.""" + # Call SQLAlchemy's default `.__init__()` method. + super().__init__(*args, **kwargs) + + # Take the "lower left" of the viewport as the origin + # of a Cartesian coordinate system. + lower_left = self.viewport['southwest'] + self._origin = utils.UTMCoordinate( + lower_left['latitude'], lower_left['longitude'], + ) + def __repr__(self) -> str: """Non-literal text representation.""" return '<{cls}({name})>'.format(cls=self.__class__.__name__, name=self.name) @@ -81,3 +94,12 @@ class City(meta.Base): 'longitude': self._southwest_longitude, }, } + + @property + def as_origin(self) -> utils.UTMCoordinate: + """The lower left corner of the `.viewport` in UTM coordinates. + + This property serves as the `relative_to` argument to the + `UTMConstructor` when representing an `Address` in the x-y plane. + """ + return self._origin diff --git a/tests/db/test_addresses.py b/tests/db/test_addresses.py index 4086f9c..2f51bb8 100644 --- a/tests/db/test_addresses.py +++ b/tests/db/test_addresses.py @@ -122,3 +122,15 @@ class TestProperties: result = address.is_primary assert result is False + + def test_x_is_positive(self, address): + """Test `Address.x` property.""" + result = address.x + + assert result > 0 + + def test_y_is_positive(self, address): + """Test `Address.y` property.""" + result = address.y + + assert result > 0 diff --git a/tests/db/test_cities.py b/tests/db/test_cities.py index 51aefc7..94d69fe 100644 --- a/tests/db/test_cities.py +++ b/tests/db/test_cities.py @@ -3,6 +3,7 @@ import pytest +from tests.db.utils import test_coordinates as consts from urban_meal_delivery import db @@ -63,3 +64,11 @@ class TestProperties: assert len(result) == 2 assert result['latitude'] == pytest.approx(city_data[f'_{corner}_latitude']) assert result['longitude'] == pytest.approx(city_data[f'_{corner}_longitude']) + + def test_city_in_utm_coordinates(self, city): + """Test `City.as_origin` property.""" + result = city.as_origin + + assert result.zone == consts.ZONE + assert consts.MIN_EASTING < result.easting < consts.MAX_EASTING + assert consts.MIN_NORTHING < result.northing < consts.MAX_NORTHING From f996376b13c066d00279620bff8ae71d34032476 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sun, 3 Jan 2021 19:33:36 +0100 Subject: [PATCH 22/72] Add ORM models for the pixel grids - add Grid, Pixel, and AddressPixelAssociation ORM models - each Grid belongs to a City an is characterized by the side_length of all the square Pixels contained in it - Pixels aggregate Addresses => many-to-many relationship (that is modeled with SQLAlchemy's Association Pattern to implement a couple of constraints) --- migrations/env.py | 2 +- ...20210102_18_888e352d7526_add_pixel_grid.py | 163 ++++++++++++++++++ setup.cfg | 21 +-- src/urban_meal_delivery/db/__init__.py | 21 ++- src/urban_meal_delivery/db/addresses.py | 4 +- .../db/addresses_pixels.py | 56 ++++++ src/urban_meal_delivery/db/cities.py | 1 + src/urban_meal_delivery/db/grids.py | 48 ++++++ src/urban_meal_delivery/db/pixels.py | 59 +++++++ tests/db/conftest.py | 2 + tests/db/fake_data/__init__.py | 26 +-- tests/db/fake_data/static_fixtures.py | 12 ++ tests/db/test_addresses_pixels.py | 136 +++++++++++++++ tests/db/test_grids.py | 60 +++++++ tests/db/test_pixels.py | 90 ++++++++++ 15 files changed, 665 insertions(+), 36 deletions(-) create mode 100644 migrations/versions/rev_20210102_18_888e352d7526_add_pixel_grid.py create mode 100644 src/urban_meal_delivery/db/addresses_pixels.py create mode 100644 src/urban_meal_delivery/db/grids.py create mode 100644 src/urban_meal_delivery/db/pixels.py create mode 100644 tests/db/test_addresses_pixels.py create mode 100644 tests/db/test_grids.py create mode 100644 tests/db/test_pixels.py diff --git a/migrations/env.py b/migrations/env.py index 15c79e3..4c62bc9 100644 --- a/migrations/env.py +++ b/migrations/env.py @@ -21,7 +21,7 @@ log_config.fileConfig(context.config.config_file_name) def include_object(obj, _name, type_, _reflected, _compare_to): """Only include the clean schema into --autogenerate migrations.""" - if type_ in {'table', 'column'} and obj.schema != umd_config.DATABASE_SCHEMA: + if type_ in {'table', 'column'} and obj.schema != umd_config.CLEAN_SCHEMA: return False return True diff --git a/migrations/versions/rev_20210102_18_888e352d7526_add_pixel_grid.py b/migrations/versions/rev_20210102_18_888e352d7526_add_pixel_grid.py new file mode 100644 index 0000000..d1a9d34 --- /dev/null +++ b/migrations/versions/rev_20210102_18_888e352d7526_add_pixel_grid.py @@ -0,0 +1,163 @@ +"""Add pixel grid. + +Revision: #888e352d7526 at 2021-01-02 18:11:02 +Revises: #f11cd76d2f45 +""" + +import os + +import sqlalchemy as sa +from alembic import op + +from urban_meal_delivery import configuration + + +revision = '888e352d7526' +down_revision = 'f11cd76d2f45' +branch_labels = None +depends_on = None + + +config = configuration.make_config('testing' if os.getenv('TESTING') else 'production') + + +def upgrade(): + """Upgrade to revision 888e352d7526.""" + op.create_table( + 'grids', + sa.Column('id', sa.SmallInteger(), autoincrement=True, nullable=False), + sa.Column('city_id', sa.SmallInteger(), nullable=False), + sa.Column('side_length', sa.SmallInteger(), nullable=True), + sa.PrimaryKeyConstraint('id', name=op.f('pk_grids')), + sa.ForeignKeyConstraint( + ['city_id'], + [f'{config.CLEAN_SCHEMA}.cities.id'], + name=op.f('fk_grids_to_cities_via_city_id'), + onupdate='RESTRICT', + ondelete='RESTRICT', + ), + sa.UniqueConstraint('side_length', name=op.f('uq_grids_on_side_length')), + schema=config.CLEAN_SCHEMA, + ) + + op.create_table( + 'pixels', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('grid_id', sa.SmallInteger(), nullable=False), + sa.Column('n_x', sa.SmallInteger(), nullable=False), + sa.Column('n_y', sa.SmallInteger(), nullable=False), + sa.CheckConstraint('0 <= n_x', name=op.f('ck_pixels_on_n_x_is_positive')), + sa.CheckConstraint('0 <= n_y', name=op.f('ck_pixels_on_n_y_is_positive')), + sa.ForeignKeyConstraint( + ['grid_id'], + [f'{config.CLEAN_SCHEMA}.grids.id'], + name=op.f('fk_pixels_to_grids_via_grid_id'), + onupdate='RESTRICT', + ondelete='RESTRICT', + ), + sa.PrimaryKeyConstraint('id', name=op.f('pk_pixels')), + sa.UniqueConstraint( + 'grid_id', 'n_x', 'n_y', name=op.f('uq_pixels_on_grid_id_n_x_n_y'), + ), + sa.UniqueConstraint('id', 'grid_id', name=op.f('uq_pixels_on_id_grid_id')), + schema=config.CLEAN_SCHEMA, + ) + + op.create_index( + op.f('ix_pixels_on_grid_id'), + 'pixels', + ['grid_id'], + unique=False, + schema=config.CLEAN_SCHEMA, + ) + op.create_index( + op.f('ix_pixels_on_n_x'), + 'pixels', + ['n_x'], + unique=False, + schema=config.CLEAN_SCHEMA, + ) + op.create_index( + op.f('ix_pixels_on_n_y'), + 'pixels', + ['n_y'], + unique=False, + schema=config.CLEAN_SCHEMA, + ) + + # These `UniqueConstraints`s are needed by the `addresses_pixels` table below. + op.create_unique_constraint( + 'uq_addresses_on_id_city_id', + 'addresses', + ['id', 'city_id'], + schema=config.CLEAN_SCHEMA, + ) + op.create_unique_constraint( + 'uq_grids_on_id_city_id', + 'grids', + ['id', 'city_id'], + schema=config.CLEAN_SCHEMA, + ) + + op.create_table( + 'addresses_pixels', + sa.Column('address_id', sa.Integer(), nullable=False), + sa.Column('city_id', sa.SmallInteger(), nullable=False), + sa.Column('grid_id', sa.SmallInteger(), nullable=False), + sa.Column('pixel_id', sa.Integer(), nullable=False), + sa.ForeignKeyConstraint( + ['address_id', 'city_id'], + [ + f'{config.CLEAN_SCHEMA}.addresses.id', + f'{config.CLEAN_SCHEMA}.addresses.city_id', + ], + name=op.f('fk_addresses_pixels_to_addresses_via_address_id_city_id'), + onupdate='RESTRICT', + ondelete='RESTRICT', + ), + sa.ForeignKeyConstraint( + ['grid_id', 'city_id'], + [ + f'{config.CLEAN_SCHEMA}.grids.id', + f'{config.CLEAN_SCHEMA}.grids.city_id', + ], + name=op.f('fk_addresses_pixels_to_grids_via_grid_id_city_id'), + onupdate='RESTRICT', + ondelete='RESTRICT', + ), + sa.ForeignKeyConstraint( + ['pixel_id', 'grid_id'], + [ + f'{config.CLEAN_SCHEMA}.pixels.id', + f'{config.CLEAN_SCHEMA}.pixels.grid_id', + ], + name=op.f('fk_addresses_pixels_to_pixels_via_pixel_id_grid_id'), + onupdate='RESTRICT', + ondelete='RESTRICT', + ), + sa.PrimaryKeyConstraint( + 'address_id', 'pixel_id', name=op.f('pk_addresses_pixels'), + ), + sa.UniqueConstraint( + 'address_id', + 'grid_id', + name=op.f('uq_addresses_pixels_on_address_id_grid_id'), + ), + schema=config.CLEAN_SCHEMA, + ) + + +def downgrade(): + """Downgrade to revision f11cd76d2f45.""" + op.drop_table('addresses_pixels', schema=config.CLEAN_SCHEMA) + op.drop_index( + op.f('ix_pixels_on_n_y'), table_name='pixels', schema=config.CLEAN_SCHEMA, + ) + op.drop_index( + op.f('ix_pixels_on_n_x'), table_name='pixels', schema=config.CLEAN_SCHEMA, + ) + op.drop_index( + op.f('ix_pixels_on_grid_id'), table_name='pixels', schema=config.CLEAN_SCHEMA, + ) + op.drop_table('pixels', schema=config.CLEAN_SCHEMA) + op.drop_table('grids', schema=config.CLEAN_SCHEMA) diff --git a/setup.cfg b/setup.cfg index 7443dc5..e68c2da 100644 --- a/setup.cfg +++ b/setup.cfg @@ -89,6 +89,8 @@ extend-ignore = # Comply with black's style. # Source: https://github.com/psf/black/blob/master/docs/compatible_configs.md#flake8 E203, W503, WPS348, + # String constant over-use is checked visually by the programmer. + WPS226, # Allow underscores in numbers. WPS303, # f-strings are ok. @@ -114,8 +116,6 @@ per-file-ignores = WPS114,WPS118, # Revisions may have too many expressions. WPS204,WPS213, - # No overuse of string constants (e.g., 'RESTRICT'). - WPS226, # Too many noqa's are ok. WPS402, noxfile.py: @@ -125,8 +125,6 @@ per-file-ignores = WPS202, # TODO (isort): Remove after simplifying the nox session "lint". WPS213, - # No overuse of string constants (e.g., '--version'). - WPS226, # The noxfile is rather long => allow many noqa's. WPS402, src/urban_meal_delivery/configuration.py: @@ -134,13 +132,9 @@ per-file-ignores = WPS115, # Numbers are normal in config files. WPS432, - # No real string constant over-use. - src/urban_meal_delivery/db/addresses.py: - WPS226, - src/urban_meal_delivery/db/cities.py: - WPS226, - src/urban_meal_delivery/db/orders.py: - WPS226, + src/urban_meal_delivery/db/__init__.py: + # Top-level of a sub-packages is intended to import a lot. + F401, tests/*.py: # Type annotations are not strictly enforced. ANN0, ANN2, @@ -158,14 +152,15 @@ per-file-ignores = WPS202,WPS204,WPS214, # Do not check for Jones complexity in the test suite. WPS221, - # No overuse of string constants (e.g., '__version__'). - WPS226, # We do not care about the number of "# noqa"s in the test suite. WPS402, # Allow closures. WPS430, # Numbers are normal in test cases as expected results. WPS432, + tests/db/fake_data/__init__.py: + # Top-level of a sub-packages is intended to import a lot. + F401,WPS201, # Explicitly set mccabe's maximum complexity to 10 as recommended by # Thomas McCabe, the inventor of the McCabe complexity, and the NIST. diff --git a/src/urban_meal_delivery/db/__init__.py b/src/urban_meal_delivery/db/__init__.py index 8b9f0b4..a73f40e 100644 --- a/src/urban_meal_delivery/db/__init__.py +++ b/src/urban_meal_delivery/db/__init__.py @@ -1,11 +1,14 @@ """Provide the ORM models and a connection to the database.""" -from urban_meal_delivery.db.addresses import Address # noqa:F401 -from urban_meal_delivery.db.cities import City # noqa:F401 -from urban_meal_delivery.db.connection import make_engine # noqa:F401 -from urban_meal_delivery.db.connection import make_session_factory # noqa:F401 -from urban_meal_delivery.db.couriers import Courier # noqa:F401 -from urban_meal_delivery.db.customers import Customer # noqa:F401 -from urban_meal_delivery.db.meta import Base # noqa:F401 -from urban_meal_delivery.db.orders import Order # noqa:F401 -from urban_meal_delivery.db.restaurants import Restaurant # noqa:F401 +from urban_meal_delivery.db.addresses import Address +from urban_meal_delivery.db.addresses_pixels import AddressPixelAssociation +from urban_meal_delivery.db.cities import City +from urban_meal_delivery.db.connection import make_engine +from urban_meal_delivery.db.connection import make_session_factory +from urban_meal_delivery.db.couriers import Courier +from urban_meal_delivery.db.customers import Customer +from urban_meal_delivery.db.grids import Grid +from urban_meal_delivery.db.meta import Base +from urban_meal_delivery.db.orders import Order +from urban_meal_delivery.db.pixels import Pixel +from urban_meal_delivery.db.restaurants import Restaurant diff --git a/src/urban_meal_delivery/db/addresses.py b/src/urban_meal_delivery/db/addresses.py index bfb848c..f392207 100644 --- a/src/urban_meal_delivery/db/addresses.py +++ b/src/urban_meal_delivery/db/addresses.py @@ -46,6 +46,8 @@ class Address(meta.Base): '-180 <= longitude AND longitude <= 180', name='longitude_between_180_degrees', ), + # Needed by a `ForeignKeyConstraint` in `AddressPixelAssociation`. + sa.UniqueConstraint('id', 'city_id'), sa.CheckConstraint( '30000 <= zip_code AND zip_code <= 99999', name='valid_zip_code', ), @@ -60,12 +62,12 @@ class Address(meta.Base): back_populates='pickup_address', foreign_keys='[Order._pickup_address_id]', ) - orders_delivered = orm.relationship( 'Order', back_populates='delivery_address', foreign_keys='[Order._delivery_address_id]', ) + pixels = orm.relationship('AddressPixelAssociation', back_populates='address') def __init__(self, *args: Any, **kwargs: Any) -> None: """Create a new address.""" diff --git a/src/urban_meal_delivery/db/addresses_pixels.py b/src/urban_meal_delivery/db/addresses_pixels.py new file mode 100644 index 0000000..3ba198f --- /dev/null +++ b/src/urban_meal_delivery/db/addresses_pixels.py @@ -0,0 +1,56 @@ +"""Model for the many-to-many relationship between `Address` and `Pixel` objects.""" + +import sqlalchemy as sa +from sqlalchemy import orm + +from urban_meal_delivery.db import meta + + +class AddressPixelAssociation(meta.Base): + """Association pattern between `Address` and `Pixel`. + + This approach is needed here mainly because it implicitly + updates the `_city_id` and `_grid_id` columns. + + Further info: + https://docs.sqlalchemy.org/en/stable/orm/basic_relationships.html#association-object # noqa:E501 + """ + + __tablename__ = 'addresses_pixels' + + # Columns + _address_id = sa.Column('address_id', sa.Integer, primary_key=True) + _city_id = sa.Column('city_id', sa.SmallInteger, nullable=False) + _grid_id = sa.Column('grid_id', sa.SmallInteger, nullable=False) + _pixel_id = sa.Column('pixel_id', sa.Integer, primary_key=True) + + # Constraints + __table_args__ = ( + # An `Address` can only be on a `Grid` ... + sa.ForeignKeyConstraint( + ['address_id', 'city_id'], + ['addresses.id', 'addresses.city_id'], + onupdate='RESTRICT', + ondelete='RESTRICT', + ), + # ... if their `.city` attributes match. + sa.ForeignKeyConstraint( + ['grid_id', 'city_id'], + ['grids.id', 'grids.city_id'], + onupdate='RESTRICT', + ondelete='RESTRICT', + ), + # Each `Address` can only be on a `Grid` once. + sa.UniqueConstraint('address_id', 'grid_id'), + # An association must reference an existing `Grid`-`Pixel` pair. + sa.ForeignKeyConstraint( + ['pixel_id', 'grid_id'], + ['pixels.id', 'pixels.grid_id'], + onupdate='RESTRICT', + ondelete='RESTRICT', + ), + ) + + # Relationships + address = orm.relationship('Address', back_populates='pixels') + pixel = orm.relationship('Pixel', back_populates='addresses') diff --git a/src/urban_meal_delivery/db/cities.py b/src/urban_meal_delivery/db/cities.py index a8e1360..d0d7422 100644 --- a/src/urban_meal_delivery/db/cities.py +++ b/src/urban_meal_delivery/db/cities.py @@ -45,6 +45,7 @@ class City(meta.Base): # Relationships addresses = orm.relationship('Address', back_populates='city') + grids = orm.relationship('Grid', back_populates='city') def __init__(self, *args: Any, **kwargs: Any) -> None: """Create a new city.""" diff --git a/src/urban_meal_delivery/db/grids.py b/src/urban_meal_delivery/db/grids.py new file mode 100644 index 0000000..26a7cea --- /dev/null +++ b/src/urban_meal_delivery/db/grids.py @@ -0,0 +1,48 @@ +"""Provide the ORM's `Grid` model.""" + +import sqlalchemy as sa +from sqlalchemy import orm + +from urban_meal_delivery.db import meta + + +class Grid(meta.Base): + """A grid of `Pixel`s to partition a `City`. + + A grid is characterized by the uniform size of the `Pixel`s it contains. + That is configures via the `Grid.side_length` attribute. + """ + + __tablename__ = 'grids' + + # Columns + id = sa.Column( # noqa:WPS125 + sa.SmallInteger, primary_key=True, autoincrement=True, + ) + _city_id = sa.Column('city_id', sa.SmallInteger, nullable=False) + side_length = sa.Column(sa.SmallInteger, nullable=False, unique=True) + + # Constraints + __table_args__ = ( + sa.ForeignKeyConstraint( + ['city_id'], ['cities.id'], onupdate='RESTRICT', ondelete='RESTRICT', + ), + # Needed by a `ForeignKeyConstraint` in `address_pixel_association`. + sa.UniqueConstraint('id', 'city_id'), + ) + + # Relationships + city = orm.relationship('City', back_populates='grids') + pixels = orm.relationship('Pixel', back_populates='grid') + + def __repr__(self) -> str: + """Non-literal text representation.""" + return '<{cls}: {area}>'.format( + cls=self.__class__.__name__, area=self.pixel_area, + ) + + # Convenience properties + @property + def pixel_area(self) -> float: + """The area of a `Pixel` on the grid in square kilometers.""" + return (self.side_length ** 2) / 1_000_000 # noqa:WPS432 diff --git a/src/urban_meal_delivery/db/pixels.py b/src/urban_meal_delivery/db/pixels.py new file mode 100644 index 0000000..6d28227 --- /dev/null +++ b/src/urban_meal_delivery/db/pixels.py @@ -0,0 +1,59 @@ +"""Provide the ORM's `Pixel` model.""" + +import sqlalchemy as sa +from sqlalchemy import orm + +from urban_meal_delivery.db import meta + + +class Pixel(meta.Base): + """A pixel in a `Grid`. + + Square pixels aggregate `Address` objects within a `City`. + Every `Address` belongs to exactly one `Pixel` in a `Grid`. + + Every `Pixel` has a unique "coordinate" within the `Grid`. + """ + + __tablename__ = 'pixels' + + # Columns + id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) # noqa:WPS125 + _grid_id = sa.Column('grid_id', sa.SmallInteger, nullable=False, index=True) + n_x = sa.Column(sa.SmallInteger, nullable=False, index=True) + n_y = sa.Column(sa.SmallInteger, nullable=False, index=True) + + # Constraints + __table_args__ = ( + sa.ForeignKeyConstraint( + ['grid_id'], ['grids.id'], onupdate='RESTRICT', ondelete='RESTRICT', + ), + sa.CheckConstraint('0 <= n_x', name='n_x_is_positive'), + sa.CheckConstraint('0 <= n_y', name='n_y_is_positive'), + # Needed by a `ForeignKeyConstraint` in `AddressPixelAssociation`. + sa.UniqueConstraint('id', 'grid_id'), + # Each coordinate within the same `grid` is used at most once. + sa.UniqueConstraint('grid_id', 'n_x', 'n_y'), + ) + + # Relationships + grid = orm.relationship('Grid', back_populates='pixels') + addresses = orm.relationship('AddressPixelAssociation', back_populates='pixel') + + def __repr__(self) -> str: + """Non-literal text representation.""" + return '<{cls}: ({x}, {y})>'.format( + cls=self.__class__.__name__, x=self.n_x, y=self.n_y, + ) + + # Convenience properties + + @property + def side_length(self) -> int: + """The length of one side of a pixel in meters.""" + return self.grid.side_length + + @property + def area(self) -> float: + """The area of a pixel in square kilometers.""" + return self.grid.pixel_area diff --git a/tests/db/conftest.py b/tests/db/conftest.py index fcacfe7..8d2e3d1 100644 --- a/tests/db/conftest.py +++ b/tests/db/conftest.py @@ -85,3 +85,5 @@ courier = fake_data.courier customer = fake_data.customer order = fake_data.order restaurant = fake_data.restaurant +grid = fake_data.grid +pixel = fake_data.pixel diff --git a/tests/db/fake_data/__init__.py b/tests/db/fake_data/__init__.py index f6b879c..80a7be3 100644 --- a/tests/db/fake_data/__init__.py +++ b/tests/db/fake_data/__init__.py @@ -1,14 +1,16 @@ """Fixtures for testing the ORM layer with fake data.""" -from tests.db.fake_data.fixture_makers import make_address # noqa:F401 -from tests.db.fake_data.fixture_makers import make_courier # noqa:F401 -from tests.db.fake_data.fixture_makers import make_customer # noqa:F401 -from tests.db.fake_data.fixture_makers import make_order # noqa:F401 -from tests.db.fake_data.fixture_makers import make_restaurant # noqa:F401 -from tests.db.fake_data.static_fixtures import address # noqa:F401 -from tests.db.fake_data.static_fixtures import city # noqa:F401 -from tests.db.fake_data.static_fixtures import city_data # noqa:F401 -from tests.db.fake_data.static_fixtures import courier # noqa:F401 -from tests.db.fake_data.static_fixtures import customer # noqa:F401 -from tests.db.fake_data.static_fixtures import order # noqa:F401 -from tests.db.fake_data.static_fixtures import restaurant # noqa:F401 +from tests.db.fake_data.fixture_makers import make_address +from tests.db.fake_data.fixture_makers import make_courier +from tests.db.fake_data.fixture_makers import make_customer +from tests.db.fake_data.fixture_makers import make_order +from tests.db.fake_data.fixture_makers import make_restaurant +from tests.db.fake_data.static_fixtures import address +from tests.db.fake_data.static_fixtures import city +from tests.db.fake_data.static_fixtures import city_data +from tests.db.fake_data.static_fixtures import courier +from tests.db.fake_data.static_fixtures import customer +from tests.db.fake_data.static_fixtures import grid +from tests.db.fake_data.static_fixtures import order +from tests.db.fake_data.static_fixtures import pixel +from tests.db.fake_data.static_fixtures import restaurant diff --git a/tests/db/fake_data/static_fixtures.py b/tests/db/fake_data/static_fixtures.py index df7d5b7..ee6682d 100644 --- a/tests/db/fake_data/static_fixtures.py +++ b/tests/db/fake_data/static_fixtures.py @@ -56,3 +56,15 @@ def restaurant(address, make_restaurant): def order(make_order, restaurant): """An `Order` object for the `restaurant`.""" return make_order(restaurant=restaurant) + + +@pytest.fixture +def grid(city): + """A `Grid` with a pixel area of 1 square kilometer.""" + return db.Grid(city=city, side_length=1000) + + +@pytest.fixture +def pixel(grid): + """The `Pixel` in the lower-left corner of the `grid`.""" + return db.Pixel(grid=grid, n_x=0, n_y=0) diff --git a/tests/db/test_addresses_pixels.py b/tests/db/test_addresses_pixels.py new file mode 100644 index 0000000..40e41f8 --- /dev/null +++ b/tests/db/test_addresses_pixels.py @@ -0,0 +1,136 @@ +"""Test the ORM's `AddressPixelAssociation` model. + +Implementation notes: + The test suite has 100% coverage without the test cases in this module. + That is so as the `AddressPixelAssociation` model is imported into the + `urban_meal_delivery.db` namespace so that the `Address` and `Pixel` models + can find it upon initialization. Yet, none of the other unit tests run any + code associated with it. Therefore, we test it here as non-e2e tests and do + not measure its coverage. +""" +# pylint:disable=no-self-use + +import pytest +import sqlalchemy as sqla +from sqlalchemy import exc as sa_exc + +from urban_meal_delivery import db + + +@pytest.fixture +def assoc(address, pixel): + """An association between `address` and `pixel`.""" + return db.AddressPixelAssociation(address=address, pixel=pixel) + + +@pytest.mark.no_cover +class TestSpecialMethods: + """Test special methods in `Pixel`.""" + + def test_create_an_address_pixel_association(self, assoc): + """Test instantiation of a new `AddressPixelAssociation` object.""" + assert assoc is not None + + +@pytest.mark.db +@pytest.mark.no_cover +class TestConstraints: + """Test the database constraints defined in `AddressPixelAssociation`. + + The foreign keys to `City` and `Grid` are tested via INSERT and not + DELETE statements as the latter would already fail because of foreign + keys defined in `Address` and `Pixel`. + """ + + def test_insert_into_database(self, db_session, assoc): + """Insert an instance into the (empty) database.""" + assert db_session.query(db.AddressPixelAssociation).count() == 0 + + db_session.add(assoc) + db_session.commit() + + assert db_session.query(db.AddressPixelAssociation).count() == 1 + + def test_delete_a_referenced_address(self, db_session, assoc): + """Remove a record that is referenced with a FK.""" + db_session.add(assoc) + db_session.commit() + + # Must delete without ORM as otherwise an UPDATE statement is emitted. + stmt = sqla.delete(db.Address).where(db.Address.id == assoc.address.id) + + with pytest.raises( + sa_exc.IntegrityError, + match='fk_addresses_pixels_to_addresses_via_address_id_city_id', + ): + db_session.execute(stmt) + + def test_reference_an_invalid_city(self, db_session, address, pixel): + """Insert a record with an invalid foreign key.""" + db_session.add(address) + db_session.add(pixel) + db_session.commit() + + # Must insert without ORM as otherwise SQLAlchemy figures out + # that something is wrong before any query is sent to the database. + stmt = sqla.insert(db.AddressPixelAssociation).values( + address_id=address.id, + city_id=999, + grid_id=pixel.grid.id, + pixel_id=pixel.id, + ) + + with pytest.raises( + sa_exc.IntegrityError, + match='fk_addresses_pixels_to_addresses_via_address_id_city_id', + ): + db_session.execute(stmt) + + def test_reference_an_invalid_grid(self, db_session, address, pixel): + """Insert a record with an invalid foreign key.""" + db_session.add(address) + db_session.add(pixel) + db_session.commit() + + # Must insert without ORM as otherwise SQLAlchemy figures out + # that something is wrong before any query is sent to the database. + stmt = sqla.insert(db.AddressPixelAssociation).values( + address_id=address.id, + city_id=address.city.id, + grid_id=999, + pixel_id=pixel.id, + ) + + with pytest.raises( + sa_exc.IntegrityError, + match='fk_addresses_pixels_to_grids_via_grid_id_city_id', + ): + db_session.execute(stmt) + + def test_delete_a_referenced_pixel(self, db_session, assoc): + """Remove a record that is referenced with a FK.""" + db_session.add(assoc) + db_session.commit() + + # Must delete without ORM as otherwise an UPDATE statement is emitted. + stmt = sqla.delete(db.Pixel).where(db.Pixel.id == assoc.pixel.id) + + with pytest.raises( + sa_exc.IntegrityError, + match='fk_addresses_pixels_to_pixels_via_pixel_id_grid_id', + ): + db_session.execute(stmt) + + def test_put_an_address_on_a_grid_twice(self, db_session, address, assoc, pixel): + """Insert a record that violates a unique constraint.""" + db_session.add(assoc) + db_session.commit() + + # Create a neighboring `Pixel` and put the same `address` as in `pixel` in it. + neighbor = db.Pixel(grid=pixel.grid, n_x=pixel.n_x, n_y=pixel.n_y + 1) + another_assoc = db.AddressPixelAssociation(address=address, pixel=neighbor) + + db_session.add(another_assoc) + + with pytest.raises(sa_exc.IntegrityError, match='duplicate key value'): + db_session.commit() diff --git a/tests/db/test_grids.py b/tests/db/test_grids.py new file mode 100644 index 0000000..0333b64 --- /dev/null +++ b/tests/db/test_grids.py @@ -0,0 +1,60 @@ +"""Test the ORM's `Grid` model.""" +# pylint:disable=no-self-use + +import pytest +import sqlalchemy as sqla +from sqlalchemy import exc as sa_exc + +from urban_meal_delivery import db + + +class TestSpecialMethods: + """Test special methods in `Grid`.""" + + def test_create_grid(self, grid): + """Test instantiation of a new `Grid` object.""" + assert grid is not None + + def test_text_representation(self, grid): + """`Grid` has a non-literal text representation.""" + result = repr(grid) + + assert result == f'' + + +@pytest.mark.db +@pytest.mark.no_cover +class TestConstraints: + """Test the database constraints defined in `Grid`.""" + + def test_insert_into_database(self, db_session, grid): + """Insert an instance into the (empty) database.""" + assert db_session.query(db.Grid).count() == 0 + + db_session.add(grid) + db_session.commit() + + assert db_session.query(db.Grid).count() == 1 + + def test_delete_a_referenced_city(self, db_session, grid): + """Remove a record that is referenced with a FK.""" + db_session.add(grid) + db_session.commit() + + # Must delete without ORM as otherwise an UPDATE statement is emitted. + stmt = sqla.delete(db.City).where(db.City.id == grid.city.id) + + with pytest.raises( + sa_exc.IntegrityError, match='fk_grids_to_cities_via_city_id', + ): + db_session.execute(stmt) + + +class TestProperties: + """Test properties in `Grid`.""" + + def test_pixel_area(self, grid): + """Test `Grid.pixel_area` property.""" + result = grid.pixel_area + + assert result == 1.0 diff --git a/tests/db/test_pixels.py b/tests/db/test_pixels.py new file mode 100644 index 0000000..878d6cc --- /dev/null +++ b/tests/db/test_pixels.py @@ -0,0 +1,90 @@ +"""Test the ORM's `Pixel` model.""" +# pylint:disable=no-self-use + +import pytest +import sqlalchemy as sqla +from sqlalchemy import exc as sa_exc + +from urban_meal_delivery import db + + +class TestSpecialMethods: + """Test special methods in `Pixel`.""" + + def test_create_pixel(self, pixel): + """Test instantiation of a new `Pixel` object.""" + assert pixel is not None + + def test_text_representation(self, pixel): + """`Pixel` has a non-literal text representation.""" + result = repr(pixel) + + assert result == f'' + + +@pytest.mark.db +@pytest.mark.no_cover +class TestConstraints: + """Test the database constraints defined in `Pixel`.""" + + def test_insert_into_database(self, db_session, pixel): + """Insert an instance into the (empty) database.""" + assert db_session.query(db.Pixel).count() == 0 + + db_session.add(pixel) + db_session.commit() + + assert db_session.query(db.Pixel).count() == 1 + + def test_delete_a_referenced_grid(self, db_session, pixel): + """Remove a record that is referenced with a FK.""" + db_session.add(pixel) + db_session.commit() + + # Must delete without ORM as otherwise an UPDATE statement is emitted. + stmt = sqla.delete(db.Grid).where(db.Grid.id == pixel.grid.id) + + with pytest.raises( + sa_exc.IntegrityError, match='fk_pixels_to_grids_via_grid_id', + ): + db_session.execute(stmt) + + def test_negative_n_x(self, db_session, pixel): + """Insert an instance with invalid data.""" + pixel.n_x = -1 + db_session.add(pixel) + + with pytest.raises(sa_exc.IntegrityError, match='n_x_is_positive'): + db_session.commit() + + def test_negative_n_y(self, db_session, pixel): + """Insert an instance with invalid data.""" + pixel.n_y = -1 + db_session.add(pixel) + + with pytest.raises(sa_exc.IntegrityError, match='n_y_is_positive'): + db_session.commit() + + def test_non_unique_coordinates_within_a_grid(self, db_session, pixel): + """Insert an instance with invalid data.""" + another_pixel = db.Pixel(grid=pixel.grid, n_x=pixel.n_x, n_y=pixel.n_y) + db_session.add(another_pixel) + + with pytest.raises(sa_exc.IntegrityError, match='duplicate key value'): + db_session.commit() + + +class TestProperties: + """Test properties in `Pixel`.""" + + def test_side_length(self, pixel): + """Test `Pixel.side_length` property.""" + result = pixel.side_length + + assert result == 1_000 + + def test_area(self, pixel): + """Test `Pixel.area` property.""" + result = pixel.area + + assert result == 1.0 From 2e3ccd14d574d38caeb4524a46a59b6b7e9a5b28 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Mon, 4 Jan 2021 18:50:26 +0100 Subject: [PATCH 23/72] Use globals for the database connection - remove the factory functions for creating engines and sessions - define global engine, connection, and session objects to be used everywhere in the urban_meal_delivery package --- noxfile.py | 10 +++++++++ research/clean_data.ipynb | 3 +-- setup.cfg | 2 +- src/urban_meal_delivery/configuration.py | 5 ++++- src/urban_meal_delivery/db/__init__.py | 5 +++-- src/urban_meal_delivery/db/connection.py | 27 +++++++++++++++-------- tests/db/conftest.py | 28 +++++++++++++++++------- tests/test_config.py | 18 ++++++++++++++- 8 files changed, 74 insertions(+), 24 deletions(-) diff --git a/noxfile.py b/noxfile.py index cd65168..1557320 100644 --- a/noxfile.py +++ b/noxfile.py @@ -254,6 +254,12 @@ def test(session): # For xdoctest, the default arguments are different from pytest. args = posargs or [PACKAGE_IMPORT_NAME] + + # The "TESTING" environment variable forces the global `engine`, `connection`, + # and `session` objects to be set to `None` and avoid any database connection. + # For pytest above this is not necessary as pytest sets this variable itself. + session.env['TESTING'] = 'true' + session.run('xdoctest', '--version') session.run('xdoctest', '--quiet', *args) # --quiet => less verbose output @@ -297,6 +303,10 @@ def docs(session): session.run('poetry', 'install', '--no-dev', external=True) _install_packages(session, 'sphinx', 'sphinx-autodoc-typehints') + # The "TESTING" environment variable forces the global `engine`, `connection`, + # and `session` objects to be set to `None` and avoid any database connection. + session.env['TESTING'] = 'true' + session.run('sphinx-build', DOCS_SRC, DOCS_BUILD) # Verify all external links return 200 OK. session.run('sphinx-build', '-b', 'linkcheck', DOCS_SRC, DOCS_BUILD) diff --git a/research/clean_data.ipynb b/research/clean_data.ipynb index a02ec10..3c7fdee 100644 --- a/research/clean_data.ipynb +++ b/research/clean_data.ipynb @@ -103,8 +103,7 @@ "metadata": {}, "outputs": [], "source": [ - "_engine = db.make_engine()\n", - "connection = _engine.connect()" + "connection = db.connection" ] }, { diff --git a/setup.cfg b/setup.cfg index e68c2da..af25bbf 100644 --- a/setup.cfg +++ b/setup.cfg @@ -134,7 +134,7 @@ per-file-ignores = WPS432, src/urban_meal_delivery/db/__init__.py: # Top-level of a sub-packages is intended to import a lot. - F401, + F401,WPS201, tests/*.py: # Type annotations are not strictly enforced. ANN0, ANN2, diff --git a/src/urban_meal_delivery/configuration.py b/src/urban_meal_delivery/configuration.py index d20320a..0354da6 100644 --- a/src/urban_meal_delivery/configuration.py +++ b/src/urban_meal_delivery/configuration.py @@ -85,7 +85,10 @@ def make_config(env: str = 'production') -> Config: raise ValueError("Must be either 'production' or 'testing'") # Without a PostgreSQL database the package cannot work. - if config.DATABASE_URI is None: + # As pytest sets the "TESTING" environment variable explicitly, + # the warning is only emitted if the code is not run by pytest. + # We see the bad configuration immediately as all "db" tests fail. + if config.DATABASE_URI is None and not os.getenv('TESTING'): warnings.warn('Bad configurartion: no DATABASE_URI set in the environment') return config diff --git a/src/urban_meal_delivery/db/__init__.py b/src/urban_meal_delivery/db/__init__.py index a73f40e..aae8516 100644 --- a/src/urban_meal_delivery/db/__init__.py +++ b/src/urban_meal_delivery/db/__init__.py @@ -3,8 +3,9 @@ from urban_meal_delivery.db.addresses import Address from urban_meal_delivery.db.addresses_pixels import AddressPixelAssociation from urban_meal_delivery.db.cities import City -from urban_meal_delivery.db.connection import make_engine -from urban_meal_delivery.db.connection import make_session_factory +from urban_meal_delivery.db.connection import connection +from urban_meal_delivery.db.connection import engine +from urban_meal_delivery.db.connection import session from urban_meal_delivery.db.couriers import Courier from urban_meal_delivery.db.customers import Customer from urban_meal_delivery.db.grids import Grid diff --git a/src/urban_meal_delivery/db/connection.py b/src/urban_meal_delivery/db/connection.py index 460ef9d..de32ab9 100644 --- a/src/urban_meal_delivery/db/connection.py +++ b/src/urban_meal_delivery/db/connection.py @@ -1,17 +1,26 @@ -"""Provide connection utils for the ORM layer.""" +"""Provide connection utils for the ORM layer. + +This module defines fully configured `engine`, `connection`, and `session` +objects to be used as globals within the `urban_meal_delivery` package. + +If a database is not guaranteed to be available, they are set to `None`. +That is the case on the CI server. +""" + +import os import sqlalchemy as sa -from sqlalchemy import engine from sqlalchemy import orm import urban_meal_delivery -def make_engine() -> engine.Engine: # pragma: no cover - """Provide a configured Engine object.""" - return sa.create_engine(urban_meal_delivery.config.DATABASE_URI) +if os.getenv('TESTING'): + engine = None + connection = None + session = None - -def make_session_factory() -> orm.Session: # pragma: no cover - """Provide a configured Session factory.""" - return orm.sessionmaker(bind=make_engine()) +else: # pragma: no cover + engine = sa.create_engine(urban_meal_delivery.config.DATABASE_URI) + connection = engine.connect() + session = orm.sessionmaker(bind=connection)() diff --git a/tests/db/conftest.py b/tests/db/conftest.py index 8d2e3d1..3d8c676 100644 --- a/tests/db/conftest.py +++ b/tests/db/conftest.py @@ -1,6 +1,7 @@ """Utils for testing the ORM layer.""" import pytest +import sqlalchemy as sa from alembic import command as migrations_cmd from alembic import config as migrations_config from sqlalchemy import orm @@ -26,11 +27,18 @@ def db_connection(request): This ensures that Alembic's migration files are consistent. """ - engine = db.make_engine() + # We need a fresh database connection for each of the two `params`. + # Otherwise, the first test of the parameter run second will fail. + engine = sa.create_engine(config.DATABASE_URI) connection = engine.connect() + # Monkey patch the package's global `engine` and `connection` objects, + # just in case if it is used somewhere in the code base. + db.engine = engine + db.connection = connection + if request.param == 'all_at_once': - engine.execute(f'CREATE SCHEMA {config.CLEAN_SCHEMA};') + connection.execute(f'CREATE SCHEMA {config.CLEAN_SCHEMA};') db.Base.metadata.create_all(connection) else: cfg = migrations_config.Config('alembic.ini') @@ -54,13 +62,17 @@ def db_connection(request): @pytest.fixture def db_session(db_connection): """A SQLAlchemy session that rolls back everything after a test case.""" - # Begin the outer most transaction - # that is rolled back at the end of the test. + # Begin the outermost transaction + # that is rolled back at the end of each test case. transaction = db_connection.begin() - # Create a session bound on the same connection as the transaction. - # Using any other session would not work. - session_factory = orm.sessionmaker() - session = session_factory(bind=db_connection) + + # Create a session bound to the same connection as the `transaction`. + # Using any other session would not result in the roll back. + session = orm.sessionmaker()(bind=db_connection) + + # Monkey patch the package's global `session` object, + # which is used heavily in the code base. + db.session = session try: yield session diff --git a/tests/test_config.py b/tests/test_config.py index 6569161..9251d48 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -36,11 +36,27 @@ def test_database_uri_set(env, monkeypatch): @pytest.mark.parametrize('env', envs) -def test_no_database_uri_set(env, monkeypatch): +def test_no_database_uri_set_with_testing_env_var(env, monkeypatch): """Package does not work without DATABASE_URI set in the environment.""" monkeypatch.setattr(configuration.ProductionConfig, 'DATABASE_URI', None) monkeypatch.setattr(configuration.TestingConfig, 'DATABASE_URI', None) + monkeypatch.setenv('TESTING', 'true') + + with pytest.warns(None) as record: + configuration.make_config(env) + + assert len(record) == 0 # noqa:WPS441,WPS507 + + +@pytest.mark.parametrize('env', envs) +def test_no_database_uri_set_without_testing_env_var(env, monkeypatch): + """Package does not work without DATABASE_URI set in the environment.""" + monkeypatch.setattr(configuration.ProductionConfig, 'DATABASE_URI', None) + monkeypatch.setattr(configuration.TestingConfig, 'DATABASE_URI', None) + + monkeypatch.delenv('TESTING', raising=False) + with pytest.warns(UserWarning, match='no DATABASE_URI'): configuration.make_config(env) From a1cbb808fd0cbf0398d8ab0f360c50241c03c4ba Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Mon, 4 Jan 2021 20:33:10 +0100 Subject: [PATCH 24/72] Integrate the new Location class - the old `UTMCoordinate` class becomes the new `Location` class - its main purpose is to represent locations in both lat-long coordinates as well as in the UTM system - remove `Address.__init__()` and `City.__init__()` methods as they are not executed for entries retrieved from the database - simplfiy the `Location.__init__()` => remove `relative_to` argument --- setup.cfg | 3 + src/urban_meal_delivery/db/addresses.py | 47 +++++++--- src/urban_meal_delivery/db/cities.py | 74 +++++++--------- src/urban_meal_delivery/db/utils/__init__.py | 2 +- .../db/utils/{coordinates.py => locations.py} | 77 +++++++++-------- tests/db/test_addresses.py | 19 ++++ tests/db/test_cities.py | 47 ++++++---- ...{test_coordinates.py => test_locations.py} | 86 ++++++++----------- 8 files changed, 198 insertions(+), 157 deletions(-) rename src/urban_meal_delivery/db/utils/{coordinates.py => locations.py} (63%) rename tests/db/utils/{test_coordinates.py => test_locations.py} (63%) diff --git a/setup.cfg b/setup.cfg index af25bbf..c5aa9dd 100644 --- a/setup.cfg +++ b/setup.cfg @@ -135,6 +135,9 @@ per-file-ignores = src/urban_meal_delivery/db/__init__.py: # Top-level of a sub-packages is intended to import a lot. F401,WPS201, + src/urban_meal_delivery/db/utils/__init__.py: + # Top-level of a sub-packages is intended to import a lot. + F401, tests/*.py: # Type annotations are not strictly enforced. ANN0, ANN2, diff --git a/src/urban_meal_delivery/db/addresses.py b/src/urban_meal_delivery/db/addresses.py index f392207..f4b853c 100644 --- a/src/urban_meal_delivery/db/addresses.py +++ b/src/urban_meal_delivery/db/addresses.py @@ -1,6 +1,5 @@ """Provide the ORM's `Address` model.""" -from typing import Any import sqlalchemy as sa from sqlalchemy import orm @@ -69,14 +68,9 @@ class Address(meta.Base): ) pixels = orm.relationship('AddressPixelAssociation', back_populates='address') - def __init__(self, *args: Any, **kwargs: Any) -> None: - """Create a new address.""" - # Call SQLAlchemy's default `.__init__()` method. - super().__init__(*args, **kwargs) - - self._utm_coordinates = utils.UTMCoordinate( - self.latitude, self.longitude, relative_to=self.city.as_origin, - ) + # We do not implement a `.__init__()` method and leave that to SQLAlchemy. + # Instead, we use `hasattr()` to check for uninitialized attributes. + # grep:b1f68d24 pylint:disable=attribute-defined-outside-init def __repr__(self) -> str: """Non-literal text representation.""" @@ -95,12 +89,39 @@ class Address(meta.Base): """ return self.id == self._primary_id + @property + def location(self) -> utils.Location: + """The location of the address. + + The returned `Location` object relates to `.city.viewport.southwest`. + + See also the `.x` and `.y` properties that are shortcuts for + `.location.x` and `.location.y`. + + Implementation detail: This property is cached as none of the + underlying attributes to calculate the value are to be changed. + """ + if not hasattr(self, '_location'): # noqa:WPS421 note:b1f68d24 + self._location = utils.Location(self.latitude, self.longitude) + self._location.relate_to(self.city.as_xy_origin) + return self._location + @property def x(self) -> int: # noqa=WPS111 - """The `.easting` of the address in meters, relative to the `.city`.""" - return self._utm_coordinates.x + """The relative x-coordinate within the `.city` in meters. + + On the implied x-y plane, the `.city`'s southwest corner is the origin. + + Shortcut for `.location.x`. + """ + return self.location.x @property def y(self) -> int: # noqa=WPS111 - """The `.northing` of the address in meters, relative to the `.city`.""" - return self._utm_coordinates.y + """The relative y-coordinate within the `.city` in meters. + + On the implied x-y plane, the `.city`'s southwest corner is the origin. + + Shortcut for `.location.y`. + """ + return self.location.y diff --git a/src/urban_meal_delivery/db/cities.py b/src/urban_meal_delivery/db/cities.py index d0d7422..11175ad 100644 --- a/src/urban_meal_delivery/db/cities.py +++ b/src/urban_meal_delivery/db/cities.py @@ -1,6 +1,6 @@ """Provide the ORM's `City` model.""" -from typing import Any, Dict +from typing import Dict import sqlalchemy as sa from sqlalchemy import orm @@ -47,60 +47,52 @@ class City(meta.Base): addresses = orm.relationship('Address', back_populates='city') grids = orm.relationship('Grid', back_populates='city') - def __init__(self, *args: Any, **kwargs: Any) -> None: - """Create a new city.""" - # Call SQLAlchemy's default `.__init__()` method. - super().__init__(*args, **kwargs) - - # Take the "lower left" of the viewport as the origin - # of a Cartesian coordinate system. - lower_left = self.viewport['southwest'] - self._origin = utils.UTMCoordinate( - lower_left['latitude'], lower_left['longitude'], - ) + # We do not implement a `.__init__()` method and leave that to SQLAlchemy. + # Instead, we use `hasattr()` to check for uninitialized attributes. + # grep:d334120e pylint:disable=attribute-defined-outside-init def __repr__(self) -> str: """Non-literal text representation.""" return '<{cls}({name})>'.format(cls=self.__class__.__name__, name=self.name) @property - def location(self) -> Dict[str, float]: - """GPS location of the city's center. + def center(self) -> utils.Location: + """Location of the city's center. - Example: - {"latitude": 48.856614, "longitude": 2.3522219} + Implementation detail: This property is cached as none of the + underlying attributes to calculate the value are to be changed. """ - return { - 'latitude': self._center_latitude, - 'longitude': self._center_longitude, - } + if not hasattr(self, '_center'): # noqa:WPS421 note:d334120e + self._center = utils.Location( + self._center_latitude, self._center_longitude, + ) + return self._center @property - def viewport(self) -> Dict[str, Dict[str, float]]: + def viewport(self) -> Dict[str, utils.Location]: """Google Maps viewport of the city. - Example: - { - 'northeast': {'latitude': 48.9021449, 'longitude': 2.4699208}, - 'southwest': {'latitude': 48.815573, 'longitude': 2.225193}, + Implementation detail: This property is cached as none of the + underlying attributes to calculate the value are to be changed. + """ + if not hasattr(self, '_viewport'): # noqa:WPS421 note:d334120e + self._viewport = { + 'northeast': utils.Location( + self._northeast_latitude, self._northeast_longitude, + ), + 'southwest': utils.Location( + self._southwest_latitude, self._southwest_longitude, + ), } - """ # noqa:RST203 - return { - 'northeast': { - 'latitude': self._northeast_latitude, - 'longitude': self._northeast_longitude, - }, - 'southwest': { - 'latitude': self._southwest_latitude, - 'longitude': self._southwest_longitude, - }, - } + + return self._viewport @property - def as_origin(self) -> utils.UTMCoordinate: - """The lower left corner of the `.viewport` in UTM coordinates. + def as_xy_origin(self) -> utils.Location: + """The southwest corner of the `.viewport`. - This property serves as the `relative_to` argument to the - `UTMConstructor` when representing an `Address` in the x-y plane. + This property serves, for example, as the `other` argument to the + `Location.relate_to()` method when representing an `Address` + in the x-y plane. """ - return self._origin + return self.viewport['southwest'] diff --git a/src/urban_meal_delivery/db/utils/__init__.py b/src/urban_meal_delivery/db/utils/__init__.py index 3fe2e82..59ade94 100644 --- a/src/urban_meal_delivery/db/utils/__init__.py +++ b/src/urban_meal_delivery/db/utils/__init__.py @@ -1,3 +1,3 @@ """Utilities used by the ORM models.""" -from urban_meal_delivery.db.utils.coordinates import UTMCoordinate # noqa:F401 +from urban_meal_delivery.db.utils.locations import Location diff --git a/src/urban_meal_delivery/db/utils/coordinates.py b/src/urban_meal_delivery/db/utils/locations.py similarity index 63% rename from src/urban_meal_delivery/db/utils/coordinates.py rename to src/urban_meal_delivery/db/utils/locations.py index 17a60e6..f789bc3 100644 --- a/src/urban_meal_delivery/db/utils/coordinates.py +++ b/src/urban_meal_delivery/db/utils/locations.py @@ -1,4 +1,4 @@ -"""A `UTMCoordinate` class to unify working with coordinates.""" +"""A `Location` class to unify working with coordinates.""" from __future__ import annotations @@ -7,19 +7,27 @@ from typing import Optional import utm -class UTMCoordinate: - """A GPS location represented in UTM coordinates. +class Location: + """A location represented in WGS84 and UTM coordinates. - For further info, we refer to this comprehensive article on the UTM system: - https://en.wikipedia.org/wiki/Universal_Transverse_Mercator_coordinate_system + WGS84: + - "conventional" system with latitude-longitude pairs + - assumes earth is a sphere and models the location in 3D + + UTM: + - the Universal Transverse Mercator sytem + - projects WGS84 coordinates onto a 2D map + - can be used for visualizations and calculations directly + - distances are in meters + + Further info how WGS84 and UTM are related: + https://en.wikipedia.org/wiki/Universal_Transverse_Mercator_coordinate_system """ # pylint:disable=too-many-instance-attributes - def __init__( - self, latitude: float, longitude: float, relative_to: UTMCoordinate = None, - ) -> None: - """Cast a WGS84-conforming `latitude`-`longitude` pair as UTM coordinates.""" + def __init__(self, latitude: float, longitude: float) -> None: + """Create a location from a WGS84-conforming `latitude`-`longitude` pair.""" # The SQLAlchemy columns come as `Decimal`s due to the `DOUBLE_PRECISION`. self._latitude = float(latitude) self._longitude = float(longitude) @@ -35,36 +43,40 @@ class UTMCoordinate: self._normalized_easting: Optional[int] = None self._normalized_northing: Optional[int] = None - if relative_to: - try: - self.relate_to(relative_to) - except TypeError: - raise TypeError( - '`relative_to` must be a `UTMCoordinate` object', - ) from None - except ValueError: - raise ValueError( - '`relative_to` must be in the same UTM zone as the `latitude`-`longitude` pair', # noqa:E501 - ) from None - def __repr__(self) -> str: - """A non-literal text representation. + """A non-literal text representation in the UTM system. Convention is {ZONE} {EASTING} {NORTHING}. Example: - `'` + `'` """ - return f'' # noqa:WPS221 + return f'' # noqa:WPS221 + + @property + def latitude(self) -> float: + """The latitude of the location in degrees (WGS84). + + Between -90 and +90 degrees. + """ + return self._latitude + + @property + def longitude(self) -> float: + """The longitude of the location in degrees (WGS84). + + Between -180 and +180 degrees. + """ + return self._longitude @property def easting(self) -> int: - """The easting of the location in meters.""" + """The easting of the location in meters (UTM).""" return self._easting @property def northing(self) -> int: - """The northing of the location in meters.""" + """The northing of the location in meters (UTM).""" return self._northing @property @@ -73,8 +85,8 @@ class UTMCoordinate: return f'{self._zone}{self._band}' def __eq__(self, other: object) -> bool: - """Check if two `UTMCoordinate` objects are the same location.""" - if not isinstance(other, UTMCoordinate): + """Check if two `Location` objects are the same location.""" + if not isinstance(other, Location): return NotImplemented if self.zone != other.zone: @@ -104,24 +116,21 @@ class UTMCoordinate: return self._normalized_northing - def relate_to(self, other: UTMCoordinate) -> None: + def relate_to(self, other: Location) -> None: """Make the origin in the lower-left corner relative to `other`. The `.x` and `.y` properties are the `.easting` and `.northing` values of `self` minus the ones from `other`. So, `.x` and `.y` make up a Cartesian coordinate system where the `other` origin is `(0, 0)`. - This method is implicitly called by `.__init__()` if that is called - with a `relative_to` argument. - To prevent semantic errors in calculations based on the `.x` and `.y` properties, the `other` origin may only be set once! """ if self._normalized_easting is not None: raise RuntimeError('the `other` origin may only be set once') - if not isinstance(other, UTMCoordinate): - raise TypeError('`other` is not a `UTMCoordinate` object') + if not isinstance(other, Location): + raise TypeError('`other` is not a `Location` object') if self.zone != other.zone: raise ValueError('`other` must be in the same zone, including the band') diff --git a/tests/db/test_addresses.py b/tests/db/test_addresses.py index 2f51bb8..8f44352 100644 --- a/tests/db/test_addresses.py +++ b/tests/db/test_addresses.py @@ -6,6 +6,7 @@ import sqlalchemy as sqla from sqlalchemy import exc as sa_exc from urban_meal_delivery import db +from urban_meal_delivery.db import utils class TestSpecialMethods: @@ -123,6 +124,24 @@ class TestProperties: assert result is False + def test_location(self, address): + """Test `Address.location` property.""" + latitude = float(address.latitude) + longitude = float(address.longitude) + + result = address.location + + assert isinstance(result, utils.Location) + assert result.latitude == pytest.approx(latitude) + assert result.longitude == pytest.approx(longitude) + + def test_location_is_cached(self, address): + """Test `Address.location` property.""" + result1 = address.location + result2 = address.location + + assert result1 is result2 + def test_x_is_positive(self, address): """Test `Address.x` property.""" result = address.x diff --git a/tests/db/test_cities.py b/tests/db/test_cities.py index 94d69fe..a0110e5 100644 --- a/tests/db/test_cities.py +++ b/tests/db/test_cities.py @@ -3,8 +3,9 @@ import pytest -from tests.db.utils import test_coordinates as consts +from tests.db.utils import test_locations as consts from urban_meal_delivery import db +from urban_meal_delivery.db import utils class TestSpecialMethods: @@ -39,16 +40,22 @@ class TestConstraints: class TestProperties: """Test properties in `City`.""" - def test_location_data(self, city, city_data): - """Test `City.location` property.""" - result = city.location + def test_center(self, city, city_data): + """Test `City.center` property.""" + result = city.center - assert isinstance(result, dict) - assert len(result) == 2 - assert result['latitude'] == pytest.approx(city_data['_center_latitude']) - assert result['longitude'] == pytest.approx(city_data['_center_longitude']) + assert isinstance(result, utils.Location) + assert result.latitude == pytest.approx(city_data['_center_latitude']) + assert result.longitude == pytest.approx(city_data['_center_longitude']) - def test_viewport_data_overall(self, city): + def test_center_is_cached(self, city): + """Test `City.center` property.""" + result1 = city.center + result2 = city.center + + assert result1 is result2 + + def test_viewport_overall(self, city): """Test `City.viewport` property.""" result = city.viewport @@ -56,18 +63,24 @@ class TestProperties: assert len(result) == 2 @pytest.mark.parametrize('corner', ['northeast', 'southwest']) - def test_viewport_data_corners(self, city, city_data, corner): + def test_viewport_corners(self, city, city_data, corner): """Test `City.viewport` property.""" result = city.viewport[corner] - assert isinstance(result, dict) - assert len(result) == 2 - assert result['latitude'] == pytest.approx(city_data[f'_{corner}_latitude']) - assert result['longitude'] == pytest.approx(city_data[f'_{corner}_longitude']) + assert isinstance(result, utils.Location) + assert result.latitude == pytest.approx(city_data[f'_{corner}_latitude']) + assert result.longitude == pytest.approx(city_data[f'_{corner}_longitude']) - def test_city_in_utm_coordinates(self, city): - """Test `City.as_origin` property.""" - result = city.as_origin + def test_viewport_is_cached(self, city): + """Test `City.viewport` property.""" + result1 = city.viewport + result2 = city.viewport + + assert result1 is result2 + + def test_city_as_xy_origin(self, city): + """Test `City.as_xy_origin` property.""" + result = city.as_xy_origin assert result.zone == consts.ZONE assert consts.MIN_EASTING < result.easting < consts.MAX_EASTING diff --git a/tests/db/utils/test_coordinates.py b/tests/db/utils/test_locations.py similarity index 63% rename from tests/db/utils/test_coordinates.py rename to tests/db/utils/test_locations.py index 6909240..1ee4ddf 100644 --- a/tests/db/utils/test_coordinates.py +++ b/tests/db/utils/test_locations.py @@ -1,4 +1,4 @@ -"""Test the `UTMCoordinate` class.""" +"""Test the `Location` class.""" # pylint:disable=no-self-use import pytest @@ -14,8 +14,8 @@ ZONE = '31U' @pytest.fixture def location(address): - """A `UTMCoordinate` object based off the `address` fixture.""" - obj = utils.UTMCoordinate(address.latitude, address.longitude) + """A `Location` object based off the `address` fixture.""" + obj = utils.Location(address.latitude, address.longitude) assert obj.zone == ZONE # sanity check @@ -24,8 +24,8 @@ def location(address): @pytest.fixture def faraway_location(): - """A `UTMCoordinate` object far away from the `location`.""" - obj = utils.UTMCoordinate(latitude=0, longitude=0) + """A `Location` object far away from the `location`.""" + obj = utils.Location(latitude=0, longitude=0) assert obj.zone != ZONE # sanity check @@ -34,10 +34,8 @@ def faraway_location(): @pytest.fixture def origin(city): - """A `UTMCoordinate` object based off the one and only `city`.""" - # Use the `city`'s lower left viewport corner as the `(0, 0)` origin. - lower_left = city.viewport['southwest'] - obj = utils.UTMCoordinate(lower_left['latitude'], lower_left['longitude']) + """A `Location` object based off the one and only `city`.""" + obj = city.as_xy_origin assert obj.zone == ZONE # sanity check @@ -45,43 +43,17 @@ def origin(city): class TestSpecialMethods: - """Test special methods in `UTMCoordinate`.""" + """Test special methods in `Location`.""" def test_create_utm_coordinates(self, location): - """Test instantiation of a new `UTMCoordinate` object.""" + """Test instantiation of a new `Location` object.""" assert location is not None - def test_create_utm_coordinates_with_origin(self, address, origin): - """Test instantiation with a `relate_to` argument.""" - result = utils.UTMCoordinate( - latitude=address.latitude, longitude=address.longitude, relative_to=origin, - ) - - assert result is not None - - def test_create_utm_coordinates_with_non_utm_origin(self): - """Test instantiation with a `relate_to` argument of the wrong type.""" - with pytest.raises(TypeError, match='UTMCoordinate'): - utils.UTMCoordinate( - latitude=0, longitude=0, relative_to=object(), - ) - - def test_create_utm_coordinates_with_invalid_origin( - self, address, faraway_location, - ): - """Test instantiation with a `relate_to` argument at an invalid location.""" - with pytest.raises(ValueError, match='must be in the same UTM zone'): - utils.UTMCoordinate( - latitude=address.latitude, - longitude=address.longitude, - relative_to=faraway_location, - ) - def test_text_representation(self, location): """The text representation is a non-literal.""" result = repr(location) - assert result.startswith('') @pytest.mark.e2e @@ -103,7 +75,7 @@ class TestSpecialMethods: assert MIN_NORTHING < northing < MAX_NORTHING def test_compare_utm_coordinates_to_different_data_type(self, location): - """Test `UTMCoordinate.__eq__()`.""" + """Test `Location.__eq__()`.""" result = location == object() assert result is False @@ -111,56 +83,68 @@ class TestSpecialMethods: def test_compare_utm_coordinates_to_far_away_coordinates( self, location, faraway_location, ): - """Test `UTMCoordinate.__eq__()`.""" + """Test `Location.__eq__()`.""" with pytest.raises(ValueError, match='must be in the same zone'): bool(location == faraway_location) def test_compare_utm_coordinates_to_equal_coordinates(self, location, address): - """Test `UTMCoordinate.__eq__()`.""" - same_location = utils.UTMCoordinate(address.latitude, address.longitude) + """Test `Location.__eq__()`.""" + same_location = utils.Location(address.latitude, address.longitude) result = location == same_location assert result is True def test_compare_utm_coordinates_to_themselves(self, location): - """Test `UTMCoordinate.__eq__()`.""" + """Test `Location.__eq__()`.""" # pylint:disable=comparison-with-itself result = location == location # noqa:WPS312 assert result is True def test_compare_utm_coordinates_to_different_coordinates(self, location, origin): - """Test `UTMCoordinate.__eq__()`.""" + """Test `Location.__eq__()`.""" result = location == origin assert result is False class TestProperties: - """Test properties in `UTMCoordinate`.""" + """Test properties in `Location`.""" + + def test_latitude(self, location, address): + """Test `Location.latitude` property.""" + result = location.latitude + + assert result == pytest.approx(float(address.latitude)) + + def test_longitude(self, location, address): + """Test `Location.longitude` property.""" + result = location.longitude + + assert result == pytest.approx(float(address.longitude)) def test_easting(self, location): - """Test `UTMCoordinate.easting` property.""" + """Test `Location.easting` property.""" result = location.easting assert MIN_EASTING < result < MAX_EASTING def test_northing(self, location): - """Test `UTMCoordinate.northing` property.""" + """Test `Location.northing` property.""" result = location.northing assert MIN_NORTHING < result < MAX_NORTHING def test_zone(self, location): - """Test `UTMCoordinate.zone` property.""" + """Test `Location.zone` property.""" result = location.zone assert result == ZONE class TestRelateTo: - """Test the `UTMCoordinate.relate_to()` method and the `.x` and `.y` properties.""" + """Test the `Location.relate_to()` method and the `.x` and `.y` properties.""" def test_run_relate_to_twice(self, location, origin): """The `.relate_to()` method must only be run once.""" @@ -170,8 +154,8 @@ class TestRelateTo: location.relate_to(origin) def test_call_relate_to_with_wrong_other_type(self, location): - """`other` must be another `UTMCoordinate`.""" - with pytest.raises(TypeError, match='UTMCoordinate'): + """`other` must be another `Location`.""" + with pytest.raises(TypeError, match='Location'): location.relate_to(object()) def test_call_relate_to_with_far_away_other( From 776112d6092b485bab7e0d1f5ade6a6d4fb71df4 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Tue, 5 Jan 2021 18:58:48 +0100 Subject: [PATCH 25/72] Add `Grid.gridify()` constructor - the purpose of this constructor method is to generate all `Pixel`s for a `Grid` that have at least one `Address` assigned to them - fix missing `UniqueConstraint` in `Grid` class => it was not possible to create two `Grid`s with the same `.side_length` in different cities - change the `City.viewport` property into two separate `City.southwest` and `City.northeast` properties; also add `City.total_x` and `City.total_y` properties for convenience --- ...20210102_18_888e352d7526_add_pixel_grid.py | 14 ++- noxfile.py | 2 +- setup.cfg | 3 + src/urban_meal_delivery/db/addresses.py | 5 +- src/urban_meal_delivery/db/cities.py | 52 ++++++---- src/urban_meal_delivery/db/grids.py | 48 ++++++++++ tests/db/test_cities.py | 58 ++++++----- tests/db/test_grids.py | 95 +++++++++++++++++++ tests/db/test_orders.py | 2 +- tests/db/utils/test_locations.py | 2 +- 10 files changed, 224 insertions(+), 57 deletions(-) diff --git a/migrations/versions/rev_20210102_18_888e352d7526_add_pixel_grid.py b/migrations/versions/rev_20210102_18_888e352d7526_add_pixel_grid.py index d1a9d34..fb1aa16 100644 --- a/migrations/versions/rev_20210102_18_888e352d7526_add_pixel_grid.py +++ b/migrations/versions/rev_20210102_18_888e352d7526_add_pixel_grid.py @@ -36,7 +36,11 @@ def upgrade(): onupdate='RESTRICT', ondelete='RESTRICT', ), - sa.UniqueConstraint('side_length', name=op.f('uq_grids_on_side_length')), + sa.UniqueConstraint( + 'city_id', 'side_length', name=op.f('uq_grids_on_city_id_side_length'), + ), + # This `UniqueConstraint` is needed by the `addresses_pixels` table below. + sa.UniqueConstraint('id', 'city_id', name=op.f('uq_grids_on_id_city_id')), schema=config.CLEAN_SCHEMA, ) @@ -85,19 +89,13 @@ def upgrade(): schema=config.CLEAN_SCHEMA, ) - # These `UniqueConstraints`s are needed by the `addresses_pixels` table below. + # This `UniqueConstraint` is needed by the `addresses_pixels` table below. op.create_unique_constraint( 'uq_addresses_on_id_city_id', 'addresses', ['id', 'city_id'], schema=config.CLEAN_SCHEMA, ) - op.create_unique_constraint( - 'uq_grids_on_id_city_id', - 'grids', - ['id', 'city_id'], - schema=config.CLEAN_SCHEMA, - ) op.create_table( 'addresses_pixels', diff --git a/noxfile.py b/noxfile.py index 1557320..0567f22 100644 --- a/noxfile.py +++ b/noxfile.py @@ -470,7 +470,7 @@ def init_project(session): @nox.session(name='clean-pwd', python=PYTHON, venv_backend='none') -def clean_pwd(session): # noqa:WPS210,WPS231 +def clean_pwd(session): # noqa:WPS231 """Remove (almost) all glob patterns listed in .gitignore. The difference compared to `git clean -X` is that this task diff --git a/setup.cfg b/setup.cfg index c5aa9dd..6b3f076 100644 --- a/setup.cfg +++ b/setup.cfg @@ -170,6 +170,9 @@ per-file-ignores = # Source: https://en.wikipedia.org/wiki/Cyclomatic_complexity#Limiting_complexity_during_development max-complexity = 10 +# Allow more than wemake-python-styleguide's 5 local variables per function. +max-local-variables = 8 + # Allow more than wemake-python-styleguide's 7 methods per class. max-methods = 12 diff --git a/src/urban_meal_delivery/db/addresses.py b/src/urban_meal_delivery/db/addresses.py index f4b853c..10c07ea 100644 --- a/src/urban_meal_delivery/db/addresses.py +++ b/src/urban_meal_delivery/db/addresses.py @@ -1,6 +1,5 @@ """Provide the ORM's `Address` model.""" - import sqlalchemy as sa from sqlalchemy import orm from sqlalchemy.dialects import postgresql @@ -93,7 +92,7 @@ class Address(meta.Base): def location(self) -> utils.Location: """The location of the address. - The returned `Location` object relates to `.city.viewport.southwest`. + The returned `Location` object relates to `.city.southwest`. See also the `.x` and `.y` properties that are shortcuts for `.location.x` and `.location.y`. @@ -103,7 +102,7 @@ class Address(meta.Base): """ if not hasattr(self, '_location'): # noqa:WPS421 note:b1f68d24 self._location = utils.Location(self.latitude, self.longitude) - self._location.relate_to(self.city.as_xy_origin) + self._location.relate_to(self.city.southwest) return self._location @property diff --git a/src/urban_meal_delivery/db/cities.py b/src/urban_meal_delivery/db/cities.py index 11175ad..20367aa 100644 --- a/src/urban_meal_delivery/db/cities.py +++ b/src/urban_meal_delivery/db/cities.py @@ -1,6 +1,5 @@ """Provide the ORM's `City` model.""" -from typing import Dict import sqlalchemy as sa from sqlalchemy import orm @@ -69,30 +68,45 @@ class City(meta.Base): return self._center @property - def viewport(self) -> Dict[str, utils.Location]: - """Google Maps viewport of the city. + def northeast(self) -> utils.Location: + """The city's northeast corner of the Google Maps viewport. Implementation detail: This property is cached as none of the underlying attributes to calculate the value are to be changed. """ - if not hasattr(self, '_viewport'): # noqa:WPS421 note:d334120e - self._viewport = { - 'northeast': utils.Location( - self._northeast_latitude, self._northeast_longitude, - ), - 'southwest': utils.Location( - self._southwest_latitude, self._southwest_longitude, - ), - } + if not hasattr(self, '_northeast'): # noqa:WPS421 note:d334120e + self._northeast = utils.Location( + self._northeast_latitude, self._northeast_longitude, + ) - return self._viewport + return self._northeast @property - def as_xy_origin(self) -> utils.Location: - """The southwest corner of the `.viewport`. + def southwest(self) -> utils.Location: + """The city's southwest corner of the Google Maps viewport. - This property serves, for example, as the `other` argument to the - `Location.relate_to()` method when representing an `Address` - in the x-y plane. + Implementation detail: This property is cached as none of the + underlying attributes to calculate the value are to be changed. """ - return self.viewport['southwest'] + if not hasattr(self, '_southwest'): # noqa:WPS421 note:d334120e + self._southwest = utils.Location( + self._southwest_latitude, self._southwest_longitude, + ) + + return self._southwest + + @property + def total_x(self) -> int: + """The horizontal distance from the city's west to east end in meters. + + The city borders refer to the Google Maps viewport. + """ + return self.northeast.easting - self.southwest.easting + + @property + def total_y(self) -> int: + """The vertical distance from the city's south to north end in meters. + + The city borders refer to the Google Maps viewport. + """ + return self.northeast.northing - self.southwest.northing diff --git a/src/urban_meal_delivery/db/grids.py b/src/urban_meal_delivery/db/grids.py index 26a7cea..3f7039b 100644 --- a/src/urban_meal_delivery/db/grids.py +++ b/src/urban_meal_delivery/db/grids.py @@ -1,8 +1,11 @@ """Provide the ORM's `Grid` model.""" +from __future__ import annotations + import sqlalchemy as sa from sqlalchemy import orm +from urban_meal_delivery import db from urban_meal_delivery.db import meta @@ -27,6 +30,9 @@ class Grid(meta.Base): sa.ForeignKeyConstraint( ['city_id'], ['cities.id'], onupdate='RESTRICT', ondelete='RESTRICT', ), + # Each `Grid`, characterized by its `.side_length`, + # may only exists once for a given `.city`. + sa.UniqueConstraint('city_id', 'side_length'), # Needed by a `ForeignKeyConstraint` in `address_pixel_association`. sa.UniqueConstraint('id', 'city_id'), ) @@ -46,3 +52,45 @@ class Grid(meta.Base): def pixel_area(self) -> float: """The area of a `Pixel` on the grid in square kilometers.""" return (self.side_length ** 2) / 1_000_000 # noqa:WPS432 + + @classmethod + def gridify(cls, city: db.City, side_length: int) -> db.Grid: + """Create a fully populated `Grid` for a `city`. + + The created `Grid` contains only the `Pixel`s for which + there is at least one `Address` in it. + + Args: + city: city for which the grid is created + side_length: the length of a square `Pixel`'s side + + Returns: + grid: including `grid.pixels` with the associated `city.addresses` + """ + grid = cls(city=city, side_length=side_length) + + # Create `Pixel` objects covering the entire `city`. + # Note: `+1` so that `city.northeast` corner is on the grid. + possible_pixels = [ + db.Pixel(n_x=n_x, n_y=n_y) + for n_x in range((city.total_x // side_length) + 1) + for n_y in range((city.total_y // side_length) + 1) + ] + + # For convenient lookup by `.n_x`-`.n_y` coordinates. + pixel_map = {(pixel.n_x, pixel.n_y): pixel for pixel in possible_pixels} + + for address in city.addresses: + # Determine which `pixel` the `address` belongs to. + n_x = address.x // side_length + n_y = address.y // side_length + pixel = pixel_map[n_x, n_y] + + # Create an association between the `address` and `pixel`. + assoc = db.AddressPixelAssociation(address=address, pixel=pixel) + pixel.addresses.append(assoc) + + # Only keep `pixel`s that contain at least one `Address`. + grid.pixels = [pixel for pixel in pixel_map.values() if pixel.addresses] + + return grid diff --git a/tests/db/test_cities.py b/tests/db/test_cities.py index a0110e5..d3ae5af 100644 --- a/tests/db/test_cities.py +++ b/tests/db/test_cities.py @@ -3,7 +3,6 @@ import pytest -from tests.db.utils import test_locations as consts from urban_meal_delivery import db from urban_meal_delivery.db import utils @@ -55,33 +54,44 @@ class TestProperties: assert result1 is result2 - def test_viewport_overall(self, city): - """Test `City.viewport` property.""" - result = city.viewport - - assert isinstance(result, dict) - assert len(result) == 2 - - @pytest.mark.parametrize('corner', ['northeast', 'southwest']) - def test_viewport_corners(self, city, city_data, corner): - """Test `City.viewport` property.""" - result = city.viewport[corner] + def test_northeast(self, city, city_data): + """Test `City.northeast` property.""" + result = city.northeast assert isinstance(result, utils.Location) - assert result.latitude == pytest.approx(city_data[f'_{corner}_latitude']) - assert result.longitude == pytest.approx(city_data[f'_{corner}_longitude']) + assert result.latitude == pytest.approx(city_data['_northeast_latitude']) + assert result.longitude == pytest.approx(city_data['_northeast_longitude']) - def test_viewport_is_cached(self, city): - """Test `City.viewport` property.""" - result1 = city.viewport - result2 = city.viewport + def test_northeast_is_cached(self, city): + """Test `City.northeast` property.""" + result1 = city.northeast + result2 = city.northeast assert result1 is result2 - def test_city_as_xy_origin(self, city): - """Test `City.as_xy_origin` property.""" - result = city.as_xy_origin + def test_southwest(self, city, city_data): + """Test `City.southwest` property.""" + result = city.southwest - assert result.zone == consts.ZONE - assert consts.MIN_EASTING < result.easting < consts.MAX_EASTING - assert consts.MIN_NORTHING < result.northing < consts.MAX_NORTHING + assert isinstance(result, utils.Location) + assert result.latitude == pytest.approx(city_data['_southwest_latitude']) + assert result.longitude == pytest.approx(city_data['_southwest_longitude']) + + def test_southwest_is_cached(self, city): + """Test `City.southwest` property.""" + result1 = city.southwest + result2 = city.southwest + + assert result1 is result2 + + def test_total_x(self, city): + """Test `City.total_x` property.""" + result = city.total_x + + assert result > 18_000 + + def test_total_y(self, city): + """Test `City.total_y` property.""" + result = city.total_y + + assert result > 9_000 diff --git a/tests/db/test_grids.py b/tests/db/test_grids.py index 0333b64..bcde3f7 100644 --- a/tests/db/test_grids.py +++ b/tests/db/test_grids.py @@ -49,6 +49,18 @@ class TestConstraints: ): db_session.execute(stmt) + def test_two_grids_with_identical_side_length(self, db_session, grid): + """Insert a record that violates a unique constraint.""" + db_session.add(grid) + db_session.commit() + + # Create a `Grid` with the same `.side_length` in the same `.city`. + another_grid = db.Grid(city=grid.city, side_length=grid.side_length) + db_session.add(another_grid) + + with pytest.raises(sa_exc.IntegrityError, match='duplicate key value'): + db_session.commit() + class TestProperties: """Test properties in `Grid`.""" @@ -58,3 +70,86 @@ class TestProperties: result = grid.pixel_area assert result == 1.0 + + +class TestGridification: + """Test the `Grid.gridify()` constructor.""" + + def test_one_pixel_covering_entire_city_without_addresses(self, city): + """At the very least, there must be one `Pixel` ... + + ... if the `side_length` is greater than both the + horizontal and vertical distances of the viewport. + """ + city.addresses = [] + + # `+1` as otherwise there would be a second pixel in one direction. + side_length = max(city.total_x, city.total_y) + 1 + + result = db.Grid.gridify(city=city, side_length=side_length) + + assert isinstance(result, db.Grid) + assert len(result.pixels) == 0 # noqa:WPS507 + + def test_one_pixel_covering_entire_city_with_one_address(self, city, address): + """At the very least, there must be one `Pixel` ... + + ... if the `side_length` is greater than both the + horizontal and vertical distances of the viewport. + """ + city.addresses = [address] + + # `+1` as otherwise there would be a second pixel in one direction. + side_length = max(city.total_x, city.total_y) + 1 + + result = db.Grid.gridify(city=city, side_length=side_length) + + assert isinstance(result, db.Grid) + assert len(result.pixels) == 1 + + def test_four_pixels_with_two_addresses(self, city, make_address): + """Two `Address` objects in distinct `Pixel` objects.""" + # Create two `Address` objects in distinct `Pixel`s. + city.addresses = [ + # One `Address` in the lower-left `Pixel`, ... + make_address(latitude=48.8357377, longitude=2.2517412), + # ... and another one in the upper-right one. + make_address(latitude=48.8898312, longitude=2.4357622), + ] + + side_length = max(city.total_x // 2, city.total_y // 2) + 1 + + # By assumption of the test data. + n_pixels_x = (city.total_x // side_length) + 1 + n_pixels_y = (city.total_y // side_length) + 1 + assert n_pixels_x * n_pixels_y == 4 + + # Create a `Grid` with at most four `Pixel`s. + result = db.Grid.gridify(city=city, side_length=side_length) + + assert isinstance(result, db.Grid) + assert len(result.pixels) == 2 + + @pytest.mark.db + @pytest.mark.no_cover + @pytest.mark.parametrize('side_length', [250, 500, 1_000, 2_000, 4_000, 8_000]) + def test_make_random_grids(self, db_session, city, make_address, side_length): + """With 100 random `Address` objects, a grid must have ... + + ... between 1 and a deterministic number of `Pixel` objects. + + This test creates confidence that the created `Grid` + objects adhere to the database constraints. + """ + city.addresses = [make_address() for _ in range(100)] + + n_pixels_x = (city.total_x // side_length) + 1 + n_pixels_y = (city.total_y // side_length) + 1 + + result = db.Grid.gridify(city=city, side_length=side_length) + + assert isinstance(result, db.Grid) + assert 1 <= len(result.pixels) <= n_pixels_x * n_pixels_y + + db_session.add(result) + db_session.commit() diff --git a/tests/db/test_orders.py b/tests/db/test_orders.py index f23e9bb..9fdde79 100644 --- a/tests/db/test_orders.py +++ b/tests/db/test_orders.py @@ -425,7 +425,7 @@ class TestProperties: @pytest.mark.db @pytest.mark.no_cover -def test_make_random_orders( # noqa:C901,WPS211,WPS210,WPS213,WPS231 +def test_make_random_orders( # noqa:C901,WPS211,WPS213,WPS231 db_session, make_address, make_courier, make_restaurant, make_order, ): """Sanity check the all the `make_*` fixtures. diff --git a/tests/db/utils/test_locations.py b/tests/db/utils/test_locations.py index 1ee4ddf..fff43d2 100644 --- a/tests/db/utils/test_locations.py +++ b/tests/db/utils/test_locations.py @@ -35,7 +35,7 @@ def faraway_location(): @pytest.fixture def origin(city): """A `Location` object based off the one and only `city`.""" - obj = city.as_xy_origin + obj = city.southwest assert obj.zone == ZONE # sanity check From 992d2bb7d41bf5c1af307d2146d65dbe6ecee4c7 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Tue, 5 Jan 2021 19:08:52 +0100 Subject: [PATCH 26/72] Adjust flake8 ... ... to not complain about implementation details when testing. --- setup.cfg | 2 ++ tests/db/fake_data/factories.py | 2 +- tests/db/test_addresses.py | 4 ++-- tests/db/test_orders.py | 4 ++-- 4 files changed, 7 insertions(+), 5 deletions(-) diff --git a/setup.cfg b/setup.cfg index 6b3f076..f6b291f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -161,6 +161,8 @@ per-file-ignores = WPS430, # Numbers are normal in test cases as expected results. WPS432, + # When testing, it is normal to use implementation details. + WPS437, tests/db/fake_data/__init__.py: # Top-level of a sub-packages is intended to import a lot. F401,WPS201, diff --git a/tests/db/fake_data/factories.py b/tests/db/fake_data/factories.py index 0758667..d9d23d4 100644 --- a/tests/db/fake_data/factories.py +++ b/tests/db/fake_data/factories.py @@ -317,7 +317,7 @@ class AdHocOrderFactory(alchemy.SQLAlchemyModelFactory): obj.delivery_at = None obj.delivery_at_corrected = None obj.delivery_not_confirmed = None - obj._courier_waited_at_delivery = None # noqa:WPS437 + obj._courier_waited_at_delivery = None class ScheduledOrderFactory(AdHocOrderFactory): diff --git a/tests/db/test_addresses.py b/tests/db/test_addresses.py index 8f44352..2d45343 100644 --- a/tests/db/test_addresses.py +++ b/tests/db/test_addresses.py @@ -110,7 +110,7 @@ class TestProperties: def test_is_primary(self, address): """Test `Address.is_primary` property.""" - assert address.id == address._primary_id # noqa:WPS437 + assert address.id == address._primary_id result = address.is_primary @@ -118,7 +118,7 @@ class TestProperties: def test_is_not_primary(self, address): """Test `Address.is_primary` property.""" - address._primary_id = 999 # noqa:WPS437 + address._primary_id = 999 result = address.is_primary diff --git a/tests/db/test_orders.py b/tests/db/test_orders.py index 9fdde79..37eb96a 100644 --- a/tests/db/test_orders.py +++ b/tests/db/test_orders.py @@ -336,7 +336,7 @@ class TestProperties: def test_courier_waited_at_delviery(self, order): """Test `Order.courier_waited_at_delivery` property.""" - order._courier_waited_at_delivery = True # noqa:WPS437 + order._courier_waited_at_delivery = True result = order.courier_waited_at_delivery.total_seconds() @@ -344,7 +344,7 @@ class TestProperties: def test_courier_did_not_wait_at_delivery(self, order): """Test `Order.courier_waited_at_delivery` property.""" - order._courier_waited_at_delivery = False # noqa:WPS437 + order._courier_waited_at_delivery = False result = order.courier_waited_at_delivery.total_seconds() From 078355897a8c903b5aadb51ad2d18ccb56702356 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Tue, 5 Jan 2021 22:32:24 +0100 Subject: [PATCH 27/72] Fix missing unique constraint drop --- .../versions/rev_20210102_18_888e352d7526_add_pixel_grid.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/migrations/versions/rev_20210102_18_888e352d7526_add_pixel_grid.py b/migrations/versions/rev_20210102_18_888e352d7526_add_pixel_grid.py index fb1aa16..dc5e8d7 100644 --- a/migrations/versions/rev_20210102_18_888e352d7526_add_pixel_grid.py +++ b/migrations/versions/rev_20210102_18_888e352d7526_add_pixel_grid.py @@ -148,6 +148,12 @@ def upgrade(): def downgrade(): """Downgrade to revision f11cd76d2f45.""" op.drop_table('addresses_pixels', schema=config.CLEAN_SCHEMA) + op.drop_constraint( + 'uq_addresses_on_id_city_id', + 'addresses', + type_=None, + schema=config.CLEAN_SCHEMA, + ) op.drop_index( op.f('ix_pixels_on_n_y'), table_name='pixels', schema=config.CLEAN_SCHEMA, ) From daa224d04105daf817cb791783861407b64d8d4e Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Tue, 5 Jan 2021 22:37:12 +0100 Subject: [PATCH 28/72] Rename `_*_id` columns into just `*_id` --- src/urban_meal_delivery/db/addresses.py | 6 ++-- .../db/addresses_pixels.py | 8 ++--- src/urban_meal_delivery/db/cities.py | 32 ++++++------------- src/urban_meal_delivery/db/grids.py | 2 +- src/urban_meal_delivery/db/orders.py | 20 ++++-------- src/urban_meal_delivery/db/pixels.py | 2 +- src/urban_meal_delivery/db/restaurants.py | 2 +- tests/db/fake_data/static_fixtures.py | 12 +++---- tests/db/test_cities.py | 12 +++---- 9 files changed, 38 insertions(+), 58 deletions(-) diff --git a/src/urban_meal_delivery/db/addresses.py b/src/urban_meal_delivery/db/addresses.py index 10c07ea..d97b09c 100644 --- a/src/urban_meal_delivery/db/addresses.py +++ b/src/urban_meal_delivery/db/addresses.py @@ -23,7 +23,7 @@ class Address(meta.Base): ) latitude = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False) longitude = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False) - _city_id = sa.Column('city_id', sa.SmallInteger, nullable=False, index=True) + city_id = sa.Column(sa.SmallInteger, nullable=False, index=True) city_name = sa.Column('city', sa.Unicode(length=25), nullable=False) # noqa:WPS432 zip_code = sa.Column(sa.Integer, nullable=False, index=True) street = sa.Column(sa.Unicode(length=80), nullable=False) # noqa:WPS432 @@ -58,12 +58,12 @@ class Address(meta.Base): orders_picked_up = orm.relationship( 'Order', back_populates='pickup_address', - foreign_keys='[Order._pickup_address_id]', + foreign_keys='[Order.pickup_address_id]', ) orders_delivered = orm.relationship( 'Order', back_populates='delivery_address', - foreign_keys='[Order._delivery_address_id]', + foreign_keys='[Order.delivery_address_id]', ) pixels = orm.relationship('AddressPixelAssociation', back_populates='address') diff --git a/src/urban_meal_delivery/db/addresses_pixels.py b/src/urban_meal_delivery/db/addresses_pixels.py index 3ba198f..293bde7 100644 --- a/src/urban_meal_delivery/db/addresses_pixels.py +++ b/src/urban_meal_delivery/db/addresses_pixels.py @@ -19,10 +19,10 @@ class AddressPixelAssociation(meta.Base): __tablename__ = 'addresses_pixels' # Columns - _address_id = sa.Column('address_id', sa.Integer, primary_key=True) - _city_id = sa.Column('city_id', sa.SmallInteger, nullable=False) - _grid_id = sa.Column('grid_id', sa.SmallInteger, nullable=False) - _pixel_id = sa.Column('pixel_id', sa.Integer, primary_key=True) + address_id = sa.Column(sa.Integer, primary_key=True) + city_id = sa.Column(sa.SmallInteger, nullable=False) + grid_id = sa.Column(sa.SmallInteger, nullable=False) + pixel_id = sa.Column(sa.Integer, primary_key=True) # Constraints __table_args__ = ( diff --git a/src/urban_meal_delivery/db/cities.py b/src/urban_meal_delivery/db/cities.py index 20367aa..dea5f5f 100644 --- a/src/urban_meal_delivery/db/cities.py +++ b/src/urban_meal_delivery/db/cities.py @@ -22,24 +22,12 @@ class City(meta.Base): kml = sa.Column(sa.UnicodeText, nullable=False) # Google Maps related columns - _center_latitude = sa.Column( - 'center_latitude', postgresql.DOUBLE_PRECISION, nullable=False, - ) - _center_longitude = sa.Column( - 'center_longitude', postgresql.DOUBLE_PRECISION, nullable=False, - ) - _northeast_latitude = sa.Column( - 'northeast_latitude', postgresql.DOUBLE_PRECISION, nullable=False, - ) - _northeast_longitude = sa.Column( - 'northeast_longitude', postgresql.DOUBLE_PRECISION, nullable=False, - ) - _southwest_latitude = sa.Column( - 'southwest_latitude', postgresql.DOUBLE_PRECISION, nullable=False, - ) - _southwest_longitude = sa.Column( - 'southwest_longitude', postgresql.DOUBLE_PRECISION, nullable=False, - ) + center_latitude = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False) + center_longitude = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False) + northeast_latitude = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False) + northeast_longitude = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False) + southwest_latitude = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False) + southwest_longitude = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False) initial_zoom = sa.Column(sa.SmallInteger, nullable=False) # Relationships @@ -62,9 +50,7 @@ class City(meta.Base): underlying attributes to calculate the value are to be changed. """ if not hasattr(self, '_center'): # noqa:WPS421 note:d334120e - self._center = utils.Location( - self._center_latitude, self._center_longitude, - ) + self._center = utils.Location(self.center_latitude, self.center_longitude) return self._center @property @@ -76,7 +62,7 @@ class City(meta.Base): """ if not hasattr(self, '_northeast'): # noqa:WPS421 note:d334120e self._northeast = utils.Location( - self._northeast_latitude, self._northeast_longitude, + self.northeast_latitude, self.northeast_longitude, ) return self._northeast @@ -90,7 +76,7 @@ class City(meta.Base): """ if not hasattr(self, '_southwest'): # noqa:WPS421 note:d334120e self._southwest = utils.Location( - self._southwest_latitude, self._southwest_longitude, + self.southwest_latitude, self.southwest_longitude, ) return self._southwest diff --git a/src/urban_meal_delivery/db/grids.py b/src/urban_meal_delivery/db/grids.py index 3f7039b..389bd5f 100644 --- a/src/urban_meal_delivery/db/grids.py +++ b/src/urban_meal_delivery/db/grids.py @@ -22,7 +22,7 @@ class Grid(meta.Base): id = sa.Column( # noqa:WPS125 sa.SmallInteger, primary_key=True, autoincrement=True, ) - _city_id = sa.Column('city_id', sa.SmallInteger, nullable=False) + city_id = sa.Column(sa.SmallInteger, nullable=False) side_length = sa.Column(sa.SmallInteger, nullable=False, unique=True) # Constraints diff --git a/src/urban_meal_delivery/db/orders.py b/src/urban_meal_delivery/db/orders.py index d3adcdf..244e4c1 100644 --- a/src/urban_meal_delivery/db/orders.py +++ b/src/urban_meal_delivery/db/orders.py @@ -17,7 +17,7 @@ class Order(meta.Base): # noqa:WPS214 # Generic columns id = sa.Column(sa.Integer, primary_key=True, autoincrement=False) # noqa:WPS125 _delivery_id = sa.Column('delivery_id', sa.Integer, index=True, unique=True) - _customer_id = sa.Column('customer_id', sa.Integer, nullable=False, index=True) + customer_id = sa.Column(sa.Integer, nullable=False, index=True) placed_at = sa.Column(sa.DateTime, nullable=False, index=True) ad_hoc = sa.Column(sa.Boolean, nullable=False) scheduled_delivery_at = sa.Column(sa.DateTime, index=True) @@ -33,9 +33,7 @@ class Order(meta.Base): # noqa:WPS214 total = sa.Column(sa.Integer, nullable=False) # Restaurant-related columns - _restaurant_id = sa.Column( - 'restaurant_id', sa.SmallInteger, nullable=False, index=True, - ) + restaurant_id = sa.Column(sa.SmallInteger, nullable=False, index=True) restaurant_notified_at = sa.Column(sa.DateTime) restaurant_notified_at_corrected = sa.Column(sa.Boolean, index=True) restaurant_confirmed_at = sa.Column(sa.DateTime) @@ -45,7 +43,7 @@ class Order(meta.Base): # noqa:WPS214 estimated_prep_buffer = sa.Column(sa.Integer, nullable=False, index=True) # Dispatch-related columns - _courier_id = sa.Column('courier_id', sa.Integer, index=True) + courier_id = sa.Column(sa.Integer, index=True) dispatch_at = sa.Column(sa.DateTime) dispatch_at_corrected = sa.Column(sa.Boolean, index=True) courier_notified_at = sa.Column(sa.DateTime) @@ -55,9 +53,7 @@ class Order(meta.Base): # noqa:WPS214 utilization = sa.Column(sa.SmallInteger, nullable=False) # Pickup-related columns - _pickup_address_id = sa.Column( - 'pickup_address_id', sa.Integer, nullable=False, index=True, - ) + pickup_address_id = sa.Column(sa.Integer, nullable=False, index=True) reached_pickup_at = sa.Column(sa.DateTime) pickup_at = sa.Column(sa.DateTime) pickup_at_corrected = sa.Column(sa.Boolean, index=True) @@ -66,9 +62,7 @@ class Order(meta.Base): # noqa:WPS214 left_pickup_at_corrected = sa.Column(sa.Boolean, index=True) # Delivery-related columns - _delivery_address_id = sa.Column( - 'delivery_address_id', sa.Integer, nullable=False, index=True, - ) + delivery_address_id = sa.Column(sa.Integer, nullable=False, index=True) reached_delivery_at = sa.Column(sa.DateTime) delivery_at = sa.Column(sa.DateTime) delivery_at_corrected = sa.Column(sa.Boolean, index=True) @@ -313,12 +307,12 @@ class Order(meta.Base): # noqa:WPS214 pickup_address = orm.relationship( 'Address', back_populates='orders_picked_up', - foreign_keys='[Order._pickup_address_id]', + foreign_keys='[Order.pickup_address_id]', ) delivery_address = orm.relationship( 'Address', back_populates='orders_delivered', - foreign_keys='[Order._delivery_address_id]', + foreign_keys='[Order.delivery_address_id]', ) # Convenience properties diff --git a/src/urban_meal_delivery/db/pixels.py b/src/urban_meal_delivery/db/pixels.py index 6d28227..5b3f4f3 100644 --- a/src/urban_meal_delivery/db/pixels.py +++ b/src/urban_meal_delivery/db/pixels.py @@ -19,7 +19,7 @@ class Pixel(meta.Base): # Columns id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) # noqa:WPS125 - _grid_id = sa.Column('grid_id', sa.SmallInteger, nullable=False, index=True) + grid_id = sa.Column(sa.SmallInteger, nullable=False, index=True) n_x = sa.Column(sa.SmallInteger, nullable=False, index=True) n_y = sa.Column(sa.SmallInteger, nullable=False, index=True) diff --git a/src/urban_meal_delivery/db/restaurants.py b/src/urban_meal_delivery/db/restaurants.py index 1319b56..d427540 100644 --- a/src/urban_meal_delivery/db/restaurants.py +++ b/src/urban_meal_delivery/db/restaurants.py @@ -22,7 +22,7 @@ class Restaurant(meta.Base): ) created_at = sa.Column(sa.DateTime, nullable=False) name = sa.Column(sa.Unicode(length=45), nullable=False) # noqa:WPS432 - _address_id = sa.Column('address_id', sa.Integer, nullable=False, index=True) + address_id = sa.Column(sa.Integer, nullable=False, index=True) estimated_prep_duration = sa.Column(sa.SmallInteger, nullable=False) # Constraints diff --git a/tests/db/fake_data/static_fixtures.py b/tests/db/fake_data/static_fixtures.py index ee6682d..6a386de 100644 --- a/tests/db/fake_data/static_fixtures.py +++ b/tests/db/fake_data/static_fixtures.py @@ -12,12 +12,12 @@ def city_data(): 'id': 1, 'name': 'Paris', 'kml': " ...", - '_center_latitude': 48.856614, - '_center_longitude': 2.3522219, - '_northeast_latitude': 48.9021449, - '_northeast_longitude': 2.4699208, - '_southwest_latitude': 48.815573, - '_southwest_longitude': 2.225193, + 'center_latitude': 48.856614, + 'center_longitude': 2.3522219, + 'northeast_latitude': 48.9021449, + 'northeast_longitude': 2.4699208, + 'southwest_latitude': 48.815573, + 'southwest_longitude': 2.225193, 'initial_zoom': 12, } diff --git a/tests/db/test_cities.py b/tests/db/test_cities.py index d3ae5af..73a0cdb 100644 --- a/tests/db/test_cities.py +++ b/tests/db/test_cities.py @@ -44,8 +44,8 @@ class TestProperties: result = city.center assert isinstance(result, utils.Location) - assert result.latitude == pytest.approx(city_data['_center_latitude']) - assert result.longitude == pytest.approx(city_data['_center_longitude']) + assert result.latitude == pytest.approx(city_data['center_latitude']) + assert result.longitude == pytest.approx(city_data['center_longitude']) def test_center_is_cached(self, city): """Test `City.center` property.""" @@ -59,8 +59,8 @@ class TestProperties: result = city.northeast assert isinstance(result, utils.Location) - assert result.latitude == pytest.approx(city_data['_northeast_latitude']) - assert result.longitude == pytest.approx(city_data['_northeast_longitude']) + assert result.latitude == pytest.approx(city_data['northeast_latitude']) + assert result.longitude == pytest.approx(city_data['northeast_longitude']) def test_northeast_is_cached(self, city): """Test `City.northeast` property.""" @@ -74,8 +74,8 @@ class TestProperties: result = city.southwest assert isinstance(result, utils.Location) - assert result.latitude == pytest.approx(city_data['_southwest_latitude']) - assert result.longitude == pytest.approx(city_data['_southwest_longitude']) + assert result.latitude == pytest.approx(city_data['southwest_latitude']) + assert result.longitude == pytest.approx(city_data['southwest_longitude']) def test_southwest_is_cached(self, city): """Test `City.southwest` property.""" From 54ff37757929a148ae9ca8720e94e902da43a55f Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Wed, 6 Jan 2021 16:17:05 +0100 Subject: [PATCH 29/72] Add CLI script to gridify all cities - reorganize `urban_meal_delivery.console` into a sub-package - move `tests.db.conftest` fixtures into `tests.conftest` => some integration tests regarding CLI scripts need a database - add `urban_meal_delivery.console.decorators.db_revision` decorator to ensure the database is at a certain state before a CLI script runs - refactor the `urban_meal_delivery.db.grids.Grid.gridify()` constructor: - bug fix: even empty `Pixel`s end up in the database temporarily => create `Pixel` objects only if an `Address` is to be assigned to it - streamline code and docstring - add further test cases --- pyproject.toml | 2 +- src/urban_meal_delivery/configuration.py | 2 + src/urban_meal_delivery/console/__init__.py | 9 ++ src/urban_meal_delivery/console/decorators.py | 37 +++++++ src/urban_meal_delivery/console/gridify.py | 48 ++++++++ .../{console.py => console/main.py} | 10 +- src/urban_meal_delivery/db/connection.py | 8 +- src/urban_meal_delivery/db/grids.py | 40 +++---- tests/conftest.py | 104 +++++++++++++++++- tests/console/__init__.py | 5 + tests/console/conftest.py | 10 ++ tests/console/test_gridify.py | 41 +++++++ .../{test_console.py => console/test_main.py} | 45 ++++---- tests/db/conftest.py | 101 ----------------- tests/db/test_grids.py | 70 +++++++++++- 15 files changed, 372 insertions(+), 160 deletions(-) create mode 100644 src/urban_meal_delivery/console/__init__.py create mode 100644 src/urban_meal_delivery/console/decorators.py create mode 100644 src/urban_meal_delivery/console/gridify.py rename src/urban_meal_delivery/{console.py => console/main.py} (80%) create mode 100644 tests/console/__init__.py create mode 100644 tests/console/conftest.py create mode 100644 tests/console/test_gridify.py rename tests/{test_console.py => console/test_main.py} (63%) delete mode 100644 tests/db/conftest.py diff --git a/pyproject.toml b/pyproject.toml index f17ce00..dbddd95 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -88,4 +88,4 @@ sphinx = "^3.1.2" sphinx-autodoc-typehints = "^1.11.0" [tool.poetry.scripts] -umd = "urban_meal_delivery.console:main" +umd = "urban_meal_delivery.console:cli" diff --git a/src/urban_meal_delivery/configuration.py b/src/urban_meal_delivery/configuration.py index 0354da6..e4cca50 100644 --- a/src/urban_meal_delivery/configuration.py +++ b/src/urban_meal_delivery/configuration.py @@ -32,6 +32,8 @@ class Config: # time horizon, we treat it as an ad-hoc order. QUASI_AD_HOC_LIMIT = datetime.timedelta(minutes=45) + GRID_SIDE_LENGTHS = [707, 1000, 1414] + DATABASE_URI = os.getenv('DATABASE_URI') # The PostgreSQL schema that holds the tables with the original data. diff --git a/src/urban_meal_delivery/console/__init__.py b/src/urban_meal_delivery/console/__init__.py new file mode 100644 index 0000000..60ac801 --- /dev/null +++ b/src/urban_meal_delivery/console/__init__.py @@ -0,0 +1,9 @@ +"""Provide CLI scripts for the project.""" + +from urban_meal_delivery.console import gridify +from urban_meal_delivery.console import main + + +cli = main.entry_point + +cli.add_command(gridify.gridify) diff --git a/src/urban_meal_delivery/console/decorators.py b/src/urban_meal_delivery/console/decorators.py new file mode 100644 index 0000000..ef416dd --- /dev/null +++ b/src/urban_meal_delivery/console/decorators.py @@ -0,0 +1,37 @@ +"""Utils for the CLI scripts.""" + +import functools +import os +import subprocess # noqa:S404 +import sys +from typing import Any, Callable + +import click + + +def db_revision(rev: str) -> Callable: # pragma: no cover -> easy to check visually + """A decorator ensuring the database is at a given revision.""" + + def decorator(func: Callable) -> Callable: + @functools.wraps(func) + def ensure(*args: Any, **kwargs: Any) -> Any: # noqa:WPS430 + """Do not execute the `func` if the revision does not match.""" + if not os.getenv('TESTING'): + result = subprocess.run( # noqa:S603,S607 + ['alembic', 'current'], + capture_output=True, + check=False, + encoding='utf8', + ) + + if not result.stdout.startswith(rev): + click.echo( + click.style(f'Database is not at revision {rev}', fg='red'), + ) + sys.exit(1) + + return func(*args, **kwargs) + + return ensure + + return decorator diff --git a/src/urban_meal_delivery/console/gridify.py b/src/urban_meal_delivery/console/gridify.py new file mode 100644 index 0000000..44f2fc3 --- /dev/null +++ b/src/urban_meal_delivery/console/gridify.py @@ -0,0 +1,48 @@ +"""CLI script to create pixel grids.""" + +import click + +from urban_meal_delivery import config +from urban_meal_delivery import db +from urban_meal_delivery.console import decorators + + +@click.command() +@decorators.db_revision('888e352d7526') +def gridify() -> None: # pragma: no cover note:b1f68d24 + """Create grids for all cities. + + This command creates grids with pixels of various + side lengths (specified in `urban_meal_delivery.config`). + + Pixels are only generated if they contain at least one + (pickup or delivery) address. + + All data are persisted to the database. + """ + cities = db.session.query(db.City).all() + click.echo(f'{len(cities)} cities retrieved from the database') + + for city in cities: + click.echo(f'\nCreating grids for {city.name}') + + for side_length in config.GRID_SIDE_LENGTHS: + click.echo(f'Creating grid with a side length of {side_length} meters') + + grid = db.Grid.gridify(city=city, side_length=side_length) + db.session.add(grid) + + click.echo(f' -> created {len(grid.pixels)} pixels') + + # The number of assigned addresses is the same across different `side_length`s. + db.session.flush() # necessary for the query to work + n_assigned = ( + db.session.query(db.AddressPixelAssociation) + .filter(db.AddressPixelAssociation.grid_id == grid.id) + .count() + ) + click.echo( + f'=> assigned {n_assigned} out of {len(city.addresses)} addresses in {city.name}', # noqa:E501 + ) + + db.session.commit() diff --git a/src/urban_meal_delivery/console.py b/src/urban_meal_delivery/console/main.py similarity index 80% rename from src/urban_meal_delivery/console.py rename to src/urban_meal_delivery/console/main.py index 0141370..8acb4c3 100644 --- a/src/urban_meal_delivery/console.py +++ b/src/urban_meal_delivery/console/main.py @@ -1,14 +1,14 @@ -"""Provide CLI scripts for the project.""" +"""The entry point for all CLI scripts in the project.""" from typing import Any import click -from click.core import Context +from click import core as cli_core import urban_meal_delivery -def show_version(ctx: Context, _param: Any, value: bool) -> None: +def show_version(ctx: cli_core.Context, _param: Any, value: bool) -> None: """Show the package's version.""" # If --version / -V is NOT passed in, # continue with the command. @@ -24,7 +24,7 @@ def show_version(ctx: Context, _param: Any, value: bool) -> None: ctx.exit() -@click.command() +@click.group() @click.option( '--version', '-V', @@ -33,5 +33,5 @@ def show_version(ctx: Context, _param: Any, value: bool) -> None: is_eager=True, expose_value=False, ) -def main() -> None: +def entry_point() -> None: """The urban-meal-delivery research project.""" diff --git a/src/urban_meal_delivery/db/connection.py b/src/urban_meal_delivery/db/connection.py index de32ab9..9c50709 100644 --- a/src/urban_meal_delivery/db/connection.py +++ b/src/urban_meal_delivery/db/connection.py @@ -10,15 +10,17 @@ That is the case on the CI server. import os import sqlalchemy as sa +from sqlalchemy import engine as engine_mod from sqlalchemy import orm import urban_meal_delivery if os.getenv('TESTING'): - engine = None - connection = None - session = None + # Specify the types explicitly to make mypy happy. + engine: engine_mod.Engine = None + connection: engine_mod.Connection = None + session: orm.Session = None else: # pragma: no cover engine = sa.create_engine(urban_meal_delivery.config.DATABASE_URI) diff --git a/src/urban_meal_delivery/db/grids.py b/src/urban_meal_delivery/db/grids.py index 389bd5f..5593892 100644 --- a/src/urban_meal_delivery/db/grids.py +++ b/src/urban_meal_delivery/db/grids.py @@ -57,8 +57,8 @@ class Grid(meta.Base): def gridify(cls, city: db.City, side_length: int) -> db.Grid: """Create a fully populated `Grid` for a `city`. - The created `Grid` contains only the `Pixel`s for which - there is at least one `Address` in it. + The `Grid` contains only `Pixel`s that have at least one `Address`. + `Address` objects outside the `city`'s viewport are discarded. Args: city: city for which the grid is created @@ -69,28 +69,30 @@ class Grid(meta.Base): """ grid = cls(city=city, side_length=side_length) - # Create `Pixel` objects covering the entire `city`. - # Note: `+1` so that `city.northeast` corner is on the grid. - possible_pixels = [ - db.Pixel(n_x=n_x, n_y=n_y) - for n_x in range((city.total_x // side_length) + 1) - for n_y in range((city.total_y // side_length) + 1) - ] - - # For convenient lookup by `.n_x`-`.n_y` coordinates. - pixel_map = {(pixel.n_x, pixel.n_y): pixel for pixel in possible_pixels} + # `Pixel`s grouped by `.n_x`-`.n_y` coordinates. + pixels = {} for address in city.addresses: - # Determine which `pixel` the `address` belongs to. - n_x = address.x // side_length - n_y = address.y // side_length - pixel = pixel_map[n_x, n_y] + # Check if an `address` is not within the `city`'s viewport, ... + not_within_city_viewport = ( + address.x < 0 + or address.x > city.total_x + or address.y < 0 + or address.y > city.total_y + ) + # ... and, if so, the `address` does not belong to any `Pixel`. + if not_within_city_viewport: + continue + + # Determine which `pixel` the `address` belongs to ... + n_x, n_y = address.x // side_length, address.y // side_length + # ... and create a new `Pixel` object if necessary. + if (n_x, n_y) not in pixels: + pixels[(n_x, n_y)] = db.Pixel(grid=grid, n_x=n_x, n_y=n_y) + pixel = pixels[(n_x, n_y)] # Create an association between the `address` and `pixel`. assoc = db.AddressPixelAssociation(address=address, pixel=pixel) pixel.addresses.append(assoc) - # Only keep `pixel`s that contain at least one `Address`. - grid.pixels = [pixel for pixel in pixel_map.values() if pixel.addresses] - return grid diff --git a/tests/conftest.py b/tests/conftest.py index b58c430..b7bafd5 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,8 +1,20 @@ -"""Utils for testing the entire package.""" +"""Fixtures for testing the entire package. + +The ORM related fixtures are placed here too as some integration tests +in the CLI layer need access to the database. +""" import os +import pytest +import sqlalchemy as sa +from alembic import command as migrations_cmd +from alembic import config as migrations_config +from sqlalchemy import orm + +from tests.db import fake_data from urban_meal_delivery import config +from urban_meal_delivery import db # The TESTING environment variable is set @@ -12,3 +24,93 @@ if not os.getenv('TESTING'): if not config.TESTING: raise RuntimeError('The testing configuration was not loaded') + + +@pytest.fixture(scope='session', params=['all_at_once', 'sequentially']) +def db_connection(request): + """Create all tables given the ORM models. + + The tables are put into a distinct PostgreSQL schema + that is removed after all tests are over. + + The database connection used to do that is yielded. + + There are two modes for this fixture: + + - "all_at_once": build up the tables all at once with MetaData.create_all() + - "sequentially": build up the tables sequentially with `alembic upgrade head` + + This ensures that Alembic's migration files are consistent. + """ + # We need a fresh database connection for each of the two `params`. + # Otherwise, the first test of the parameter run second will fail. + engine = sa.create_engine(config.DATABASE_URI) + connection = engine.connect() + + # Monkey patch the package's global `engine` and `connection` objects, + # just in case if it is used somewhere in the code base. + db.engine = engine + db.connection = connection + + if request.param == 'all_at_once': + connection.execute(f'CREATE SCHEMA {config.CLEAN_SCHEMA};') + db.Base.metadata.create_all(connection) + else: + cfg = migrations_config.Config('alembic.ini') + migrations_cmd.upgrade(cfg, 'head') + + try: + yield connection + + finally: + connection.execute(f'DROP SCHEMA {config.CLEAN_SCHEMA} CASCADE;') + + if request.param == 'sequentially': + tmp_alembic_version = f'{config.ALEMBIC_TABLE}_{config.CLEAN_SCHEMA}' + connection.execute( + f'DROP TABLE {config.ALEMBIC_TABLE_SCHEMA}.{tmp_alembic_version};', + ) + + connection.close() + + +@pytest.fixture +def db_session(db_connection): + """A SQLAlchemy session that rolls back everything after a test case.""" + # Begin the outermost transaction + # that is rolled back at the end of each test case. + transaction = db_connection.begin() + + # Create a session bound to the same connection as the `transaction`. + # Using any other session would not result in the roll back. + session = orm.sessionmaker()(bind=db_connection) + + # Monkey patch the package's global `session` object, + # which is used heavily in the code base. + db.session = session + + try: + yield session + + finally: + session.close() + transaction.rollback() + + +# Import the fixtures from the `fake_data` sub-package. + +make_address = fake_data.make_address +make_courier = fake_data.make_courier +make_customer = fake_data.make_customer +make_order = fake_data.make_order +make_restaurant = fake_data.make_restaurant + +address = fake_data.address +city = fake_data.city +city_data = fake_data.city_data +courier = fake_data.courier +customer = fake_data.customer +order = fake_data.order +restaurant = fake_data.restaurant +grid = fake_data.grid +pixel = fake_data.pixel diff --git a/tests/console/__init__.py b/tests/console/__init__.py new file mode 100644 index 0000000..49b8d86 --- /dev/null +++ b/tests/console/__init__.py @@ -0,0 +1,5 @@ +"""Test the CLI scripts in the urban-meal-delivery package. + +Some tests require a database. Therefore, the corresponding code is excluded +from coverage reporting with "pragma: no cover" (grep:b1f68d24). +""" diff --git a/tests/console/conftest.py b/tests/console/conftest.py new file mode 100644 index 0000000..d6c2e59 --- /dev/null +++ b/tests/console/conftest.py @@ -0,0 +1,10 @@ +"""Fixture for testing the CLI scripts.""" + +import pytest +from click import testing as click_testing + + +@pytest.fixture +def cli() -> click_testing.CliRunner: + """Initialize Click's CLI Test Runner.""" + return click_testing.CliRunner() diff --git a/tests/console/test_gridify.py b/tests/console/test_gridify.py new file mode 100644 index 0000000..2911a0e --- /dev/null +++ b/tests/console/test_gridify.py @@ -0,0 +1,41 @@ +"""Integration test for the `urban_meal_delivery.console.gridify` module.""" + +import pytest + +import urban_meal_delivery +from urban_meal_delivery import db +from urban_meal_delivery.console import gridify + + +@pytest.mark.db +def test_four_pixels_with_two_addresses( + cli, db_session, monkeypatch, city, make_address, +): + """Two `Address` objects in distinct `Pixel` objects. + + This is roughly the same test case as + `tests.db.test_grids.test_four_pixels_with_two_addresses`. + The difference is that the result is written to the database. + """ + # Create two `Address` objects in distinct `Pixel`s. + city.addresses = [ + # One `Address` in the lower-left `Pixel`, ... + make_address(latitude=48.8357377, longitude=2.2517412), + # ... and another one in the upper-right one. + make_address(latitude=48.8898312, longitude=2.4357622), + ] + + db_session.add(city) + db_session.commit() + + side_length = max(city.total_x // 2, city.total_y // 2) + 1 + + # Hack the configuration regarding the grids to be created. + monkeypatch.setattr(urban_meal_delivery.config, 'GRID_SIDE_LENGTHS', [side_length]) + + result = cli.invoke(gridify.gridify) + + assert result.exit_code == 0 + + assert db_session.query(db.Grid).count() == 1 + assert db_session.query(db.Pixel).count() == 2 diff --git a/tests/test_console.py b/tests/console/test_main.py similarity index 63% rename from tests/test_console.py rename to tests/console/test_main.py index 00c721f..5a35dab 100644 --- a/tests/test_console.py +++ b/tests/console/test_main.py @@ -1,18 +1,17 @@ -"""Test the package's `umd` command-line client.""" +"""Test the package's top-level `umd` CLI command.""" import click import pytest -from click import testing as click_testing -from urban_meal_delivery import console +from urban_meal_delivery.console import main class TestShowVersion: - """Test console.show_version(). + """Test `console.main.show_version()`. The function is used as a callback to a click command option. - show_version() prints the name and version of the installed package to + `show_version()` prints the name and version of the installed package to stdout. The output looks like this: "{pkg_name}, version {version}". Development (= non-final) versions are indicated by appending a @@ -23,12 +22,12 @@ class TestShowVersion: @pytest.fixture def ctx(self) -> click.Context: - """Context around the console.main Command.""" - return click.Context(console.main) + """Context around the `main.entry_point` Command.""" + return click.Context(main.entry_point) def test_no_version(self, capsys, ctx): - """The the early exit branch without any output.""" - console.show_version(ctx, _param='discarded', value=False) + """Test the early exit branch without any output.""" + main.show_version(ctx, _param='discarded', value=False) captured = capsys.readouterr() @@ -37,10 +36,10 @@ class TestShowVersion: def test_final_version(self, capsys, ctx, monkeypatch): """For final versions, NO "development" warning is emitted.""" version = '1.2.3' - monkeypatch.setattr(console.urban_meal_delivery, '__version__', version) + monkeypatch.setattr(main.urban_meal_delivery, '__version__', version) with pytest.raises(click.exceptions.Exit): - console.show_version(ctx, _param='discarded', value=True) + main.show_version(ctx, _param='discarded', value=True) captured = capsys.readouterr() @@ -49,18 +48,18 @@ class TestShowVersion: def test_develop_version(self, capsys, ctx, monkeypatch): """For develop versions, a warning thereof is emitted.""" version = '1.2.3.dev0' - monkeypatch.setattr(console.urban_meal_delivery, '__version__', version) + monkeypatch.setattr(main.urban_meal_delivery, '__version__', version) with pytest.raises(click.exceptions.Exit): - console.show_version(ctx, _param='discarded', value=True) + main.show_version(ctx, _param='discarded', value=True) captured = capsys.readouterr() assert captured.out.strip().endswith(f', version {version} (development)') -class TestCLI: - """Test the `umd` CLI utility. +class TestCLIWithoutCommand: + """Test the `umd` CLI utility, invoked without any specific command. The test cases are integration tests. Therefore, they are not considered for coverage reporting. @@ -68,18 +67,12 @@ class TestCLI: # pylint:disable=no-self-use - @pytest.fixture - def cli(self) -> click_testing.CliRunner: - """Initialize Click's CLI Test Runner.""" - return click_testing.CliRunner() - @pytest.mark.no_cover def test_no_options(self, cli): """Exit with 0 status code and no output if run without options.""" - result = cli.invoke(console.main) + result = cli.invoke(main.entry_point) assert result.exit_code == 0 - assert result.output == '' # The following test cases validate the --version / -V option. @@ -90,9 +83,9 @@ class TestCLI: def test_final_version(self, cli, monkeypatch, option): """For final versions, NO "development" warning is emitted.""" version = '1.2.3' - monkeypatch.setattr(console.urban_meal_delivery, '__version__', version) + monkeypatch.setattr(main.urban_meal_delivery, '__version__', version) - result = cli.invoke(console.main, option) + result = cli.invoke(main.entry_point, option) assert result.exit_code == 0 assert result.output.strip().endswith(f', version {version}') @@ -102,9 +95,9 @@ class TestCLI: def test_develop_version(self, cli, monkeypatch, option): """For develop versions, a warning thereof is emitted.""" version = '1.2.3.dev0' - monkeypatch.setattr(console.urban_meal_delivery, '__version__', version) + monkeypatch.setattr(main.urban_meal_delivery, '__version__', version) - result = cli.invoke(console.main, option) + result = cli.invoke(main.entry_point, option) assert result.exit_code == 0 assert result.output.strip().endswith(f', version {version} (development)') diff --git a/tests/db/conftest.py b/tests/db/conftest.py deleted file mode 100644 index 3d8c676..0000000 --- a/tests/db/conftest.py +++ /dev/null @@ -1,101 +0,0 @@ -"""Utils for testing the ORM layer.""" - -import pytest -import sqlalchemy as sa -from alembic import command as migrations_cmd -from alembic import config as migrations_config -from sqlalchemy import orm - -from tests.db import fake_data -from urban_meal_delivery import config -from urban_meal_delivery import db - - -@pytest.fixture(scope='session', params=['all_at_once', 'sequentially']) -def db_connection(request): - """Create all tables given the ORM models. - - The tables are put into a distinct PostgreSQL schema - that is removed after all tests are over. - - The database connection used to do that is yielded. - - There are two modes for this fixture: - - - "all_at_once": build up the tables all at once with MetaData.create_all() - - "sequentially": build up the tables sequentially with `alembic upgrade head` - - This ensures that Alembic's migration files are consistent. - """ - # We need a fresh database connection for each of the two `params`. - # Otherwise, the first test of the parameter run second will fail. - engine = sa.create_engine(config.DATABASE_URI) - connection = engine.connect() - - # Monkey patch the package's global `engine` and `connection` objects, - # just in case if it is used somewhere in the code base. - db.engine = engine - db.connection = connection - - if request.param == 'all_at_once': - connection.execute(f'CREATE SCHEMA {config.CLEAN_SCHEMA};') - db.Base.metadata.create_all(connection) - else: - cfg = migrations_config.Config('alembic.ini') - migrations_cmd.upgrade(cfg, 'head') - - try: - yield connection - - finally: - connection.execute(f'DROP SCHEMA {config.CLEAN_SCHEMA} CASCADE;') - - if request.param == 'sequentially': - tmp_alembic_version = f'{config.ALEMBIC_TABLE}_{config.CLEAN_SCHEMA}' - connection.execute( - f'DROP TABLE {config.ALEMBIC_TABLE_SCHEMA}.{tmp_alembic_version};', - ) - - connection.close() - - -@pytest.fixture -def db_session(db_connection): - """A SQLAlchemy session that rolls back everything after a test case.""" - # Begin the outermost transaction - # that is rolled back at the end of each test case. - transaction = db_connection.begin() - - # Create a session bound to the same connection as the `transaction`. - # Using any other session would not result in the roll back. - session = orm.sessionmaker()(bind=db_connection) - - # Monkey patch the package's global `session` object, - # which is used heavily in the code base. - db.session = session - - try: - yield session - - finally: - session.close() - transaction.rollback() - - -# Import the fixtures from the `fake_data` sub-package. - -make_address = fake_data.make_address -make_courier = fake_data.make_courier -make_customer = fake_data.make_customer -make_order = fake_data.make_order -make_restaurant = fake_data.make_restaurant - -address = fake_data.address -city = fake_data.city -city_data = fake_data.city_data -courier = fake_data.courier -customer = fake_data.customer -order = fake_data.order -restaurant = fake_data.restaurant -grid = fake_data.grid -pixel = fake_data.pixel diff --git a/tests/db/test_grids.py b/tests/db/test_grids.py index bcde3f7..8bdb0c5 100644 --- a/tests/db/test_grids.py +++ b/tests/db/test_grids.py @@ -75,11 +75,15 @@ class TestProperties: class TestGridification: """Test the `Grid.gridify()` constructor.""" - def test_one_pixel_covering_entire_city_without_addresses(self, city): + @pytest.mark.no_cover + def test_one_pixel_without_addresses(self, city): """At the very least, there must be one `Pixel` ... ... if the `side_length` is greater than both the horizontal and vertical distances of the viewport. + + This test case skips the `for`-loop inside `Grid.gridify()`. + Interestingly, coverage.py does not see this. """ city.addresses = [] @@ -91,7 +95,7 @@ class TestGridification: assert isinstance(result, db.Grid) assert len(result.pixels) == 0 # noqa:WPS507 - def test_one_pixel_covering_entire_city_with_one_address(self, city, address): + def test_one_pixel_with_one_address(self, city, address): """At the very least, there must be one `Pixel` ... ... if the `side_length` is greater than both the @@ -107,8 +111,66 @@ class TestGridification: assert isinstance(result, db.Grid) assert len(result.pixels) == 1 + def test_one_pixel_with_two_addresses(self, city, make_address): + """At the very least, there must be one `Pixel` ... + + ... if the `side_length` is greater than both the + horizontal and vertical distances of the viewport. + + This test case is necessary as `test_one_pixel_with_one_address` + does not have to re-use an already created `Pixel` object internally. + """ + city.addresses = [make_address(), make_address()] + + # `+1` as otherwise there would be a second pixel in one direction. + side_length = max(city.total_x, city.total_y) + 1 + + result = db.Grid.gridify(city=city, side_length=side_length) + + assert isinstance(result, db.Grid) + assert len(result.pixels) == 1 + + def test_one_pixel_with_address_too_far_south(self, city, address): + """An `address` outside the `city`'s viewport is discarded.""" + # Move the `address` just below `city.southwest`. + address.latitude = city.southwest.latitude - 0.1 + + city.addresses = [address] + + # `+1` as otherwise there would be a second pixel in one direction. + side_length = max(city.total_x, city.total_y) + 1 + + result = db.Grid.gridify(city=city, side_length=side_length) + + assert isinstance(result, db.Grid) + assert len(result.pixels) == 0 # noqa:WPS507 + + @pytest.mark.no_cover + def test_one_pixel_with_address_too_far_west(self, city, address): + """An `address` outside the `city`'s viewport is discarded. + + This test is a logical sibling to `test_one_pixel_with_address_too_far_south` + and therefore redundant. + """ + # Move the `address` just left to `city.southwest`. + address.longitude = city.southwest.longitude - 0.1 + + city.addresses = [address] + + # `+1` as otherwise there would be a second pixel in one direction. + side_length = max(city.total_x, city.total_y) + 1 + + result = db.Grid.gridify(city=city, side_length=side_length) + + assert isinstance(result, db.Grid) + assert len(result.pixels) == 0 # noqa:WPS507 + + @pytest.mark.no_cover def test_four_pixels_with_two_addresses(self, city, make_address): - """Two `Address` objects in distinct `Pixel` objects.""" + """Two `Address` objects in distinct `Pixel` objects. + + This test is more of a sanity check. + """ # Create two `Address` objects in distinct `Pixel`s. city.addresses = [ # One `Address` in the lower-left `Pixel`, ... @@ -136,7 +198,7 @@ class TestGridification: def test_make_random_grids(self, db_session, city, make_address, side_length): """With 100 random `Address` objects, a grid must have ... - ... between 1 and a deterministic number of `Pixel` objects. + ... between 1 and a deterministic upper bound of `Pixel` objects. This test creates confidence that the created `Grid` objects adhere to the database constraints. From e8c97dd7da1ccbe6857dde48423469ca0ffa073b Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Thu, 7 Jan 2021 12:45:32 +0100 Subject: [PATCH 30/72] Add `Forecast` model to ORM layer - the model handles the caching of demand forecasting results - include the database migration script --- ..._19_e40623e10405_add_demand_forecasting.py | 96 ++++++++++++ src/urban_meal_delivery/db/__init__.py | 1 + src/urban_meal_delivery/db/forecasts.py | 66 ++++++++ src/urban_meal_delivery/db/pixels.py | 1 + tests/db/test_forecasts.py | 147 ++++++++++++++++++ 5 files changed, 311 insertions(+) create mode 100644 migrations/versions/rev_20210106_19_e40623e10405_add_demand_forecasting.py create mode 100644 src/urban_meal_delivery/db/forecasts.py create mode 100644 tests/db/test_forecasts.py diff --git a/migrations/versions/rev_20210106_19_e40623e10405_add_demand_forecasting.py b/migrations/versions/rev_20210106_19_e40623e10405_add_demand_forecasting.py new file mode 100644 index 0000000..1579190 --- /dev/null +++ b/migrations/versions/rev_20210106_19_e40623e10405_add_demand_forecasting.py @@ -0,0 +1,96 @@ +"""Add demand forecasting. + +Revision: #e40623e10405 at 2021-01-06 19:55:56 +Revises: #888e352d7526 +""" + +import os + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +from urban_meal_delivery import configuration + + +revision = 'e40623e10405' +down_revision = '888e352d7526' +branch_labels = None +depends_on = None + + +config = configuration.make_config('testing' if os.getenv('TESTING') else 'production') + + +def upgrade(): + """Upgrade to revision e40623e10405.""" + op.create_table( + 'forecasts', + sa.Column('id', sa.Integer(), autoincrement=True, nullable=False), + sa.Column('pixel_id', sa.Integer(), nullable=False), + sa.Column('start_at', sa.DateTime(), nullable=False), + sa.Column('time_step', sa.SmallInteger(), nullable=False), + sa.Column('training_horizon', sa.SmallInteger(), nullable=False), + sa.Column('method', sa.Unicode(length=20), nullable=False), # noqa:WPS432 + sa.Column('prediction', postgresql.DOUBLE_PRECISION(), nullable=False), + sa.PrimaryKeyConstraint('id', name=op.f('pk_forecasts')), + sa.ForeignKeyConstraint( + ['pixel_id'], + [f'{config.CLEAN_SCHEMA}.pixels.id'], + name=op.f('fk_forecasts_to_pixels_via_pixel_id'), + onupdate='RESTRICT', + ondelete='RESTRICT', + ), + sa.CheckConstraint( + """ + NOT ( + EXTRACT(HOUR FROM start_at) < 11 + OR + EXTRACT(HOUR FROM start_at) > 22 + ) + """, + name=op.f('ck_forecasts_on_start_at_must_be_within_operating_hours'), + ), + sa.CheckConstraint( + 'CAST(EXTRACT(MINUTES FROM start_at) AS INTEGER) % 15 = 0', + name=op.f('ck_forecasts_on_start_at_minutes_must_be_quarters_of_the_hour'), + ), + sa.CheckConstraint( + 'CAST(EXTRACT(MICROSECONDS FROM start_at) AS INTEGER) % 1000000 = 0', + name=op.f('ck_forecasts_on_start_at_allows_no_microseconds'), + ), + sa.CheckConstraint( + 'EXTRACT(SECONDS FROM start_at) = 0', + name=op.f('ck_forecasts_on_start_at_allows_no_seconds'), + ), + sa.CheckConstraint( + 'time_step > 0', name=op.f('ck_forecasts_on_time_step_must_be_positive'), + ), + sa.CheckConstraint( + 'training_horizon > 0', + name=op.f('ck_forecasts_on_training_horizon_must_be_positive'), + ), + sa.UniqueConstraint( + 'pixel_id', + 'start_at', + 'time_step', + 'training_horizon', + 'method', + name=op.f( + 'uq_forecasts_on_pixel_id_start_at_time_step_training_horizon_method', + ), + ), + schema=config.CLEAN_SCHEMA, + ) + op.create_index( + op.f('ix_forecasts_on_pixel_id'), + 'forecasts', + ['pixel_id'], + unique=False, + schema=config.CLEAN_SCHEMA, + ) + + +def downgrade(): + """Downgrade to revision 888e352d7526.""" + op.drop_table('forecasts', schema=config.CLEAN_SCHEMA) diff --git a/src/urban_meal_delivery/db/__init__.py b/src/urban_meal_delivery/db/__init__.py index aae8516..ecd9fa1 100644 --- a/src/urban_meal_delivery/db/__init__.py +++ b/src/urban_meal_delivery/db/__init__.py @@ -8,6 +8,7 @@ from urban_meal_delivery.db.connection import engine from urban_meal_delivery.db.connection import session from urban_meal_delivery.db.couriers import Courier from urban_meal_delivery.db.customers import Customer +from urban_meal_delivery.db.forecasts import Forecast from urban_meal_delivery.db.grids import Grid from urban_meal_delivery.db.meta import Base from urban_meal_delivery.db.orders import Order diff --git a/src/urban_meal_delivery/db/forecasts.py b/src/urban_meal_delivery/db/forecasts.py new file mode 100644 index 0000000..0052ee8 --- /dev/null +++ b/src/urban_meal_delivery/db/forecasts.py @@ -0,0 +1,66 @@ +"""Provide the ORM's `Forecast` model.""" + +import sqlalchemy as sa +from sqlalchemy import orm +from sqlalchemy.dialects import postgresql + +from urban_meal_delivery.db import meta + + +class Forecast(meta.Base): + """A demand forecast for a `.pixel` and `.time_step` pair. + + This table is denormalized on purpose to keep things simple. + """ + + __tablename__ = 'forecasts' + + # Columns + id = sa.Column(sa.Integer, primary_key=True, autoincrement=True) # noqa:WPS125 + pixel_id = sa.Column(sa.Integer, nullable=False, index=True) + start_at = sa.Column(sa.DateTime, nullable=False) + time_step = sa.Column(sa.SmallInteger, nullable=False) + training_horizon = sa.Column(sa.SmallInteger, nullable=False) + method = sa.Column(sa.Unicode(length=20), nullable=False) # noqa:WPS432 + # Raw `.prediction`s are stored as `float`s (possibly negative). + # The rounding is then done on the fly if required. + prediction = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False) + + # Constraints + __table_args__ = ( + sa.ForeignKeyConstraint( + ['pixel_id'], ['pixels.id'], onupdate='RESTRICT', ondelete='RESTRICT', + ), + sa.CheckConstraint( + """ + NOT ( + EXTRACT(HOUR FROM start_at) < 11 + OR + EXTRACT(HOUR FROM start_at) > 22 + ) + """, + name='start_at_must_be_within_operating_hours', + ), + sa.CheckConstraint( + 'CAST(EXTRACT(MINUTES FROM start_at) AS INTEGER) % 15 = 0', + name='start_at_minutes_must_be_quarters_of_the_hour', + ), + sa.CheckConstraint( + 'EXTRACT(SECONDS FROM start_at) = 0', name='start_at_allows_no_seconds', + ), + sa.CheckConstraint( + 'CAST(EXTRACT(MICROSECONDS FROM start_at) AS INTEGER) % 1000000 = 0', + name='start_at_allows_no_microseconds', + ), + sa.CheckConstraint('time_step > 0', name='time_step_must_be_positive'), + sa.CheckConstraint( + 'training_horizon > 0', name='training_horizon_must_be_positive', + ), + # There can be only one prediction per forecasting setting. + sa.UniqueConstraint( + 'pixel_id', 'start_at', 'time_step', 'training_horizon', 'method', + ), + ) + + # Relationships + pixel = orm.relationship('Pixel', back_populates='forecasts') diff --git a/src/urban_meal_delivery/db/pixels.py b/src/urban_meal_delivery/db/pixels.py index 5b3f4f3..26faf1c 100644 --- a/src/urban_meal_delivery/db/pixels.py +++ b/src/urban_meal_delivery/db/pixels.py @@ -39,6 +39,7 @@ class Pixel(meta.Base): # Relationships grid = orm.relationship('Grid', back_populates='pixels') addresses = orm.relationship('AddressPixelAssociation', back_populates='pixel') + forecasts = orm.relationship('Forecast', back_populates='pixel') def __repr__(self) -> str: """Non-literal text representation.""" diff --git a/tests/db/test_forecasts.py b/tests/db/test_forecasts.py new file mode 100644 index 0000000..fa27854 --- /dev/null +++ b/tests/db/test_forecasts.py @@ -0,0 +1,147 @@ +"""Test the ORM's `Forecast` model.""" +# pylint:disable=no-self-use + +import datetime + +import pytest +import sqlalchemy as sqla +from sqlalchemy import exc as sa_exc + +from urban_meal_delivery import db + + +@pytest.fixture +def forecast(pixel): + """A `forecast` made in the `pixel`.""" + return db.Forecast( + pixel=pixel, + start_at=datetime.datetime(2020, 1, 1, 12, 0), + time_step=60, + training_horizon=8, + method='hets', + prediction=12.3, + ) + + +class TestSpecialMethods: + """Test special methods in `Forecast`.""" + + def test_create_forecast(self, forecast): + """Test instantiation of a new `Forecast` object.""" + assert forecast is not None + + +@pytest.mark.db +@pytest.mark.no_cover +class TestConstraints: + """Test the database constraints defined in `Forecast`.""" + + def test_insert_into_database(self, db_session, forecast): + """Insert an instance into the (empty) database.""" + assert db_session.query(db.Forecast).count() == 0 + + db_session.add(forecast) + db_session.commit() + + assert db_session.query(db.Forecast).count() == 1 + + def test_delete_a_referenced_pixel(self, db_session, forecast): + """Remove a record that is referenced with a FK.""" + db_session.add(forecast) + db_session.commit() + + # Must delete without ORM as otherwise an UPDATE statement is emitted. + stmt = sqla.delete(db.Pixel).where(db.Pixel.id == forecast.pixel.id) + + with pytest.raises( + sa_exc.IntegrityError, match='fk_forecasts_to_pixels_via_pixel_id', + ): + db_session.execute(stmt) + + @pytest.mark.parametrize('hour', [10, 23]) + def test_invalid_start_at_outside_operating_hours( + self, db_session, forecast, hour, + ): + """Insert an instance with invalid data.""" + forecast.start_at = datetime.datetime( + forecast.start_at.year, + forecast.start_at.month, + forecast.start_at.day, + hour, + ) + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='within_operating_hours', + ): + db_session.commit() + + def test_invalid_start_at_not_quarter_of_hour(self, db_session, forecast): + """Insert an instance with invalid data.""" + forecast.start_at += datetime.timedelta(minutes=1) + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='must_be_quarters_of_the_hour', + ): + db_session.commit() + + def test_invalid_start_at_seconds_set(self, db_session, forecast): + """Insert an instance with invalid data.""" + forecast.start_at += datetime.timedelta(seconds=1) + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='no_seconds', + ): + db_session.commit() + + def test_invalid_start_at_microseconds_set(self, db_session, forecast): + """Insert an instance with invalid data.""" + forecast.start_at += datetime.timedelta(microseconds=1) + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='no_microseconds', + ): + db_session.commit() + + @pytest.mark.parametrize('value', [-1, 0]) + def test_positive_time_step(self, db_session, forecast, value): + """Insert an instance with invalid data.""" + forecast.time_step = value + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='time_step_must_be_positive', + ): + db_session.commit() + + @pytest.mark.parametrize('value', [-1, 0]) + def test_positive_training_horizon(self, db_session, forecast, value): + """Insert an instance with invalid data.""" + forecast.training_horizon = value + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='training_horizon_must_be_positive', + ): + db_session.commit() + + def test_two_predictions_for_same_forecasting_setting(self, db_session, forecast): + """Insert a record that violates a unique constraint.""" + db_session.add(forecast) + db_session.commit() + + another_forecast = db.Forecast( + pixel=forecast.pixel, + start_at=forecast.start_at, + time_step=forecast.time_step, + training_horizon=forecast.training_horizon, + method=forecast.method, + prediction=99.9, + ) + db_session.add(another_forecast) + + with pytest.raises(sa_exc.IntegrityError, match='duplicate key value'): + db_session.commit() From d5b3efbca1178ccd2b98b7fd82da2bd9d7790804 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Thu, 7 Jan 2021 23:18:40 +0100 Subject: [PATCH 31/72] Add `aggregate_orders()` function - the function queries the database and aggregates the ad-hoc orders by pixel and time steps into a demand time series - implement "heavy" integration tests for `aggregate_orders()` - make `pandas` a package dependency - streamline the `Config` --- poetry.lock | 2 +- pyproject.toml | 3 +- setup.cfg | 8 + src/urban_meal_delivery/configuration.py | 24 ++ src/urban_meal_delivery/db/grids.py | 4 +- src/urban_meal_delivery/forecasts/__init__.py | 3 + src/urban_meal_delivery/forecasts/timify.py | 114 +++++++ tests/db/test_grids.py | 2 +- tests/forecasts/__init__.py | 1 + tests/forecasts/test_timify.py | 305 ++++++++++++++++++ 10 files changed, 460 insertions(+), 6 deletions(-) create mode 100644 src/urban_meal_delivery/forecasts/__init__.py create mode 100644 src/urban_meal_delivery/forecasts/timify.py create mode 100644 tests/forecasts/__init__.py create mode 100644 tests/forecasts/test_timify.py diff --git a/poetry.lock b/poetry.lock index e4862bf..b56f2af 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1120,7 +1120,7 @@ name = "pandas" version = "1.1.5" description = "Powerful data structures for data analysis, time series, and statistics" category = "main" -optional = true +optional = false python-versions = ">=3.6.1" [package.dependencies] diff --git a/pyproject.toml b/pyproject.toml index dbddd95..573d282 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,6 +31,7 @@ python = "^3.8" Shapely = "^1.7.1" alembic = "^1.4.2" click = "^7.1.2" +pandas = "^1.1.0" psycopg2 = "^2.8.5" # adapter for PostgreSQL sqlalchemy = "^1.3.18" utm = "^0.7.0" @@ -40,7 +41,6 @@ utm = "^0.7.0" jupyterlab = { version="^2.2.2", optional=true } nb_black = { version="^1.0.7", optional=true } numpy = { version="^1.19.1", optional=true } -pandas = { version="^1.1.0", optional=true } pytz = { version="^2020.1", optional=true } [tool.poetry.extras] @@ -48,7 +48,6 @@ research = [ "jupyterlab", "nb_black", "numpy", - "pandas", "pytz", ] diff --git a/setup.cfg b/setup.cfg index f6b291f..76746f3 100644 --- a/setup.cfg +++ b/setup.cfg @@ -138,6 +138,12 @@ per-file-ignores = src/urban_meal_delivery/db/utils/__init__.py: # Top-level of a sub-packages is intended to import a lot. F401, + src/urban_meal_delivery/forecasts/__init__.py: + # Top-level of a sub-packages is intended to import a lot. + F401, + src/urban_meal_delivery/forecasts/timify.py: + # No SQL injection as the inputs come from a safe source. + S608, tests/*.py: # Type annotations are not strictly enforced. ANN0, ANN2, @@ -245,6 +251,8 @@ cache_dir = .cache/mypy ignore_missing_imports = true [mypy-packaging] ignore_missing_imports = true +[mypy-pandas] +ignore_missing_imports = true [mypy-pytest] ignore_missing_imports = true [mypy-sqlalchemy.*] diff --git a/src/urban_meal_delivery/configuration.py b/src/urban_meal_delivery/configuration.py index e4cca50..2d36392 100644 --- a/src/urban_meal_delivery/configuration.py +++ b/src/urban_meal_delivery/configuration.py @@ -26,14 +26,38 @@ def random_schema_name() -> str: class Config: """Configuration that applies in all situations.""" + # Application-specific settings + # ----------------------------- + + # Date after which the real-life data is discarded. CUTOFF_DAY = datetime.datetime(2017, 2, 1) # If a scheduled pre-order is made within this # time horizon, we treat it as an ad-hoc order. QUASI_AD_HOC_LIMIT = datetime.timedelta(minutes=45) + # Operating hours of the platform. + SERVICE_START = 11 + SERVICE_END = 23 + + # Side lengths (in meters) for which pixel grids are created. + # They are the basis for the aggregated demand forecasts. GRID_SIDE_LENGTHS = [707, 1000, 1414] + # Time steps (in minutes) used to aggregate the + # individual orders into time series. + TIME_STEPS = [60] + + # Training horizons (in full weeks) used + # to train the forecasting models. + TRAINING_HORIZONS = [8] + + # The demand forecasting methods used in the simulations. + FORECASTING_METHODS = ['hets', 'rtarima'] + + # Implementation-specific settings + # -------------------------------- + DATABASE_URI = os.getenv('DATABASE_URI') # The PostgreSQL schema that holds the tables with the original data. diff --git a/src/urban_meal_delivery/db/grids.py b/src/urban_meal_delivery/db/grids.py index 5593892..c1d7dd2 100644 --- a/src/urban_meal_delivery/db/grids.py +++ b/src/urban_meal_delivery/db/grids.py @@ -43,7 +43,7 @@ class Grid(meta.Base): def __repr__(self) -> str: """Non-literal text representation.""" - return '<{cls}: {area}>'.format( + return '<{cls}: {area} sqr. km>'.format( cls=self.__class__.__name__, area=self.pixel_area, ) @@ -51,7 +51,7 @@ class Grid(meta.Base): @property def pixel_area(self) -> float: """The area of a `Pixel` on the grid in square kilometers.""" - return (self.side_length ** 2) / 1_000_000 # noqa:WPS432 + return round((self.side_length ** 2) / 1_000_000, 1) # noqa:WPS432 @classmethod def gridify(cls, city: db.City, side_length: int) -> db.Grid: diff --git a/src/urban_meal_delivery/forecasts/__init__.py b/src/urban_meal_delivery/forecasts/__init__.py new file mode 100644 index 0000000..be8843e --- /dev/null +++ b/src/urban_meal_delivery/forecasts/__init__.py @@ -0,0 +1,3 @@ +"""Demand forecasting utilities.""" + +from urban_meal_delivery.forecasts import timify diff --git a/src/urban_meal_delivery/forecasts/timify.py b/src/urban_meal_delivery/forecasts/timify.py new file mode 100644 index 0000000..08cd1df --- /dev/null +++ b/src/urban_meal_delivery/forecasts/timify.py @@ -0,0 +1,114 @@ +"""Obtain and work with time series data.""" + +import datetime + +import pandas as pd + +from urban_meal_delivery import config +from urban_meal_delivery import db + + +def aggregate_orders(grid: db.Grid, time_step: int) -> pd.DataFrame: # pragma: no cover + """Obtain a time series of the ad-hoc `Order` totals. + + Args: + grid: pixel grid used to aggregate orders spatially + time_step: interval length (in minutes) into which orders are aggregated + + Returns: + order_totals: `DataFrame` with a `MultiIndex` of the "pixel_id"s and + beginnings of the intervals (i.e., "start_at"s); the sole column + with data is "total_orders" + """ + # `data` is probably missing "pixel_id"-"start_at" pairs. + # This happens whenever there is no demand in the `Pixel` in the given `time_step`. + data = pd.read_sql_query( + f"""-- # noqa:WPS221 + SELECT + pixel_id, + start_at, + COUNT(*) AS total_orders + FROM ( + SELECT + pixel_id, + placed_at_without_seconds - minutes_to_be_cut AS start_at + FROM ( + SELECT + pixels.pixel_id, + DATE_TRUNC('MINUTE', orders.placed_at) AS placed_at_without_seconds, + (( + EXTRACT(MINUTES FROM orders.placed_at)::INTEGER % {time_step} + )::TEXT || ' MINUTES')::INTERVAL + AS minutes_to_be_cut + FROM ( + SELECT + id, + placed_at, + pickup_address_id + FROM + {config.CLEAN_SCHEMA}.orders + INNER JOIN ( + SELECT + id AS address_id + FROM + {config.CLEAN_SCHEMA}.addresses + WHERE + city_id = {grid.city.id} + ) AS in_city + ON orders.pickup_address_id = in_city.address_id + WHERE + ad_hoc IS TRUE + ) AS + orders + INNER JOIN ( + SELECT + address_id, + pixel_id + FROM + {config.CLEAN_SCHEMA}.addresses_pixels + WHERE + grid_id = {grid.id} + AND + city_id = {grid.city.id} -- city_id is redundant -> sanity check + ) AS pixels + ON orders.pickup_address_id = pixels.address_id + ) AS placed_at_aggregated_into_start_at + ) AS pixel_start_at_combinations + GROUP BY + pixel_id, + start_at + ORDER BY + pixel_id, + start_at; + """, + con=db.connection, + index_col=['pixel_id', 'start_at'], + ) + + if data.empty: + return data + + # Calculate the first and last "start_at" value ... + start_day = data.index.levels[1].min().date() + start = datetime.datetime( + start_day.year, start_day.month, start_day.day, config.SERVICE_START, + ) + end_day = data.index.levels[1].max().date() + end = datetime.datetime( + end_day.year, end_day.month, end_day.day, config.SERVICE_END, + ) + + # ... and all possible `tuple`s of "pixel_id"-"start_at" combinations. + # The "start_at" values must lie within the operating hours. + gen = ( + (pixel_id, start_at) + for pixel_id in sorted(data.index.levels[0]) + for start_at in pd.date_range(start, end, freq=f'{time_step}T') + if config.SERVICE_START <= start_at.time().hour < config.SERVICE_END + ) + + # Re-index `data` filling in `0`s where there is no demand. + index = pd.MultiIndex.from_tuples(gen) + index.names = ['pixel_id', 'start_at'] + + return data.reindex(index, fill_value=0) diff --git a/tests/db/test_grids.py b/tests/db/test_grids.py index 8bdb0c5..4dd5beb 100644 --- a/tests/db/test_grids.py +++ b/tests/db/test_grids.py @@ -19,7 +19,7 @@ class TestSpecialMethods: """`Grid` has a non-literal text representation.""" result = repr(grid) - assert result == f'' + assert result == f'' @pytest.mark.db diff --git a/tests/forecasts/__init__.py b/tests/forecasts/__init__.py new file mode 100644 index 0000000..50eaeb3 --- /dev/null +++ b/tests/forecasts/__init__.py @@ -0,0 +1 @@ +"""Test the forecasting-related functionality.""" diff --git a/tests/forecasts/test_timify.py b/tests/forecasts/test_timify.py new file mode 100644 index 0000000..0cd4d21 --- /dev/null +++ b/tests/forecasts/test_timify.py @@ -0,0 +1,305 @@ +"""Test the time series related code.""" +# pylint:disable=no-self-use,unused-argument + +import datetime + +import pytest + +from urban_meal_delivery import db +from urban_meal_delivery.forecasts import timify + + +YEAR, MONTH, DAY = 2020, 1, 1 + + +@pytest.mark.db +class TestAggregateOrders: + """Test the `aggregate_orders()` function. + + The test cases are all integration tests that model realistic scenarios. + """ + + @pytest.fixture + def one_pixel_grid(self, db_session, city, restaurant): + """A persisted `Grid` with one `Pixel`. + + `restaurant` must be a dependency as otherwise + its `.address` is not put into the database. + """ + # `+1` as otherwise there would be a second pixel in one direction. + side_length = max(city.total_x, city.total_y) + 1 + grid = db.Grid.gridify(city=city, side_length=side_length) + + db_session.add(grid) + + assert len(grid.pixels) == 1 # sanity check + + return grid + + def test_no_orders(self, db_session, one_pixel_grid, restaurant): + """Edge case that does not occur for real-life data.""" + db_session.commit() + + assert len(restaurant.orders) == 0 # noqa:WPS507 sanity check + + result = timify.aggregate_orders(grid=one_pixel_grid, time_step=60) + + assert len(result) == 0 # noqa:WPS507 + + def test_evenly_distributed_ad_hoc_orders( + self, db_session, one_pixel_grid, restaurant, make_order, + ): + """12 ad-hoc orders, one per operating hour.""" + # Create one order per hour and 12 orders in total. + for hour in range(11, 23): + order = make_order( + scheduled=False, + restaurant=restaurant, + placed_at=datetime.datetime(YEAR, MONTH, DAY, hour, 11), + ) + db_session.add(order) + + db_session.commit() + + assert len(restaurant.orders) == 12 # sanity check + + result = timify.aggregate_orders(grid=one_pixel_grid, time_step=60) + + # The resulting `DataFrame` has 12 rows holding `1`s. + assert len(result) == 12 + assert result['total_orders'].min() == 1 + assert result['total_orders'].max() == 1 + assert result['total_orders'].sum() == 12 + + def test_evenly_distributed_ad_hoc_orders_with_no_demand_late( # noqa:WPS218 + self, db_session, one_pixel_grid, restaurant, make_order, + ): + """10 ad-hoc orders, one per hour, no orders after 21.""" + # Create one order per hour and 10 orders in total. + for hour in range(11, 21): + order = make_order( + scheduled=False, + restaurant=restaurant, + placed_at=datetime.datetime(YEAR, MONTH, DAY, hour, 11), + ) + db_session.add(order) + + db_session.commit() + + assert len(restaurant.orders) == 10 # sanity check + + result = timify.aggregate_orders(grid=one_pixel_grid, time_step=60) + + # Even though there are only 10 orders, there are 12 rows in the `DataFrame`. + # That is so as `0`s are filled in for hours without any demand at the end. + assert len(result) == 12 + assert result['total_orders'].min() == 0 + assert result['total_orders'].max() == 1 + assert result.iloc[:10]['total_orders'].sum() == 10 + assert result.iloc[10:]['total_orders'].sum() == 0 + + def test_one_ad_hoc_order_every_other_hour( + self, db_session, one_pixel_grid, restaurant, make_order, + ): + """6 ad-hoc orders, one every other hour.""" + # Create one order every other hour. + for hour in range(11, 23, 2): + order = make_order( + scheduled=False, + restaurant=restaurant, + placed_at=datetime.datetime(YEAR, MONTH, DAY, hour, 11), + ) + db_session.add(order) + + db_session.commit() + + assert len(restaurant.orders) == 6 # sanity check + + result = timify.aggregate_orders(grid=one_pixel_grid, time_step=60) + + # The resulting `DataFrame` has 12 rows, 6 holding `0`s, and 6 holding `1`s. + assert len(result) == 12 + assert result['total_orders'].min() == 0 + assert result['total_orders'].max() == 1 + assert result['total_orders'].sum() == 6 + + def test_one_ad_hoc_and_one_pre_order( + self, db_session, one_pixel_grid, restaurant, make_order, + ): + """1 ad-hoc and 1 scheduled order. + + The scheduled order is discarded. + """ + ad_hoc_order = make_order( + scheduled=False, + restaurant=restaurant, + placed_at=datetime.datetime(YEAR, MONTH, DAY, 11, 11), + ) + db_session.add(ad_hoc_order) + + pre_order = make_order( + scheduled=True, + restaurant=restaurant, + placed_at=datetime.datetime(YEAR, MONTH, DAY, 9, 0), + scheduled_delivery_at=datetime.datetime(YEAR, MONTH, DAY, 12, 0), + ) + db_session.add(pre_order) + + db_session.commit() + + assert len(restaurant.orders) == 2 # sanity check + + result = timify.aggregate_orders(grid=one_pixel_grid, time_step=60) + + # The resulting `DataFrame` has 12 rows, 11 holding `0`s, and one holding a `1`. + assert len(result) == 12 + assert result['total_orders'].min() == 0 + assert result['total_orders'].max() == 1 + assert result['total_orders'].sum() == 1 + + def test_evenly_distributed_ad_hoc_orders_with_half_hour_time_steps( # noqa:WPS218 + self, db_session, one_pixel_grid, restaurant, make_order, + ): + """12 ad-hoc orders, one per hour, with 30 minute time windows. + + In half the time steps, there is no demand. + """ + # Create one order per hour and 10 orders in total. + for hour in range(11, 23): + order = make_order( + scheduled=False, + restaurant=restaurant, + placed_at=datetime.datetime(YEAR, MONTH, DAY, hour, 11), + ) + db_session.add(order) + + db_session.commit() + + assert len(restaurant.orders) == 12 # sanity check + + result = timify.aggregate_orders(grid=one_pixel_grid, time_step=30) + + # The resulting `DataFrame` has 24 rows for the 24 30-minute time steps. + # The rows' values are `0` and `1` alternating. + assert len(result) == 24 + assert result['total_orders'].min() == 0 + assert result['total_orders'].max() == 1 + assert result.iloc[::2]['total_orders'].sum() == 12 + assert result.iloc[1::2]['total_orders'].sum() == 0 + + def test_ad_hoc_orders_over_two_days( + self, db_session, one_pixel_grid, restaurant, make_order, + ): + """First day 12 ad-hoc orders, one per operating hour ... + + ... and 6 orders, one every other hour on the second day. + In total, there are 18 orders. + """ + # Create one order per hour and 12 orders in total. + for hour in range(11, 23): + order = make_order( + scheduled=False, + restaurant=restaurant, + placed_at=datetime.datetime(YEAR, MONTH, DAY, hour, 11), + ) + db_session.add(order) + + # Create one order every other hour and 6 orders in total. + for hour in range(11, 23, 2): # noqa:WPS440 + order = make_order( + scheduled=False, + restaurant=restaurant, + placed_at=datetime.datetime( + YEAR, MONTH, DAY + 1, hour, 11, # noqa:WPS441 + ), + ) + db_session.add(order) + + db_session.commit() + + assert len(restaurant.orders) == 18 # sanity check + + result = timify.aggregate_orders(grid=one_pixel_grid, time_step=60) + + # The resulting `DataFrame` has 24 rows, 12 for each day. + assert len(result) == 24 + assert result['total_orders'].min() == 0 + assert result['total_orders'].max() == 1 + assert result['total_orders'].sum() == 18 + + @pytest.fixture + def two_pixel_grid(self, db_session, city, make_address, make_restaurant): + """A persisted `Grid` with two `Pixel` objects. + + `restaurant` must be a dependency as otherwise + its `.address` is not put into the database. + """ + # One `Address` in the lower-left `Pixel`, ... + address1 = make_address(latitude=48.8357377, longitude=2.2517412) + # ... and another one in the upper-right one. + address2 = make_address(latitude=48.8898312, longitude=2.4357622) + + # Create `Restaurant`s at the two addresses. + make_restaurant(address=address1) + make_restaurant(address=address2) + + # This creates four `Pixel`s, two of which have no `pickup_address`. + side_length = max(city.total_x // 2, city.total_y // 2) + 1 + + grid = db.Grid.gridify(city=city, side_length=side_length) + + db_session.add(grid) + + assert len(grid.pixels) == 2 # sanity check + + return grid + + def test_two_pixels_with_shifted_orders( # noqa:WPS218 + self, db_session, two_pixel_grid, make_order, + ): + """One restaurant with one order every other hour ... + + ... and another restaurant with two orders per hour. + In total, there are 30 orders. + """ + address1, address2 = two_pixel_grid.city.addresses + restaurant1, restaurant2 = address1.restaurant, address2.restaurant + + # Create one order every other hour for `restaurant1`. + for hour in range(11, 23, 2): + order = make_order( + scheduled=False, + restaurant=restaurant1, + placed_at=datetime.datetime(YEAR, MONTH, DAY, hour, 11), + ) + db_session.add(order) + + # Create two orders per hour for `restaurant2`. + for hour in range(11, 23): # noqa:WPS440 + order = make_order( + scheduled=False, + restaurant=restaurant2, + placed_at=datetime.datetime(YEAR, MONTH, DAY, hour, 13), # noqa:WPS441 + ) + db_session.add(order) + + order = make_order( + scheduled=False, + restaurant=restaurant2, + placed_at=datetime.datetime(YEAR, MONTH, DAY, hour, 14), # noqa:WPS441 + ) + db_session.add(order) + + db_session.commit() + + # sanity checks + assert len(restaurant1.orders) == 6 + assert len(restaurant2.orders) == 24 + + result = timify.aggregate_orders(grid=two_pixel_grid, time_step=60) + + # The resulting `DataFrame` has 24 rows, 12 for each pixel. + assert len(result) == 24 + assert result['total_orders'].min() == 0 + assert result['total_orders'].max() == 2 + assert result['total_orders'].sum() == 30 From 65d1632e9856e4f23a54ee4e59fcd27633d709ee Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sat, 9 Jan 2021 16:20:23 +0100 Subject: [PATCH 32/72] Add `OrderHistory` class - the main purpose of this class is to manage querying the order totals from the database and slice various kinds of time series out of the data - the class holds the former `aggregate_orders()` function as a method - modularize the corresponding tests - add `tests.config` with globals used when testing to provide a single source of truth for various settings --- src/urban_meal_delivery/forecasts/timify.py | 211 ++++++++++-------- tests/config.py | 10 + tests/db/fake_data/factories.py | 40 ++-- tests/forecasts/timify/__init__.py | 1 + .../test_aggregate_orders.py} | 117 +++++++--- tests/forecasts/timify/test_order_history.py | 39 ++++ 6 files changed, 289 insertions(+), 129 deletions(-) create mode 100644 tests/config.py create mode 100644 tests/forecasts/timify/__init__.py rename tests/forecasts/{test_timify.py => timify/test_aggregate_orders.py} (73%) create mode 100644 tests/forecasts/timify/test_order_history.py diff --git a/src/urban_meal_delivery/forecasts/timify.py b/src/urban_meal_delivery/forecasts/timify.py index 08cd1df..87d26b5 100644 --- a/src/urban_meal_delivery/forecasts/timify.py +++ b/src/urban_meal_delivery/forecasts/timify.py @@ -1,6 +1,6 @@ """Obtain and work with time series data.""" -import datetime +import datetime as dt import pandas as pd @@ -8,107 +8,140 @@ from urban_meal_delivery import config from urban_meal_delivery import db -def aggregate_orders(grid: db.Grid, time_step: int) -> pd.DataFrame: # pragma: no cover - """Obtain a time series of the ad-hoc `Order` totals. +class OrderHistory: + """Generate time series from the `Order` model in the database. - Args: - grid: pixel grid used to aggregate orders spatially - time_step: interval length (in minutes) into which orders are aggregated - - Returns: - order_totals: `DataFrame` with a `MultiIndex` of the "pixel_id"s and - beginnings of the intervals (i.e., "start_at"s); the sole column - with data is "total_orders" + The purpose of this class is to abstract away the managing of the order data + in memory and the slicing the data into various kinds of time series. """ - # `data` is probably missing "pixel_id"-"start_at" pairs. - # This happens whenever there is no demand in the `Pixel` in the given `time_step`. - data = pd.read_sql_query( - f"""-- # noqa:WPS221 - SELECT - pixel_id, - start_at, - COUNT(*) AS total_orders - FROM ( + + def __init__(self, grid: db.Grid, time_step: int) -> None: + """Initialize a new `OrderHistory` object. + + Args: + grid: pixel grid used to aggregate orders spatially + time_step: interval length (in minutes) into which orders are aggregated + + # noqa:DAR401 RuntimeError + """ + self._grid = grid + self._time_step = time_step + + # Number of daily time steps must be a whole multiple of `time_step` length. + n_daily_time_steps = ( + 60 * (config.SERVICE_END - config.SERVICE_START) / time_step + ) + if n_daily_time_steps != int(n_daily_time_steps): # pragma: no cover + raise RuntimeError('Internal error: configuration has invalid TIME_STEPS') + self._n_daily_time_steps = int(n_daily_time_steps) + + # The `_data` are populated by `.aggregate_orders()`. + self._data = None + + @property + def totals(self) -> pd.DataFrame: + """The order totals by `Pixel` and `.time_step`. + + The returned object should not be mutated! + + Returns: + order_totals: a one-column `DataFrame` with a `MultiIndex` of the + "pixel_id"s and "start_at"s (i.e., beginnings of the intervals); + the column with data is "total_orders" + """ + if self._data is None: + self._data = self.aggregate_orders() + + return self._data + + def aggregate_orders(self) -> pd.DataFrame: # pragma: no cover + """Generate and load all order totals from the database.""" + # `data` is probably missing "pixel_id"-"start_at" pairs. + # This happens when there is no demand in the `Pixel` in the given `time_step`. + data = pd.read_sql_query( + f"""-- # noqa:E501,WPS221 SELECT pixel_id, - placed_at_without_seconds - minutes_to_be_cut AS start_at + start_at, + COUNT(*) AS total_orders FROM ( SELECT - pixels.pixel_id, - DATE_TRUNC('MINUTE', orders.placed_at) AS placed_at_without_seconds, - (( - EXTRACT(MINUTES FROM orders.placed_at)::INTEGER % {time_step} - )::TEXT || ' MINUTES')::INTERVAL - AS minutes_to_be_cut + pixel_id, + placed_at_without_seconds - minutes_to_be_cut AS start_at FROM ( SELECT - id, - placed_at, - pickup_address_id - FROM - {config.CLEAN_SCHEMA}.orders + pixels.pixel_id, + DATE_TRUNC('MINUTE', orders.placed_at) AS placed_at_without_seconds, + (( + EXTRACT(MINUTES FROM orders.placed_at)::INTEGER % {self._time_step} + )::TEXT || ' MINUTES')::INTERVAL + AS minutes_to_be_cut + FROM ( + SELECT + id, + placed_at, + pickup_address_id + FROM + {config.CLEAN_SCHEMA}.orders + INNER JOIN ( + SELECT + id AS address_id + FROM + {config.CLEAN_SCHEMA}.addresses + WHERE + city_id = {self._grid.city.id} + ) AS in_city + ON orders.pickup_address_id = in_city.address_id + WHERE + ad_hoc IS TRUE + ) AS + orders INNER JOIN ( SELECT - id AS address_id + address_id, + pixel_id FROM - {config.CLEAN_SCHEMA}.addresses + {config.CLEAN_SCHEMA}.addresses_pixels WHERE - city_id = {grid.city.id} - ) AS in_city - ON orders.pickup_address_id = in_city.address_id - WHERE - ad_hoc IS TRUE - ) AS - orders - INNER JOIN ( - SELECT - address_id, - pixel_id - FROM - {config.CLEAN_SCHEMA}.addresses_pixels - WHERE - grid_id = {grid.id} - AND - city_id = {grid.city.id} -- city_id is redundant -> sanity check - ) AS pixels - ON orders.pickup_address_id = pixels.address_id - ) AS placed_at_aggregated_into_start_at - ) AS pixel_start_at_combinations - GROUP BY - pixel_id, - start_at - ORDER BY - pixel_id, - start_at; - """, - con=db.connection, - index_col=['pixel_id', 'start_at'], - ) + grid_id = {self._grid.id} + AND + city_id = {self._grid.city.id} -- redundant -> sanity check + ) AS pixels + ON orders.pickup_address_id = pixels.address_id + ) AS placed_at_aggregated_into_start_at + ) AS pixel_start_at_combinations + GROUP BY + pixel_id, + start_at + ORDER BY + pixel_id, + start_at; + """, + con=db.connection, + index_col=['pixel_id', 'start_at'], + ) - if data.empty: - return data + if data.empty: + return data - # Calculate the first and last "start_at" value ... - start_day = data.index.levels[1].min().date() - start = datetime.datetime( - start_day.year, start_day.month, start_day.day, config.SERVICE_START, - ) - end_day = data.index.levels[1].max().date() - end = datetime.datetime( - end_day.year, end_day.month, end_day.day, config.SERVICE_END, - ) + # Calculate the first and last "start_at" value ... + start_day = data.index.levels[1].min().date() + start = dt.datetime( + start_day.year, start_day.month, start_day.day, config.SERVICE_START, + ) + end_day = data.index.levels[1].max().date() + end = dt.datetime(end_day.year, end_day.month, end_day.day, config.SERVICE_END) + # ... and all possible `tuple`s of "pixel_id"-"start_at" combinations. + # The "start_at" values must lie within the operating hours. + gen = ( + (pixel_id, start_at) + for pixel_id in sorted(data.index.levels[0]) + for start_at in pd.date_range(start, end, freq=f'{self._time_step}T') + if config.SERVICE_START <= start_at.hour < config.SERVICE_END + ) - # ... and all possible `tuple`s of "pixel_id"-"start_at" combinations. - # The "start_at" values must lie within the operating hours. - gen = ( - (pixel_id, start_at) - for pixel_id in sorted(data.index.levels[0]) - for start_at in pd.date_range(start, end, freq=f'{time_step}T') - if config.SERVICE_START <= start_at.time().hour < config.SERVICE_END - ) + # Re-index `data` filling in `0`s where there is no demand. + index = pd.MultiIndex.from_tuples(gen) + index.names = ['pixel_id', 'start_at'] - # Re-index `data` filling in `0`s where there is no demand. - index = pd.MultiIndex.from_tuples(gen) - index.names = ['pixel_id', 'start_at'] - - return data.reindex(index, fill_value=0) + return data.reindex(index, fill_value=0) diff --git a/tests/config.py b/tests/config.py new file mode 100644 index 0000000..288c471 --- /dev/null +++ b/tests/config.py @@ -0,0 +1,10 @@ +"""Globals used when testing.""" + + +# The day on which most test cases take place. +YEAR, MONTH, DAY = 2016, 7, 1 + +# Default time steps, for example, for `OrderHistory` objects. +LONG_TIME_STEP = 60 +SHORT_TIME_STEP = 30 +TIME_STEPS = (SHORT_TIME_STEP, LONG_TIME_STEP) diff --git a/tests/db/fake_data/factories.py b/tests/db/fake_data/factories.py index d9d23d4..50ee950 100644 --- a/tests/db/fake_data/factories.py +++ b/tests/db/fake_data/factories.py @@ -9,6 +9,7 @@ import faker from factory import alchemy from geopy import distance +from tests import config as test_config from urban_meal_delivery import db @@ -27,13 +28,10 @@ def _random_timespan( # noqa:WPS211 return dt.timedelta(seconds=random.randint(total_min_seconds, total_max_seconds)) -# The test day. -_YEAR, _MONTH, _DAY = 2020, 1, 1 - - def _early_in_the_morning(): """A randomized `datetime` object early in the morning.""" - return dt.datetime(_YEAR, _MONTH, _DAY, 3, 0) + _random_timespan(max_hours=2) + early = dt.datetime(test_config.YEAR, test_config.MONTH, test_config.DAY, 3, 0) + return early + _random_timespan(max_hours=2) class AddressFactory(alchemy.SQLAlchemyModelFactory): @@ -171,7 +169,9 @@ class AdHocOrderFactory(alchemy.SQLAlchemyModelFactory): # Attributes regarding the specialization of an `Order`: ad-hoc or scheduled. # Ad-hoc `Order`s are placed between 11.45 and 14.15. placed_at = factory.LazyFunction( - lambda: dt.datetime(_YEAR, _MONTH, _DAY, 11, 45) + lambda: dt.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, 11, 45, + ) + _random_timespan(max_hours=2, max_minutes=30), ) ad_hoc = True @@ -337,13 +337,27 @@ class ScheduledOrderFactory(AdHocOrderFactory): scheduled_delivery_at = factory.LazyFunction( lambda: random.choice( [ - dt.datetime(_YEAR, _MONTH, _DAY, 12, 0), - dt.datetime(_YEAR, _MONTH, _DAY, 12, 15), - dt.datetime(_YEAR, _MONTH, _DAY, 12, 30), - dt.datetime(_YEAR, _MONTH, _DAY, 12, 45), - dt.datetime(_YEAR, _MONTH, _DAY, 13, 0), - dt.datetime(_YEAR, _MONTH, _DAY, 13, 15), - dt.datetime(_YEAR, _MONTH, _DAY, 13, 30), + dt.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, 12, 0, + ), + dt.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, 12, 15, + ), + dt.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, 12, 30, + ), + dt.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, 12, 45, + ), + dt.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, 13, 0, + ), + dt.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, 13, 15, + ), + dt.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, 13, 30, + ), ], ), ) diff --git a/tests/forecasts/timify/__init__.py b/tests/forecasts/timify/__init__.py new file mode 100644 index 0000000..167675d --- /dev/null +++ b/tests/forecasts/timify/__init__.py @@ -0,0 +1 @@ +"""Tests for the `urban_meal_delivery.forecasts.timify` module.""" diff --git a/tests/forecasts/test_timify.py b/tests/forecasts/timify/test_aggregate_orders.py similarity index 73% rename from tests/forecasts/test_timify.py rename to tests/forecasts/timify/test_aggregate_orders.py index 0cd4d21..51a6dfa 100644 --- a/tests/forecasts/test_timify.py +++ b/tests/forecasts/timify/test_aggregate_orders.py @@ -1,22 +1,20 @@ -"""Test the time series related code.""" +"""Test the `OrderHistory.aggregate_orders()` method.""" # pylint:disable=no-self-use,unused-argument import datetime import pytest +from tests import config as test_config from urban_meal_delivery import db from urban_meal_delivery.forecasts import timify -YEAR, MONTH, DAY = 2020, 1, 1 - - @pytest.mark.db class TestAggregateOrders: - """Test the `aggregate_orders()` function. + """Test the `OrderHistory.aggregate_orders()` method. - The test cases are all integration tests that model realistic scenarios. + The test cases are integration tests that model realistic scenarios. """ @pytest.fixture @@ -39,10 +37,13 @@ class TestAggregateOrders: def test_no_orders(self, db_session, one_pixel_grid, restaurant): """Edge case that does not occur for real-life data.""" db_session.commit() - assert len(restaurant.orders) == 0 # noqa:WPS507 sanity check - result = timify.aggregate_orders(grid=one_pixel_grid, time_step=60) + oh = timify.OrderHistory( + grid=one_pixel_grid, time_step=test_config.LONG_TIME_STEP, + ) + + result = oh.aggregate_orders() assert len(result) == 0 # noqa:WPS507 @@ -55,7 +56,9 @@ class TestAggregateOrders: order = make_order( scheduled=False, restaurant=restaurant, - placed_at=datetime.datetime(YEAR, MONTH, DAY, hour, 11), + placed_at=datetime.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, hour, 11, + ), ) db_session.add(order) @@ -63,7 +66,11 @@ class TestAggregateOrders: assert len(restaurant.orders) == 12 # sanity check - result = timify.aggregate_orders(grid=one_pixel_grid, time_step=60) + oh = timify.OrderHistory( + grid=one_pixel_grid, time_step=test_config.LONG_TIME_STEP, + ) + + result = oh.aggregate_orders() # The resulting `DataFrame` has 12 rows holding `1`s. assert len(result) == 12 @@ -80,7 +87,9 @@ class TestAggregateOrders: order = make_order( scheduled=False, restaurant=restaurant, - placed_at=datetime.datetime(YEAR, MONTH, DAY, hour, 11), + placed_at=datetime.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, hour, 11, + ), ) db_session.add(order) @@ -88,7 +97,11 @@ class TestAggregateOrders: assert len(restaurant.orders) == 10 # sanity check - result = timify.aggregate_orders(grid=one_pixel_grid, time_step=60) + oh = timify.OrderHistory( + grid=one_pixel_grid, time_step=test_config.LONG_TIME_STEP, + ) + + result = oh.aggregate_orders() # Even though there are only 10 orders, there are 12 rows in the `DataFrame`. # That is so as `0`s are filled in for hours without any demand at the end. @@ -107,7 +120,9 @@ class TestAggregateOrders: order = make_order( scheduled=False, restaurant=restaurant, - placed_at=datetime.datetime(YEAR, MONTH, DAY, hour, 11), + placed_at=datetime.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, hour, 11, + ), ) db_session.add(order) @@ -115,7 +130,11 @@ class TestAggregateOrders: assert len(restaurant.orders) == 6 # sanity check - result = timify.aggregate_orders(grid=one_pixel_grid, time_step=60) + oh = timify.OrderHistory( + grid=one_pixel_grid, time_step=test_config.LONG_TIME_STEP, + ) + + result = oh.aggregate_orders() # The resulting `DataFrame` has 12 rows, 6 holding `0`s, and 6 holding `1`s. assert len(result) == 12 @@ -133,15 +152,21 @@ class TestAggregateOrders: ad_hoc_order = make_order( scheduled=False, restaurant=restaurant, - placed_at=datetime.datetime(YEAR, MONTH, DAY, 11, 11), + placed_at=datetime.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, 11, 11, + ), ) db_session.add(ad_hoc_order) pre_order = make_order( scheduled=True, restaurant=restaurant, - placed_at=datetime.datetime(YEAR, MONTH, DAY, 9, 0), - scheduled_delivery_at=datetime.datetime(YEAR, MONTH, DAY, 12, 0), + placed_at=datetime.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, 9, 0, + ), + scheduled_delivery_at=datetime.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, 12, 0, + ), ) db_session.add(pre_order) @@ -149,7 +174,11 @@ class TestAggregateOrders: assert len(restaurant.orders) == 2 # sanity check - result = timify.aggregate_orders(grid=one_pixel_grid, time_step=60) + oh = timify.OrderHistory( + grid=one_pixel_grid, time_step=test_config.LONG_TIME_STEP, + ) + + result = oh.aggregate_orders() # The resulting `DataFrame` has 12 rows, 11 holding `0`s, and one holding a `1`. assert len(result) == 12 @@ -169,7 +198,9 @@ class TestAggregateOrders: order = make_order( scheduled=False, restaurant=restaurant, - placed_at=datetime.datetime(YEAR, MONTH, DAY, hour, 11), + placed_at=datetime.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, hour, 11, + ), ) db_session.add(order) @@ -177,7 +208,11 @@ class TestAggregateOrders: assert len(restaurant.orders) == 12 # sanity check - result = timify.aggregate_orders(grid=one_pixel_grid, time_step=30) + oh = timify.OrderHistory( + grid=one_pixel_grid, time_step=test_config.SHORT_TIME_STEP, + ) + + result = oh.aggregate_orders() # The resulting `DataFrame` has 24 rows for the 24 30-minute time steps. # The rows' values are `0` and `1` alternating. @@ -200,7 +235,9 @@ class TestAggregateOrders: order = make_order( scheduled=False, restaurant=restaurant, - placed_at=datetime.datetime(YEAR, MONTH, DAY, hour, 11), + placed_at=datetime.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, hour, 11, + ), ) db_session.add(order) @@ -210,7 +247,11 @@ class TestAggregateOrders: scheduled=False, restaurant=restaurant, placed_at=datetime.datetime( - YEAR, MONTH, DAY + 1, hour, 11, # noqa:WPS441 + test_config.YEAR, + test_config.MONTH, + test_config.DAY + 1, + hour, # noqa:WPS441 + 11, ), ) db_session.add(order) @@ -219,7 +260,11 @@ class TestAggregateOrders: assert len(restaurant.orders) == 18 # sanity check - result = timify.aggregate_orders(grid=one_pixel_grid, time_step=60) + oh = timify.OrderHistory( + grid=one_pixel_grid, time_step=test_config.LONG_TIME_STEP, + ) + + result = oh.aggregate_orders() # The resulting `DataFrame` has 24 rows, 12 for each day. assert len(result) == 24 @@ -270,7 +315,9 @@ class TestAggregateOrders: order = make_order( scheduled=False, restaurant=restaurant1, - placed_at=datetime.datetime(YEAR, MONTH, DAY, hour, 11), + placed_at=datetime.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, hour, 11, + ), ) db_session.add(order) @@ -279,14 +326,26 @@ class TestAggregateOrders: order = make_order( scheduled=False, restaurant=restaurant2, - placed_at=datetime.datetime(YEAR, MONTH, DAY, hour, 13), # noqa:WPS441 + placed_at=datetime.datetime( + test_config.YEAR, + test_config.MONTH, + test_config.DAY, + hour, # noqa:WPS441 + 13, + ), ) db_session.add(order) order = make_order( scheduled=False, restaurant=restaurant2, - placed_at=datetime.datetime(YEAR, MONTH, DAY, hour, 14), # noqa:WPS441 + placed_at=datetime.datetime( + test_config.YEAR, + test_config.MONTH, + test_config.DAY, + hour, # noqa:WPS441 + 14, + ), ) db_session.add(order) @@ -296,7 +355,11 @@ class TestAggregateOrders: assert len(restaurant1.orders) == 6 assert len(restaurant2.orders) == 24 - result = timify.aggregate_orders(grid=two_pixel_grid, time_step=60) + oh = timify.OrderHistory( + grid=two_pixel_grid, time_step=test_config.LONG_TIME_STEP, + ) + + result = oh.aggregate_orders() # The resulting `DataFrame` has 24 rows, 12 for each pixel. assert len(result) == 24 diff --git a/tests/forecasts/timify/test_order_history.py b/tests/forecasts/timify/test_order_history.py new file mode 100644 index 0000000..9d69d70 --- /dev/null +++ b/tests/forecasts/timify/test_order_history.py @@ -0,0 +1,39 @@ +"""Test the basic functionalities in the `OrderHistory` class.""" +# pylint:disable=no-self-use + +import pytest + +from tests import config as test_config +from urban_meal_delivery.forecasts import timify + + +class TestSpecialMethods: + """Test the special methods in `OrderHistory`.""" + + @pytest.mark.parametrize('time_step', test_config.TIME_STEPS) + def test_instantiate(self, grid, time_step): + """Test `OrderHistory.__init__()`.""" + oh = timify.OrderHistory(grid=grid, time_step=time_step) + + assert oh is not None + + +class TestProperties: + """Test the properties in `OrderHistory`.""" + + def test_totals_is_cached(self, grid, monkeypatch): + """Test `.totals` property. + + The result of the `OrderHistory.aggregate_orders()` method call + is cached in the `OrderHistory.totals` property. + """ + oh = timify.OrderHistory(grid=grid, time_step=test_config.LONG_TIME_STEP) + + sentinel = object() + monkeypatch.setattr(oh, 'aggregate_orders', lambda: sentinel) + + result1 = oh.totals + result2 = oh.totals + + assert result1 is result2 + assert result1 is sentinel From b61db734b666521b32c0d3d5273b3752e9eb6bad Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sat, 9 Jan 2021 16:34:42 +0100 Subject: [PATCH 33/72] Add `OrderHistory.make_horizontal_time_series()` - the method slices out a horizontal time series from the data within an `OrderHistory` object --- src/urban_meal_delivery/forecasts/timify.py | 67 +++++++ tests/config.py | 18 ++ .../forecasts/timify/test_make_time_series.py | 173 ++++++++++++++++++ 3 files changed, 258 insertions(+) create mode 100644 tests/forecasts/timify/test_make_time_series.py diff --git a/src/urban_meal_delivery/forecasts/timify.py b/src/urban_meal_delivery/forecasts/timify.py index 87d26b5..d9e43fc 100644 --- a/src/urban_meal_delivery/forecasts/timify.py +++ b/src/urban_meal_delivery/forecasts/timify.py @@ -1,6 +1,7 @@ """Obtain and work with time series data.""" import datetime as dt +from typing import Tuple import pandas as pd @@ -145,3 +146,69 @@ class OrderHistory: index.names = ['pixel_id', 'start_at'] return data.reindex(index, fill_value=0) + + def make_horizontal_time_series( # noqa:WPS210 + self, pixel_id: int, predict_at: dt.datetime, train_horizon: int, + ) -> Tuple[pd.DataFrame, int, int]: + """Slice a horizontal time series out of the `.totals`. + + Create a time series covering `train_horizon` weeks that can be used + for training a forecasting model to predict the demand at `predict_at`. + + For explanation of the terms "horizontal", "vertical", and "real-time" + in the context of time series, see section 3.2 in the following paper: + https://github.com/webartifex/urban-meal-delivery-demand-forecasting/blob/main/paper.pdf + + Args: + pixel_id: pixel in which the time series is aggregated + predict_at: time step (i.e., "start_at") for which a prediction is made + train_horizon: weeks of historic data used to predict `predict_at` + + Returns: + training time series, frequency, actual order count at `predict_at` + + Raises: + LookupError: `pixel_id` is not in the `grid` + RuntimeError: desired time series slice is not entirely in `.totals` + """ + try: + intra_pixel = self.totals.loc[pixel_id] + except KeyError: + raise LookupError('The `pixel_id` is not in the `grid`') from None + + if predict_at >= config.CUTOFF_DAY: # pragma: no cover + raise RuntimeError('Internal error: cannot predict beyond the given data') + + # The first and last training day are just before the `predict_at` day + # and span exactly `train_horizon` weeks covering only the times of the + # day equal to the hour/minute of `predict_at`. + first_train_day = predict_at.date() - dt.timedelta(weeks=train_horizon) + first_start_at = dt.datetime( + first_train_day.year, + first_train_day.month, + first_train_day.day, + predict_at.hour, + predict_at.minute, + ) + last_train_day = predict_at.date() - dt.timedelta(days=1) + last_start_at = dt.datetime( + last_train_day.year, + last_train_day.month, + last_train_day.day, + predict_at.hour, + predict_at.minute, + ) + + # The frequency is the number of weekdays. + frequency = 7 + + # Take only the counts at the `predict_at` time. + training_df = intra_pixel.loc[ + first_start_at : last_start_at : self._n_daily_time_steps # type: ignore + ] + if len(training_df) != frequency * train_horizon: + raise RuntimeError('Not enough historic data for `predict_at`') + + actual_df = intra_pixel.loc[[predict_at]] + + return training_df, frequency, actual_df diff --git a/tests/config.py b/tests/config.py index 288c471..c2e3175 100644 --- a/tests/config.py +++ b/tests/config.py @@ -1,10 +1,28 @@ """Globals used when testing.""" +import datetime + +from urban_meal_delivery import config + # The day on which most test cases take place. YEAR, MONTH, DAY = 2016, 7, 1 +# `START` and `END` constitute a 15-day time span. +# That implies a maximum `train_horizon` of `2` as that needs full 7-day weeks. +START = datetime.datetime(YEAR, MONTH, DAY, config.SERVICE_START, 0) +_end_day = (START + datetime.timedelta(weeks=2)).date() +END = datetime.datetime( + _end_day.year, _end_day.month, _end_day.day, config.SERVICE_END, 0, +) + # Default time steps, for example, for `OrderHistory` objects. LONG_TIME_STEP = 60 SHORT_TIME_STEP = 30 TIME_STEPS = (SHORT_TIME_STEP, LONG_TIME_STEP) + +# Default training horizons, for example, for +# `OrderHistory.make_horizontal_time_series()`. +LONG_TRAIN_HORIZON = 2 +SHORT_TRAIN_HORIZON = 1 +TRAIN_HORIZONS = (SHORT_TRAIN_HORIZON, LONG_TRAIN_HORIZON) diff --git a/tests/forecasts/timify/test_make_time_series.py b/tests/forecasts/timify/test_make_time_series.py new file mode 100644 index 0000000..bb7f682 --- /dev/null +++ b/tests/forecasts/timify/test_make_time_series.py @@ -0,0 +1,173 @@ +"""Test the time series related code.""" +# pylint:disable=no-self-use,unused-argument + +import datetime + +import pandas as pd +import pytest + +from tests import config as test_config +from urban_meal_delivery import config +from urban_meal_delivery.forecasts import timify + + +@pytest.fixture +def good_pixel_id(): + """A `pixel_id` that is on the `grid`.""" + return 1 + + +@pytest.fixture +def order_totals(good_pixel_id): + """A mock for `OrderHistory.totals`. + + To be a bit more realistic, we sample two pixels on the `grid`. + """ + pixel_ids = [good_pixel_id, good_pixel_id + 1] + + gen = ( + (pixel_id, start_at) + for pixel_id in pixel_ids + for start_at in pd.date_range( + test_config.START, test_config.END, freq=f'{test_config.LONG_TIME_STEP}T', + ) + if config.SERVICE_START <= start_at.hour < config.SERVICE_END + ) + + # Re-index `data` filling in `0`s where there is no demand. + index = pd.MultiIndex.from_tuples(gen) + index.names = ['pixel_id', 'start_at'] + + df = pd.DataFrame(data={'total_orders': 0}, index=index) + + # Sanity check: n_pixels * n_time_steps_per_day * n_weekdays * n_weeks. + assert len(df) == 2 * 12 * (7 * 2 + 1) + + return df + + +@pytest.fixture +def order_history(order_totals, grid): + """An `OrderHistory` object that does not need the database.""" + oh = timify.OrderHistory(grid=grid, time_step=test_config.LONG_TIME_STEP) + oh._data = order_totals # pylint:disable=protected-access + + return oh + + +@pytest.fixture +def good_predict_at(): + """A `predict_at` within `START`-`END` and ... + + ... a long enough history so that either `train_horizon=1` + or `train_horizon=2` works. + """ + return datetime.datetime( + test_config.END.year, test_config.END.month, test_config.END.day, 12, 0, + ) + + +@pytest.fixture +def bad_predict_at(): + """A `predict_at` within `START`-`END` but ... + + ... not a long enough history so that both `train_horizon=1` + and `train_horizon=2` do not work. + """ + predict_day = test_config.END - datetime.timedelta(weeks=1, days=1) + return datetime.datetime( + predict_day.year, predict_day.month, predict_day.day, 12, 0, + ) + + +class TestMakeHorizontalTimeSeries: + """Test the `OrderHistory.make_horizontal_time_series()` method.""" + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_wrong_pixel(self, order_history, good_predict_at, train_horizon): + """A `pixel_id` that is not in the `grid`.""" + with pytest.raises(LookupError): + order_history.make_horizontal_time_series( + pixel_id=999_999, + predict_at=good_predict_at, + train_horizon=train_horizon, + ) + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_time_series_are_dataframes( + self, order_history, good_pixel_id, good_predict_at, train_horizon, + ): + """The time series come in a one-column `pd.DataFrame`.""" + result = order_history.make_horizontal_time_series( + pixel_id=good_pixel_id, + predict_at=good_predict_at, + train_horizon=train_horizon, + ) + + training_df, _, actual_df = result + + assert isinstance(training_df, pd.DataFrame) + assert training_df.columns == ['total_orders'] + assert isinstance(actual_df, pd.DataFrame) + assert actual_df.columns == ['total_orders'] + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_time_series_have_correct_length( + self, order_history, good_pixel_id, good_predict_at, train_horizon, + ): + """The length of a training time series must be a multiple of `7` ... + + whereas the time series with the actual order counts always holds `1` value. + """ + result = order_history.make_horizontal_time_series( + pixel_id=good_pixel_id, + predict_at=good_predict_at, + train_horizon=train_horizon, + ) + + training_df, _, actual_df = result + + assert len(training_df) == 7 * train_horizon + assert len(actual_df) == 1 + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_frequency_is_number_of_weekdays( + self, order_history, good_pixel_id, good_predict_at, train_horizon, + ): + """The `frequency` must be `7`.""" + result = order_history.make_horizontal_time_series( + pixel_id=good_pixel_id, + predict_at=good_predict_at, + train_horizon=train_horizon, + ) + + _, frequency, _ = result # noqa:WPS434 + + assert frequency == 7 + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_no_long_enough_history1( + self, order_history, good_pixel_id, bad_predict_at, train_horizon, + ): + """If the `predict_at` day is too early in the `START`-`END` horizon ... + + ... the history of order totals is not long enough. + """ + with pytest.raises(RuntimeError): + order_history.make_horizontal_time_series( + pixel_id=good_pixel_id, + predict_at=bad_predict_at, + train_horizon=train_horizon, + ) + + def test_no_long_enough_history2( + self, order_history, good_pixel_id, good_predict_at, + ): + """If the `train_horizon` is longer than the `START`-`END` horizon ... + + ... the history of order totals can never be long enough. + """ + with pytest.raises(RuntimeError): + order_history.make_horizontal_time_series( + pixel_id=good_pixel_id, predict_at=good_predict_at, train_horizon=999, + ) From 5330ceb7710c4aa333d79532dee7c3e1bc6a5c4c Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sat, 9 Jan 2021 17:00:10 +0100 Subject: [PATCH 34/72] Add `OrderHistory.make_vertical_time_series()` - the method slices out a vertical time series from the data within an `OrderHistory` object --- src/urban_meal_delivery/forecasts/timify.py | 82 +++++++++++++ .../forecasts/timify/test_make_time_series.py | 113 +++++++++++++++++- 2 files changed, 194 insertions(+), 1 deletion(-) diff --git a/src/urban_meal_delivery/forecasts/timify.py b/src/urban_meal_delivery/forecasts/timify.py index d9e43fc..078c972 100644 --- a/src/urban_meal_delivery/forecasts/timify.py +++ b/src/urban_meal_delivery/forecasts/timify.py @@ -212,3 +212,85 @@ class OrderHistory: actual_df = intra_pixel.loc[[predict_at]] return training_df, frequency, actual_df + + def make_vertical_time_series( # noqa:WPS210 + self, pixel_id: int, predict_day: dt.date, train_horizon: int, + ) -> Tuple[pd.DataFrame, int, pd.DataFrame]: + """Slice a vertical time series out of the `.totals`. + + Create a time series covering `train_horizon` weeks that can be used + for training a forecasting model to predict the demand on the `predict_day`. + + For explanation of the terms "horizontal", "vertical", and "real-time" + in the context of time series, see section 3.2 in the following paper: + https://github.com/webartifex/urban-meal-delivery-demand-forecasting/blob/main/paper.pdf + + Args: + pixel_id: pixel in which the time series is aggregated + predict_day: day for which predictions are made + train_horizon: weeks of historic data used to predict `predict_at` + + Returns: + training time series, frequency, actual order counts on `predict_day` + + Raises: + LookupError: `pixel_id` is not in the `grid` + RuntimeError: desired time series slice is not entirely in `.totals` + """ + try: + intra_pixel = self.totals.loc[pixel_id] + except KeyError: + raise LookupError('The `pixel_id` is not in the `grid`') from None + + if predict_day >= config.CUTOFF_DAY.date(): # pragma: no cover + raise RuntimeError('Internal error: cannot predict beyond the given data') + + # The first and last training day are just before the `predict_day` + # and span exactly `train_horizon` weeks covering all times of the day. + first_train_day = predict_day - dt.timedelta(weeks=train_horizon) + first_start_at = dt.datetime( + first_train_day.year, + first_train_day.month, + first_train_day.day, + config.SERVICE_START, + 0, + ) + last_train_day = predict_day - dt.timedelta(days=1) + last_start_at = dt.datetime( + last_train_day.year, + last_train_day.month, + last_train_day.day, + config.SERVICE_END, # subtract one `time_step` below + 0, + ) - dt.timedelta(minutes=self._time_step) + + # The frequency is the number of weekdays times the number of daily time steps. + frequency = 7 * self._n_daily_time_steps + + # Take all the counts between `first_train_day` and `last_train_day`. + training_df = intra_pixel.loc[ + first_start_at:last_start_at # type: ignore + ] + if len(training_df) != frequency * train_horizon: + raise RuntimeError('Not enough historic data for `predict_day`') + + first_prediction_at = dt.datetime( + predict_day.year, + predict_day.month, + predict_day.day, + config.SERVICE_START, + 0, + ) + last_prediction_at = dt.datetime( + predict_day.year, + predict_day.month, + predict_day.day, + config.SERVICE_END, # subtract one `time_step` below + 0, + ) - dt.timedelta(minutes=self._time_step) + + actuals_df = intra_pixel.loc[ + first_prediction_at:last_prediction_at # type: ignore + ] + + return training_df, frequency, actuals_df diff --git a/tests/forecasts/timify/test_make_time_series.py b/tests/forecasts/timify/test_make_time_series.py index bb7f682..521cd08 100644 --- a/tests/forecasts/timify/test_make_time_series.py +++ b/tests/forecasts/timify/test_make_time_series.py @@ -117,7 +117,7 @@ class TestMakeHorizontalTimeSeries: ): """The length of a training time series must be a multiple of `7` ... - whereas the time series with the actual order counts always holds `1` value. + ... whereas the time series with the actual order counts has only `1` value. """ result = order_history.make_horizontal_time_series( pixel_id=good_pixel_id, @@ -171,3 +171,114 @@ class TestMakeHorizontalTimeSeries: order_history.make_horizontal_time_series( pixel_id=good_pixel_id, predict_at=good_predict_at, train_horizon=999, ) + + +class TestMakeVerticalTimeSeries: + """Test the `OrderHistory.make_vertical_time_series()` method.""" + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_wrong_pixel(self, order_history, good_predict_at, train_horizon): + """A `pixel_id` that is not in the `grid`.""" + with pytest.raises(LookupError): + order_history.make_vertical_time_series( + pixel_id=999_999, + predict_day=good_predict_at.date(), + train_horizon=train_horizon, + ) + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_time_series_are_dataframes( + self, order_history, good_pixel_id, good_predict_at, train_horizon, + ): + """The time series come in a one-column `pd.DataFrame`.""" + result = order_history.make_vertical_time_series( + pixel_id=good_pixel_id, + predict_day=good_predict_at.date(), + train_horizon=train_horizon, + ) + + training_df, _, actual_df = result + + assert isinstance(training_df, pd.DataFrame) + assert training_df.columns == ['total_orders'] + assert isinstance(actual_df, pd.DataFrame) + assert actual_df.columns == ['total_orders'] + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_time_series_have_correct_length( + self, order_history, good_pixel_id, good_predict_at, train_horizon, + ): + """The length of a training time series is the product of the ... + + ... weekly time steps (i.e., product of `7` and the number of daily time steps) + and the `train_horizon` in weeks. + + The time series with the actual order counts always holds one observation + per time step of a day. + """ + result = order_history.make_vertical_time_series( + pixel_id=good_pixel_id, + predict_day=good_predict_at.date(), + train_horizon=train_horizon, + ) + + training_df, _, actual_df = result + + n_daily_time_steps = ( + 60 + * (config.SERVICE_END - config.SERVICE_START) + // test_config.LONG_TIME_STEP + ) + + assert len(training_df) == 7 * n_daily_time_steps * train_horizon + assert len(actual_df) == n_daily_time_steps + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_frequency_is_number_number_of_weekly_time_steps( + self, order_history, good_pixel_id, good_predict_at, train_horizon, + ): + """The `frequency` is the number of weekly time steps.""" + result = order_history.make_vertical_time_series( + pixel_id=good_pixel_id, + predict_day=good_predict_at.date(), + train_horizon=train_horizon, + ) + + _, frequency, _ = result # noqa:WPS434 + + n_daily_time_steps = ( + 60 + * (config.SERVICE_END - config.SERVICE_START) + // test_config.LONG_TIME_STEP + ) + + assert frequency == 7 * n_daily_time_steps + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_no_long_enough_history1( + self, order_history, good_pixel_id, bad_predict_at, train_horizon, + ): + """If the `predict_at` day is too early in the `START`-`END` horizon ... + + ... the history of order totals is not long enough. + """ + with pytest.raises(RuntimeError): + order_history.make_vertical_time_series( + pixel_id=good_pixel_id, + predict_day=bad_predict_at.date(), + train_horizon=train_horizon, + ) + + def test_no_long_enough_history2( + self, order_history, good_pixel_id, good_predict_at, + ): + """If the `train_horizon` is longer than the `START`-`END` horizon ... + + ... the history of order totals can never be long enough. + """ + with pytest.raises(RuntimeError): + order_history.make_vertical_time_series( + pixel_id=good_pixel_id, + predict_day=good_predict_at.date(), + train_horizon=999, + ) From 100fac659ad663df11af7d12ad649fe56ebc6652 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sat, 9 Jan 2021 17:30:00 +0100 Subject: [PATCH 35/72] Add `OrderHistory.make_real_time_time_series()` - the method slices out a real-time time series from the data within an `OrderHistory` object --- src/urban_meal_delivery/forecasts/timify.py | 90 ++++++++++ tests/config.py | 3 + .../forecasts/timify/test_make_time_series.py | 167 +++++++++++++++++- 3 files changed, 257 insertions(+), 3 deletions(-) diff --git a/src/urban_meal_delivery/forecasts/timify.py b/src/urban_meal_delivery/forecasts/timify.py index 078c972..6906d24 100644 --- a/src/urban_meal_delivery/forecasts/timify.py +++ b/src/urban_meal_delivery/forecasts/timify.py @@ -294,3 +294,93 @@ class OrderHistory: ] return training_df, frequency, actuals_df + + def make_real_time_time_series( # noqa:WPS210 + self, pixel_id: int, predict_at: dt.datetime, train_horizon: int, + ) -> Tuple[pd.DataFrame, int, int]: + """Slice a vertical real-time time series out of the `.totals`. + + Create a time series covering `train_horizon` weeks that can be used + for training a forecasting model to predict the demand at `predict_at`. + + For explanation of the terms "horizontal", "vertical", and "real-time" + in the context of time series, see section 3.2 in the following paper: + https://github.com/webartifex/urban-meal-delivery-demand-forecasting/blob/main/paper.pdf + + Args: + pixel_id: pixel in which the time series is aggregated + predict_at: time step (i.e., "start_at") for which a prediction is made + train_horizon: weeks of historic data used to predict `predict_at` + + Returns: + training time series, frequency, actual order count at `predict_at` + + Raises: + LookupError: `pixel_id` is not in the `grid` + RuntimeError: desired time series slice is not entirely in `.totals` + """ + try: + intra_pixel = self.totals.loc[pixel_id] + except KeyError: + raise LookupError('The `pixel_id` is not in the `grid`') from None + + if predict_at >= config.CUTOFF_DAY: # pragma: no cover + raise RuntimeError('Internal error: cannot predict beyond the given data') + + # The first and last training day are just before the `predict_at` day + # and span exactly `train_horizon` weeks covering all times of the day, + # including times on the `predict_at` day that are earlier than `predict_at`. + first_train_day = predict_at.date() - dt.timedelta(weeks=train_horizon) + first_start_at = dt.datetime( + first_train_day.year, + first_train_day.month, + first_train_day.day, + config.SERVICE_START, + 0, + ) + # Predicting the first time step on the `predict_at` day is a corner case. + # Then, the previous day is indeed the `last_train_day`. Predicting any + # other time step implies that the `predict_at` day is the `last_train_day`. + # `last_train_time` is the last "start_at" before the one being predicted. + if predict_at.hour == config.SERVICE_START: + last_train_day = predict_at.date() - dt.timedelta(days=1) + last_train_time = dt.time(config.SERVICE_END, 0) + else: + last_train_day = predict_at.date() + last_train_time = predict_at.time() + last_start_at = dt.datetime( + last_train_day.year, + last_train_day.month, + last_train_day.day, + last_train_time.hour, + last_train_time.minute, + ) - dt.timedelta(minutes=self._time_step) + + # The frequency is the number of weekdays times the number of daily time steps. + frequency = 7 * self._n_daily_time_steps + + # Take all the counts between `first_train_day` and `last_train_day`, + # including the ones on the `predict_at` day prior to `predict_at`. + training_df = intra_pixel.loc[ + first_start_at:last_start_at # type: ignore + ] + n_time_steps_on_predict_day = ( + ( + predict_at + - dt.datetime( + predict_at.year, + predict_at.month, + predict_at.day, + config.SERVICE_START, + 0, + ) + ).seconds + // 60 # -> minutes + // self._time_step + ) + if len(training_df) != frequency * train_horizon + n_time_steps_on_predict_day: + raise RuntimeError('Not enough historic data for `predict_day`') + + actual_df = intra_pixel.loc[[predict_at]] + + return training_df, frequency, actual_df diff --git a/tests/config.py b/tests/config.py index c2e3175..5c4c83c 100644 --- a/tests/config.py +++ b/tests/config.py @@ -8,6 +8,9 @@ from urban_meal_delivery import config # The day on which most test cases take place. YEAR, MONTH, DAY = 2016, 7, 1 +# The hour when most test cases take place. +NOON = 12 + # `START` and `END` constitute a 15-day time span. # That implies a maximum `train_horizon` of `2` as that needs full 7-day weeks. START = datetime.datetime(YEAR, MONTH, DAY, config.SERVICE_START, 0) diff --git a/tests/forecasts/timify/test_make_time_series.py b/tests/forecasts/timify/test_make_time_series.py index 521cd08..dc4eee9 100644 --- a/tests/forecasts/timify/test_make_time_series.py +++ b/tests/forecasts/timify/test_make_time_series.py @@ -1,4 +1,8 @@ -"""Test the time series related code.""" +"""Test the code generating time series with the order totals. + +Unless otherwise noted, each `time_step` is 60 minutes long implying +12 time steps per day (i.e., we use `LONG_TIME_STEP` by default). +""" # pylint:disable=no-self-use,unused-argument import datetime @@ -63,7 +67,11 @@ def good_predict_at(): or `train_horizon=2` works. """ return datetime.datetime( - test_config.END.year, test_config.END.month, test_config.END.day, 12, 0, + test_config.END.year, + test_config.END.month, + test_config.END.day, + test_config.NOON, + 0, ) @@ -76,7 +84,7 @@ def bad_predict_at(): """ predict_day = test_config.END - datetime.timedelta(weeks=1, days=1) return datetime.datetime( - predict_day.year, predict_day.month, predict_day.day, 12, 0, + predict_day.year, predict_day.month, predict_day.day, test_config.NOON, 0, ) @@ -282,3 +290,156 @@ class TestMakeVerticalTimeSeries: predict_day=good_predict_at.date(), train_horizon=999, ) + + +class TestMakeRealTimeTimeSeries: + """Test the `OrderHistory.make_real_time_time_series()` method.""" + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_wrong_pixel(self, order_history, good_predict_at, train_horizon): + """A `pixel_id` that is not in the `grid`.""" + with pytest.raises(LookupError): + order_history.make_real_time_time_series( + pixel_id=999_999, + predict_at=good_predict_at, + train_horizon=train_horizon, + ) + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_time_series_are_dataframes( + self, order_history, good_pixel_id, good_predict_at, train_horizon, + ): + """The time series come in a one-column `pd.DataFrame`.""" + result = order_history.make_real_time_time_series( + pixel_id=good_pixel_id, + predict_at=good_predict_at, + train_horizon=train_horizon, + ) + + training_df, _, actual_df = result + + assert isinstance(training_df, pd.DataFrame) + assert training_df.columns == ['total_orders'] + assert isinstance(actual_df, pd.DataFrame) + assert actual_df.columns == ['total_orders'] + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_time_series_have_correct_length1( + self, order_history, good_pixel_id, good_predict_at, train_horizon, + ): + """The length of a training time series is the product of the ... + + ... weekly time steps (i.e., product of `7` and the number of daily time steps) + and the `train_horizon` in weeks; however, this assertion only holds if + we predict the first `time_step` of the day. + + The time series with the actual order counts always holds `1` value. + """ + predict_at = datetime.datetime( + good_predict_at.year, + good_predict_at.month, + good_predict_at.day, + config.SERVICE_START, + 0, + ) + result = order_history.make_real_time_time_series( + pixel_id=good_pixel_id, predict_at=predict_at, train_horizon=train_horizon, + ) + + training_df, _, actual_df = result + + n_daily_time_steps = ( + 60 + * (config.SERVICE_END - config.SERVICE_START) + // test_config.LONG_TIME_STEP + ) + + assert len(training_df) == 7 * n_daily_time_steps * train_horizon + assert len(actual_df) == 1 + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_time_series_have_correct_length2( + self, order_history, good_pixel_id, good_predict_at, train_horizon, + ): + """The length of a training time series is the product of the ... + + ... weekly time steps (i.e., product of `7` and the number of daily time steps) + and the `train_horizon` in weeks; however, this assertion only holds if + we predict the first `time_step` of the day. Predicting any other `time_step` + means that the training time series becomes longer by the number of time steps + before the one being predicted. + + The time series with the actual order counts always holds `1` value. + """ + assert good_predict_at.hour == test_config.NOON + + result = order_history.make_real_time_time_series( + pixel_id=good_pixel_id, + predict_at=good_predict_at, + train_horizon=train_horizon, + ) + + training_df, _, actual_df = result + + n_daily_time_steps = ( + 60 + * (config.SERVICE_END - config.SERVICE_START) + // test_config.LONG_TIME_STEP + ) + n_time_steps_before = ( + 60 * (test_config.NOON - config.SERVICE_START) // test_config.LONG_TIME_STEP + ) + + assert ( + len(training_df) + == 7 * n_daily_time_steps * train_horizon + n_time_steps_before + ) + assert len(actual_df) == 1 + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_frequency_is_number_number_of_weekly_time_steps( + self, order_history, good_pixel_id, good_predict_at, train_horizon, + ): + """The `frequency` is the number of weekly time steps.""" + result = order_history.make_real_time_time_series( + pixel_id=good_pixel_id, + predict_at=good_predict_at, + train_horizon=train_horizon, + ) + + _, frequency, _ = result # noqa:WPS434 + + n_daily_time_steps = ( + 60 + * (config.SERVICE_END - config.SERVICE_START) + // test_config.LONG_TIME_STEP + ) + + assert frequency == 7 * n_daily_time_steps + + @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) + def test_no_long_enough_history1( + self, order_history, good_pixel_id, bad_predict_at, train_horizon, + ): + """If the `predict_at` day is too early in the `START`-`END` horizon ... + + ... the history of order totals is not long enough. + """ + with pytest.raises(RuntimeError): + order_history.make_real_time_time_series( + pixel_id=good_pixel_id, + predict_at=bad_predict_at, + train_horizon=train_horizon, + ) + + def test_no_long_enough_history2( + self, order_history, good_pixel_id, good_predict_at, + ): + """If the `train_horizon` is longer than the `START`-`END` horizon ... + + ... the history of order totals can never be long enough. + """ + with pytest.raises(RuntimeError): + order_history.make_real_time_time_series( + pixel_id=good_pixel_id, predict_at=good_predict_at, train_horizon=999, + ) From 9196c88ed444de9991b40758303fd448a5988fe5 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sat, 9 Jan 2021 17:47:45 +0100 Subject: [PATCH 36/72] Remove pylint from the project --- docs/conf.py | 2 +- noxfile.py | 17 +--- poetry.lock | 82 +------------------ pyproject.toml | 1 - setup.cfg | 39 --------- src/urban_meal_delivery/db/addresses.py | 3 +- src/urban_meal_delivery/db/cities.py | 4 +- src/urban_meal_delivery/db/utils/locations.py | 2 - tests/console/test_main.py | 4 - tests/db/fake_data/factories.py | 4 +- tests/db/test_addresses.py | 1 - tests/db/test_addresses_pixels.py | 1 - tests/db/test_cities.py | 1 - tests/db/test_couriers.py | 1 - tests/db/test_customer.py | 1 - tests/db/test_forecasts.py | 1 - tests/db/test_grids.py | 1 - tests/db/test_orders.py | 3 - tests/db/test_pixels.py | 1 - tests/db/test_restaurants.py | 1 - tests/db/utils/test_locations.py | 2 - .../forecasts/timify/test_aggregate_orders.py | 1 - .../forecasts/timify/test_make_time_series.py | 3 +- tests/forecasts/timify/test_order_history.py | 1 - tests/test_version.py | 4 - 25 files changed, 9 insertions(+), 172 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index 2ab87a7..40ef34b 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -5,7 +5,7 @@ import urban_meal_delivery as umd project = umd.__pkg_name__ author = umd.__author__ -copyright = f'2020, {author}' # pylint:disable=redefined-builtin +copyright = f'2020, {author}' version = release = umd.__version__ extensions = [ diff --git a/noxfile.py b/noxfile.py index 0567f22..617f4c6 100644 --- a/noxfile.py +++ b/noxfile.py @@ -17,7 +17,7 @@ as unified tasks to assure the quality of the source code: that are then interpreted as the paths the formatters and linters work on recursively -- "lint" (flake8, mypy, pylint): same as "format" +- "lint" (flake8, mypy): same as "format" - "test" (pytest, xdoctest): @@ -141,7 +141,7 @@ def format_(session): @nox.session(python=PYTHON) def lint(session): - """Lint source files with flake8, mypy, and pylint. + """Lint source files with flake8, and mypy. If no extra arguments are provided, all source files are linted. Otherwise, they are interpreted as paths the linters work on recursively. @@ -158,7 +158,6 @@ def lint(session): 'flake8-expression-complexity', 'flake8-pytest-style', 'mypy', - 'pylint', 'wemake-python-styleguide', ) @@ -182,18 +181,6 @@ def lint(session): else: session.log('No paths to be checked with mypy') - # Ignore errors where pylint cannot import a third-party package due its - # being run in an isolated environment. For the same reason, pylint is - # also not able to determine the correct order of imports. - # One way to fix this is to install all develop dependencies in this nox - # session, which we do not do. The whole point of static linting tools is - # to not rely on any package be importable at runtime. Instead, these - # imports are validated implicitly when the test suite is run. - session.run('pylint', '--version') - session.run( - 'pylint', '--disable=import-error', '--disable=wrong-import-order', *locations, - ) - @nox.session(python=PYTHON) def test(session): diff --git a/poetry.lock b/poetry.lock index b56f2af..7cf1b5a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -83,19 +83,6 @@ python-versions = ">=3.6.1" [package.extras] typed = ["typed-ast"] -[[package]] -name = "astroid" -version = "2.4.2" -description = "An abstract syntax tree for Python with inference support." -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.dependencies] -lazy-object-proxy = ">=1.4.0,<1.5.0" -six = ">=1.12,<2.0" -wrapt = ">=1.11,<2.0" - [[package]] name = "async-generator" version = "1.10" @@ -885,14 +872,6 @@ requests = "*" [package.extras] test = ["pytest", "requests"] -[[package]] -name = "lazy-object-proxy" -version = "1.4.3" -description = "A fast and thorough lazy object proxy." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - [[package]] name = "mako" version = "1.1.3" @@ -1312,21 +1291,6 @@ category = "main" optional = false python-versions = ">=3.5" -[[package]] -name = "pylint" -version = "2.6.0" -description = "python code static checker" -category = "dev" -optional = false -python-versions = ">=3.5.*" - -[package.dependencies] -astroid = ">=2.4.0,<=2.5" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -isort = ">=4.2.5,<6" -mccabe = ">=0.6,<0.7" -toml = ">=0.7.1" - [[package]] name = "pyparsing" version = "2.4.7" @@ -1869,14 +1833,6 @@ pep8-naming = ">=0.9.1,<0.10.0" pygments = ">=2.4,<3.0" typing_extensions = ">=3.6,<4.0" -[[package]] -name = "wrapt" -version = "1.12.1" -description = "Module for decorators, wrappers and monkey patching." -category = "dev" -optional = false -python-versions = "*" - [[package]] name = "xdoctest" version = "0.13.0" @@ -1896,12 +1852,12 @@ optional = ["pygments", "colorama"] tests = ["pytest", "pytest-cov", "codecov", "scikit-build", "cmake", "ninja", "pybind11"] [extras] -research = ["jupyterlab", "nb_black", "numpy", "pandas", "pytz"] +research = ["jupyterlab", "nb_black", "numpy", "pytz"] [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "b2671b1aecf282fa648190111c9ca79e7a40b13d0915f59a4f65e7986e737fb5" +content-hash = "5c0a4b37e73e0ed607cc2c46a9178f7b8e2a8364856a408a80f955a3c8b861a1" [metadata.files] alabaster = [ @@ -1952,10 +1908,6 @@ astpretty = [ {file = "astpretty-2.0.0-py2.py3-none-any.whl", hash = "sha256:7f27633ed885033da8b58666e7079ffff7e8e01869ec1aa66484cb5185ea3aa4"}, {file = "astpretty-2.0.0.tar.gz", hash = "sha256:e4724bfd753636ba4a84384702e9796e5356969f40af2596d846ce64addde086"}, ] -astroid = [ - {file = "astroid-2.4.2-py3-none-any.whl", hash = "sha256:bc58d83eb610252fd8de6363e39d4f1d0619c894b0ed24603b881c02e64c7386"}, - {file = "astroid-2.4.2.tar.gz", hash = "sha256:2f4078c2a41bf377eea06d71c9d2ba4eb8f6b1af2135bec27bbbb7d8f12bb703"}, -] async-generator = [ {file = "async_generator-1.10-py3-none-any.whl", hash = "sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b"}, {file = "async_generator-1.10.tar.gz", hash = "sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144"}, @@ -2282,29 +2234,6 @@ jupyterlab-server = [ {file = "jupyterlab_server-1.2.0-py3-none-any.whl", hash = "sha256:55d256077bf13e5bc9e8fbd5aac51bef82f6315111cec6b712b9a5ededbba924"}, {file = "jupyterlab_server-1.2.0.tar.gz", hash = "sha256:5431d9dde96659364b7cc877693d5d21e7b80cea7ae3959ecc2b87518e5f5d8c"}, ] -lazy-object-proxy = [ - {file = "lazy-object-proxy-1.4.3.tar.gz", hash = "sha256:f3900e8a5de27447acbf900b4750b0ddfd7ec1ea7fbaf11dfa911141bc522af0"}, - {file = "lazy_object_proxy-1.4.3-cp27-cp27m-macosx_10_13_x86_64.whl", hash = "sha256:a2238e9d1bb71a56cd710611a1614d1194dc10a175c1e08d75e1a7bcc250d442"}, - {file = "lazy_object_proxy-1.4.3-cp27-cp27m-win32.whl", hash = "sha256:efa1909120ce98bbb3777e8b6f92237f5d5c8ea6758efea36a473e1d38f7d3e4"}, - {file = "lazy_object_proxy-1.4.3-cp27-cp27m-win_amd64.whl", hash = "sha256:4677f594e474c91da97f489fea5b7daa17b5517190899cf213697e48d3902f5a"}, - {file = "lazy_object_proxy-1.4.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:0c4b206227a8097f05c4dbdd323c50edf81f15db3b8dc064d08c62d37e1a504d"}, - {file = "lazy_object_proxy-1.4.3-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:d945239a5639b3ff35b70a88c5f2f491913eb94871780ebfabb2568bd58afc5a"}, - {file = "lazy_object_proxy-1.4.3-cp34-cp34m-win32.whl", hash = "sha256:9651375199045a358eb6741df3e02a651e0330be090b3bc79f6d0de31a80ec3e"}, - {file = "lazy_object_proxy-1.4.3-cp34-cp34m-win_amd64.whl", hash = "sha256:eba7011090323c1dadf18b3b689845fd96a61ba0a1dfbd7f24b921398affc357"}, - {file = "lazy_object_proxy-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:48dab84ebd4831077b150572aec802f303117c8cc5c871e182447281ebf3ac50"}, - {file = "lazy_object_proxy-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:ca0a928a3ddbc5725be2dd1cf895ec0a254798915fb3a36af0964a0a4149e3db"}, - {file = "lazy_object_proxy-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:194d092e6f246b906e8f70884e620e459fc54db3259e60cf69a4d66c3fda3449"}, - {file = "lazy_object_proxy-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:97bb5884f6f1cdce0099f86b907aa41c970c3c672ac8b9c8352789e103cf3156"}, - {file = "lazy_object_proxy-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:cb2c7c57005a6804ab66f106ceb8482da55f5314b7fcb06551db1edae4ad1531"}, - {file = "lazy_object_proxy-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:8d859b89baf8ef7f8bc6b00aa20316483d67f0b1cbf422f5b4dc56701c8f2ffb"}, - {file = "lazy_object_proxy-1.4.3-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:1be7e4c9f96948003609aa6c974ae59830a6baecc5376c25c92d7d697e684c08"}, - {file = "lazy_object_proxy-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d74bb8693bf9cf75ac3b47a54d716bbb1a92648d5f781fc799347cfc95952383"}, - {file = "lazy_object_proxy-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:9b15f3f4c0f35727d3a0fba4b770b3c4ebbb1fa907dbcc046a1d2799f3edd142"}, - {file = "lazy_object_proxy-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9254f4358b9b541e3441b007a0ea0764b9d056afdeafc1a5569eee1cc6c1b9ea"}, - {file = "lazy_object_proxy-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:a6ae12d08c0bf9909ce12385803a543bfe99b95fe01e752536a60af2b7797c62"}, - {file = "lazy_object_proxy-1.4.3-cp38-cp38-win32.whl", hash = "sha256:5541cada25cd173702dbd99f8e22434105456314462326f06dba3e180f203dfd"}, - {file = "lazy_object_proxy-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:59f79fef100b09564bc2df42ea2d8d21a64fdcda64979c0fa3db7bdaabaf6239"}, -] mako = [ {file = "Mako-1.1.3-py2.py3-none-any.whl", hash = "sha256:93729a258e4ff0747c876bd9e20df1b9758028946e976324ccd2d68245c7b6a9"}, {file = "Mako-1.1.3.tar.gz", hash = "sha256:8195c8c1400ceb53496064314c6736719c6f25e7479cd24c77be3d9361cddc27"}, @@ -2557,10 +2486,6 @@ pygments = [ {file = "Pygments-2.7.3-py3-none-any.whl", hash = "sha256:f275b6c0909e5dafd2d6269a656aa90fa58ebf4a74f8fcf9053195d226b24a08"}, {file = "Pygments-2.7.3.tar.gz", hash = "sha256:ccf3acacf3782cbed4a989426012f1c535c9a90d3a7fc3f16d231b9372d2b716"}, ] -pylint = [ - {file = "pylint-2.6.0-py3-none-any.whl", hash = "sha256:bfe68f020f8a0fece830a22dd4d5dddb4ecc6137db04face4c3420a46a52239f"}, - {file = "pylint-2.6.0.tar.gz", hash = "sha256:bb4a908c9dadbc3aac18860550e870f58e1a02c9f2c204fdf5693d73be061210"}, -] pyparsing = [ {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, @@ -2961,9 +2886,6 @@ wemake-python-styleguide = [ {file = "wemake-python-styleguide-0.14.1.tar.gz", hash = "sha256:e13dc580fa56b7b548de8da170bccb8ddff2d4ab026ca987db8a9893bf8a7b5b"}, {file = "wemake_python_styleguide-0.14.1-py3-none-any.whl", hash = "sha256:73a501e0547275287a2b926515c000cc25026a8bceb9dcc1bf73ef85a223a3c6"}, ] -wrapt = [ - {file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"}, -] xdoctest = [ {file = "xdoctest-0.13.0-py2.py3-none-any.whl", hash = "sha256:de861fd5230a46bd26c054b4981169dd963f813768cb62b62e104e4d2644ac94"}, {file = "xdoctest-0.13.0.tar.gz", hash = "sha256:4f113a430076561a9d7f31af65b5d5acda62ee06b05cb6894264cb9efb8196ac"}, diff --git a/pyproject.toml b/pyproject.toml index 573d282..3545e95 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -68,7 +68,6 @@ flake8-black = "^0.2.1" flake8-expression-complexity = "^0.0.8" flake8-pytest-style = "^1.2.2" mypy = "^0.782" -pylint = "^2.5.3" wemake-python-styleguide = "^0.14.1" # flake8 plug-in # Test Suite diff --git a/setup.cfg b/setup.cfg index 76746f3..a7668a8 100644 --- a/setup.cfg +++ b/setup.cfg @@ -261,45 +261,6 @@ ignore_missing_imports = true ignore_missing_imports = true -[pylint.FORMAT] -# Comply with black's style. -max-line-length = 88 - -[pylint.MESSAGES CONTROL] -disable = - # We use TODO's to indicate locations in the source base - # that must be worked on in the near future. - fixme, - # Too many false positives and cannot be disabled within a file. - # Source: https://github.com/PyCQA/pylint/issues/214 - duplicate-code, - # Comply with black's style. - bad-continuation, bad-whitespace, - # ===================== - # flake8 de-duplication - # Source: https://pylint.pycqa.org/en/latest/faq.html#i-am-using-another-popular-linter-alongside-pylint-which-messages-should-i-disable-to-avoid-duplicates - # ===================== - # mccabe - too-many-branches, - # pep8-naming - bad-classmethod-argument, bad-mcs-classmethod-argument, - invalid-name, no-self-argument, - # pycodestyle - bad-indentation, bare-except, line-too-long, missing-final-newline, - multiple-statements, trailing-whitespace, unnecessary-semicolon, unneeded-not, - # pydocstyle - missing-class-docstring, missing-function-docstring, missing-module-docstring, - # pyflakes - undefined-variable, unused-import, unused-variable, - # wemake-python-styleguide - redefined-outer-name, - # A lot of classes in the test suite and `Meta` classes cause complaints. - too-few-public-methods, - -[pylint.REPORTS] -score = no - - [tool:pytest] addopts = --strict-markers diff --git a/src/urban_meal_delivery/db/addresses.py b/src/urban_meal_delivery/db/addresses.py index d97b09c..d86518d 100644 --- a/src/urban_meal_delivery/db/addresses.py +++ b/src/urban_meal_delivery/db/addresses.py @@ -68,8 +68,7 @@ class Address(meta.Base): pixels = orm.relationship('AddressPixelAssociation', back_populates='address') # We do not implement a `.__init__()` method and leave that to SQLAlchemy. - # Instead, we use `hasattr()` to check for uninitialized attributes. - # grep:b1f68d24 pylint:disable=attribute-defined-outside-init + # Instead, we use `hasattr()` to check for uninitialized attributes. grep:b1f68d24 def __repr__(self) -> str: """Non-literal text representation.""" diff --git a/src/urban_meal_delivery/db/cities.py b/src/urban_meal_delivery/db/cities.py index dea5f5f..b2f0cc4 100644 --- a/src/urban_meal_delivery/db/cities.py +++ b/src/urban_meal_delivery/db/cities.py @@ -1,6 +1,5 @@ """Provide the ORM's `City` model.""" - import sqlalchemy as sa from sqlalchemy import orm from sqlalchemy.dialects import postgresql @@ -35,8 +34,7 @@ class City(meta.Base): grids = orm.relationship('Grid', back_populates='city') # We do not implement a `.__init__()` method and leave that to SQLAlchemy. - # Instead, we use `hasattr()` to check for uninitialized attributes. - # grep:d334120e pylint:disable=attribute-defined-outside-init + # Instead, we use `hasattr()` to check for uninitialized attributes. grep:d334120e def __repr__(self) -> str: """Non-literal text representation.""" diff --git a/src/urban_meal_delivery/db/utils/locations.py b/src/urban_meal_delivery/db/utils/locations.py index f789bc3..741edfe 100644 --- a/src/urban_meal_delivery/db/utils/locations.py +++ b/src/urban_meal_delivery/db/utils/locations.py @@ -24,8 +24,6 @@ class Location: https://en.wikipedia.org/wiki/Universal_Transverse_Mercator_coordinate_system """ - # pylint:disable=too-many-instance-attributes - def __init__(self, latitude: float, longitude: float) -> None: """Create a location from a WGS84-conforming `latitude`-`longitude` pair.""" # The SQLAlchemy columns come as `Decimal`s due to the `DOUBLE_PRECISION`. diff --git a/tests/console/test_main.py b/tests/console/test_main.py index 5a35dab..8832239 100644 --- a/tests/console/test_main.py +++ b/tests/console/test_main.py @@ -18,8 +18,6 @@ class TestShowVersion: " (development)" to the output. """ - # pylint:disable=no-self-use - @pytest.fixture def ctx(self) -> click.Context: """Context around the `main.entry_point` Command.""" @@ -65,8 +63,6 @@ class TestCLIWithoutCommand: Therefore, they are not considered for coverage reporting. """ - # pylint:disable=no-self-use - @pytest.mark.no_cover def test_no_options(self, cli): """Exit with 0 status code and no output if run without options.""" diff --git a/tests/db/fake_data/factories.py b/tests/db/fake_data/factories.py index 50ee950..61c27e9 100644 --- a/tests/db/fake_data/factories.py +++ b/tests/db/fake_data/factories.py @@ -121,8 +121,6 @@ class AdHocOrderFactory(alchemy.SQLAlchemyModelFactory): flags to adapt how the `Order` is created. """ - # pylint:disable=too-many-instance-attributes - class Meta: model = db.Order sqlalchemy_get_or_create = ('id',) @@ -282,7 +280,7 @@ class AdHocOrderFactory(alchemy.SQLAlchemyModelFactory): ) @factory.post_generation - def post( # noqa:C901,WPS23 pylint:disable=unused-argument + def post( # noqa:C901,WPS231 obj, create, extracted, **kwargs, # noqa:B902,N805 ): """Discard timestamps that occur after cancellation.""" diff --git a/tests/db/test_addresses.py b/tests/db/test_addresses.py index 2d45343..0b14ccc 100644 --- a/tests/db/test_addresses.py +++ b/tests/db/test_addresses.py @@ -1,5 +1,4 @@ """Test the ORM's `Address` model.""" -# pylint:disable=no-self-use,protected-access import pytest import sqlalchemy as sqla diff --git a/tests/db/test_addresses_pixels.py b/tests/db/test_addresses_pixels.py index 40e41f8..d5beadd 100644 --- a/tests/db/test_addresses_pixels.py +++ b/tests/db/test_addresses_pixels.py @@ -8,7 +8,6 @@ Implementation notes: code associated with it. Therefore, we test it here as non-e2e tests and do not measure its coverage. """ -# pylint:disable=no-self-use import pytest import sqlalchemy as sqla diff --git a/tests/db/test_cities.py b/tests/db/test_cities.py index 73a0cdb..840b2dd 100644 --- a/tests/db/test_cities.py +++ b/tests/db/test_cities.py @@ -1,5 +1,4 @@ """Test the ORM's `City` model.""" -# pylint:disable=no-self-use import pytest diff --git a/tests/db/test_couriers.py b/tests/db/test_couriers.py index 3db047e..5376cae 100644 --- a/tests/db/test_couriers.py +++ b/tests/db/test_couriers.py @@ -1,5 +1,4 @@ """Test the ORM's `Courier` model.""" -# pylint:disable=no-self-use import pytest from sqlalchemy import exc as sa_exc diff --git a/tests/db/test_customer.py b/tests/db/test_customer.py index 5c74f68..f9ef15a 100644 --- a/tests/db/test_customer.py +++ b/tests/db/test_customer.py @@ -1,5 +1,4 @@ """Test the ORM's `Customer` model.""" -# pylint:disable=no-self-use import pytest diff --git a/tests/db/test_forecasts.py b/tests/db/test_forecasts.py index fa27854..23765db 100644 --- a/tests/db/test_forecasts.py +++ b/tests/db/test_forecasts.py @@ -1,5 +1,4 @@ """Test the ORM's `Forecast` model.""" -# pylint:disable=no-self-use import datetime diff --git a/tests/db/test_grids.py b/tests/db/test_grids.py index 4dd5beb..3d8858d 100644 --- a/tests/db/test_grids.py +++ b/tests/db/test_grids.py @@ -1,5 +1,4 @@ """Test the ORM's `Grid` model.""" -# pylint:disable=no-self-use import pytest import sqlalchemy as sqla diff --git a/tests/db/test_orders.py b/tests/db/test_orders.py index 37eb96a..653038a 100644 --- a/tests/db/test_orders.py +++ b/tests/db/test_orders.py @@ -1,5 +1,4 @@ """Test the ORM's `Order` model.""" -# pylint:disable=no-self-use,protected-access import datetime import random @@ -49,8 +48,6 @@ class TestProperties: and provided by the `make_order` fixture. """ - # pylint:disable=no-self-use,too-many-public-methods - def test_is_ad_hoc(self, order): """Test `Order.scheduled` property.""" assert order.ad_hoc is True diff --git a/tests/db/test_pixels.py b/tests/db/test_pixels.py index 878d6cc..3ebfb26 100644 --- a/tests/db/test_pixels.py +++ b/tests/db/test_pixels.py @@ -1,5 +1,4 @@ """Test the ORM's `Pixel` model.""" -# pylint:disable=no-self-use import pytest import sqlalchemy as sqla diff --git a/tests/db/test_restaurants.py b/tests/db/test_restaurants.py index 536d6f0..a641bce 100644 --- a/tests/db/test_restaurants.py +++ b/tests/db/test_restaurants.py @@ -1,5 +1,4 @@ """Test the ORM's `Restaurant` model.""" -# pylint:disable=no-self-use import pytest import sqlalchemy as sqla diff --git a/tests/db/utils/test_locations.py b/tests/db/utils/test_locations.py index fff43d2..51750e2 100644 --- a/tests/db/utils/test_locations.py +++ b/tests/db/utils/test_locations.py @@ -1,5 +1,4 @@ """Test the `Location` class.""" -# pylint:disable=no-self-use import pytest @@ -97,7 +96,6 @@ class TestSpecialMethods: def test_compare_utm_coordinates_to_themselves(self, location): """Test `Location.__eq__()`.""" - # pylint:disable=comparison-with-itself result = location == location # noqa:WPS312 assert result is True diff --git a/tests/forecasts/timify/test_aggregate_orders.py b/tests/forecasts/timify/test_aggregate_orders.py index 51a6dfa..b3c4206 100644 --- a/tests/forecasts/timify/test_aggregate_orders.py +++ b/tests/forecasts/timify/test_aggregate_orders.py @@ -1,5 +1,4 @@ """Test the `OrderHistory.aggregate_orders()` method.""" -# pylint:disable=no-self-use,unused-argument import datetime diff --git a/tests/forecasts/timify/test_make_time_series.py b/tests/forecasts/timify/test_make_time_series.py index dc4eee9..98e2c34 100644 --- a/tests/forecasts/timify/test_make_time_series.py +++ b/tests/forecasts/timify/test_make_time_series.py @@ -3,7 +3,6 @@ Unless otherwise noted, each `time_step` is 60 minutes long implying 12 time steps per day (i.e., we use `LONG_TIME_STEP` by default). """ -# pylint:disable=no-self-use,unused-argument import datetime @@ -54,7 +53,7 @@ def order_totals(good_pixel_id): def order_history(order_totals, grid): """An `OrderHistory` object that does not need the database.""" oh = timify.OrderHistory(grid=grid, time_step=test_config.LONG_TIME_STEP) - oh._data = order_totals # pylint:disable=protected-access + oh._data = order_totals return oh diff --git a/tests/forecasts/timify/test_order_history.py b/tests/forecasts/timify/test_order_history.py index 9d69d70..eb6bbcc 100644 --- a/tests/forecasts/timify/test_order_history.py +++ b/tests/forecasts/timify/test_order_history.py @@ -1,5 +1,4 @@ """Test the basic functionalities in the `OrderHistory` class.""" -# pylint:disable=no-self-use import pytest diff --git a/tests/test_version.py b/tests/test_version.py index 474b7b1..4ee70b3 100644 --- a/tests/test_version.py +++ b/tests/test_version.py @@ -20,8 +20,6 @@ import urban_meal_delivery class TestPEP404Compliance: """Packaged version identifier is PEP440 compliant.""" - # pylint:disable=no-self-use - @pytest.fixture def parsed_version(self) -> str: """The packaged version.""" @@ -47,8 +45,6 @@ class TestPEP404Compliance: class TestSemanticVersioning: """Packaged version follows a strict subset of semantic versioning.""" - # pylint:disable=no-self-use - version_pattern = re.compile( r'^(0|([1-9]\d*))\.(0|([1-9]\d*))\.(0|([1-9]\d*))(\.dev(0|([1-9]\d*)))?$', ) From 84876047c12d4440e66f44f3e35b4d63f70df43b Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sun, 10 Jan 2021 16:11:40 +0100 Subject: [PATCH 37/72] Return the resulting time series as `pd.Series` --- src/urban_meal_delivery/forecasts/timify.py | 42 ++++++------ .../forecasts/timify/test_make_time_series.py | 66 +++++++++---------- 2 files changed, 56 insertions(+), 52 deletions(-) diff --git a/src/urban_meal_delivery/forecasts/timify.py b/src/urban_meal_delivery/forecasts/timify.py index 6906d24..0220a58 100644 --- a/src/urban_meal_delivery/forecasts/timify.py +++ b/src/urban_meal_delivery/forecasts/timify.py @@ -149,7 +149,7 @@ class OrderHistory: def make_horizontal_time_series( # noqa:WPS210 self, pixel_id: int, predict_at: dt.datetime, train_horizon: int, - ) -> Tuple[pd.DataFrame, int, int]: + ) -> Tuple[pd.Series, int, pd.Series]: """Slice a horizontal time series out of the `.totals`. Create a time series covering `train_horizon` weeks that can be used @@ -203,19 +203,20 @@ class OrderHistory: frequency = 7 # Take only the counts at the `predict_at` time. - training_df = intra_pixel.loc[ - first_start_at : last_start_at : self._n_daily_time_steps # type: ignore + training_ts = intra_pixel.loc[ + first_start_at : last_start_at : self._n_daily_time_steps, # type: ignore + 'total_orders', ] - if len(training_df) != frequency * train_horizon: + if len(training_ts) != frequency * train_horizon: raise RuntimeError('Not enough historic data for `predict_at`') - actual_df = intra_pixel.loc[[predict_at]] + actuals_ts = intra_pixel.loc[[predict_at], 'total_orders'] - return training_df, frequency, actual_df + return training_ts, frequency, actuals_ts def make_vertical_time_series( # noqa:WPS210 self, pixel_id: int, predict_day: dt.date, train_horizon: int, - ) -> Tuple[pd.DataFrame, int, pd.DataFrame]: + ) -> Tuple[pd.Series, int, pd.Series]: """Slice a vertical time series out of the `.totals`. Create a time series covering `train_horizon` weeks that can be used @@ -268,10 +269,11 @@ class OrderHistory: frequency = 7 * self._n_daily_time_steps # Take all the counts between `first_train_day` and `last_train_day`. - training_df = intra_pixel.loc[ - first_start_at:last_start_at # type: ignore + training_ts = intra_pixel.loc[ + first_start_at:last_start_at, # type: ignore + 'total_orders', ] - if len(training_df) != frequency * train_horizon: + if len(training_ts) != frequency * train_horizon: raise RuntimeError('Not enough historic data for `predict_day`') first_prediction_at = dt.datetime( @@ -289,15 +291,16 @@ class OrderHistory: 0, ) - dt.timedelta(minutes=self._time_step) - actuals_df = intra_pixel.loc[ - first_prediction_at:last_prediction_at # type: ignore + actuals_ts = intra_pixel.loc[ + first_prediction_at:last_prediction_at, # type: ignore + 'total_orders', ] - return training_df, frequency, actuals_df + return training_ts, frequency, actuals_ts def make_real_time_time_series( # noqa:WPS210 self, pixel_id: int, predict_at: dt.datetime, train_horizon: int, - ) -> Tuple[pd.DataFrame, int, int]: + ) -> Tuple[pd.Series, int, pd.Series]: """Slice a vertical real-time time series out of the `.totals`. Create a time series covering `train_horizon` weeks that can be used @@ -361,8 +364,9 @@ class OrderHistory: # Take all the counts between `first_train_day` and `last_train_day`, # including the ones on the `predict_at` day prior to `predict_at`. - training_df = intra_pixel.loc[ - first_start_at:last_start_at # type: ignore + training_ts = intra_pixel.loc[ + first_start_at:last_start_at, # type: ignore + 'total_orders', ] n_time_steps_on_predict_day = ( ( @@ -378,9 +382,9 @@ class OrderHistory: // 60 # -> minutes // self._time_step ) - if len(training_df) != frequency * train_horizon + n_time_steps_on_predict_day: + if len(training_ts) != frequency * train_horizon + n_time_steps_on_predict_day: raise RuntimeError('Not enough historic data for `predict_day`') - actual_df = intra_pixel.loc[[predict_at]] + actuals_ts = intra_pixel.loc[[predict_at], 'total_orders'] - return training_df, frequency, actual_df + return training_ts, frequency, actuals_ts diff --git a/tests/forecasts/timify/test_make_time_series.py b/tests/forecasts/timify/test_make_time_series.py index 98e2c34..4dc187d 100644 --- a/tests/forecasts/timify/test_make_time_series.py +++ b/tests/forecasts/timify/test_make_time_series.py @@ -101,22 +101,22 @@ class TestMakeHorizontalTimeSeries: ) @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) - def test_time_series_are_dataframes( + def test_time_series_are_series( self, order_history, good_pixel_id, good_predict_at, train_horizon, ): - """The time series come in a one-column `pd.DataFrame`.""" + """The time series come as a `pd.Series`.""" result = order_history.make_horizontal_time_series( pixel_id=good_pixel_id, predict_at=good_predict_at, train_horizon=train_horizon, ) - training_df, _, actual_df = result + training_ts, _, actuals_ts = result - assert isinstance(training_df, pd.DataFrame) - assert training_df.columns == ['total_orders'] - assert isinstance(actual_df, pd.DataFrame) - assert actual_df.columns == ['total_orders'] + assert isinstance(training_ts, pd.Series) + assert training_ts.name == 'total_orders' + assert isinstance(actuals_ts, pd.Series) + assert actuals_ts.name == 'total_orders' @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) def test_time_series_have_correct_length( @@ -132,10 +132,10 @@ class TestMakeHorizontalTimeSeries: train_horizon=train_horizon, ) - training_df, _, actual_df = result + training_ts, _, actuals_ts = result - assert len(training_df) == 7 * train_horizon - assert len(actual_df) == 1 + assert len(training_ts) == 7 * train_horizon + assert len(actuals_ts) == 1 @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) def test_frequency_is_number_of_weekdays( @@ -194,22 +194,22 @@ class TestMakeVerticalTimeSeries: ) @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) - def test_time_series_are_dataframes( + def test_time_series_are_series( self, order_history, good_pixel_id, good_predict_at, train_horizon, ): - """The time series come in a one-column `pd.DataFrame`.""" + """The time series come as `pd.Series`.""" result = order_history.make_vertical_time_series( pixel_id=good_pixel_id, predict_day=good_predict_at.date(), train_horizon=train_horizon, ) - training_df, _, actual_df = result + training_ts, _, actuals_ts = result - assert isinstance(training_df, pd.DataFrame) - assert training_df.columns == ['total_orders'] - assert isinstance(actual_df, pd.DataFrame) - assert actual_df.columns == ['total_orders'] + assert isinstance(training_ts, pd.Series) + assert training_ts.name == 'total_orders' + assert isinstance(actuals_ts, pd.Series) + assert actuals_ts.name == 'total_orders' @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) def test_time_series_have_correct_length( @@ -229,7 +229,7 @@ class TestMakeVerticalTimeSeries: train_horizon=train_horizon, ) - training_df, _, actual_df = result + training_ts, _, actuals_ts = result n_daily_time_steps = ( 60 @@ -237,8 +237,8 @@ class TestMakeVerticalTimeSeries: // test_config.LONG_TIME_STEP ) - assert len(training_df) == 7 * n_daily_time_steps * train_horizon - assert len(actual_df) == n_daily_time_steps + assert len(training_ts) == 7 * n_daily_time_steps * train_horizon + assert len(actuals_ts) == n_daily_time_steps @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) def test_frequency_is_number_number_of_weekly_time_steps( @@ -305,22 +305,22 @@ class TestMakeRealTimeTimeSeries: ) @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) - def test_time_series_are_dataframes( + def test_time_series_are_series( self, order_history, good_pixel_id, good_predict_at, train_horizon, ): - """The time series come in a one-column `pd.DataFrame`.""" + """The time series come as `pd.Series`.""" result = order_history.make_real_time_time_series( pixel_id=good_pixel_id, predict_at=good_predict_at, train_horizon=train_horizon, ) - training_df, _, actual_df = result + training_ts, _, actuals_ts = result - assert isinstance(training_df, pd.DataFrame) - assert training_df.columns == ['total_orders'] - assert isinstance(actual_df, pd.DataFrame) - assert actual_df.columns == ['total_orders'] + assert isinstance(training_ts, pd.Series) + assert training_ts.name == 'total_orders' + assert isinstance(actuals_ts, pd.Series) + assert actuals_ts.name == 'total_orders' @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) def test_time_series_have_correct_length1( @@ -345,7 +345,7 @@ class TestMakeRealTimeTimeSeries: pixel_id=good_pixel_id, predict_at=predict_at, train_horizon=train_horizon, ) - training_df, _, actual_df = result + training_ts, _, actuals_ts = result n_daily_time_steps = ( 60 @@ -353,8 +353,8 @@ class TestMakeRealTimeTimeSeries: // test_config.LONG_TIME_STEP ) - assert len(training_df) == 7 * n_daily_time_steps * train_horizon - assert len(actual_df) == 1 + assert len(training_ts) == 7 * n_daily_time_steps * train_horizon + assert len(actuals_ts) == 1 @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) def test_time_series_have_correct_length2( @@ -378,7 +378,7 @@ class TestMakeRealTimeTimeSeries: train_horizon=train_horizon, ) - training_df, _, actual_df = result + training_ts, _, actuals_ts = result n_daily_time_steps = ( 60 @@ -390,10 +390,10 @@ class TestMakeRealTimeTimeSeries: ) assert ( - len(training_df) + len(training_ts) == 7 * n_daily_time_steps * train_horizon + n_time_steps_before ) - assert len(actual_df) == 1 + assert len(actuals_ts) == 1 @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) def test_frequency_is_number_number_of_weekly_time_steps( From b0f2fdde101703cbb77a6476885b4c68055d731e Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Mon, 11 Jan 2021 12:24:15 +0100 Subject: [PATCH 38/72] Add rpy2 to the dependencies - add a Jupyter notebook that allows to install all project-external dependencies regarding R and R packages - adjust the GitHub Action workflow to also install R and the R packages used within the project - add a `init_r` module that initializes all R packages globally once the `urban_meal_delivery` package is imported --- .github/workflows/tests.yml | 24 +- noxfile.py | 135 +- poetry.lock | 54 +- pyproject.toml | 1 + research/r_dependencies.ipynb | 1868 ++++++++++++++++++++++ setup.cfg | 7 +- src/urban_meal_delivery/configuration.py | 8 + src/urban_meal_delivery/init_r.py | 28 + tests/test_config.py | 60 + tests/test_init_r.py | 19 + 10 files changed, 2152 insertions(+), 52 deletions(-) create mode 100644 research/r_dependencies.ipynb create mode 100644 src/urban_meal_delivery/init_r.py create mode 100644 tests/test_init_r.py diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 0724c09..bdf77e9 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -1,7 +1,8 @@ name: CI on: push jobs: - tests: + fast-tests: + name: fast (without R) runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 @@ -10,5 +11,22 @@ jobs: python-version: 3.8 architecture: x64 - run: pip install nox==2020.5.24 - - run: pip install poetry==1.0.10 - - run: nox + - run: pip install poetry==1.1.4 + - run: nox -s format lint ci-tests-fast safety docs + slow-tests: + name: slow (with R) + runs-on: ubuntu-latest + env: + R_LIBS: .r_libs + steps: + - uses: actions/checkout@v2 + - uses: actions/setup-python@v1 + with: + python-version: 3.8 + architecture: x64 + - run: mkdir .r_libs + - run: sudo apt-get install r-base r-base-dev libcurl4-openssl-dev libxml2-dev patchelf + - run: R -e "install.packages('forecast')" + - run: pip install nox==2020.5.24 + - run: pip install poetry==1.1.4 + - run: nox -s ci-tests-slow diff --git a/noxfile.py b/noxfile.py index 617f4c6..474bb65 100644 --- a/noxfile.py +++ b/noxfile.py @@ -25,26 +25,6 @@ as unified tasks to assure the quality of the source code: + accepts extra arguments, e.g., `poetry run nox -s test -- --no-cov`, that are passed on to `pytest` and `xdoctest` with no changes => may be paths or options - - -GitHub Actions implements the following CI workflow: - -- "format", "lint", and "test" as above - -- "safety": check if dependencies contain known security vulnerabilites - -- "docs": build the documentation with sphinx - - -The pre-commit framework invokes the following tasks: - -- before any commit: - - + "format" and "lint" as above - + "fix-branch-references": replace branch references with the current one - -- before merges: run the entire "test-suite" independent of the file changes - """ import contextlib @@ -92,7 +72,7 @@ nox.options.envdir = '.cache/nox' # Avoid accidental successes if the environment is not set up properly. nox.options.error_on_external_run = True -# Run only CI related checks by default. +# Run only local checks by default. nox.options.sessions = ( 'format', 'lint', @@ -220,24 +200,50 @@ def test(session): 'xdoctest[optional]', ) + session.run('pytest', '--version') + + # When the CI server runs the slow tests, we only execute the R related + # test cases that require the slow installation of R and some packages. + if session.env.get('_slow_ci_tests'): + session.run( + 'pytest', '--randomly-seed=4287', '-m', 'r', PYTEST_LOCATION, + ) + + # In the "ci-tests-slow" session, we do not run any test tool + # other than pytest. So, xdoctest, for example, is only run + # locally or in the "ci-tests-fast" session. + return + + # When the CI server executes pytest, no database is available. + # Therefore, the CI server does not measure coverage. + elif session.env.get('_fast_ci_tests'): + pytest_args = ( + '--randomly-seed=4287', + '-m', + 'not (db or r)', + PYTEST_LOCATION, + ) + + # When pytest is executed in the local develop environment, + # both R and a database are available. + # Therefore, we require 100% coverage. + else: + pytest_args = ( + '--cov', + '--no-cov-on-fail', + '--cov-branch', + '--cov-fail-under=100', + '--cov-report=term-missing:skip-covered', + '--randomly-seed=4287', + PYTEST_LOCATION, + ) + # Interpret extra arguments as options for pytest. - # They are "dropped" by the hack in the pre_merge() function - # if this function is run within the "pre-merge" session. + # They are "dropped" by the hack in the test_suite() function + # if this function is run within the "test-suite" session. posargs = () if session.env.get('_drop_posargs') else session.posargs - args = posargs or ( - '--cov', - '--no-cov-on-fail', - '--cov-branch', - '--cov-fail-under=100', - '--cov-report=term-missing:skip-covered', - '--randomly-seed=4287', - '-m', - 'not (db or e2e)', - PYTEST_LOCATION, - ) - session.run('pytest', '--version') - session.run('pytest', *args) + session.run('pytest', *(posargs or pytest_args)) # For xdoctest, the default arguments are different from pytest. args = posargs or [PACKAGE_IMPORT_NAME] @@ -301,6 +307,60 @@ def docs(session): print(f'Docs are available at {os.getcwd()}/{DOCS_BUILD}index.html') # noqa:WPS421 +@nox.session(name='ci-tests-fast', python=PYTHON) +def fast_ci_tests(session): + """Fast tests run by the GitHub Actions CI server. + + These regards all test cases NOT involving R via `rpy2`. + + Also, coverage is not measured as full coverage can only be + achieved by running the tests in the local develop environment + that has access to a database. + """ + # Re-using an old environment is not so easy here as the "test" session + # runs `poetry install --no-dev`, which removes previously installed packages. + if session.virtualenv.reuse_existing: + raise RuntimeError( + 'The "ci-tests-fast" session must be run without the "-r" option', + ) + + # Little hack to pass arguments to the "test" session. + session.env['_fast_ci_tests'] = 'true' + + # Cannot use session.notify() to trigger the "test" session + # as that would create a new Session object without the flag + # in the env(ironment). + test(session) + + +@nox.session(name='ci-tests-slow', python=PYTHON) +def slow_ci_tests(session): + """Slow tests run by the GitHub Actions CI server. + + These regards all test cases involving R via `rpy2`. + They are slow as the CI server needs to install R and some packages + first, which takes a couple of minutes. + + Also, coverage is not measured as full coverage can only be + achieved by running the tests in the local develop environment + that has access to a database. + """ + # Re-using an old environment is not so easy here as the "test" session + # runs `poetry install --no-dev`, which removes previously installed packages. + if session.virtualenv.reuse_existing: + raise RuntimeError( + 'The "ci-tests-slow" session must be run without the "-r" option', + ) + + # Little hack to pass arguments to the "test" session. + session.env['_slow_ci_tests'] = 'true' + + # Cannot use session.notify() to trigger the "test" session + # as that would create a new Session object without the flag + # in the env(ironment). + test(session) + + @nox.session(name='test-suite', python=PYTHON) def test_suite(session): """Run the entire test suite. @@ -324,8 +384,7 @@ def test_suite(session): # Cannot use session.notify() to trigger the "test" session # as that would create a new Session object without the flag - # in the env(ironment). Instead, run the test() function within - # the "pre-merge" session. + # in the env(ironment). test(session) diff --git a/poetry.lock b/poetry.lock index 7cf1b5a..9c12400 100644 --- a/poetry.lock +++ b/poetry.lock @@ -95,7 +95,7 @@ python-versions = ">=3.5" name = "atomicwrites" version = "1.4.0" description = "Atomic file writes." -category = "dev" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" @@ -204,7 +204,7 @@ name = "cffi" version = "1.14.4" description = "Foreign Function Interface for Python calling C code." category = "main" -optional = true +optional = false python-versions = "*" [package.dependencies] @@ -660,7 +660,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" name = "iniconfig" version = "1.1.1" description = "iniconfig: brain-dead simple config-ini parsing" -category = "dev" +category = "main" optional = false python-versions = "*" @@ -1080,7 +1080,7 @@ name = "numpy" version = "1.19.4" description = "NumPy is the fundamental package for array computing with Python." category = "main" -optional = true +optional = false python-versions = ">=3.6" [[package]] @@ -1179,7 +1179,7 @@ python-versions = "*" name = "pluggy" version = "0.13.1" description = "plugin and hook calling mechanisms for python" -category = "dev" +category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" @@ -1261,7 +1261,7 @@ name = "pycparser" version = "2.20" description = "C parser in Python" category = "main" -optional = true +optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] @@ -1311,7 +1311,7 @@ python-versions = ">=3.5" name = "pytest" version = "6.2.1" description = "pytest: simple powerful testing with Python" -category = "dev" +category = "main" optional = false python-versions = ">=3.6" @@ -1465,6 +1465,21 @@ python-versions = "*" [package.dependencies] docutils = ">=0.11,<1.0" +[[package]] +name = "rpy2" +version = "3.4.1" +description = "Python interface to the R language (embedded R)" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +cffi = ">=1.10.0" +jinja2 = "*" +pytest = "*" +pytz = "*" +tzlocal = "*" + [[package]] name = "send2trash" version = "1.5.0" @@ -1707,7 +1722,7 @@ python-versions = "*" name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" -category = "dev" +category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" @@ -1749,6 +1764,17 @@ category = "dev" optional = false python-versions = "*" +[[package]] +name = "tzlocal" +version = "2.1" +description = "tzinfo object for the local timezone" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +pytz = "*" + [[package]] name = "urllib3" version = "1.26.2" @@ -1857,7 +1883,7 @@ research = ["jupyterlab", "nb_black", "numpy", "pytz"] [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "5c0a4b37e73e0ed607cc2c46a9178f7b8e2a8364856a408a80f955a3c8b861a1" +content-hash = "9be7d168525c85958389c8edb4686567cbb4de0e8780168b91e387e1b0581ec3" [metadata.files] alabaster = [ @@ -2643,6 +2669,12 @@ requests = [ restructuredtext-lint = [ {file = "restructuredtext_lint-1.3.2.tar.gz", hash = "sha256:d3b10a1fe2ecac537e51ae6d151b223b78de9fafdd50e5eb6b08c243df173c80"}, ] +rpy2 = [ + {file = "rpy2-3.4.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:3f2d56bc80c2af0fe8118c53da7fd29f1809bc159a88cb10f9e2869321a21deb"}, + {file = "rpy2-3.4.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:344ac89c966b2ec91bbf9e623b7ff9c121820b5e53da2ffc75fa10f158023cd7"}, + {file = "rpy2-3.4.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:ebbd7fceef359279f56b481d7ea2dd60db91928abb3726010a88fbb3362213af"}, + {file = "rpy2-3.4.1.tar.gz", hash = "sha256:644360b569656700dfe13f59878ec1cf8c116c128d4f2f0bf96144031f95d2e2"}, +] send2trash = [ {file = "Send2Trash-1.5.0-py3-none-any.whl", hash = "sha256:f1691922577b6fa12821234aeb57599d887c4900b9ca537948d2dac34aea888b"}, {file = "Send2Trash-1.5.0.tar.gz", hash = "sha256:60001cc07d707fe247c94f74ca6ac0d3255aabcb930529690897ca2a39db28b2"}, @@ -2863,6 +2895,10 @@ typing-extensions = [ {file = "typing_extensions-3.7.4.3-py3-none-any.whl", hash = "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918"}, {file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"}, ] +tzlocal = [ + {file = "tzlocal-2.1-py2.py3-none-any.whl", hash = "sha256:e2cb6c6b5b604af38597403e9852872d7f534962ae2954c7f35efcb1ccacf4a4"}, + {file = "tzlocal-2.1.tar.gz", hash = "sha256:643c97c5294aedc737780a49d9df30889321cbe1204eac2c2ec6134035a92e44"}, +] urllib3 = [ {file = "urllib3-1.26.2-py2.py3-none-any.whl", hash = "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473"}, {file = "urllib3-1.26.2.tar.gz", hash = "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08"}, diff --git a/pyproject.toml b/pyproject.toml index 3545e95..2cd8747 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,6 +42,7 @@ jupyterlab = { version="^2.2.2", optional=true } nb_black = { version="^1.0.7", optional=true } numpy = { version="^1.19.1", optional=true } pytz = { version="^2020.1", optional=true } +rpy2 = "^3.4.1" [tool.poetry.extras] research = [ diff --git a/research/r_dependencies.ipynb b/research/r_dependencies.ipynb new file mode 100644 index 0000000..e2e1dc6 --- /dev/null +++ b/research/r_dependencies.ipynb @@ -0,0 +1,1868 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## R Dependencies\n", + "\n", + "This notebook installs R and the R packages needed to perform all the calculations throughout this project into a project-local sub-folder.\n", + "\n", + "**Note:** This notebook requires sudo privileges to install R and assumes a Ubuntu/Debian based system!\n", + "\n", + "**Important:** Once any notebook first imports anything from the `rpy2` site package, a new R process is spawned off and used to run all calls to R via `rpy2`. In order for this process to use the project-local sub-folder for the R dependencies, an environment variable `R_LIBS` must be set (with the path to this sub-folder) **before** any interaction with `rpy2`." + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import getpass\n", + "import os\n", + "import shutil" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Obtain the sudo password." + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdin", + "output_type": "stream", + "text": [ + " ············\n" + ] + } + ], + "source": [ + "password = getpass.getpass()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Install the packages *r-base r-base-dev libcurl4-openssl-dev libxml2-dev patchelf*" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "os.system(\n", + " f\"echo {password} | sudo -S apt-get -y install\"\n", + " \" r-base r-base-dev libcurl4-openssl-dev libxml2-dev patchelf\"\n", + ");" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Create a (hidden) folder in the project's root directory to install the R libraries into." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "/home/webartifex/repos/urban-meal-delivery\n" + ] + } + ], + "source": [ + "%cd .." + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [], + "source": [ + "# Sanity check to see if the R_LIBS variable is set.\n", + "assert os.getenv(\"R_LIBS\")\n", + "# Expand the R_LIBS path to an absolute path.\n", + "r_libs_path = os.path.join(os.getcwd(), os.environ[\"R_LIBS\"])" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [], + "source": [ + "# Create a fresh folder for the R dependencies.\n", + "try:\n", + " shutil.rmtree(r_libs_path)\n", + "except FileNotFoundError:\n", + " pass\n", + "os.mkdir(r_libs_path)" + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "scrolled": true + }, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "\n", + "R version 4.0.2 (2020-06-22) -- \"Taking Off Again\"\n", + "Copyright (C) 2020 The R Foundation for Statistical Computing\n", + "Platform: x86_64-pc-linux-gnu (64-bit)\n", + "\n", + "R is free software and comes with ABSOLUTELY NO WARRANTY.\n", + "You are welcome to redistribute it under certain conditions.\n", + "Type 'license()' or 'licence()' for distribution details.\n", + "\n", + " Natural language support but running in an English locale\n", + "\n", + "R is a collaborative project with many contributors.\n", + "Type 'contributors()' for more information and\n", + "'citation()' on how to cite R or R packages in publications.\n", + "\n", + "Type 'demo()' for some demos, 'help()' for on-line help, or\n", + "'help.start()' for an HTML browser interface to help.\n", + "Type 'q()' to quit R.\n", + "\n", + "> install.packages('forecast')\n", + "Installing package into ‘/home/webartifex/repos/urban-meal-delivery/.cache/r_libs’\n", + "(as ‘lib’ is unspecified)\n", + "also installing the dependencies ‘prettyunits’, ‘rprojroot’, ‘pkgbuild’, ‘rstudioapi’, ‘diffobj’, ‘rematch2’, ‘brio’, ‘callr’, ‘desc’, ‘evaluate’, ‘jsonlite’, ‘pkgload’, ‘praise’, ‘processx’, ‘ps’, ‘waldo’, ‘assertthat’, ‘utf8’, ‘testthat’, ‘farver’, ‘labeling’, ‘lifecycle’, ‘munsell’, ‘R6’, ‘RColorBrewer’, ‘viridisLite’, ‘cli’, ‘crayon’, ‘ellipsis’, ‘fansi’, ‘pillar’, ‘pkgconfig’, ‘vctrs’, ‘xts’, ‘TTR’, ‘curl’, ‘digest’, ‘glue’, ‘gtable’, ‘isoband’, ‘rlang’, ‘scales’, ‘tibble’, ‘withr’, ‘quadprog’, ‘quantmod’, ‘colorspace’, ‘fracdiff’, ‘ggplot2’, ‘lmtest’, ‘magrittr’, ‘Rcpp’, ‘timeDate’, ‘tseries’, ‘urca’, ‘zoo’, ‘RcppArmadillo’\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/prettyunits_1.1.1.tar.gz'\n", + "Content type 'application/x-gzip' length 10366 bytes (10 KB)\n", + "==================================================\n", + "downloaded 10 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/rprojroot_2.0.2.tar.gz'\n", + "Content type 'application/x-gzip' length 59967 bytes (58 KB)\n", + "==================================================\n", + "downloaded 58 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/pkgbuild_1.2.0.tar.gz'\n", + "Content type 'application/x-gzip' length 30383 bytes (29 KB)\n", + "==================================================\n", + "downloaded 29 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/rstudioapi_0.13.tar.gz'\n", + "Content type 'application/x-gzip' length 110472 bytes (107 KB)\n", + "==================================================\n", + "downloaded 107 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/diffobj_0.3.3.tar.gz'\n", + "Content type 'application/x-gzip' length 472233 bytes (461 KB)\n", + "==================================================\n", + "downloaded 461 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/rematch2_2.1.2.tar.gz'\n", + "Content type 'application/x-gzip' length 13366 bytes (13 KB)\n", + "==================================================\n", + "downloaded 13 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/brio_1.1.0.tar.gz'\n", + "Content type 'application/x-gzip' length 11610 bytes (11 KB)\n", + "==================================================\n", + "downloaded 11 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/callr_3.5.1.tar.gz'\n", + "Content type 'application/x-gzip' length 77905 bytes (76 KB)\n", + "==================================================\n", + "downloaded 76 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/desc_1.2.0.tar.gz'\n", + "Content type 'application/x-gzip' length 65612 bytes (64 KB)\n", + "==================================================\n", + "downloaded 64 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/evaluate_0.14.tar.gz'\n", + "Content type 'application/x-gzip' length 24206 bytes (23 KB)\n", + "==================================================\n", + "downloaded 23 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/jsonlite_1.7.2.tar.gz'\n", + "Content type 'application/x-gzip' length 421716 bytes (411 KB)\n", + "==================================================\n", + "downloaded 411 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/pkgload_1.1.0.tar.gz'\n", + "Content type 'application/x-gzip' length 58046 bytes (56 KB)\n", + "==================================================\n", + "downloaded 56 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/praise_1.0.0.tar.gz'\n", + "Content type 'application/x-gzip' length 6100 bytes\n", + "==================================================\n", + "downloaded 6100 bytes\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/processx_3.4.5.tar.gz'\n", + "Content type 'application/x-gzip' length 135121 bytes (131 KB)\n", + "==================================================\n", + "downloaded 131 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/ps_1.5.0.tar.gz'\n", + "Content type 'application/x-gzip' length 115131 bytes (112 KB)\n", + "==================================================\n", + "downloaded 112 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/waldo_0.2.3.tar.gz'\n", + "Content type 'application/x-gzip' length 25726 bytes (25 KB)\n", + "==================================================\n", + "downloaded 25 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/assertthat_0.2.1.tar.gz'\n", + "Content type 'application/x-gzip' length 12742 bytes (12 KB)\n", + "==================================================\n", + "downloaded 12 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/utf8_1.1.4.tar.gz'\n", + "Content type 'application/x-gzip' length 218882 bytes (213 KB)\n", + "==================================================\n", + "downloaded 213 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/testthat_3.0.1.tar.gz'\n", + "Content type 'application/x-gzip' length 678199 bytes (662 KB)\n", + "==================================================\n", + "downloaded 662 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/farver_2.0.3.tar.gz'\n", + "Content type 'application/x-gzip' length 1279579 bytes (1.2 MB)\n", + "==================================================\n", + "downloaded 1.2 MB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/labeling_0.4.2.tar.gz'\n", + "Content type 'application/x-gzip' length 10156 bytes\n", + "==================================================\n", + "downloaded 10156 bytes\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/lifecycle_0.2.0.tar.gz'\n", + "Content type 'application/x-gzip' length 164455 bytes (160 KB)\n", + "==================================================\n", + "downloaded 160 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/munsell_0.5.0.tar.gz'\n", + "Content type 'application/x-gzip' length 182653 bytes (178 KB)\n", + "==================================================\n", + "downloaded 178 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/R6_2.5.0.tar.gz'\n", + "Content type 'application/x-gzip' length 63361 bytes (61 KB)\n", + "==================================================\n", + "downloaded 61 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/RColorBrewer_1.1-2.tar.gz'\n", + "Content type 'application/x-gzip' length 11532 bytes (11 KB)\n", + "==================================================\n", + "downloaded 11 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/viridisLite_0.3.0.tar.gz'\n", + "Content type 'application/x-gzip' length 44019 bytes (42 KB)\n", + "==================================================\n", + "downloaded 42 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/cli_2.2.0.tar.gz'\n", + "Content type 'application/x-gzip' length 120676 bytes (117 KB)\n", + "==================================================\n", + "downloaded 117 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/crayon_1.3.4.tar.gz'\n", + "Content type 'application/x-gzip' length 658694 bytes (643 KB)\n", + "==================================================\n", + "downloaded 643 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/ellipsis_0.3.1.tar.gz'\n", + "Content type 'application/x-gzip' length 7582 bytes\n", + "==================================================\n", + "downloaded 7582 bytes\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/fansi_0.4.1.tar.gz'\n", + "Content type 'application/x-gzip' length 270906 bytes (264 KB)\n", + "==================================================\n", + "downloaded 264 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/pillar_1.4.7.tar.gz'\n", + "Content type 'application/x-gzip' length 113345 bytes (110 KB)\n", + "==================================================\n", + "downloaded 110 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/pkgconfig_2.0.3.tar.gz'\n", + "Content type 'application/x-gzip' length 6080 bytes\n", + "==================================================\n", + "downloaded 6080 bytes\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/vctrs_0.3.6.tar.gz'\n", + "Content type 'application/x-gzip' length 778016 bytes (759 KB)\n", + "==================================================\n", + "downloaded 759 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/xts_0.12.1.tar.gz'\n", + "Content type 'application/x-gzip' length 517875 bytes (505 KB)\n", + "==================================================\n", + "downloaded 505 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/TTR_0.24.2.tar.gz'\n", + "Content type 'application/x-gzip' length 314035 bytes (306 KB)\n", + "==================================================\n", + "downloaded 306 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/curl_4.3.tar.gz'\n", + "Content type 'application/x-gzip' length 673779 bytes (657 KB)\n", + "==================================================\n", + "downloaded 657 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/digest_0.6.27.tar.gz'\n", + "Content type 'application/x-gzip' length 164373 bytes (160 KB)\n", + "==================================================\n", + "downloaded 160 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/glue_1.4.2.tar.gz'\n", + "Content type 'application/x-gzip' length 99049 bytes (96 KB)\n", + "==================================================\n", + "downloaded 96 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/gtable_0.3.0.tar.gz'\n", + "Content type 'application/x-gzip' length 368081 bytes (359 KB)\n", + "==================================================\n", + "downloaded 359 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/isoband_0.2.3.tar.gz'\n", + "Content type 'application/x-gzip' length 1902568 bytes (1.8 MB)\n", + "==================================================\n", + "downloaded 1.8 MB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/rlang_0.4.10.tar.gz'\n", + "Content type 'application/x-gzip' length 915685 bytes (894 KB)\n", + "==================================================\n", + "downloaded 894 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/scales_1.1.1.tar.gz'\n", + "Content type 'application/x-gzip' length 515201 bytes (503 KB)\n", + "==================================================\n", + "downloaded 503 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/tibble_3.0.4.tar.gz'\n", + "Content type 'application/x-gzip' length 255457 bytes (249 KB)\n", + "==================================================\n", + "downloaded 249 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/withr_2.3.0.tar.gz'\n", + "Content type 'application/x-gzip' length 91443 bytes (89 KB)\n", + "==================================================\n", + "downloaded 89 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/quadprog_1.5-8.tar.gz'\n", + "Content type 'application/x-gzip' length 36141 bytes (35 KB)\n", + "==================================================\n", + "downloaded 35 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/quantmod_0.4.18.tar.gz'\n", + "Content type 'application/x-gzip' length 154512 bytes (150 KB)\n", + "==================================================\n", + "downloaded 150 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/colorspace_2.0-0.tar.gz'\n", + "Content type 'application/x-gzip' length 2203295 bytes (2.1 MB)\n", + "==================================================\n", + "downloaded 2.1 MB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/fracdiff_1.5-1.tar.gz'\n", + "Content type 'application/x-gzip' length 63764 bytes (62 KB)\n", + "==================================================\n", + "downloaded 62 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/ggplot2_3.3.3.tar.gz'\n", + "Content type 'application/x-gzip' length 3058840 bytes (2.9 MB)\n", + "==================================================\n", + "downloaded 2.9 MB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/lmtest_0.9-38.tar.gz'\n", + "Content type 'application/x-gzip' length 227052 bytes (221 KB)\n", + "==================================================\n", + "downloaded 221 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/magrittr_2.0.1.tar.gz'\n", + "Content type 'application/x-gzip' length 265580 bytes (259 KB)\n", + "==================================================\n", + "downloaded 259 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/Rcpp_1.0.5.tar.gz'\n", + "Content type 'application/x-gzip' length 2950521 bytes (2.8 MB)\n", + "==================================================\n", + "downloaded 2.8 MB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/timeDate_3043.102.tar.gz'\n", + "Content type 'application/x-gzip' length 314656 bytes (307 KB)\n", + "==================================================\n", + "downloaded 307 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/tseries_0.10-48.tar.gz'\n", + "Content type 'application/x-gzip' length 170342 bytes (166 KB)\n", + "==================================================\n", + "downloaded 166 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/urca_1.3-0.tar.gz'\n", + "Content type 'application/x-gzip' length 682935 bytes (666 KB)\n", + "==================================================\n", + "downloaded 666 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/zoo_1.8-8.tar.gz'\n", + "Content type 'application/x-gzip' length 849487 bytes (829 KB)\n", + "==================================================\n", + "downloaded 829 KB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/RcppArmadillo_0.10.1.2.2.tar.gz'\n", + "Content type 'application/x-gzip' length 1647570 bytes (1.6 MB)\n", + "==================================================\n", + "downloaded 1.6 MB\n", + "\n", + "trying URL 'https://cloud.r-project.org/src/contrib/forecast_8.13.tar.gz'\n", + "Content type 'application/x-gzip' length 796389 bytes (777 KB)\n", + "==================================================\n", + "downloaded 777 KB\n", + "\n", + "* installing *source* package ‘prettyunits’ ...\n", + "** package ‘prettyunits’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (prettyunits)\n", + "* installing *source* package ‘rprojroot’ ...\n", + "** package ‘rprojroot’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (rprojroot)\n", + "* installing *source* package ‘rstudioapi’ ...\n", + "** package ‘rstudioapi’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (rstudioapi)\n", + "* installing *source* package ‘brio’ ...\n", + "** package ‘brio’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c brio.c -o brio.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c file_line_endings.c -o file_line_endings.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c read_file.c -o read_file.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c read_file_raw.c -o read_file_raw.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c read_lines.c -o read_lines.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c write_file.c -o write_file.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c write_lines.c -o write_lines.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o brio.so brio.o file_line_endings.o init.o read_file.o read_file_raw.o read_lines.o write_file.o write_lines.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-brio/00new/brio/libs\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (brio)\n", + "* installing *source* package ‘evaluate’ ...\n", + "** package ‘evaluate’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (evaluate)\n", + "* installing *source* package ‘jsonlite’ ...\n", + "** package ‘jsonlite’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c base64.c -o base64.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c collapse_array.c -o collapse_array.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c collapse_object.c -o collapse_object.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c collapse_pretty.c -o collapse_pretty.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c escape_chars.c -o escape_chars.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c integer64_to_na.c -o integer64_to_na.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c is_datelist.c -o is_datelist.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c is_recordlist.c -o is_recordlist.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c is_scalarlist.c -o is_scalarlist.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c modp_numtoa.c -o modp_numtoa.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c null_to_na.c -o null_to_na.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c num_to_char.c -o num_to_char.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c parse.c -o parse.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c prettify.c -o prettify.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c push_parser.c -o push_parser.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c r-base64.c -o r-base64.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c register.c -o register.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c row_collapse.c -o row_collapse.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c transpose_list.c -o transpose_list.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c validate.c -o validate.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c yajl/yajl.c -o yajl/yajl.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c yajl/yajl_alloc.c -o yajl/yajl_alloc.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c yajl/yajl_buf.c -o yajl/yajl_buf.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c yajl/yajl_encode.c -o yajl/yajl_encode.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c yajl/yajl_gen.c -o yajl/yajl_gen.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c yajl/yajl_lex.c -o yajl/yajl_lex.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c yajl/yajl_parser.c -o yajl/yajl_parser.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iyajl/api -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c yajl/yajl_tree.c -o yajl/yajl_tree.o\n", + "ar rcs yajl/libstatyajl.a yajl/yajl.o yajl/yajl_alloc.o yajl/yajl_buf.o yajl/yajl_encode.o yajl/yajl_gen.o yajl/yajl_lex.o yajl/yajl_parser.o yajl/yajl_tree.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o jsonlite.so base64.o collapse_array.o collapse_object.o collapse_pretty.o escape_chars.o integer64_to_na.o is_datelist.o is_recordlist.o is_scalarlist.o modp_numtoa.o null_to_na.o num_to_char.o parse.o prettify.o push_parser.o r-base64.o register.o row_collapse.o transpose_list.o validate.o -Lyajl -lstatyajl -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-jsonlite/00new/jsonlite/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "in method for ‘asJSON’ with signature ‘\"blob\"’: no definition for class “blob”\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (jsonlite)\n", + "* installing *source* package ‘praise’ ...\n", + "** package ‘praise’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (praise)\n", + "* installing *source* package ‘ps’ ...\n", + "** package ‘ps’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -Wall px.c -o px\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c api-common.c -o api-common.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c common.c -o common.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c extra.c -o extra.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c dummy.c -o dummy.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c error-codes.c -o error-codes.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c posix.c -o posix.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c api-posix.c -o api-posix.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c linux.c -o linux.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c api-linux.c -o api-linux.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o ps.so init.o api-common.o common.o extra.o dummy.o error-codes.o posix.o api-posix.o linux.o api-linux.o -L/usr/lib/R/lib -lR\n", + "installing via 'install.libs.R' to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-ps/00new/ps\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (ps)\n", + "* installing *source* package ‘assertthat’ ...\n", + "** package ‘assertthat’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (assertthat)\n", + "* installing *source* package ‘utf8’ ...\n", + "** package ‘utf8’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c as_utf8.c -o as_utf8.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c bytes.c -o bytes.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c context.c -o context.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c render.c -o render.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c render_table.c -o render_table.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c string.c -o string.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c text.c -o text.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8_encode.c -o utf8_encode.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8_format.c -o utf8_format.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8_normalize.c -o utf8_normalize.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8_valid.c -o utf8_valid.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8_width.c -o utf8_width.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c util.c -o util.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/array.c -o utf8lite/src/array.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/char.c -o utf8lite/src/char.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/encode.c -o utf8lite/src/encode.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/error.c -o utf8lite/src/error.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/escape.c -o utf8lite/src/escape.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/graph.c -o utf8lite/src/graph.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/graphscan.c -o utf8lite/src/graphscan.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/normalize.c -o utf8lite/src/normalize.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/render.c -o utf8lite/src/render.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/text.c -o utf8lite/src/text.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/textassign.c -o utf8lite/src/textassign.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/textiter.c -o utf8lite/src/textiter.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -Iutf8lite/src -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8lite/src/textmap.c -o utf8lite/src/textmap.o\n", + "ar rcs libcutf8lite.a utf8lite/src/array.o utf8lite/src/char.o utf8lite/src/encode.o utf8lite/src/error.o utf8lite/src/escape.o utf8lite/src/graph.o utf8lite/src/graphscan.o utf8lite/src/normalize.o utf8lite/src/render.o utf8lite/src/text.o utf8lite/src/textassign.o utf8lite/src/textiter.o utf8lite/src/textmap.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o utf8.so as_utf8.o bytes.o context.o init.o render.o render_table.o string.o text.o utf8_encode.o utf8_format.o utf8_normalize.o utf8_valid.o utf8_width.o util.o -L. -lcutf8lite -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-utf8/00new/utf8/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (utf8)\n", + "* installing *source* package ‘farver’ ...\n", + "** package ‘farver’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c ColorSpace.cpp -o ColorSpace.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c Comparison.cpp -o Comparison.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c Conversion.cpp -o Conversion.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c encode.cpp -o encode.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c farver.cpp -o farver.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.cpp -o init.o\n", + "g++ -std=gnu++11 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o farver.so ColorSpace.o Comparison.o Conversion.o encode.o farver.o init.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-farver/00new/farver/libs\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (farver)\n", + "* installing *source* package ‘labeling’ ...\n", + "** package ‘labeling’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (labeling)\n", + "* installing *source* package ‘R6’ ...\n", + "** package ‘R6’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (R6)\n", + "* installing *source* package ‘RColorBrewer’ ...\n", + "** package ‘RColorBrewer’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (RColorBrewer)\n", + "* installing *source* package ‘viridisLite’ ...\n", + "** package ‘viridisLite’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** data\n", + "*** moving datasets to lazyload DB\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (viridisLite)\n", + "* installing *source* package ‘crayon’ ...\n", + "** package ‘crayon’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (crayon)\n", + "* installing *source* package ‘fansi’ ...\n", + "** package ‘fansi’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c assumptions.c -o assumptions.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c has.c -o has.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c nchar.c -o nchar.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c read.c -o read.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c rnchar.c -o rnchar.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c state.c -o state.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c strip.c -o strip.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c strsplit.c -o strsplit.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c tabs.c -o tabs.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c tohtml.c -o tohtml.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c unhandled.c -o unhandled.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c unique.c -o unique.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utf8.c -o utf8.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utils.c -o utils.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c wrap.c -o wrap.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o fansi.so assumptions.o has.o init.o nchar.o read.o rnchar.o state.o strip.o strsplit.o tabs.o tohtml.o unhandled.o unique.o utf8.o utils.o wrap.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-fansi/00new/fansi/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (fansi)\n", + "* installing *source* package ‘pkgconfig’ ...\n", + "** package ‘pkgconfig’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (pkgconfig)\n", + "* installing *source* package ‘curl’ ...\n", + "** package ‘curl’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "Found pkg-config cflags and libs!\n", + "Using PKG_CFLAGS=-I/usr/include/x86_64-linux-gnu\n", + "Using PKG_LIBS=-lcurl\n", + "** libs\n", + "rm -f curl.so callbacks.o curl.o download.o escape.o fetch.o form.o getdate.o handle.o ieproxy.o init.o interrupt.o multi.o nslookup.o reflist.o split.o ssl.o typechecking.o utils.o version.o winidn.o writer.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c callbacks.c -o callbacks.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c curl.c -o curl.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c download.c -o download.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c escape.c -o escape.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c fetch.c -o fetch.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c form.c -o form.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c getdate.c -o getdate.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c handle.c -o handle.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c ieproxy.c -o ieproxy.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c interrupt.c -o interrupt.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c multi.c -o multi.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c nslookup.c -o nslookup.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c reflist.c -o reflist.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c split.c -o split.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c ssl.c -o ssl.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c typechecking.c -o typechecking.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utils.c -o utils.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c version.c -o version.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c winidn.c -o winidn.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I/usr/include/x86_64-linux-gnu -DSTRICT_R_HEADERS -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c writer.c -o writer.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o curl.so callbacks.o curl.o download.o escape.o fetch.o form.o getdate.o handle.o ieproxy.o init.o interrupt.o multi.o nslookup.o reflist.o split.o ssl.o typechecking.o utils.o version.o winidn.o writer.o -lcurl -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-curl/00new/curl/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (curl)\n", + "* installing *source* package ‘digest’ ...\n", + "** package ‘digest’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c SpookyV2.cpp -o SpookyV2.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c aes.c -o aes.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c blake3.c -o blake3.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c blake3_dispatch.c -o blake3_dispatch.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c blake3_portable.c -o blake3_portable.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c crc32.c -o crc32.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c digest.c -o digest.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c digest2int.c -o digest2int.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c md5.c -o md5.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c pmurhash.c -o pmurhash.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c raes.c -o raes.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c sha1.c -o sha1.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c sha2.c -o sha2.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c sha256.c -o sha256.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c spooky_serialize.cpp -o spooky_serialize.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c xxhash.c -o xxhash.o\n", + "g++ -std=gnu++11 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o digest.so SpookyV2.o aes.o blake3.o blake3_dispatch.o blake3_portable.o crc32.o digest.o digest2int.o init.o md5.o pmurhash.o raes.o sha1.o sha2.o sha256.o spooky_serialize.o xxhash.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-digest/00new/digest/libs\n", + "** R\n", + "** demo\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (digest)\n", + "* installing *source* package ‘glue’ ...\n", + "** package ‘glue’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c glue.c -o glue.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c trim.c -o trim.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o glue.so glue.o init.o trim.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-glue/00new/glue/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (glue)\n", + "* installing *source* package ‘gtable’ ...\n", + "** package ‘gtable’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (gtable)\n", + "* installing *source* package ‘rlang’ ...\n", + "** package ‘rlang’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I./lib/ -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c capture.c -o capture.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I./lib/ -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c export.c -o export.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I./lib/ -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c internal.c -o internal.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I./lib/ -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c lib.c -o lib.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I./lib/ -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c version.c -o version.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o rlang.so capture.o export.o internal.o lib.o version.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-rlang/00new/rlang/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (rlang)\n", + "* installing *source* package ‘withr’ ...\n", + "** package ‘withr’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (withr)\n", + "* installing *source* package ‘quadprog’ ...\n", + "** package ‘quadprog’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gfortran -fno-optimize-sibling-calls -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -msse2 -mfpmath=sse -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -c aind.f -o aind.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gfortran -fno-optimize-sibling-calls -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -msse2 -mfpmath=sse -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -c solve.QP.compact.f -o solve.QP.compact.o\n", + "gfortran -fno-optimize-sibling-calls -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -msse2 -mfpmath=sse -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -c solve.QP.f -o solve.QP.o\n", + "gfortran -fno-optimize-sibling-calls -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -msse2 -mfpmath=sse -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -c util.f -o util.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o quadprog.so aind.o init.o solve.QP.compact.o solve.QP.o util.o -lblas -lgfortran -lm -lquadmath -lgfortran -lm -lquadmath -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-quadprog/00new/quadprog/libs\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (quadprog)\n", + "* installing *source* package ‘colorspace’ ...\n", + "** package ‘colorspace’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c colorspace.c -o colorspace.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o colorspace.so colorspace.o init.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-colorspace/00new/colorspace/libs\n", + "** R\n", + "** data\n", + "*** moving datasets to lazyload DB\n", + "** demo\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (colorspace)\n", + "* installing *source* package ‘fracdiff’ ...\n", + "** package ‘fracdiff’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c fdcore.c -o fdcore.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c fdgam.c -o fdgam.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c fdhess.c -o fdhess.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c fdmin.c -o fdmin.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c fdsim.c -o fdsim.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c pmult.c -o pmult.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o fracdiff.so fdcore.o fdgam.o fdhess.o fdmin.o fdsim.o init.o pmult.o -lblas -lgfortran -lm -lquadmath -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-fracdiff/00new/fracdiff/libs\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (fracdiff)\n", + "* installing *source* package ‘magrittr’ ...\n", + "** package ‘magrittr’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c pipe.c -o pipe.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utils.c -o utils.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o magrittr.so pipe.o utils.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-magrittr/00new/magrittr/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (magrittr)\n", + "* installing *source* package ‘Rcpp’ ...\n", + "** package ‘Rcpp’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include/ -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c api.cpp -o api.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include/ -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c attributes.cpp -o attributes.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include/ -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c barrier.cpp -o barrier.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include/ -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c date.cpp -o date.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include/ -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c module.cpp -o module.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include/ -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c rcpp_init.cpp -o rcpp_init.o\n", + "g++ -std=gnu++11 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o Rcpp.so api.o attributes.o barrier.o date.o module.o rcpp_init.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-Rcpp/00new/Rcpp/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (Rcpp)\n", + "* installing *source* package ‘timeDate’ ...\n", + "** package ‘timeDate’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "Creating a generic function for ‘sample’ from package ‘base’ in package ‘timeDate’\n", + "Creating a generic function for ‘getDataPart’ from package ‘methods’ in package ‘timeDate’\n", + "Creating a generic function for ‘abline’ from package ‘graphics’ in package ‘timeDate’\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (timeDate)\n", + "* installing *source* package ‘urca’ ...\n", + "** package ‘urca’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gfortran -fno-optimize-sibling-calls -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -c UnitRootMacKinnon.f -o UnitRootMacKinnon.o\n", + "\u001b[01m\u001b[KUnitRootMacKinnon.f:502:72:\u001b[m\u001b[K\n", + "\n", + " 502 | do 21 k=1,nobs\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: Shared DO termination label 21 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[KUnitRootMacKinnon.f:505:72:\u001b[m\u001b[K\n", + "\n", + " 505 | do 24 l=j,nvar\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: Shared DO termination label 24 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[KUnitRootMacKinnon.f:524:72:\u001b[m\u001b[K\n", + "\n", + " 524 | do 5 j=1,nvar\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: Shared DO termination label 5 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[KUnitRootMacKinnon.f:570:72:\u001b[m\u001b[K\n", + "\n", + " 570 | 3 amat(i,j) = amat(i,j) - amat(k,i)*amat(k,j)\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 3 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[KUnitRootMacKinnon.f:595:72:\u001b[m\u001b[K\n", + "\n", + " 595 | 11 t = t - amat(i,k)*amat(k,j)\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 11 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[KUnitRootMacKinnon.f:596:72:\u001b[m\u001b[K\n", + "\n", + " 596 | 12 amat(i,j) = t*ooa\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 12 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[KUnitRootMacKinnon.f:602:72:\u001b[m\u001b[K\n", + "\n", + " 602 | 14 t = t + amat(i,k)*amat(j,k)\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 14 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o urca.so UnitRootMacKinnon.o -llapack -lblas -lgfortran -lm -lquadmath -lgfortran -lm -lquadmath -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-urca/00new/urca/libs\n", + "** R\n", + "** data\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (urca)\n", + "* installing *source* package ‘zoo’ ...\n", + "** package ‘zoo’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c coredata.c -o coredata.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c lag.c -o lag.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o zoo.so coredata.o init.o lag.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-zoo/00new/zoo/libs\n", + "** R\n", + "** demo\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (zoo)\n", + "* installing *source* package ‘diffobj’ ...\n", + "** package ‘diffobj’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c diff.c -o diff.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c diffobj.c -o diffobj.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o diffobj.so diff.o diffobj.o init.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-diffobj/00new/diffobj/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (diffobj)\n", + "* installing *source* package ‘desc’ ...\n", + "** package ‘desc’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (desc)\n", + "* installing *source* package ‘processx’ ...\n", + "** package ‘processx’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -Wall tools/px.c -o tools/px\n", + "gcc -std=gnu99 -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g supervisor/supervisor.c supervisor/utils.c \\\n", + " -o supervisor/supervisor\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c base64.c -o base64.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c client.c -o client.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c errors.c -o errors.o\n", + "gcc -std=gnu99 -shared -L\"/usr/lib/R/lib\" -Wl,-Bsymbolic-functions -Wl,-z,relro -o client.so base64.o client.o errors.o -L\"/usr/lib/R/lib\" -lR\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c poll.c -o poll.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c processx-connection.c -o processx-connection.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c processx-vector.c -o processx-vector.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c create-time.c -o create-time.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c unix/childlist.c -o unix/childlist.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c unix/connection.c -o unix/connection.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c unix/processx.c -o unix/processx.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c unix/sigchld.c -o unix/sigchld.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c unix/utils.c -o unix/utils.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c unix/named_pipe.c -o unix/named_pipe.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c cleancall.c -o cleancall.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o processx.so init.o poll.o errors.o processx-connection.o processx-vector.o create-time.o base64.o unix/childlist.o unix/connection.o unix/processx.o unix/sigchld.o unix/utils.o unix/named_pipe.o cleancall.o -L/usr/lib/R/lib -lR\n", + "installing via 'install.libs.R' to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-processx/00new/processx\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (processx)\n", + "* installing *source* package ‘lifecycle’ ...\n", + "** package ‘lifecycle’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (lifecycle)\n", + "* installing *source* package ‘munsell’ ...\n", + "** package ‘munsell’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (munsell)\n", + "* installing *source* package ‘cli’ ...\n", + "** package ‘cli’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (cli)\n", + "* installing *source* package ‘ellipsis’ ...\n", + "** package ‘ellipsis’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c dots.c -o dots.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o ellipsis.so dots.o init.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-ellipsis/00new/ellipsis/libs\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (ellipsis)\n", + "* installing *source* package ‘xts’ ...\n", + "** package ‘xts’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c add_class.c -o add_class.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c any.c -o any.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c attr.c -o attr.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c binsearch.c -o binsearch.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c coredata.c -o coredata.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c diff.c -o diff.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c dimnames.c -o dimnames.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c endpoints.c -o endpoints.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c extract_col.c -o extract_col.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c isOrdered.c -o isOrdered.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c isXts.c -o isXts.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c merge.c -o merge.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c na.c -o na.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c period_apply.c -o period_apply.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c period_arithmetic.c -o period_arithmetic.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c period_quantile.c -o period_quantile.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c rbind.c -o rbind.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c rollfun.c -o rollfun.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c runSum.c -o runSum.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c startofyear.c -o startofyear.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c subset.c -o subset.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c subset.old.c -o subset.old.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c toperiod.c -o toperiod.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c totalcols.c -o totalcols.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c tryXts.c -o tryXts.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/zoo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c unique.time.c -o unique.time.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o xts.so add_class.o any.o attr.o binsearch.o coredata.o diff.o dimnames.o endpoints.o extract_col.o init.o isOrdered.o isXts.o merge.o na.o period_apply.o period_arithmetic.o period_quantile.o rbind.o rollfun.o runSum.o startofyear.o subset.o subset.old.o toperiod.o totalcols.o tryXts.o unique.time.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-xts/00new/xts/libs\n", + "** R\n", + "** data\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (xts)\n", + "* installing *source* package ‘lmtest’ ...\n", + "** package ‘lmtest’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gfortran -fno-optimize-sibling-calls -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -c pan.f -o pan.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o lmtest.so init.o pan.o -lgfortran -lm -lquadmath -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-lmtest/00new/lmtest/libs\n", + "** R\n", + "** data\n", + "*** moving datasets to lazyload DB\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (lmtest)\n", + "* installing *source* package ‘RcppArmadillo’ ...\n", + "** package ‘RcppArmadillo’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "checking whether the C++ compiler works... yes\n", + "checking for C++ compiler default output file name... a.out\n", + "checking for suffix of executables... \n", + "checking whether we are cross compiling... no\n", + "checking for suffix of object files... o\n", + "checking whether we are using the GNU C++ compiler... yes\n", + "checking whether g++ -std=gnu++11 accepts -g... yes\n", + "checking how to run the C++ preprocessor... g++ -std=gnu++11 -E\n", + "checking whether we are using the GNU C++ compiler... (cached) yes\n", + "checking whether g++ -std=gnu++11 accepts -g... (cached) yes\n", + "checking whether we have a suitable tempdir... /tmp/user/1000\n", + "checking whether R CMD SHLIB can already compile programs using OpenMP... yes\n", + "checking LAPACK_LIBS... system LAPACK found\n", + "configure: creating ./config.status\n", + "config.status: creating inst/include/RcppArmadilloConfigGenerated.h\n", + "config.status: creating src/Makevars\n", + "** libs\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I../inst/include -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c RcppArmadillo.cpp -o RcppArmadillo.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I../inst/include -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c RcppExports.cpp -o RcppExports.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I../inst/include -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c fastLm.cpp -o fastLm.o\n", + "g++ -std=gnu++11 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o RcppArmadillo.so RcppArmadillo.o RcppExports.o fastLm.o -llapack -lblas -lgfortran -lm -lquadmath -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-RcppArmadillo/00new/RcppArmadillo/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (RcppArmadillo)\n", + "* installing *source* package ‘callr’ ...\n", + "** package ‘callr’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (callr)\n", + "* installing *source* package ‘vctrs’ ...\n", + "** package ‘vctrs’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c altrep-rle.c -o altrep-rle.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c arg-counter.c -o arg-counter.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c arg.c -o arg.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c bind.c -o bind.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c c-unchop.c -o c-unchop.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c c.c -o c.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c callables.c -o callables.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c cast-bare.c -o cast-bare.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c cast-dispatch.c -o cast-dispatch.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c cast.c -o cast.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c compare.c -o compare.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c complete.c -o complete.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c conditions.c -o conditions.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c dictionary.c -o dictionary.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c dim.c -o dim.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c equal.c -o equal.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c fields.c -o fields.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c fill.c -o fill.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c group.c -o group.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c growable.c -o growable.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c hash.c -o hash.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c names.c -o names.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c order-groups.c -o order-groups.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c order-radix.c -o order-radix.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c order-sortedness.c -o order-sortedness.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c order-truelength.c -o order-truelength.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c poly-op.c -o poly-op.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c proxy-restore.c -o proxy-restore.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c proxy.c -o proxy.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c ptype2-dispatch.c -o ptype2-dispatch.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c rep.c -o rep.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c runs.c -o runs.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c shape.c -o shape.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c size-common.c -o size-common.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c size.c -o size.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c slice-array.c -o slice-array.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c slice-assign-array.c -o slice-assign-array.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c slice-assign.c -o slice-assign.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c slice-chop.c -o slice-chop.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c slice.c -o slice.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c split.c -o split.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c subscript-loc.c -o subscript-loc.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c subscript.c -o subscript.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c translate.c -o translate.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c type-data-frame.c -o type-data-frame.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c type-date-time.c -o type-date-time.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c type-factor.c -o type-factor.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c type-info.c -o type-info.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c type-tibble.c -o type-tibble.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c type.c -o type.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c type2.c -o type2.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c typeof2-s3.c -o typeof2-s3.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c typeof2.c -o typeof2.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c unspecified.c -o unspecified.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utils-dispatch.c -o utils-dispatch.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utils-rlang.c -o utils-rlang.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c utils.c -o utils.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fvisibility=hidden -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c version.c -o version.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o vctrs.so altrep-rle.o arg-counter.o arg.o bind.o c-unchop.o c.o callables.o cast-bare.o cast-dispatch.o cast.o compare.o complete.o conditions.o dictionary.o dim.o equal.o fields.o fill.o group.o growable.o hash.o init.o names.o order-groups.o order-radix.o order-sortedness.o order-truelength.o poly-op.o proxy-restore.o proxy.o ptype2-dispatch.o rep.o runs.o shape.o size-common.o size.o slice-array.o slice-assign-array.o slice-assign.o slice-chop.o slice.o split.o subscript-loc.o subscript.o translate.o type-data-frame.o type-date-time.o type-factor.o type-info.o type-tibble.o type.o type2.o typeof2-s3.o typeof2.o unspecified.o utils-dispatch.o utils-rlang.o utils.o version.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-vctrs/00new/vctrs/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (vctrs)\n", + "* installing *source* package ‘TTR’ ...\n", + "** package ‘TTR’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/xts/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c adjRatios.c -o adjRatios.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/xts/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c aroon.c -o aroon.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/xts/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/xts/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c moving_averages.c -o moving_averages.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/xts/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c percent_rank.c -o percent_rank.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/xts/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c runfun.c -o runfun.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/xts/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c sar.c -o sar.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/xts/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c wilderSum.c -o wilderSum.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/xts/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c zigzag.c -o zigzag.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o TTR.so adjRatios.o aroon.o init.o moving_averages.o percent_rank.o runfun.o sar.o wilderSum.o zigzag.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-TTR/00new/TTR/libs\n", + "** R\n", + "** data\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (TTR)\n", + "* installing *source* package ‘scales’ ...\n", + "** package ‘scales’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (scales)\n", + "* installing *source* package ‘pkgbuild’ ...\n", + "** package ‘pkgbuild’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (pkgbuild)\n", + "* installing *source* package ‘pillar’ ...\n", + "** package ‘pillar’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (pillar)\n", + "* installing *source* package ‘quantmod’ ...\n", + "** package ‘quantmod’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** demo\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (quantmod)\n", + "* installing *source* package ‘pkgload’ ...\n", + "** package ‘pkgload’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c unlock.c -o unlock.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o pkgload.so unlock.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-pkgload/00new/pkgload/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (pkgload)\n", + "* installing *source* package ‘tibble’ ...\n", + "** package ‘tibble’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c attributes.c -o attributes.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c coerce.c -o coerce.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c matrixToDataFrame.c -o matrixToDataFrame.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o tibble.so attributes.o coerce.o init.o matrixToDataFrame.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-tibble/00new/tibble/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (tibble)\n", + "* installing *source* package ‘tseries’ ...\n", + "** package ‘tseries’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c arma.c -o arma.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c bdstest.c -o bdstest.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c boot.c -o boot.o\n", + "gfortran -fno-optimize-sibling-calls -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -c cfuncs.f90 -o cfuncs.o\n", + "gfortran -fno-optimize-sibling-calls -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -c dsumsl.f -o dsumsl.o\n", + "\u001b[01m\u001b[Kdsumsl.f:1004:72:\u001b[m\u001b[K\n", + "\n", + " 1004 | 10 W(I) = A*X(I) + Y(I)\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 10 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[Kdsumsl.f:1123:72:\u001b[m\u001b[K\n", + "\n", + " 1123 | 10 Y(I) = S\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 10 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[Kdsumsl.f:1137:72:\u001b[m\u001b[K\n", + "\n", + " 1137 | 10 X(I) = Y(I) / Z(I)\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 10 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[Kdsumsl.f:1141:72:\u001b[m\u001b[K\n", + "\n", + " 1141 | 30 X(I) = Y(I) * Z(I)\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 30 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[Kdsumsl.f:1224:72:\u001b[m\u001b[K\n", + "\n", + " 1224 | 30 Z(I) = CY * Z(I) - CS * W(I)\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 30 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[Kdsumsl.f:1908:72:\u001b[m\u001b[K\n", + "\n", + " 1908 | 10 STEP(I) = G(I) / GNORM\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 10 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[Kdsumsl.f:1923:72:\u001b[m\u001b[K\n", + "\n", + " 1923 | 20 STEP(I) = -NWTSTP(I)\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 20 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[Kdsumsl.f:1941:72:\u001b[m\u001b[K\n", + "\n", + " 1941 | 40 STEP(I) = T * NWTSTP(I)\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 40 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[Kdsumsl.f:1955:72:\u001b[m\u001b[K\n", + "\n", + " 1955 | 60 STEP(I) = T * DIG(I)\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 60 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[Kdsumsl.f:1982:72:\u001b[m\u001b[K\n", + "\n", + " 1982 | 80 STEP(I) = T1*DIG(I) + T2*NWTSTP(I)\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 80 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "\u001b[01m\u001b[Kdsumsl.f:2226:72:\u001b[m\u001b[K\n", + "\n", + " 2226 | 10 X(I) = Y(I)\n", + " | \u001b[01;35m\u001b[K1\u001b[m\u001b[K\n", + "\u001b[01;35m\u001b[KWarning:\u001b[m\u001b[K Fortran 2018 deleted feature: DO termination statement which is not END DO or CONTINUE with label 10 at \u001b[01;35m\u001b[K(1)\u001b[m\u001b[K\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c formats.c -o formats.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c garch.c -o garch.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c ppsum.c -o ppsum.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c tsutils.c -o tsutils.o\n", + "gcc -std=gnu99 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o tseries.so arma.o bdstest.o boot.o cfuncs.o dsumsl.o formats.o garch.o init.o ppsum.o tsutils.o -lblas -lgfortran -lm -lquadmath -lgfortran -lm -lquadmath -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-tseries/00new/tseries/libs\n", + "** R\n", + "** data\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (tseries)\n", + "* installing *source* package ‘rematch2’ ...\n", + "** package ‘rematch2’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (rematch2)\n", + "* installing *source* package ‘waldo’ ...\n", + "** package ‘waldo’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (waldo)\n", + "* installing *source* package ‘testthat’ ...\n", + "** package ‘testthat’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -DCOMPILING_TESTTHAT -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.c -o init.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -DCOMPILING_TESTTHAT -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c reassign.c -o reassign.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -DCOMPILING_TESTTHAT -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c test-catch.cpp -o test-catch.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -DCOMPILING_TESTTHAT -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c test-example.cpp -o test-example.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I../inst/include -DCOMPILING_TESTTHAT -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c test-runner.cpp -o test-runner.o\n", + "g++ -std=gnu++11 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o testthat.so init.o reassign.o test-catch.o test-example.o test-runner.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-testthat/00new/testthat/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (testthat)\n", + "* installing *source* package ‘isoband’ ...\n", + "** package ‘isoband’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/testthat/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c clip-lines.cpp -o clip-lines.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/testthat/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c init.cpp -o init.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/testthat/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c isoband.cpp -o isoband.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/testthat/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c polygon.cpp -o polygon.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/testthat/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c separate-polygons.cpp -o separate-polygons.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/testthat/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c test-clip-lines.cpp -o test-clip-lines.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/testthat/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c test-runner.cpp -o test-runner.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/testthat/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c test-separate-polygons.cpp -o test-separate-polygons.o\n", + "g++ -std=gnu++11 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o isoband.so clip-lines.o init.o isoband.o polygon.o separate-polygons.o test-clip-lines.o test-runner.o test-separate-polygons.o -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-isoband/00new/isoband/libs\n", + "** R\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (isoband)\n", + "* installing *source* package ‘ggplot2’ ...\n", + "** package ‘ggplot2’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** R\n", + "** data\n", + "*** moving datasets to lazyload DB\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (ggplot2)\n", + "* installing *source* package ‘forecast’ ...\n", + "** package ‘forecast’ successfully unpacked and MD5 sums checked\n", + "** using staged installation\n", + "** libs\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/RcppArmadillo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c calcBATS.cpp -o calcBATS.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/RcppArmadillo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c calcTBATS.cpp -o calcTBATS.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/RcppArmadillo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c etsTargetFunction.cpp -o etsTargetFunction.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/RcppArmadillo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c etsTargetFunctionWrapper.cpp -o etsTargetFunctionWrapper.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/RcppArmadillo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c etscalc.c -o etscalc.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/RcppArmadillo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c etspolyroot.c -o etspolyroot.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/RcppArmadillo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c makeBATSMatrices.cpp -o makeBATSMatrices.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/RcppArmadillo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c makeTBATSMatrices.cpp -o makeTBATSMatrices.o\n", + "gcc -std=gnu99 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/RcppArmadillo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c registerDynamicSymbol.c -o registerDynamicSymbol.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/RcppArmadillo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c updateMatrices.cpp -o updateMatrices.o\n", + "g++ -std=gnu++11 -I\"/usr/share/R/include\" -DNDEBUG -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/Rcpp/include' -I'/home/webartifex/repos/urban-meal-delivery/.cache/r_libs/RcppArmadillo/include' -fpic -g -O2 -fdebug-prefix-map=/build/r-base-PsrVor/r-base-4.0.2=. -fstack-protector-strong -Wformat -Werror=format-security -Wdate-time -D_FORTIFY_SOURCE=2 -g -c updateTBATSMatrices.cpp -o updateTBATSMatrices.o\n", + "g++ -std=gnu++11 -shared -L/usr/lib/R/lib -Wl,-Bsymbolic-functions -Wl,-z,relro -o forecast.so calcBATS.o calcTBATS.o etsTargetFunction.o etsTargetFunctionWrapper.o etscalc.o etspolyroot.o makeBATSMatrices.o makeTBATSMatrices.o registerDynamicSymbol.o updateMatrices.o updateTBATSMatrices.o -llapack -lblas -lgfortran -lm -lquadmath -L/usr/lib/R/lib -lR\n", + "installing to /home/webartifex/repos/urban-meal-delivery/.cache/r_libs/00LOCK-forecast/00new/forecast/libs\n", + "** R\n", + "** data\n", + "*** moving datasets to lazyload DB\n", + "** inst\n", + "** byte-compile and prepare package for lazy loading\n", + "** help\n", + "*** installing help indices\n", + "*** copying figures\n", + "** building package indices\n", + "** installing vignettes\n", + "** testing if installed package can be loaded from temporary location\n", + "** checking absolute paths in shared objects and dynamic libraries\n", + "** testing if installed package can be loaded from final location\n", + "** testing if installed package keeps a record of temporary installation path\n", + "* DONE (forecast)\n", + "\n", + "The downloaded source packages are in\n", + "\t‘/tmp/user/1000/RtmpzOhFwP/downloaded_packages’\n", + "> \n", + "> \n" + ] + } + ], + "source": [ + "!R -e \"install.packages('forecast')\"" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Lastly, check if the `urban_meal_delivery.init_r` module can be imported (works only if all R dependencies can be loaded)." + ] + }, + { + "cell_type": "code", + "execution_count": 8, + "metadata": {}, + "outputs": [], + "source": [ + "from urban_meal_delivery import init_r" + ] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.8.6" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/setup.cfg b/setup.cfg index a7668a8..3e27df5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -255,6 +255,8 @@ ignore_missing_imports = true ignore_missing_imports = true [mypy-pytest] ignore_missing_imports = true +[mypy-rpy2.*] +ignore_missing_imports = true [mypy-sqlalchemy.*] ignore_missing_imports = true [mypy-utm.*] @@ -269,5 +271,6 @@ console_output_style = count env = TESTING=true markers = - db: tests touching the database - e2e: non-db integration tests + db: (integration) tests touching the database + e2e: non-db and non-r integration tests + r: (integration) tests using rpy2 diff --git a/src/urban_meal_delivery/configuration.py b/src/urban_meal_delivery/configuration.py index 2d36392..267d579 100644 --- a/src/urban_meal_delivery/configuration.py +++ b/src/urban_meal_delivery/configuration.py @@ -69,6 +69,8 @@ class Config: ALEMBIC_TABLE = 'alembic_version' ALEMBIC_TABLE_SCHEMA = 'public' + R_LIBS_PATH = os.getenv('R_LIBS') + def __repr__(self) -> str: """Non-literal text representation.""" return '' @@ -117,6 +119,12 @@ def make_config(env: str = 'production') -> Config: if config.DATABASE_URI is None and not os.getenv('TESTING'): warnings.warn('Bad configurartion: no DATABASE_URI set in the environment') + # Some functionalities require R and some packages installed. + # To ensure isolation and reproducibility, the projects keeps the R dependencies + # in a project-local folder that must be set in the environment. + if config.R_LIBS_PATH is None and not os.getenv('TESTING'): + warnings.warn('Bad configuration: no R_LIBS set in the environment') + return config diff --git a/src/urban_meal_delivery/init_r.py b/src/urban_meal_delivery/init_r.py new file mode 100644 index 0000000..189a0dc --- /dev/null +++ b/src/urban_meal_delivery/init_r.py @@ -0,0 +1,28 @@ +"""Initialize the R dependencies. + +The purpose of this module is to import all the R packages that are installed +into a sub-folder (see `config.R_LIBS_PATH`) in the project's root directory. + +The Jupyter notebook "research/r_dependencies.ipynb" can be used to install all +R dependencies on a Ubuntu/Debian based system. +""" + +from rpy2.rinterface_lib import callbacks as rcallbacks +from rpy2.robjects import packages as rpackages + + +# Suppress R's messages to stdout and stderr. +# Source: https://stackoverflow.com/a/63220287 +rcallbacks.consolewrite_print = lambda msg: None # pragma: no cover +rcallbacks.consolewrite_warnerror = lambda msg: None # pragma: no cover + + +# For clarity and convenience, re-raise the error that results from missing R +# dependencies with clearer instructions as to how to deal with it. +try: # noqa:WPS229 + rpackages.importr('forecast') + rpackages.importr('zoo') + +except rpackages.PackageNotInstalledError: # pragma: no cover + msg = 'See the "research/r_dependencies.ipynb" notebook!' + raise rpackages.PackageNotInstalledError(msg) from None diff --git a/tests/test_config.py b/tests/test_config.py index 9251d48..db15321 100644 --- a/tests/test_config.py +++ b/tests/test_config.py @@ -29,6 +29,9 @@ def test_database_uri_set(env, monkeypatch): monkeypatch.setattr(configuration.ProductionConfig, 'DATABASE_URI', uri) monkeypatch.setattr(configuration.TestingConfig, 'DATABASE_URI', uri) + # Prevent that a warning is emitted for a missing R_LIBS_PATH. + monkeypatch.setattr(configuration.Config, 'R_LIBS_PATH', '.cache/r_libs') + with pytest.warns(None) as record: configuration.make_config(env) @@ -43,6 +46,9 @@ def test_no_database_uri_set_with_testing_env_var(env, monkeypatch): monkeypatch.setenv('TESTING', 'true') + # Prevent that a warning is emitted for a missing R_LIBS_PATH. + monkeypatch.setattr(configuration.Config, 'R_LIBS_PATH', '.cache/r_libs') + with pytest.warns(None) as record: configuration.make_config(env) @@ -57,10 +63,64 @@ def test_no_database_uri_set_without_testing_env_var(env, monkeypatch): monkeypatch.delenv('TESTING', raising=False) + # Prevent that a warning is emitted for a missing R_LIBS_PATH. + monkeypatch.setattr(configuration.Config, 'R_LIBS_PATH', '.cache/r_libs') + with pytest.warns(UserWarning, match='no DATABASE_URI'): configuration.make_config(env) +@pytest.mark.parametrize('env', envs) +def test_r_libs_path_set(env, monkeypatch): + """Package does NOT emit a warning if R_LIBS is set in the environment.""" + monkeypatch.setattr(configuration.Config, 'R_LIBS_PATH', '.cache/r_libs') + + # Prevent that a warning is emitted for a missing DATABASE_URI. + uri = 'postgresql://user:password@localhost/db' + monkeypatch.setattr(configuration.ProductionConfig, 'DATABASE_URI', uri) + + with pytest.warns(None) as record: + configuration.make_config(env) + + assert len(record) == 0 # noqa:WPS441,WPS507 + + +@pytest.mark.parametrize('env', envs) +def test_no_r_libs_path_set_with_testing_env_var(env, monkeypatch): + """Package emits a warning if no R_LIBS is set in the environment ... + + ... when not testing. + """ + monkeypatch.setattr(configuration.Config, 'R_LIBS_PATH', None) + monkeypatch.setenv('TESTING', 'true') + + # Prevent that a warning is emitted for a missing DATABASE_URI. + uri = 'postgresql://user:password@localhost/db' + monkeypatch.setattr(configuration.ProductionConfig, 'DATABASE_URI', uri) + + with pytest.warns(None) as record: + configuration.make_config(env) + + assert len(record) == 0 # noqa:WPS441,WPS507 + + +@pytest.mark.parametrize('env', envs) +def test_no_r_libs_path_set_without_testing_env_var(env, monkeypatch): + """Package emits a warning if no R_LIBS is set in the environment ... + + ... when not testing. + """ + monkeypatch.setattr(configuration.Config, 'R_LIBS_PATH', None) + monkeypatch.delenv('TESTING', raising=False) + + # Prevent that a warning is emitted for a missing DATABASE_URI. + uri = 'postgresql://user:password@localhost/db' + monkeypatch.setattr(configuration.ProductionConfig, 'DATABASE_URI', uri) + + with pytest.warns(UserWarning, match='no R_LIBS'): + configuration.make_config(env) + + def test_random_testing_schema(): """CLEAN_SCHEMA is randomized if not set explicitly.""" result = configuration.random_schema_name() diff --git a/tests/test_init_r.py b/tests/test_init_r.py new file mode 100644 index 0000000..be673d6 --- /dev/null +++ b/tests/test_init_r.py @@ -0,0 +1,19 @@ +"""Verify that the R packages are installed correctly.""" + +import pytest + + +@pytest.mark.r +def test_r_packages_installed(): + """Import the `urban_meal_delivery.init_r` module. + + Doing this raises a `PackageNotInstalledError` if the + mentioned R packages are not importable. + + They must be installed externally. That happens either + in the "research/r_dependencies.ipynb" notebook or + in the GitHub Actions CI. + """ + from urban_meal_delivery import init_r # noqa:WPS433 + + assert init_r is not None From 98b6830b4616458c50dc82ddd196aee73cf4a644 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Mon, 11 Jan 2021 16:10:45 +0100 Subject: [PATCH 39/72] Add `stl()` function - `stl()` wraps R's "stl" function in Python - STL is a decomposition method for time series --- setup.cfg | 22 +- src/urban_meal_delivery/__init__.py | 5 +- src/urban_meal_delivery/forecasts/__init__.py | 1 + .../forecasts/decomposition.py | 174 +++++++++++++++ tests/forecasts/test_decomposition.py | 200 ++++++++++++++++++ 5 files changed, 388 insertions(+), 14 deletions(-) create mode 100644 src/urban_meal_delivery/forecasts/decomposition.py create mode 100644 tests/forecasts/test_decomposition.py diff --git a/setup.cfg b/setup.cfg index 3e27df5..8c3817b 100644 --- a/setup.cfg +++ b/setup.cfg @@ -89,6 +89,10 @@ extend-ignore = # Comply with black's style. # Source: https://github.com/psf/black/blob/master/docs/compatible_configs.md#flake8 E203, W503, WPS348, + # Google's Python Style Guide is not reStructuredText + # until after being processed by Sphinx Napoleon. + # Source: https://github.com/peterjc/flake8-rst-docstrings/issues/17 + RST201,RST203,RST301, # String constant over-use is checked visually by the programmer. WPS226, # Allow underscores in numbers. @@ -103,6 +107,9 @@ extend-ignore = WPS429, per-file-ignores = + # Top-levels of a sub-packages are intended to import a lot. + **/__init__.py: + F401,WPS201, docs/conf.py: # Allow shadowing built-ins and reading __*__ variables. WPS125,WPS609, @@ -132,15 +139,9 @@ per-file-ignores = WPS115, # Numbers are normal in config files. WPS432, - src/urban_meal_delivery/db/__init__.py: - # Top-level of a sub-packages is intended to import a lot. - F401,WPS201, - src/urban_meal_delivery/db/utils/__init__.py: - # Top-level of a sub-packages is intended to import a lot. - F401, - src/urban_meal_delivery/forecasts/__init__.py: - # Top-level of a sub-packages is intended to import a lot. - F401, + src/urban_meal_delivery/forecasts/decomposition.py: + # The module does not have a high cognitive complexity. + WPS232, src/urban_meal_delivery/forecasts/timify.py: # No SQL injection as the inputs come from a safe source. S608, @@ -169,9 +170,6 @@ per-file-ignores = WPS432, # When testing, it is normal to use implementation details. WPS437, - tests/db/fake_data/__init__.py: - # Top-level of a sub-packages is intended to import a lot. - F401,WPS201, # Explicitly set mccabe's maximum complexity to 10 as recommended by # Thomas McCabe, the inventor of the McCabe complexity, and the NIST. diff --git a/src/urban_meal_delivery/__init__.py b/src/urban_meal_delivery/__init__.py index ad34978..b2f39fe 100644 --- a/src/urban_meal_delivery/__init__.py +++ b/src/urban_meal_delivery/__init__.py @@ -6,11 +6,12 @@ Example: True """ # The config object must come before all other project-internal imports. -from urban_meal_delivery.configuration import config # noqa:F401 isort:skip +from urban_meal_delivery.configuration import config # isort:skip from importlib import metadata as _metadata -from urban_meal_delivery import db # noqa:F401 +from urban_meal_delivery import db +from urban_meal_delivery import forecasts try: diff --git a/src/urban_meal_delivery/forecasts/__init__.py b/src/urban_meal_delivery/forecasts/__init__.py index be8843e..0db50ff 100644 --- a/src/urban_meal_delivery/forecasts/__init__.py +++ b/src/urban_meal_delivery/forecasts/__init__.py @@ -1,3 +1,4 @@ """Demand forecasting utilities.""" +from urban_meal_delivery.forecasts import decomposition from urban_meal_delivery.forecasts import timify diff --git a/src/urban_meal_delivery/forecasts/decomposition.py b/src/urban_meal_delivery/forecasts/decomposition.py new file mode 100644 index 0000000..ac61b68 --- /dev/null +++ b/src/urban_meal_delivery/forecasts/decomposition.py @@ -0,0 +1,174 @@ +"""Seasonal-trend decomposition procedure based on LOESS (STL). + +This module defines a `stl()` function that wraps R's STL decomposition function +using the `rpy2` library. +""" + +import math + +import pandas as pd +from rpy2 import robjects +from rpy2.robjects import pandas2ri + + +def stl( # noqa:C901,WPS210,WPS211,WPS231 + time_series: pd.Series, + *, + frequency: int, + ns: int, + nt: int = None, + nl: int = None, + ds: int = 0, + dt: int = 1, + dl: int = 1, + js: int = None, + jt: int = None, + jl: int = None, + ni: int = 2, + no: int = 0, # noqa:WPS110 +) -> pd.DataFrame: + """Decompose a time series into seasonal, trend, and residual components. + + This is a Python wrapper around the corresponding R function. + + Further info on the STL method: + https://www.nniiem.ru/file/news/2016/stl-statistical-model.pdf + https://otexts.com/fpp2/stl.html + + Further info on the R's "stl" function: + https://www.rdocumentation.org/packages/stats/versions/3.6.2/topics/stl + + Args: + time_series: time series with a `DateTime` based index; + must not contain `NaN` values + frequency: frequency of the observations in the `time_series` + ns: smoothing parameter for the seasonal component + (= window size of the seasonal smoother); + must be odd and `>= 7` so that the seasonal component is smooth; + the greater `ns`, the smoother the seasonal component; + so, this is a hyper-parameter optimized in accordance with the application + nt: smoothing parameter for the trend component + (= window size of the trend smoother); + must be odd and `>= (1.5 * frequency) / [1 - (1.5 / ns)]`; + the latter threshold is the default value; + the greater `nt`, the smoother the trend component + nl: smoothing parameter for the low-pass filter; + must be odd and `>= frequency`; + the least odd number `>= frequency` is the default + ds: degree of locally fitted polynomial in seasonal smoothing; + must be `0` or `1` + dt: degree of locally fitted polynomial in trend smoothing; + must be `0` or `1` + dl: degree of locally fitted polynomial in low-pass smoothing; + must be `0` or `1` + js: number of steps by which the seasonal smoother skips ahead + and then linearly interpolates between observations; + if set to `1`, the smoother is evaluated at all points; + to make the STL decomposition faster, increase this value; + by default, `js` is the smallest integer `>= 0.1 * ns` + jt: number of steps by which the trend smoother skips ahead + and then linearly interpolates between observations; + if set to `1`, the smoother is evaluated at all points; + to make the STL decomposition faster, increase this value; + by default, `jt` is the smallest integer `>= 0.1 * nt` + jl: number of steps by which the low-pass smoother skips ahead + and then linearly interpolates between observations; + if set to `1`, the smoother is evaluated at all points; + to make the STL decomposition faster, increase this value; + by default, `jl` is the smallest integer `>= 0.1 * nl` + ni: number of iterations of the inner loop that updates the + seasonal and trend components; + usually, a low value (e.g., `2`) suffices + no: number of iterations of the outer loop that handles outliers; + also known as the "robustness" loop; + if no outliers need to be handled, set `no=0`; + otherwise, `no=5` or `no=10` combined with `ni=1` is a good choice + + Returns: + result: a DataFrame with three columns ("seasonal", "trend", and "residual") + providing time series of the individual components + + Raises: + ValueError: some argument does not adhere to the specifications above + """ + # Re-seed R every time the process does something. + robjects.r('set.seed(42)') + + # Validate all arguments and set default values. + + if time_series.isnull().any(): + raise ValueError('`time_series` must not contain `NaN` values') + + if ns % 2 == 0 or ns < 7: + raise ValueError('`ns` must be odd and `>= 7`') + + default_nt = math.ceil((1.5 * frequency) / (1 - (1.5 / ns))) # noqa:WPS432 + if nt is not None: + if nt % 2 == 0 or nt < default_nt: + raise ValueError( + '`nt` must be odd and `>= (1.5 * frequency) / [1 - (1.5 / ns)]`, ' + + 'which is {0}'.format(default_nt), + ) + else: + nt = default_nt + if nt % 2 == 0: # pragma: no cover => hard to construct edge case + nt += 1 + + if nl is not None: + if nl % 2 == 0 or nl < frequency: + raise ValueError('`nl` must be odd and `>= frequency`') + elif frequency % 2 == 0: + nl = frequency + 1 + else: # pragma: no cover => hard to construct edge case + nl = frequency + + if ds not in {0, 1}: + raise ValueError('`ds` must be either `0` or `1`') + if dt not in {0, 1}: + raise ValueError('`dt` must be either `0` or `1`') + if dl not in {0, 1}: + raise ValueError('`dl` must be either `0` or `1`') + + if js is not None: + if js <= 0: + raise ValueError('`js` must be positive') + else: + js = math.ceil(ns / 10) + + if jt is not None: + if jt <= 0: + raise ValueError('`jt` must be positive') + else: + jt = math.ceil(nt / 10) + + if jl is not None: + if jl <= 0: + raise ValueError('`jl` must be positive') + else: + jl = math.ceil(nl / 10) + + if ni <= 0: + raise ValueError('`ni` must be positive') + + if no < 0: + raise ValueError('`no` must be non-negative') + elif no > 0: + robust = True + else: + robust = False + + # Call the STL function in R. + ts = robjects.r['ts'](pandas2ri.py2rpy(time_series), frequency=frequency) + result = robjects.r['stl']( + ts, ns, ds, nt, dt, nl, dl, js, jt, jl, robust, ni, no, # noqa:WPS221 + ) + + # Unpack the result to a `pd.DataFrame`. + result = pandas2ri.rpy2py(result[0]) + result = { + 'seasonal': pd.Series(result[:, 0], index=time_series.index), + 'trend': pd.Series(result[:, 1], index=time_series.index), + 'residual': pd.Series(result[:, 2], index=time_series.index), + } + + return pd.DataFrame(result) diff --git a/tests/forecasts/test_decomposition.py b/tests/forecasts/test_decomposition.py new file mode 100644 index 0000000..6c33d3e --- /dev/null +++ b/tests/forecasts/test_decomposition.py @@ -0,0 +1,200 @@ +"""Test the `stl()` function.""" + +import math + +import pandas as pd +import pytest + +from tests import config as test_config +from urban_meal_delivery import config +from urban_meal_delivery.forecasts import decomposition + + +# See remarks in `datetime_index` fixture. +FREQUENCY = 7 * 12 + +# The default `ns` suggested for the STL method. +NS = 7 + + +@pytest.fixture +def datetime_index(): + """A `pd.Index` with `DateTime` values. + + The times resemble a vertical time series with a + `frequency` of `7` times the number of daily time steps, + which is `12` for `LONG_TIME_STEP` values. + """ + gen = ( + start_at + for start_at in pd.date_range( + test_config.START, test_config.END, freq=f'{test_config.LONG_TIME_STEP}T', + ) + if config.SERVICE_START <= start_at.hour < config.SERVICE_END + ) + + index = pd.Index(gen) + index.name = 'start_at' + + return index + + +@pytest.fixture +def no_demand(datetime_index): + """A time series of order totals when there was no demand.""" + return pd.Series(0, index=datetime_index, name='order_totals') + + +class TestInvalidArguments: + """Test `stl()` with invalid arguments.""" + + def test_no_nans_in_time_series(self, datetime_index): + """`stl()` requires a `time_series` without `NaN` values.""" + time_series = pd.Series(dtype=float, index=datetime_index) + + with pytest.raises(ValueError, match='`NaN` values'): + decomposition.stl(time_series, frequency=FREQUENCY, ns=99) + + def test_ns_not_odd(self, no_demand): + """`ns` must be odd and `>= 7`.""" + with pytest.raises(ValueError, match='`ns`'): + decomposition.stl(no_demand, frequency=FREQUENCY, ns=8) + + @pytest.mark.parametrize('ns', [-99, -1, 1, 5]) + def test_ns_smaller_than_seven(self, no_demand, ns): + """`ns` must be odd and `>= 7`.""" + with pytest.raises(ValueError, match='`ns`'): + decomposition.stl(no_demand, frequency=FREQUENCY, ns=ns) + + def test_nt_not_odd(self, no_demand): + """`nt` must be odd and `>= default_nt`.""" + nt = 200 + default_nt = math.ceil((1.5 * FREQUENCY) / (1 - (1.5 / NS))) + + assert nt > default_nt # sanity check + + with pytest.raises(ValueError, match='`nt`'): + decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS, nt=nt) + + @pytest.mark.parametrize('nt', [-99, -1, 0, 1, 99, 159]) + def test_nt_not_at_least_the_default(self, no_demand, nt): + """`nt` must be odd and `>= default_nt`.""" + # `default_nt` becomes 161. + default_nt = math.ceil((1.5 * FREQUENCY) / (1 - (1.5 / NS))) + + assert nt < default_nt # sanity check + + with pytest.raises(ValueError, match='`nt`'): + decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS, nt=nt) + + def test_nl_not_odd(self, no_demand): + """`nl` must be odd and `>= frequency`.""" + nl = 200 + + assert nl > FREQUENCY # sanity check + + with pytest.raises(ValueError, match='`nl`'): + decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS, nl=nl) + + def test_nl_at_least_the_frequency(self, no_demand): + """`nl` must be odd and `>= frequency`.""" + nl = 77 + + assert nl < FREQUENCY # sanity check + + with pytest.raises(ValueError, match='`nl`'): + decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS, nl=nl) + + def test_ds_not_zero_or_one(self, no_demand): + """`ds` must be `0` or `1`.""" + with pytest.raises(ValueError, match='`ds`'): + decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS, ds=2) + + def test_dt_not_zero_or_one(self, no_demand): + """`dt` must be `0` or `1`.""" + with pytest.raises(ValueError, match='`dt`'): + decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS, dt=2) + + def test_dl_not_zero_or_one(self, no_demand): + """`dl` must be `0` or `1`.""" + with pytest.raises(ValueError, match='`dl`'): + decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS, dl=2) + + @pytest.mark.parametrize('js', [-1, 0]) + def test_js_not_positive(self, no_demand, js): + """`js` must be positive.""" + with pytest.raises(ValueError, match='`js`'): + decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS, js=js) + + @pytest.mark.parametrize('jt', [-1, 0]) + def test_jt_not_positive(self, no_demand, jt): + """`jt` must be positive.""" + with pytest.raises(ValueError, match='`jt`'): + decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS, jt=jt) + + @pytest.mark.parametrize('jl', [-1, 0]) + def test_jl_not_positive(self, no_demand, jl): + """`jl` must be positive.""" + with pytest.raises(ValueError, match='`jl`'): + decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS, jl=jl) + + @pytest.mark.parametrize('ni', [-1, 0]) + def test_ni_not_positive(self, no_demand, ni): + """`ni` must be positive.""" + with pytest.raises(ValueError, match='`ni`'): + decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS, ni=ni) + + def test_no_not_non_negative(self, no_demand): + """`no` must be non-negative.""" + with pytest.raises(ValueError, match='`no`'): + decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS, no=-1) + + +class TestValidArguments: + """Test `stl()` with valid arguments.""" + + def test_structure_of_returned_dataframe(self, no_demand): + """`stl()` returns a `pd.DataFrame` with three columns.""" + result = decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS) + + assert isinstance(result, pd.DataFrame) + assert list(result.columns) == ['seasonal', 'trend', 'residual'] + + # Run the `stl()` function with all possible combinations of arguments, + # including default ones and explicitly set non-default ones. + @pytest.mark.parametrize('nt', [None, 163]) + @pytest.mark.parametrize('nl', [None, 777]) + @pytest.mark.parametrize('ds', [0, 1]) + @pytest.mark.parametrize('dt', [0, 1]) + @pytest.mark.parametrize('dl', [0, 1]) + @pytest.mark.parametrize('js', [None, 1]) + @pytest.mark.parametrize('jt', [None, 1]) + @pytest.mark.parametrize('jl', [None, 1]) + @pytest.mark.parametrize('ni', [2, 3]) + @pytest.mark.parametrize('no', [0, 1]) + def test_decompose_time_series_with_no_demand( # noqa:WPS211,WPS216 + self, no_demand, nt, nl, ds, dt, dl, js, jt, jl, ni, no, # noqa:WPS110 + ): + """Decomposing a time series with no demand ... + + ... returns a `pd.DataFrame` with three columns holding only `0.0` values. + """ + decomposed = decomposition.stl( + no_demand, + frequency=FREQUENCY, + ns=NS, + nt=nt, + nl=nl, + ds=ds, + dt=dt, + dl=dl, + js=js, + jt=jt, + jl=jl, + ni=ni, + no=no, # noqa:WPS110 + ) + + result = decomposed.sum().sum() + + assert result == 0 From 64482f48d005ba4e763e5416bebae966ff2703ec Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Mon, 11 Jan 2021 20:17:00 +0100 Subject: [PATCH 40/72] Add wrappers for R's "arima" and "ets" functions --- noxfile.py | 6 +- src/urban_meal_delivery/forecasts/__init__.py | 1 + .../forecasts/decomposition.py | 10 +- .../forecasts/methods/__init__.py | 4 + .../forecasts/methods/arima.py | 76 +++++++++ .../forecasts/methods/ets.py | 77 +++++++++ tests/config.py | 5 +- tests/forecasts/conftest.py | 76 +++++++++ tests/forecasts/test_decomposition.py | 146 +++++++++--------- tests/forecasts/test_methods.py | 128 +++++++++++++++ 10 files changed, 441 insertions(+), 88 deletions(-) create mode 100644 src/urban_meal_delivery/forecasts/methods/__init__.py create mode 100644 src/urban_meal_delivery/forecasts/methods/arima.py create mode 100644 src/urban_meal_delivery/forecasts/methods/ets.py create mode 100644 tests/forecasts/conftest.py create mode 100644 tests/forecasts/test_methods.py diff --git a/noxfile.py b/noxfile.py index 474bb65..f5e9967 100644 --- a/noxfile.py +++ b/noxfile.py @@ -121,7 +121,7 @@ def format_(session): @nox.session(python=PYTHON) def lint(session): - """Lint source files with flake8, and mypy. + """Lint source files with flake8 and mypy. If no extra arguments are provided, all source files are linted. Otherwise, they are interpreted as paths the linters work on recursively. @@ -363,9 +363,7 @@ def slow_ci_tests(session): @nox.session(name='test-suite', python=PYTHON) def test_suite(session): - """Run the entire test suite. - - Intended to be run as a pre-commit hook. + """Run the entire test suite as a pre-commit hook. Ignores the paths passed in by the pre-commit framework and runs the entire test suite. diff --git a/src/urban_meal_delivery/forecasts/__init__.py b/src/urban_meal_delivery/forecasts/__init__.py index 0db50ff..86dcac6 100644 --- a/src/urban_meal_delivery/forecasts/__init__.py +++ b/src/urban_meal_delivery/forecasts/__init__.py @@ -1,4 +1,5 @@ """Demand forecasting utilities.""" from urban_meal_delivery.forecasts import decomposition +from urban_meal_delivery.forecasts import methods from urban_meal_delivery.forecasts import timify diff --git a/src/urban_meal_delivery/forecasts/decomposition.py b/src/urban_meal_delivery/forecasts/decomposition.py index ac61b68..bf4466b 100644 --- a/src/urban_meal_delivery/forecasts/decomposition.py +++ b/src/urban_meal_delivery/forecasts/decomposition.py @@ -91,9 +91,6 @@ def stl( # noqa:C901,WPS210,WPS211,WPS231 Raises: ValueError: some argument does not adhere to the specifications above """ - # Re-seed R every time the process does something. - robjects.r('set.seed(42)') - # Validate all arguments and set default values. if time_series.isnull().any(): @@ -157,6 +154,13 @@ def stl( # noqa:C901,WPS210,WPS211,WPS231 else: robust = False + # Initialize R only if necessary as it is tested only in nox's + # "ci-tests-slow" session and "ci-tests-fast" should not fail. + from urban_meal_delivery import init_r # noqa:F401,WPS433 + + # Re-seed R every time it is used to ensure reproducibility. + robjects.r('set.seed(42)') + # Call the STL function in R. ts = robjects.r['ts'](pandas2ri.py2rpy(time_series), frequency=frequency) result = robjects.r['stl']( diff --git a/src/urban_meal_delivery/forecasts/methods/__init__.py b/src/urban_meal_delivery/forecasts/methods/__init__.py new file mode 100644 index 0000000..316ae69 --- /dev/null +++ b/src/urban_meal_delivery/forecasts/methods/__init__.py @@ -0,0 +1,4 @@ +"""Various forecasting methods implemented as functions.""" + +from urban_meal_delivery.forecasts.methods import arima +from urban_meal_delivery.forecasts.methods import ets diff --git a/src/urban_meal_delivery/forecasts/methods/arima.py b/src/urban_meal_delivery/forecasts/methods/arima.py new file mode 100644 index 0000000..18965b3 --- /dev/null +++ b/src/urban_meal_delivery/forecasts/methods/arima.py @@ -0,0 +1,76 @@ +"""A wrapper around R's "auto.arima" function.""" + +import pandas as pd +from rpy2 import robjects +from rpy2.robjects import pandas2ri + + +def predict( + training_ts: pd.Series, + forecast_interval: pd.DatetimeIndex, + *, + frequency: int, + seasonal_fit: bool = False, +) -> pd.DataFrame: + """Predict with an automatically chosen ARIMA model. + + Note: The function does not check if the `forecast` interval + extends the `training_ts`'s interval without a gap! + + Args: + training_ts: past observations to be fitted + forecast_interval: interval into which the `training_ts` is forecast; + its length becomes the step size `h` in the forecasting model in R + frequency: frequency of the observations in the `training_ts` + seasonal_fit: if a seasonal ARIMA model should be fitted + + Returns: + predictions: point forecasts (i.e., the "predictions" column) and + confidence intervals (i.e, the four "low/high_80/95" columns) + + Raises: + ValueError: if `training_ts` contains `NaN` values + """ + # Initialize R only if necessary as it is tested only in nox's + # "ci-tests-slow" session and "ci-tests-fast" should not fail. + from urban_meal_delivery import init_r # noqa:F401,WPS433 + + # Re-seed R every time it is used to ensure reproducibility. + robjects.r('set.seed(42)') + + if training_ts.isnull().any(): + raise ValueError('`training_ts` must not contain `NaN` values') + + # Copy the data from Python to R. + robjects.globalenv['data'] = robjects.r['ts']( + pandas2ri.py2rpy(training_ts), frequency=frequency, + ) + + seasonal = 'TRUE' if bool(seasonal_fit) else 'FALSE' + n_steps_ahead = len(forecast_interval) + + # Make the predictions in R. + result = robjects.r( + f""" + as.data.frame( + forecast( + auto.arima(data, approximation = TRUE, seasonal = {seasonal:s}), + h = {n_steps_ahead:d} + ) + ) + """, + ) + + # Convert the results into a nice `pd.DataFrame` with the right `.index`. + forecasts = pandas2ri.rpy2py(result) + forecasts.index = forecast_interval + + return forecasts.rename( + columns={ + 'Point Forecast': 'predictions', + 'Lo 80': 'low_80', + 'Hi 80': 'high_80', + 'Lo 95': 'low_95', + 'Hi 95': 'high_95', + }, + ) diff --git a/src/urban_meal_delivery/forecasts/methods/ets.py b/src/urban_meal_delivery/forecasts/methods/ets.py new file mode 100644 index 0000000..d7af157 --- /dev/null +++ b/src/urban_meal_delivery/forecasts/methods/ets.py @@ -0,0 +1,77 @@ +"""A wrapper around R's "ets" function.""" + +import pandas as pd +from rpy2 import robjects +from rpy2.robjects import pandas2ri + + +def predict( + training_ts: pd.Series, + forecast_interval: pd.DatetimeIndex, + *, + frequency: int, + seasonal_fit: bool = False, +) -> pd.DataFrame: + """Predict with an automatically calibrated ETS model. + + Note: The function does not check if the `forecast` interval + extends the `training_ts`'s interval without a gap! + + Args: + training_ts: past observations to be fitted + forecast_interval: interval into which the `training_ts` is forecast; + its length becomes the step size `h` in the forecasting model in R + frequency: frequency of the observations in the `training_ts` + seasonal_fit: if a "ZZZ" (seasonal) or a "ZZN" (non-seasonal) + type ETS model should be fitted + + Returns: + predictions: point forecasts (i.e., the "predictions" column) and + confidence intervals (i.e, the four "low/high_80/95" columns) + + Raises: + ValueError: if `training_ts` contains `NaN` values + """ + # Initialize R only if necessary as it is tested only in nox's + # "ci-tests-slow" session and "ci-tests-fast" should not fail. + from urban_meal_delivery import init_r # noqa:F401,WPS433 + + # Re-seed R every time it is used to ensure reproducibility. + robjects.r('set.seed(42)') + + if training_ts.isnull().any(): + raise ValueError('`training_ts` must not contain `NaN` values') + + # Copy the data from Python to R. + robjects.globalenv['data'] = robjects.r['ts']( + pandas2ri.py2rpy(training_ts), frequency=frequency, + ) + + model = 'ZZZ' if bool(seasonal_fit) else 'ZZN' + n_steps_ahead = len(forecast_interval) + + # Make the predictions in R. + result = robjects.r( + f""" + as.data.frame( + forecast( + ets(data, model = "{model:s}"), + h = {n_steps_ahead:d} + ) + ) + """, + ) + + # Convert the results into a nice `pd.DataFrame` with the right `.index`. + forecasts = pandas2ri.rpy2py(result) + forecasts.index = forecast_interval + + return forecasts.rename( + columns={ + 'Point Forecast': 'predictions', + 'Lo 80': 'low_80', + 'Hi 80': 'high_80', + 'Lo 95': 'low_95', + 'Hi 95': 'high_95', + }, + ) diff --git a/tests/config.py b/tests/config.py index 5c4c83c..fd3e115 100644 --- a/tests/config.py +++ b/tests/config.py @@ -14,10 +14,7 @@ NOON = 12 # `START` and `END` constitute a 15-day time span. # That implies a maximum `train_horizon` of `2` as that needs full 7-day weeks. START = datetime.datetime(YEAR, MONTH, DAY, config.SERVICE_START, 0) -_end_day = (START + datetime.timedelta(weeks=2)).date() -END = datetime.datetime( - _end_day.year, _end_day.month, _end_day.day, config.SERVICE_END, 0, -) +END = datetime.datetime(YEAR, MONTH, 15, config.SERVICE_END, 0) # Default time steps, for example, for `OrderHistory` objects. LONG_TIME_STEP = 60 diff --git a/tests/forecasts/conftest.py b/tests/forecasts/conftest.py new file mode 100644 index 0000000..08c2439 --- /dev/null +++ b/tests/forecasts/conftest.py @@ -0,0 +1,76 @@ +"""Fixtures and globals for testing `urban_meal_delivery.forecasts`.""" + +import datetime as dt + +import pandas as pd +import pytest + +from tests import config as test_config +from urban_meal_delivery import config + + +# See remarks in `vertical_datetime_index` fixture. +VERTICAL_FREQUENCY = 7 * 12 + +# The default `ns` suggested for the STL method. +NS = 7 + + +@pytest.fixture +def horizontal_datetime_index(): + """A `pd.Index` with `DateTime` values. + + The times resemble a horizontal time series with a `frequency` of `7`. + All observations take place at `NOON`. + """ + first_start_at = dt.datetime( + test_config.YEAR, test_config.MONTH, test_config.DAY, test_config.NOON, 0, + ) + + gen = ( + start_at + for start_at in pd.date_range(first_start_at, test_config.END, freq='D') + ) + + index = pd.Index(gen) + index.name = 'start_at' + + assert len(index) == 15 # sanity check + + return index + + +@pytest.fixture +def horizontal_no_demand(horizontal_datetime_index): + """A horizontal time series of order totals when there was no demand.""" + return pd.Series(0, index=horizontal_datetime_index, name='order_totals') + + +@pytest.fixture +def vertical_datetime_index(): + """A `pd.Index` with `DateTime` values. + + The times resemble a vertical time series with a + `frequency` of `7` times the number of daily time steps, + which is `12` for `LONG_TIME_STEP` values. + """ + gen = ( + start_at + for start_at in pd.date_range( + test_config.START, test_config.END, freq=f'{test_config.LONG_TIME_STEP}T', + ) + if config.SERVICE_START <= start_at.hour < config.SERVICE_END + ) + + index = pd.Index(gen) + index.name = 'start_at' + + assert len(index) == 15 * 12 # sanity check + + return index + + +@pytest.fixture +def vertical_no_demand(vertical_datetime_index): + """A vertical time series of order totals when there was no demand.""" + return pd.Series(0, index=vertical_datetime_index, name='order_totals') diff --git a/tests/forecasts/test_decomposition.py b/tests/forecasts/test_decomposition.py index 6c33d3e..1f20535 100644 --- a/tests/forecasts/test_decomposition.py +++ b/tests/forecasts/test_decomposition.py @@ -5,157 +5,149 @@ import math import pandas as pd import pytest -from tests import config as test_config -from urban_meal_delivery import config +from tests.forecasts.conftest import NS +from tests.forecasts.conftest import VERTICAL_FREQUENCY from urban_meal_delivery.forecasts import decomposition -# See remarks in `datetime_index` fixture. -FREQUENCY = 7 * 12 - -# The default `ns` suggested for the STL method. -NS = 7 - - -@pytest.fixture -def datetime_index(): - """A `pd.Index` with `DateTime` values. - - The times resemble a vertical time series with a - `frequency` of `7` times the number of daily time steps, - which is `12` for `LONG_TIME_STEP` values. - """ - gen = ( - start_at - for start_at in pd.date_range( - test_config.START, test_config.END, freq=f'{test_config.LONG_TIME_STEP}T', - ) - if config.SERVICE_START <= start_at.hour < config.SERVICE_END - ) - - index = pd.Index(gen) - index.name = 'start_at' - - return index - - -@pytest.fixture -def no_demand(datetime_index): - """A time series of order totals when there was no demand.""" - return pd.Series(0, index=datetime_index, name='order_totals') - - class TestInvalidArguments: """Test `stl()` with invalid arguments.""" - def test_no_nans_in_time_series(self, datetime_index): + def test_no_nans_in_time_series(self, vertical_datetime_index): """`stl()` requires a `time_series` without `NaN` values.""" - time_series = pd.Series(dtype=float, index=datetime_index) + time_series = pd.Series(dtype=float, index=vertical_datetime_index) with pytest.raises(ValueError, match='`NaN` values'): - decomposition.stl(time_series, frequency=FREQUENCY, ns=99) + decomposition.stl(time_series, frequency=VERTICAL_FREQUENCY, ns=99) - def test_ns_not_odd(self, no_demand): + def test_ns_not_odd(self, vertical_no_demand): """`ns` must be odd and `>= 7`.""" with pytest.raises(ValueError, match='`ns`'): - decomposition.stl(no_demand, frequency=FREQUENCY, ns=8) + decomposition.stl(vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=8) @pytest.mark.parametrize('ns', [-99, -1, 1, 5]) - def test_ns_smaller_than_seven(self, no_demand, ns): + def test_ns_smaller_than_seven(self, vertical_no_demand, ns): """`ns` must be odd and `>= 7`.""" with pytest.raises(ValueError, match='`ns`'): - decomposition.stl(no_demand, frequency=FREQUENCY, ns=ns) + decomposition.stl(vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=ns) - def test_nt_not_odd(self, no_demand): + def test_nt_not_odd(self, vertical_no_demand): """`nt` must be odd and `>= default_nt`.""" nt = 200 - default_nt = math.ceil((1.5 * FREQUENCY) / (1 - (1.5 / NS))) + default_nt = math.ceil((1.5 * VERTICAL_FREQUENCY) / (1 - (1.5 / NS))) assert nt > default_nt # sanity check with pytest.raises(ValueError, match='`nt`'): - decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS, nt=nt) + decomposition.stl( + vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, nt=nt, + ) @pytest.mark.parametrize('nt', [-99, -1, 0, 1, 99, 159]) - def test_nt_not_at_least_the_default(self, no_demand, nt): + def test_nt_not_at_least_the_default(self, vertical_no_demand, nt): """`nt` must be odd and `>= default_nt`.""" # `default_nt` becomes 161. - default_nt = math.ceil((1.5 * FREQUENCY) / (1 - (1.5 / NS))) + default_nt = math.ceil((1.5 * VERTICAL_FREQUENCY) / (1 - (1.5 / NS))) assert nt < default_nt # sanity check with pytest.raises(ValueError, match='`nt`'): - decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS, nt=nt) + decomposition.stl( + vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, nt=nt, + ) - def test_nl_not_odd(self, no_demand): + def test_nl_not_odd(self, vertical_no_demand): """`nl` must be odd and `>= frequency`.""" nl = 200 - assert nl > FREQUENCY # sanity check + assert nl > VERTICAL_FREQUENCY # sanity check with pytest.raises(ValueError, match='`nl`'): - decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS, nl=nl) + decomposition.stl( + vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, nl=nl, + ) - def test_nl_at_least_the_frequency(self, no_demand): + def test_nl_at_least_the_frequency(self, vertical_no_demand): """`nl` must be odd and `>= frequency`.""" nl = 77 - assert nl < FREQUENCY # sanity check + assert nl < VERTICAL_FREQUENCY # sanity check with pytest.raises(ValueError, match='`nl`'): - decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS, nl=nl) + decomposition.stl( + vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, nl=nl, + ) - def test_ds_not_zero_or_one(self, no_demand): + def test_ds_not_zero_or_one(self, vertical_no_demand): """`ds` must be `0` or `1`.""" with pytest.raises(ValueError, match='`ds`'): - decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS, ds=2) + decomposition.stl( + vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, ds=2, + ) - def test_dt_not_zero_or_one(self, no_demand): + def test_dt_not_zero_or_one(self, vertical_no_demand): """`dt` must be `0` or `1`.""" with pytest.raises(ValueError, match='`dt`'): - decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS, dt=2) + decomposition.stl( + vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, dt=2, + ) - def test_dl_not_zero_or_one(self, no_demand): + def test_dl_not_zero_or_one(self, vertical_no_demand): """`dl` must be `0` or `1`.""" with pytest.raises(ValueError, match='`dl`'): - decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS, dl=2) + decomposition.stl( + vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, dl=2, + ) @pytest.mark.parametrize('js', [-1, 0]) - def test_js_not_positive(self, no_demand, js): + def test_js_not_positive(self, vertical_no_demand, js): """`js` must be positive.""" with pytest.raises(ValueError, match='`js`'): - decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS, js=js) + decomposition.stl( + vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, js=js, + ) @pytest.mark.parametrize('jt', [-1, 0]) - def test_jt_not_positive(self, no_demand, jt): + def test_jt_not_positive(self, vertical_no_demand, jt): """`jt` must be positive.""" with pytest.raises(ValueError, match='`jt`'): - decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS, jt=jt) + decomposition.stl( + vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, jt=jt, + ) @pytest.mark.parametrize('jl', [-1, 0]) - def test_jl_not_positive(self, no_demand, jl): + def test_jl_not_positive(self, vertical_no_demand, jl): """`jl` must be positive.""" with pytest.raises(ValueError, match='`jl`'): - decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS, jl=jl) + decomposition.stl( + vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, jl=jl, + ) @pytest.mark.parametrize('ni', [-1, 0]) - def test_ni_not_positive(self, no_demand, ni): + def test_ni_not_positive(self, vertical_no_demand, ni): """`ni` must be positive.""" with pytest.raises(ValueError, match='`ni`'): - decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS, ni=ni) + decomposition.stl( + vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, ni=ni, + ) - def test_no_not_non_negative(self, no_demand): + def test_no_not_non_negative(self, vertical_no_demand): """`no` must be non-negative.""" with pytest.raises(ValueError, match='`no`'): - decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS, no=-1) + decomposition.stl( + vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, no=-1, + ) +@pytest.mark.r class TestValidArguments: """Test `stl()` with valid arguments.""" - def test_structure_of_returned_dataframe(self, no_demand): + def test_structure_of_returned_dataframe(self, vertical_no_demand): """`stl()` returns a `pd.DataFrame` with three columns.""" - result = decomposition.stl(no_demand, frequency=FREQUENCY, ns=NS) + result = decomposition.stl( + vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, + ) assert isinstance(result, pd.DataFrame) assert list(result.columns) == ['seasonal', 'trend', 'residual'] @@ -173,15 +165,15 @@ class TestValidArguments: @pytest.mark.parametrize('ni', [2, 3]) @pytest.mark.parametrize('no', [0, 1]) def test_decompose_time_series_with_no_demand( # noqa:WPS211,WPS216 - self, no_demand, nt, nl, ds, dt, dl, js, jt, jl, ni, no, # noqa:WPS110 + self, vertical_no_demand, nt, nl, ds, dt, dl, js, jt, jl, ni, no, # noqa:WPS110 ): """Decomposing a time series with no demand ... ... returns a `pd.DataFrame` with three columns holding only `0.0` values. """ decomposed = decomposition.stl( - no_demand, - frequency=FREQUENCY, + vertical_no_demand, + frequency=VERTICAL_FREQUENCY, ns=NS, nt=nt, nl=nl, diff --git a/tests/forecasts/test_methods.py b/tests/forecasts/test_methods.py new file mode 100644 index 0000000..43fdcaf --- /dev/null +++ b/tests/forecasts/test_methods.py @@ -0,0 +1,128 @@ +"""Test the `arima.predict()` and `ets.predict()` functions.""" + +import datetime as dt + +import pandas as pd +import pytest + +from tests import config as test_config +from tests.forecasts.conftest import VERTICAL_FREQUENCY +from urban_meal_delivery import config +from urban_meal_delivery.forecasts.methods import arima +from urban_meal_delivery.forecasts.methods import ets + + +@pytest.fixture +def forecast_interval(): + """A `pd.Index` with `DateTime` values ... + + ... that takes place one day after the `START`-`END` horizon and + resembles an entire day (`12` "start_at" values as we use `LONG_TIME_STEP`). + """ + future_day = test_config.END.date() + dt.timedelta(days=1) + first_start_at = dt.datetime( + future_day.year, future_day.month, future_day.day, config.SERVICE_START, 0, + ) + end_of_day = dt.datetime( + future_day.year, future_day.month, future_day.day, config.SERVICE_END, 0, + ) + + gen = ( + start_at + for start_at in pd.date_range( + first_start_at, end_of_day, freq=f'{test_config.LONG_TIME_STEP}T', + ) + if config.SERVICE_START <= start_at.hour < config.SERVICE_END + ) + + index = pd.Index(gen) + index.name = 'start_at' + + return index + + +@pytest.fixture +def forecast_time_step(): + """A `pd.Index` with one `DateTime` value, resembling `NOON`.""" + future_day = test_config.END.date() + dt.timedelta(days=1) + + start_at = dt.datetime( + future_day.year, future_day.month, future_day.day, test_config.NOON, 0, + ) + + index = pd.Index([start_at]) + index.name = 'start_at' + + return index + + +@pytest.mark.r +@pytest.mark.parametrize('func', [arima.predict, ets.predict]) +class TestMakePredictions: + """Make predictions with `arima.predict()` and `ets.predict()`.""" + + def test_training_data_contains_nan_values( + self, func, vertical_no_demand, forecast_interval, + ): + """`training_ts` must not contain `NaN` values.""" + vertical_no_demand.iloc[0] = pd.NA + + with pytest.raises(ValueError, match='must not contain `NaN`'): + func( + training_ts=vertical_no_demand, + forecast_interval=forecast_interval, + frequency=VERTICAL_FREQUENCY, + ) + + def test_structure_of_returned_dataframe( + self, func, vertical_no_demand, forecast_interval, + ): + """Both `.predict()` return a `pd.DataFrame` with five columns.""" + result = func( + training_ts=vertical_no_demand, + forecast_interval=forecast_interval, + frequency=VERTICAL_FREQUENCY, + ) + + assert isinstance(result, pd.DataFrame) + assert list(result.columns) == [ + 'predictions', + 'low_80', + 'high_80', + 'low_95', + 'high_95', + ] + + def test_predict_horizontal_time_series_with_no_demand( + self, func, horizontal_no_demand, forecast_time_step, + ): + """Predicting a horizontal time series with no demand ... + + ... returns a `pd.DataFrame` with five columns holding only `0.0` values. + """ + predictions = func( + training_ts=horizontal_no_demand, + forecast_interval=forecast_time_step, + frequency=7, + ) + + result = predictions.sum().sum() + + assert result == 0 + + def test_predict_vertical_time_series_with_no_demand( + self, func, vertical_no_demand, forecast_interval, + ): + """Predicting a vertical time series with no demand ... + + ... returns a `pd.DataFrame` with five columns holding only `0.0` values. + """ + predictions = func( + training_ts=vertical_no_demand, + forecast_interval=forecast_interval, + frequency=VERTICAL_FREQUENCY, + ) + + result = predictions.sum().sum() + + assert result == 0 From f37d8adb9deb3a9ee9978a0be261a2477b8d1ce4 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Wed, 20 Jan 2021 16:57:39 +0100 Subject: [PATCH 41/72] Add confidence intervals to `Forecast` model - add `.low80`, `.high80`, `.low95`, and `.high95` columns - add check contraints for the confidence intervals - rename the `.method` column into `.model` for consistency --- migrations/env.py | 6 +- ...9_add_confidence_intervals_to_forecasts.py | 124 +++++++++ src/urban_meal_delivery/db/forecasts.py | 58 +++- .../forecasts/methods/arima.py | 14 +- .../forecasts/methods/ets.py | 14 +- tests/db/test_forecasts.py | 260 +++++++++++++++++- tests/forecasts/test_methods.py | 10 +- 7 files changed, 461 insertions(+), 25 deletions(-) create mode 100644 migrations/versions/rev_20210120_16_26711cd3f9b9_add_confidence_intervals_to_forecasts.py diff --git a/migrations/env.py b/migrations/env.py index 4c62bc9..1669e2d 100644 --- a/migrations/env.py +++ b/migrations/env.py @@ -21,7 +21,11 @@ log_config.fileConfig(context.config.config_file_name) def include_object(obj, _name, type_, _reflected, _compare_to): """Only include the clean schema into --autogenerate migrations.""" - if type_ in {'table', 'column'} and obj.schema != umd_config.CLEAN_SCHEMA: + if ( # noqa:WPS337 + type_ in {'table', 'column'} + and hasattr(obj, 'schema') # noqa:WPS421 => fix for rare edge case + and obj.schema != umd_config.CLEAN_SCHEMA + ): return False return True diff --git a/migrations/versions/rev_20210120_16_26711cd3f9b9_add_confidence_intervals_to_forecasts.py b/migrations/versions/rev_20210120_16_26711cd3f9b9_add_confidence_intervals_to_forecasts.py new file mode 100644 index 0000000..ab352c1 --- /dev/null +++ b/migrations/versions/rev_20210120_16_26711cd3f9b9_add_confidence_intervals_to_forecasts.py @@ -0,0 +1,124 @@ +"""Add confidence intervals to forecasts. + +Revision: #26711cd3f9b9 at 2021-01-20 16:08:21 +Revises: #e40623e10405 +""" + +import os + +import sqlalchemy as sa +from alembic import op +from sqlalchemy.dialects import postgresql + +from urban_meal_delivery import configuration + + +revision = '26711cd3f9b9' +down_revision = 'e40623e10405' +branch_labels = None +depends_on = None + + +config = configuration.make_config('testing' if os.getenv('TESTING') else 'production') + + +def upgrade(): + """Upgrade to revision 26711cd3f9b9.""" + op.alter_column( + 'forecasts', 'method', new_column_name='model', schema=config.CLEAN_SCHEMA, + ) + op.add_column( + 'forecasts', + sa.Column('low80', postgresql.DOUBLE_PRECISION(), nullable=True), + schema=config.CLEAN_SCHEMA, + ) + op.add_column( + 'forecasts', + sa.Column('high80', postgresql.DOUBLE_PRECISION(), nullable=True), + schema=config.CLEAN_SCHEMA, + ) + op.add_column( + 'forecasts', + sa.Column('low95', postgresql.DOUBLE_PRECISION(), nullable=True), + schema=config.CLEAN_SCHEMA, + ) + op.add_column( + 'forecasts', + sa.Column('high95', postgresql.DOUBLE_PRECISION(), nullable=True), + schema=config.CLEAN_SCHEMA, + ) + op.create_check_constraint( + op.f('ck_forecasts_on_ci_upper_and_lower_bounds'), + 'forecasts', + """ + NOT ( + low80 IS NULL AND high80 IS NOT NULL + OR + low80 IS NOT NULL AND high80 IS NULL + OR + low95 IS NULL AND high95 IS NOT NULL + OR + low95 IS NOT NULL AND high95 IS NULL + ) + """, + schema=config.CLEAN_SCHEMA, + ) + op.create_check_constraint( + op.f('prediction_must_be_within_ci'), + 'forecasts', + """ + NOT ( + prediction < low80 + OR + prediction < low95 + OR + prediction > high80 + OR + prediction > high95 + ) + """, + schema=config.CLEAN_SCHEMA, + ) + op.create_check_constraint( + op.f('ci_upper_bound_greater_than_lower_bound'), + 'forecasts', + """ + NOT ( + low80 > high80 + OR + low95 > high95 + ) + """, + schema=config.CLEAN_SCHEMA, + ) + op.create_check_constraint( + op.f('ci95_must_be_wider_than_ci80'), + 'forecasts', + """ + NOT ( + low80 < low95 + OR + high80 > high95 + ) + """, + schema=config.CLEAN_SCHEMA, + ) + + +def downgrade(): + """Downgrade to revision e40623e10405.""" + op.alter_column( + 'forecasts', 'model', new_column_name='method', schema=config.CLEAN_SCHEMA, + ) + op.drop_column( + 'forecasts', 'low80', schema=config.CLEAN_SCHEMA, + ) + op.drop_column( + 'forecasts', 'high80', schema=config.CLEAN_SCHEMA, + ) + op.drop_column( + 'forecasts', 'low95', schema=config.CLEAN_SCHEMA, + ) + op.drop_column( + 'forecasts', 'high95', schema=config.CLEAN_SCHEMA, + ) diff --git a/src/urban_meal_delivery/db/forecasts.py b/src/urban_meal_delivery/db/forecasts.py index 0052ee8..65f12b5 100644 --- a/src/urban_meal_delivery/db/forecasts.py +++ b/src/urban_meal_delivery/db/forecasts.py @@ -21,10 +21,16 @@ class Forecast(meta.Base): start_at = sa.Column(sa.DateTime, nullable=False) time_step = sa.Column(sa.SmallInteger, nullable=False) training_horizon = sa.Column(sa.SmallInteger, nullable=False) - method = sa.Column(sa.Unicode(length=20), nullable=False) # noqa:WPS432 + model = sa.Column(sa.Unicode(length=20), nullable=False) # noqa:WPS432 # Raw `.prediction`s are stored as `float`s (possibly negative). # The rounding is then done on the fly if required. prediction = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False) + # The confidence intervals are treated like the `.prediction`s + # but they may be nullable as some methods do not calculate them. + low80 = sa.Column(postgresql.DOUBLE_PRECISION, nullable=True) + high80 = sa.Column(postgresql.DOUBLE_PRECISION, nullable=True) + low95 = sa.Column(postgresql.DOUBLE_PRECISION, nullable=True) + high95 = sa.Column(postgresql.DOUBLE_PRECISION, nullable=True) # Constraints __table_args__ = ( @@ -56,9 +62,57 @@ class Forecast(meta.Base): sa.CheckConstraint( 'training_horizon > 0', name='training_horizon_must_be_positive', ), + sa.CheckConstraint( + """ + NOT ( + low80 IS NULL AND high80 IS NOT NULL + OR + low80 IS NOT NULL AND high80 IS NULL + OR + low95 IS NULL AND high95 IS NOT NULL + OR + low95 IS NOT NULL AND high95 IS NULL + ) + """, + name='ci_upper_and_lower_bounds', + ), + sa.CheckConstraint( + """ + NOT ( + prediction < low80 + OR + prediction < low95 + OR + prediction > high80 + OR + prediction > high95 + ) + """, + name='prediction_must_be_within_ci', + ), + sa.CheckConstraint( + """ + NOT ( + low80 > high80 + OR + low95 > high95 + ) + """, + name='ci_upper_bound_greater_than_lower_bound', + ), + sa.CheckConstraint( + """ + NOT ( + low80 < low95 + OR + high80 > high95 + ) + """, + name='ci95_must_be_wider_than_ci80', + ), # There can be only one prediction per forecasting setting. sa.UniqueConstraint( - 'pixel_id', 'start_at', 'time_step', 'training_horizon', 'method', + 'pixel_id', 'start_at', 'time_step', 'training_horizon', 'model', ), ) diff --git a/src/urban_meal_delivery/forecasts/methods/arima.py b/src/urban_meal_delivery/forecasts/methods/arima.py index 18965b3..976df3e 100644 --- a/src/urban_meal_delivery/forecasts/methods/arima.py +++ b/src/urban_meal_delivery/forecasts/methods/arima.py @@ -25,8 +25,8 @@ def predict( seasonal_fit: if a seasonal ARIMA model should be fitted Returns: - predictions: point forecasts (i.e., the "predictions" column) and - confidence intervals (i.e, the four "low/high_80/95" columns) + predictions: point forecasts (i.e., the "prediction" column) and + confidence intervals (i.e, the four "low/high80/95" columns) Raises: ValueError: if `training_ts` contains `NaN` values @@ -67,10 +67,10 @@ def predict( return forecasts.rename( columns={ - 'Point Forecast': 'predictions', - 'Lo 80': 'low_80', - 'Hi 80': 'high_80', - 'Lo 95': 'low_95', - 'Hi 95': 'high_95', + 'Point Forecast': 'prediction', + 'Lo 80': 'low80', + 'Hi 80': 'high80', + 'Lo 95': 'low95', + 'Hi 95': 'high95', }, ) diff --git a/src/urban_meal_delivery/forecasts/methods/ets.py b/src/urban_meal_delivery/forecasts/methods/ets.py index d7af157..020e4a4 100644 --- a/src/urban_meal_delivery/forecasts/methods/ets.py +++ b/src/urban_meal_delivery/forecasts/methods/ets.py @@ -26,8 +26,8 @@ def predict( type ETS model should be fitted Returns: - predictions: point forecasts (i.e., the "predictions" column) and - confidence intervals (i.e, the four "low/high_80/95" columns) + predictions: point forecasts (i.e., the "prediction" column) and + confidence intervals (i.e, the four "low/high80/95" columns) Raises: ValueError: if `training_ts` contains `NaN` values @@ -68,10 +68,10 @@ def predict( return forecasts.rename( columns={ - 'Point Forecast': 'predictions', - 'Lo 80': 'low_80', - 'Hi 80': 'high_80', - 'Lo 95': 'low_95', - 'Hi 95': 'high_95', + 'Point Forecast': 'prediction', + 'Lo 80': 'low80', + 'Hi 80': 'high80', + 'Lo 95': 'low95', + 'Hi 95': 'high95', }, ) diff --git a/tests/db/test_forecasts.py b/tests/db/test_forecasts.py index 23765db..426de7b 100644 --- a/tests/db/test_forecasts.py +++ b/tests/db/test_forecasts.py @@ -17,8 +17,12 @@ def forecast(pixel): start_at=datetime.datetime(2020, 1, 1, 12, 0), time_step=60, training_horizon=8, - method='hets', + model='hets', prediction=12.3, + low80=1.23, + high80=123.4, + low95=0.123, + high95=1234.5, ) @@ -127,6 +131,252 @@ class TestConstraints: ): db_session.commit() + def test_set_prediction_without_ci(self, db_session, forecast): + """Sanity check to see that the check constraint ... + + ... "prediction_must_be_within_ci" is not triggered. + """ + forecast.low80 = None + forecast.high80 = None + forecast.low95 = None + forecast.high95 = None + + db_session.add(forecast) + db_session.commit() + + def test_ci80_with_missing_low(self, db_session, forecast): + """Insert an instance with invalid data.""" + assert forecast.high80 is not None + + forecast.low80 = None + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='ci_upper_and_lower_bounds', + ): + db_session.commit() + + def test_ci95_with_missing_low(self, db_session, forecast): + """Insert an instance with invalid data.""" + assert forecast.high95 is not None + + forecast.low95 = None + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='ci_upper_and_lower_bounds', + ): + db_session.commit() + + def test_ci80_with_missing_high(self, db_session, forecast): + """Insert an instance with invalid data.""" + assert forecast.low80 is not None + + forecast.high80 = None + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='ci_upper_and_lower_bounds', + ): + db_session.commit() + + def test_ci95_with_missing_high(self, db_session, forecast): + """Insert an instance with invalid data.""" + assert forecast.low95 is not None + + forecast.high95 = None + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='ci_upper_and_lower_bounds', + ): + db_session.commit() + + def test_prediction_smaller_than_low80_with_ci95_set(self, db_session, forecast): + """Insert an instance with invalid data.""" + assert forecast.low95 is not None + assert forecast.high95 is not None + + forecast.prediction = forecast.low80 - 0.001 + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='prediction_must_be_within_ci', + ): + db_session.commit() + + def test_prediction_smaller_than_low80_without_ci95_set( + self, db_session, forecast, + ): + """Insert an instance with invalid data.""" + forecast.low95 = None + forecast.high95 = None + + forecast.prediction = forecast.low80 - 0.001 + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='prediction_must_be_within_ci', + ): + db_session.commit() + + def test_prediction_smaller_than_low95_with_ci80_set(self, db_session, forecast): + """Insert an instance with invalid data.""" + assert forecast.low80 is not None + assert forecast.high80 is not None + + forecast.prediction = forecast.low95 - 0.001 + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='prediction_must_be_within_ci', + ): + db_session.commit() + + def test_prediction_smaller_than_low95_without_ci80_set( + self, db_session, forecast, + ): + """Insert an instance with invalid data.""" + forecast.low80 = None + forecast.high80 = None + + forecast.prediction = forecast.low95 - 0.001 + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='prediction_must_be_within_ci', + ): + db_session.commit() + + def test_prediction_greater_than_high80_with_ci95_set(self, db_session, forecast): + """Insert an instance with invalid data.""" + assert forecast.low95 is not None + assert forecast.high95 is not None + + forecast.prediction = forecast.high80 + 0.001 + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='prediction_must_be_within_ci', + ): + db_session.commit() + + def test_prediction_greater_than_high80_without_ci95_set( + self, db_session, forecast, + ): + """Insert an instance with invalid data.""" + forecast.low95 = None + forecast.high95 = None + + forecast.prediction = forecast.high80 + 0.001 + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='prediction_must_be_within_ci', + ): + db_session.commit() + + def test_prediction_greater_than_high95_with_ci80_set(self, db_session, forecast): + """Insert an instance with invalid data.""" + assert forecast.low80 is not None + assert forecast.high80 is not None + + forecast.prediction = forecast.high95 + 0.001 + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='prediction_must_be_within_ci', + ): + db_session.commit() + + def test_prediction_greater_than_high95_without_ci80_set( + self, db_session, forecast, + ): + """Insert an instance with invalid data.""" + forecast.low80 = None + forecast.high80 = None + + forecast.prediction = forecast.high95 + 0.001 + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='prediction_must_be_within_ci', + ): + db_session.commit() + + def test_ci80_upper_bound_greater_than_lower_bound(self, db_session, forecast): + """Insert an instance with invalid data.""" + assert forecast.low80 is not None + assert forecast.high80 is not None + + # Do not trigger the "ci95_must_be_wider_than_ci80" constraint. + forecast.low95 = None + forecast.high95 = None + + forecast.low80, forecast.high80 = ( # noqa:WPS414 + forecast.high80, + forecast.low80, + ) + + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='ci_upper_bound_greater_than_lower_bound', + ): + db_session.commit() + + def test_ci95_upper_bound_greater_than_lower_bound(self, db_session, forecast): + """Insert an instance with invalid data.""" + assert forecast.low95 is not None + assert forecast.high95 is not None + + # Do not trigger the "ci95_must_be_wider_than_ci80" constraint. + forecast.low80 = None + forecast.high80 = None + + forecast.low95, forecast.high95 = ( # noqa:WPS414 + forecast.high95, + forecast.low95, + ) + + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='ci_upper_bound_greater_than_lower_bound', + ): + db_session.commit() + + def test_ci95_is_wider_than_ci80_at_low_end(self, db_session, forecast): + """Insert an instance with invalid data.""" + assert forecast.low80 is not None + assert forecast.low95 is not None + + forecast.low80, forecast.low95 = (forecast.low95, forecast.low80) # noqa:WPS414 + + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='ci95_must_be_wider_than_ci80', + ): + db_session.commit() + + def test_ci95_is_wider_than_ci80_at_high_end(self, db_session, forecast): + """Insert an instance with invalid data.""" + assert forecast.high80 is not None + assert forecast.high95 is not None + + forecast.high80, forecast.high95 = ( # noqa:WPS414 + forecast.high95, + forecast.high80, + ) + + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='ci95_must_be_wider_than_ci80', + ): + db_session.commit() + def test_two_predictions_for_same_forecasting_setting(self, db_session, forecast): """Insert a record that violates a unique constraint.""" db_session.add(forecast) @@ -137,8 +387,12 @@ class TestConstraints: start_at=forecast.start_at, time_step=forecast.time_step, training_horizon=forecast.training_horizon, - method=forecast.method, - prediction=99.9, + model=forecast.model, + prediction=2, + low80=1, + high80=3, + low95=0, + high95=4, ) db_session.add(another_forecast) diff --git a/tests/forecasts/test_methods.py b/tests/forecasts/test_methods.py index 43fdcaf..9b2f0f8 100644 --- a/tests/forecasts/test_methods.py +++ b/tests/forecasts/test_methods.py @@ -86,11 +86,11 @@ class TestMakePredictions: assert isinstance(result, pd.DataFrame) assert list(result.columns) == [ - 'predictions', - 'low_80', - 'high_80', - 'low_95', - 'high_95', + 'prediction', + 'low80', + 'high80', + 'low95', + 'high95', ] def test_predict_horizontal_time_series_with_no_demand( From 23391003718193836bce05d227d5d68bbcc557db Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Thu, 21 Jan 2021 11:47:22 +0100 Subject: [PATCH 42/72] Add folium to the dependencies --- poetry.lock | 37 ++++++++++++++++++++++++++++++++++++- pyproject.toml | 3 ++- 2 files changed, 38 insertions(+), 2 deletions(-) diff --git a/poetry.lock b/poetry.lock index 9c12400..f594c91 100644 --- a/poetry.lock +++ b/poetry.lock @@ -191,6 +191,17 @@ packaging = "*" six = ">=1.9.0" webencodings = "*" +[[package]] +name = "branca" +version = "0.4.2" +description = "Generate complex HTML+JS pages with Python" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +jinja2 = "*" + [[package]] name = "certifi" version = "2020.12.5" @@ -579,6 +590,23 @@ python-versions = "*" [package.dependencies] flake8 = "*" +[[package]] +name = "folium" +version = "0.12.1" +description = "Make beautiful maps with Leaflet.js & Python" +category = "main" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +branca = ">=0.3.0" +jinja2 = ">=2.9" +numpy = "*" +requests = "*" + +[package.extras] +testing = ["pytest"] + [[package]] name = "geographiclib" version = "1.50" @@ -1883,7 +1911,7 @@ research = ["jupyterlab", "nb_black", "numpy", "pytz"] [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "9be7d168525c85958389c8edb4686567cbb4de0e8780168b91e387e1b0581ec3" +content-hash = "8a85f19d497aeccaf90e3a30e30a9a92dc09f6ee7fad010972e6e7a76b08209e" [metadata.files] alabaster = [ @@ -1969,6 +1997,10 @@ bleach = [ {file = "bleach-3.2.1-py2.py3-none-any.whl", hash = "sha256:9f8ccbeb6183c6e6cddea37592dfb0167485c1e3b13b3363bc325aa8bda3adbd"}, {file = "bleach-3.2.1.tar.gz", hash = "sha256:52b5919b81842b1854196eaae5ca29679a2f2e378905c346d3ca8227c2c66080"}, ] +branca = [ + {file = "branca-0.4.2-py3-none-any.whl", hash = "sha256:62c2e777f074fc1830cd40ba9e650beb941861075980babafead8d97856b1a4b"}, + {file = "branca-0.4.2.tar.gz", hash = "sha256:c111453617b17ab2bda60a4cd71787d6f2b59c85cdf71ab160a737606ac66c31"}, +] certifi = [ {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"}, @@ -2176,6 +2208,9 @@ flake8-string-format = [ {file = "flake8-string-format-0.2.3.tar.gz", hash = "sha256:774d56103d9242ed968897455ef49b7d6de272000cfa83de5814273a868832f1"}, {file = "flake8_string_format-0.2.3-py2.py3-none-any.whl", hash = "sha256:68ea72a1a5b75e7018cae44d14f32473c798cf73d75cbaed86c6a9a907b770b2"}, ] +folium = [ + {file = "folium-0.12.1-py2.py3-none-any.whl", hash = "sha256:3d2c48dd6ffe5327975bbfd718468c4e81db9f2844c26e574f878adf4c08b644"}, +] geographiclib = [ {file = "geographiclib-1.50-py3-none-any.whl", hash = "sha256:51cfa698e7183792bce27d8fb63ac8e83689cd8170a730bf35e1a5c5bf8849b9"}, {file = "geographiclib-1.50.tar.gz", hash = "sha256:12bd46ee7ec25b291ea139b17aa991e7ef373e21abd053949b75c0e9ca55c632"}, diff --git a/pyproject.toml b/pyproject.toml index 2cd8747..84eabe2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -31,8 +31,10 @@ python = "^3.8" Shapely = "^1.7.1" alembic = "^1.4.2" click = "^7.1.2" +folium = "^0.12.1" pandas = "^1.1.0" psycopg2 = "^2.8.5" # adapter for PostgreSQL +rpy2 = "^3.4.1" sqlalchemy = "^1.3.18" utm = "^0.7.0" @@ -42,7 +44,6 @@ jupyterlab = { version="^2.2.2", optional=true } nb_black = { version="^1.0.7", optional=true } numpy = { version="^1.19.1", optional=true } pytz = { version="^2020.1", optional=true } -rpy2 = "^3.4.1" [tool.poetry.extras] research = [ From a1da1e9af814733581be194ff82b9da1eadda536 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Thu, 21 Jan 2021 17:15:39 +0100 Subject: [PATCH 43/72] Add matplotlib to the dependencies --- poetry.lock | 144 ++++++++++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 1 + 2 files changed, 144 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index f594c91..469cd87 100644 --- a/poetry.lock +++ b/poetry.lock @@ -275,6 +275,17 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" [package.extras] toml = ["toml"] +[[package]] +name = "cycler" +version = "0.10.0" +description = "Composable style cycles" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +six = "*" + [[package]] name = "darglint" version = "1.5.8" @@ -900,6 +911,14 @@ requests = "*" [package.extras] test = ["pytest", "requests"] +[[package]] +name = "kiwisolver" +version = "1.3.1" +description = "A fast implementation of the Cassowary constraint solver" +category = "main" +optional = false +python-versions = ">=3.6" + [[package]] name = "mako" version = "1.1.3" @@ -923,6 +942,22 @@ category = "main" optional = false python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" +[[package]] +name = "matplotlib" +version = "3.3.3" +description = "Python plotting package" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +cycler = ">=0.10" +kiwisolver = ">=1.0.1" +numpy = ">=1.15" +pillow = ">=6.2.0" +pyparsing = ">=2.0.3,<2.0.4 || >2.0.4,<2.1.2 || >2.1.2,<2.1.6 || >2.1.6" +python-dateutil = ">=2.1" + [[package]] name = "mccabe" version = "0.6.1" @@ -1203,6 +1238,14 @@ category = "main" optional = true python-versions = "*" +[[package]] +name = "pillow" +version = "8.1.0" +description = "Python Imaging Library (Fork)" +category = "main" +optional = false +python-versions = ">=3.6" + [[package]] name = "pluggy" version = "0.13.1" @@ -1911,7 +1954,7 @@ research = ["jupyterlab", "nb_black", "numpy", "pytz"] [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "8a85f19d497aeccaf90e3a30e30a9a92dc09f6ee7fad010972e6e7a76b08209e" +content-hash = "5493dc22f056cf2845ac6bd480c55092b1ceca8038312a551cad9fb76761b77a" [metadata.files] alabaster = [ @@ -2099,6 +2142,10 @@ coverage = [ {file = "coverage-5.3-cp39-cp39-win_amd64.whl", hash = "sha256:47a11bdbd8ada9b7ee628596f9d97fbd3851bd9999d398e9436bd67376dbece7"}, {file = "coverage-5.3.tar.gz", hash = "sha256:280baa8ec489c4f542f8940f9c4c2181f0306a8ee1a54eceba071a449fb870a0"}, ] +cycler = [ + {file = "cycler-0.10.0-py2.py3-none-any.whl", hash = "sha256:1d8a5ae1ff6c5cf9b93e8811e581232ad8920aeec647c37316ceac982b08cb2d"}, + {file = "cycler-0.10.0.tar.gz", hash = "sha256:cd7b2d1018258d7247a71425e9f26463dfb444d411c39569972f4ce586b0c9d8"}, +] darglint = [ {file = "darglint-1.5.8-py3-none-any.whl", hash = "sha256:2e1012945a09d19a15cc87f9d15e7b14c18473ec9cf7769c641951b348de1353"}, {file = "darglint-1.5.8.tar.gz", hash = "sha256:529f4969029d5ff5f74bfec48adc14b6f003409141f722b6cc4b787dddc8a4dd"}, @@ -2295,6 +2342,40 @@ jupyterlab-server = [ {file = "jupyterlab_server-1.2.0-py3-none-any.whl", hash = "sha256:55d256077bf13e5bc9e8fbd5aac51bef82f6315111cec6b712b9a5ededbba924"}, {file = "jupyterlab_server-1.2.0.tar.gz", hash = "sha256:5431d9dde96659364b7cc877693d5d21e7b80cea7ae3959ecc2b87518e5f5d8c"}, ] +kiwisolver = [ + {file = "kiwisolver-1.3.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:fd34fbbfbc40628200730bc1febe30631347103fc8d3d4fa012c21ab9c11eca9"}, + {file = "kiwisolver-1.3.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:d3155d828dec1d43283bd24d3d3e0d9c7c350cdfcc0bd06c0ad1209c1bbc36d0"}, + {file = "kiwisolver-1.3.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5a7a7dbff17e66fac9142ae2ecafb719393aaee6a3768c9de2fd425c63b53e21"}, + {file = "kiwisolver-1.3.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:f8d6f8db88049a699817fd9178782867bf22283e3813064302ac59f61d95be05"}, + {file = "kiwisolver-1.3.1-cp36-cp36m-manylinux2014_ppc64le.whl", hash = "sha256:5f6ccd3dd0b9739edcf407514016108e2280769c73a85b9e59aa390046dbf08b"}, + {file = "kiwisolver-1.3.1-cp36-cp36m-win32.whl", hash = "sha256:225e2e18f271e0ed8157d7f4518ffbf99b9450fca398d561eb5c4a87d0986dd9"}, + {file = "kiwisolver-1.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:cf8b574c7b9aa060c62116d4181f3a1a4e821b2ec5cbfe3775809474113748d4"}, + {file = "kiwisolver-1.3.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:232c9e11fd7ac3a470d65cd67e4359eee155ec57e822e5220322d7b2ac84fbf0"}, + {file = "kiwisolver-1.3.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:b38694dcdac990a743aa654037ff1188c7a9801ac3ccc548d3341014bc5ca278"}, + {file = "kiwisolver-1.3.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ca3820eb7f7faf7f0aa88de0e54681bddcb46e485beb844fcecbcd1c8bd01689"}, + {file = "kiwisolver-1.3.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:c8fd0f1ae9d92b42854b2979024d7597685ce4ada367172ed7c09edf2cef9cb8"}, + {file = "kiwisolver-1.3.1-cp37-cp37m-manylinux2014_ppc64le.whl", hash = "sha256:1e1bc12fb773a7b2ffdeb8380609f4f8064777877b2225dec3da711b421fda31"}, + {file = "kiwisolver-1.3.1-cp37-cp37m-win32.whl", hash = "sha256:72c99e39d005b793fb7d3d4e660aed6b6281b502e8c1eaf8ee8346023c8e03bc"}, + {file = "kiwisolver-1.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:8be8d84b7d4f2ba4ffff3665bcd0211318aa632395a1a41553250484a871d454"}, + {file = "kiwisolver-1.3.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:31dfd2ac56edc0ff9ac295193eeaea1c0c923c0355bf948fbd99ed6018010b72"}, + {file = "kiwisolver-1.3.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:563c649cfdef27d081c84e72a03b48ea9408c16657500c312575ae9d9f7bc1c3"}, + {file = "kiwisolver-1.3.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:78751b33595f7f9511952e7e60ce858c6d64db2e062afb325985ddbd34b5c131"}, + {file = "kiwisolver-1.3.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:a357fd4f15ee49b4a98b44ec23a34a95f1e00292a139d6015c11f55774ef10de"}, + {file = "kiwisolver-1.3.1-cp38-cp38-manylinux2014_ppc64le.whl", hash = "sha256:5989db3b3b34b76c09253deeaf7fbc2707616f130e166996606c284395da3f18"}, + {file = "kiwisolver-1.3.1-cp38-cp38-win32.whl", hash = "sha256:c08e95114951dc2090c4a630c2385bef681cacf12636fb0241accdc6b303fd81"}, + {file = "kiwisolver-1.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:44a62e24d9b01ba94ae7a4a6c3fb215dc4af1dde817e7498d901e229aaf50e4e"}, + {file = "kiwisolver-1.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:50af681a36b2a1dee1d3c169ade9fdc59207d3c31e522519181e12f1b3ba7000"}, + {file = "kiwisolver-1.3.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:a53d27d0c2a0ebd07e395e56a1fbdf75ffedc4a05943daf472af163413ce9598"}, + {file = "kiwisolver-1.3.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:834ee27348c4aefc20b479335fd422a2c69db55f7d9ab61721ac8cd83eb78882"}, + {file = "kiwisolver-1.3.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:5c3e6455341008a054cccee8c5d24481bcfe1acdbc9add30aa95798e95c65621"}, + {file = "kiwisolver-1.3.1-cp39-cp39-manylinux2014_ppc64le.whl", hash = "sha256:acef3d59d47dd85ecf909c359d0fd2c81ed33bdff70216d3956b463e12c38a54"}, + {file = "kiwisolver-1.3.1-cp39-cp39-win32.whl", hash = "sha256:c5518d51a0735b1e6cee1fdce66359f8d2b59c3ca85dc2b0813a8aa86818a030"}, + {file = "kiwisolver-1.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:b9edd0110a77fc321ab090aaa1cfcaba1d8499850a12848b81be2222eab648f6"}, + {file = "kiwisolver-1.3.1-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0cd53f403202159b44528498de18f9285b04482bab2a6fc3f5dd8dbb9352e30d"}, + {file = "kiwisolver-1.3.1-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:33449715e0101e4d34f64990352bce4095c8bf13bed1b390773fc0a7295967b3"}, + {file = "kiwisolver-1.3.1-pp36-pypy36_pp73-win32.whl", hash = "sha256:401a2e9afa8588589775fe34fc22d918ae839aaaf0c0e96441c0fdbce6d8ebe6"}, + {file = "kiwisolver-1.3.1.tar.gz", hash = "sha256:950a199911a8d94683a6b10321f9345d5a3a8433ec58b217ace979e18f16e248"}, +] mako = [ {file = "Mako-1.1.3-py2.py3-none-any.whl", hash = "sha256:93729a258e4ff0747c876bd9e20df1b9758028946e976324ccd2d68245c7b6a9"}, {file = "Mako-1.1.3.tar.gz", hash = "sha256:8195c8c1400ceb53496064314c6736719c6f25e7479cd24c77be3d9361cddc27"}, @@ -2334,6 +2415,33 @@ markupsafe = [ {file = "MarkupSafe-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"}, {file = "MarkupSafe-1.1.1.tar.gz", hash = "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"}, ] +matplotlib = [ + {file = "matplotlib-3.3.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b2a5e1f637a92bb6f3526cc54cc8af0401112e81ce5cba6368a1b7908f9e18bc"}, + {file = "matplotlib-3.3.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c586ac1d64432f92857c3cf4478cfb0ece1ae18b740593f8a39f2f0b27c7fda5"}, + {file = "matplotlib-3.3.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:9b03722c89a43a61d4d148acfc89ec5bb54cd0fd1539df25b10eb9c5fa6c393a"}, + {file = "matplotlib-3.3.3-cp36-cp36m-win32.whl", hash = "sha256:2c2c5041608cb75c39cbd0ed05256f8a563e144234a524c59d091abbfa7a868f"}, + {file = "matplotlib-3.3.3-cp36-cp36m-win_amd64.whl", hash = "sha256:c092fc4673260b1446b8578015321081d5db73b94533fe4bf9b69f44e948d174"}, + {file = "matplotlib-3.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:27c9393fada62bd0ad7c730562a0fecbd3d5aaa8d9ed80ba7d3ebb8abc4f0453"}, + {file = "matplotlib-3.3.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:b8ba2a1dbb4660cb469fe8e1febb5119506059e675180c51396e1723ff9b79d9"}, + {file = "matplotlib-3.3.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:0caa687fce6174fef9b27d45f8cc57cbc572e04e98c81db8e628b12b563d59a2"}, + {file = "matplotlib-3.3.3-cp37-cp37m-win32.whl", hash = "sha256:b7b09c61a91b742cb5460b72efd1fe26ef83c1c704f666e0af0df156b046aada"}, + {file = "matplotlib-3.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:6ffd2d80d76df2e5f9f0c0140b5af97e3b87dd29852dcdb103ec177d853ec06b"}, + {file = "matplotlib-3.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5111d6d47a0f5b8f3e10af7a79d5e7eb7e73a22825391834734274c4f312a8a0"}, + {file = "matplotlib-3.3.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:a4fe54eab2c7129add75154823e6543b10261f9b65b2abe692d68743a4999f8c"}, + {file = "matplotlib-3.3.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:83e6c895d93fdf93eeff1a21ee96778ba65ef258e5d284160f7c628fee40c38f"}, + {file = "matplotlib-3.3.3-cp38-cp38-win32.whl", hash = "sha256:b26c472847911f5a7eb49e1c888c31c77c4ddf8023c1545e0e8e0367ba74fb15"}, + {file = "matplotlib-3.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:09225edca87a79815822eb7d3be63a83ebd4d9d98d5aa3a15a94f4eee2435954"}, + {file = "matplotlib-3.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eb6b6700ea454bb88333d98601e74928e06f9669c1ea231b4c4c666c1d7701b4"}, + {file = "matplotlib-3.3.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2d31aff0c8184b05006ad756b9a4dc2a0805e94d28f3abc3187e881b6673b302"}, + {file = "matplotlib-3.3.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:d082f77b4ed876ae94a9373f0db96bf8768a7cca6c58fc3038f94e30ffde1880"}, + {file = "matplotlib-3.3.3-cp39-cp39-win32.whl", hash = "sha256:e71cdd402047e657c1662073e9361106c6981e9621ab8c249388dfc3ec1de07b"}, + {file = "matplotlib-3.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:756ee498b9ba35460e4cbbd73f09018e906daa8537fff61da5b5bf8d5e9de5c7"}, + {file = "matplotlib-3.3.3-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ad44f2c74c50567c694ee91c6fa16d67e7c8af6f22c656b80469ad927688457"}, + {file = "matplotlib-3.3.3-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:3a4c3e9be63adf8e9b305aa58fb3ec40ecc61fd0f8fd3328ce55bc30e7a2aeb0"}, + {file = "matplotlib-3.3.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:746897fbd72bd462b888c74ed35d812ca76006b04f717cd44698cdfc99aca70d"}, + {file = "matplotlib-3.3.3-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:5ed3d3342698c2b1f3651f8ea6c099b0f196d16ee00e33dc3a6fee8cb01d530a"}, + {file = "matplotlib-3.3.3.tar.gz", hash = "sha256:b1b60c6476c4cfe9e5cf8ab0d3127476fd3d5f05de0f343a452badaad0e4bdec"}, +] mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, @@ -2486,6 +2594,40 @@ pickleshare = [ {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, ] +pillow = [ + {file = "Pillow-8.1.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:d355502dce85ade85a2511b40b4c61a128902f246504f7de29bbeec1ae27933a"}, + {file = "Pillow-8.1.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:93a473b53cc6e0b3ce6bf51b1b95b7b1e7e6084be3a07e40f79b42e83503fbf2"}, + {file = "Pillow-8.1.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2353834b2c49b95e1313fb34edf18fca4d57446675d05298bb694bca4b194174"}, + {file = "Pillow-8.1.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:1d208e670abfeb41b6143537a681299ef86e92d2a3dac299d3cd6830d5c7bded"}, + {file = "Pillow-8.1.0-cp36-cp36m-win32.whl", hash = "sha256:dd9eef866c70d2cbbea1ae58134eaffda0d4bfea403025f4db6859724b18ab3d"}, + {file = "Pillow-8.1.0-cp36-cp36m-win_amd64.whl", hash = "sha256:b09e10ec453de97f9a23a5aa5e30b334195e8d2ddd1ce76cc32e52ba63c8b31d"}, + {file = "Pillow-8.1.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:b02a0b9f332086657852b1f7cb380f6a42403a6d9c42a4c34a561aa4530d5234"}, + {file = "Pillow-8.1.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:ca20739e303254287138234485579b28cb0d524401f83d5129b5ff9d606cb0a8"}, + {file = "Pillow-8.1.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:604815c55fd92e735f9738f65dabf4edc3e79f88541c221d292faec1904a4b17"}, + {file = "Pillow-8.1.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:cf6e33d92b1526190a1de904df21663c46a456758c0424e4f947ae9aa6088bf7"}, + {file = "Pillow-8.1.0-cp37-cp37m-win32.whl", hash = "sha256:47c0d93ee9c8b181f353dbead6530b26980fe4f5485aa18be8f1fd3c3cbc685e"}, + {file = "Pillow-8.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:96d4dc103d1a0fa6d47c6c55a47de5f5dafd5ef0114fa10c85a1fd8e0216284b"}, + {file = "Pillow-8.1.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:7916cbc94f1c6b1301ac04510d0881b9e9feb20ae34094d3615a8a7c3db0dcc0"}, + {file = "Pillow-8.1.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:3de6b2ee4f78c6b3d89d184ade5d8fa68af0848f9b6b6da2b9ab7943ec46971a"}, + {file = "Pillow-8.1.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cdbbe7dff4a677fb555a54f9bc0450f2a21a93c5ba2b44e09e54fcb72d2bd13d"}, + {file = "Pillow-8.1.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:f50e7a98b0453f39000619d845be8b06e611e56ee6e8186f7f60c3b1e2f0feae"}, + {file = "Pillow-8.1.0-cp38-cp38-win32.whl", hash = "sha256:cb192176b477d49b0a327b2a5a4979552b7a58cd42037034316b8018ac3ebb59"}, + {file = "Pillow-8.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:6c5275bd82711cd3dcd0af8ce0bb99113ae8911fc2952805f1d012de7d600a4c"}, + {file = "Pillow-8.1.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:165c88bc9d8dba670110c689e3cc5c71dbe4bfb984ffa7cbebf1fac9554071d6"}, + {file = "Pillow-8.1.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:5e2fe3bb2363b862671eba632537cd3a823847db4d98be95690b7e382f3d6378"}, + {file = "Pillow-8.1.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:7612520e5e1a371d77e1d1ca3a3ee6227eef00d0a9cddb4ef7ecb0b7396eddf7"}, + {file = "Pillow-8.1.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:d673c4990acd016229a5c1c4ee8a9e6d8f481b27ade5fc3d95938697fa443ce0"}, + {file = "Pillow-8.1.0-cp39-cp39-win32.whl", hash = "sha256:dc577f4cfdda354db3ae37a572428a90ffdbe4e51eda7849bf442fb803f09c9b"}, + {file = "Pillow-8.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:22d070ca2e60c99929ef274cfced04294d2368193e935c5d6febfd8b601bf865"}, + {file = "Pillow-8.1.0-pp36-pypy36_pp73-macosx_10_10_x86_64.whl", hash = "sha256:a3d3e086474ef12ef13d42e5f9b7bbf09d39cf6bd4940f982263d6954b13f6a9"}, + {file = "Pillow-8.1.0-pp36-pypy36_pp73-manylinux2010_i686.whl", hash = "sha256:731ca5aabe9085160cf68b2dbef95fc1991015bc0a3a6ea46a371ab88f3d0913"}, + {file = "Pillow-8.1.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:bba80df38cfc17f490ec651c73bb37cd896bc2400cfba27d078c2135223c1206"}, + {file = "Pillow-8.1.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:c3d911614b008e8a576b8e5303e3db29224b455d3d66d1b2848ba6ca83f9ece9"}, + {file = "Pillow-8.1.0-pp37-pypy37_pp73-manylinux2010_i686.whl", hash = "sha256:39725acf2d2e9c17356e6835dccebe7a697db55f25a09207e38b835d5e1bc032"}, + {file = "Pillow-8.1.0-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:81c3fa9a75d9f1afafdb916d5995633f319db09bd773cb56b8e39f1e98d90820"}, + {file = "Pillow-8.1.0-pp37-pypy37_pp73-win32.whl", hash = "sha256:b6f00ad5ebe846cc91763b1d0c6d30a8042e02b2316e27b05de04fa6ec831ec5"}, + {file = "Pillow-8.1.0.tar.gz", hash = "sha256:887668e792b7edbfb1d3c9d8b5d8c859269a0f0eba4dda562adb95500f60dbba"}, +] pluggy = [ {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, diff --git a/pyproject.toml b/pyproject.toml index 84eabe2..991dc06 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -32,6 +32,7 @@ Shapely = "^1.7.1" alembic = "^1.4.2" click = "^7.1.2" folium = "^0.12.1" +matplotlib = "^3.3.3" pandas = "^1.1.0" psycopg2 = "^2.8.5" # adapter for PostgreSQL rpy2 = "^3.4.1" From de3e489b39d6661124fec9d74363ffe6dce2fd9e Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sun, 24 Jan 2021 18:31:02 +0100 Subject: [PATCH 44/72] Adjust flake8 to not consider constants magic --- .../rev_20200806_23_f11cd76d2f45_create_the_database.py | 8 ++++---- ...rev_20210106_19_e40623e10405_add_demand_forecasting.py | 2 +- setup.cfg | 6 ++---- src/urban_meal_delivery/db/addresses.py | 8 +++----- src/urban_meal_delivery/db/forecasts.py | 2 +- src/urban_meal_delivery/db/grids.py | 2 +- src/urban_meal_delivery/db/restaurants.py | 2 +- 7 files changed, 13 insertions(+), 17 deletions(-) diff --git a/migrations/versions/rev_20200806_23_f11cd76d2f45_create_the_database.py b/migrations/versions/rev_20200806_23_f11cd76d2f45_create_the_database.py index a03e1dc..5f02843 100644 --- a/migrations/versions/rev_20200806_23_f11cd76d2f45_create_the_database.py +++ b/migrations/versions/rev_20200806_23_f11cd76d2f45_create_the_database.py @@ -107,13 +107,13 @@ def upgrade(): sa.Column('id', sa.Integer(), autoincrement=False, nullable=False), sa.Column('primary_id', sa.Integer(), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('place_id', sa.Unicode(length=120), nullable=False), # noqa:WPS432 + sa.Column('place_id', sa.Unicode(length=120), nullable=False), sa.Column('latitude', postgresql.DOUBLE_PRECISION(), nullable=False), sa.Column('longitude', postgresql.DOUBLE_PRECISION(), nullable=False), sa.Column('city_id', sa.SmallInteger(), nullable=False), - sa.Column('city', sa.Unicode(length=25), nullable=False), # noqa:WPS432 + sa.Column('city', sa.Unicode(length=25), nullable=False), sa.Column('zip_code', sa.Integer(), nullable=False), - sa.Column('street', sa.Unicode(length=80), nullable=False), # noqa:WPS432 + sa.Column('street', sa.Unicode(length=80), nullable=False), sa.Column('floor', sa.SmallInteger(), nullable=True), sa.CheckConstraint( '-180 <= longitude AND longitude <= 180', @@ -192,7 +192,7 @@ def upgrade(): 'restaurants', sa.Column('id', sa.SmallInteger(), autoincrement=False, nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), - sa.Column('name', sa.Unicode(length=45), nullable=False), # noqa:WPS432 + sa.Column('name', sa.Unicode(length=45), nullable=False), sa.Column('address_id', sa.Integer(), nullable=False), sa.Column('estimated_prep_duration', sa.SmallInteger(), nullable=False), sa.CheckConstraint( diff --git a/migrations/versions/rev_20210106_19_e40623e10405_add_demand_forecasting.py b/migrations/versions/rev_20210106_19_e40623e10405_add_demand_forecasting.py index 1579190..e624259 100644 --- a/migrations/versions/rev_20210106_19_e40623e10405_add_demand_forecasting.py +++ b/migrations/versions/rev_20210106_19_e40623e10405_add_demand_forecasting.py @@ -31,7 +31,7 @@ def upgrade(): sa.Column('start_at', sa.DateTime(), nullable=False), sa.Column('time_step', sa.SmallInteger(), nullable=False), sa.Column('training_horizon', sa.SmallInteger(), nullable=False), - sa.Column('method', sa.Unicode(length=20), nullable=False), # noqa:WPS432 + sa.Column('method', sa.Unicode(length=20), nullable=False), sa.Column('prediction', postgresql.DOUBLE_PRECISION(), nullable=False), sa.PrimaryKeyConstraint('id', name=op.f('pk_forecasts')), sa.ForeignKeyConstraint( diff --git a/setup.cfg b/setup.cfg index 8c3817b..800ade7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -105,6 +105,8 @@ extend-ignore = WPS412, # Allow multiple assignment, e.g., x = y = 123 WPS429, + # There are no magic numbers. + WPS432, per-file-ignores = # Top-levels of a sub-packages are intended to import a lot. @@ -137,8 +139,6 @@ per-file-ignores = src/urban_meal_delivery/configuration.py: # Allow upper case class variables within classes. WPS115, - # Numbers are normal in config files. - WPS432, src/urban_meal_delivery/forecasts/decomposition.py: # The module does not have a high cognitive complexity. WPS232, @@ -166,8 +166,6 @@ per-file-ignores = WPS402, # Allow closures. WPS430, - # Numbers are normal in test cases as expected results. - WPS432, # When testing, it is normal to use implementation details. WPS437, diff --git a/src/urban_meal_delivery/db/addresses.py b/src/urban_meal_delivery/db/addresses.py index d86518d..dad5b72 100644 --- a/src/urban_meal_delivery/db/addresses.py +++ b/src/urban_meal_delivery/db/addresses.py @@ -18,15 +18,13 @@ class Address(meta.Base): id = sa.Column(sa.Integer, primary_key=True, autoincrement=False) # noqa:WPS125 _primary_id = sa.Column('primary_id', sa.Integer, nullable=False, index=True) created_at = sa.Column(sa.DateTime, nullable=False) - place_id = sa.Column( - sa.Unicode(length=120), nullable=False, index=True, # noqa:WPS432 - ) + place_id = sa.Column(sa.Unicode(length=120), nullable=False, index=True) latitude = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False) longitude = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False) city_id = sa.Column(sa.SmallInteger, nullable=False, index=True) - city_name = sa.Column('city', sa.Unicode(length=25), nullable=False) # noqa:WPS432 + city_name = sa.Column('city', sa.Unicode(length=25), nullable=False) zip_code = sa.Column(sa.Integer, nullable=False, index=True) - street = sa.Column(sa.Unicode(length=80), nullable=False) # noqa:WPS432 + street = sa.Column(sa.Unicode(length=80), nullable=False) floor = sa.Column(sa.SmallInteger) # Constraints diff --git a/src/urban_meal_delivery/db/forecasts.py b/src/urban_meal_delivery/db/forecasts.py index 65f12b5..2edb695 100644 --- a/src/urban_meal_delivery/db/forecasts.py +++ b/src/urban_meal_delivery/db/forecasts.py @@ -21,7 +21,7 @@ class Forecast(meta.Base): start_at = sa.Column(sa.DateTime, nullable=False) time_step = sa.Column(sa.SmallInteger, nullable=False) training_horizon = sa.Column(sa.SmallInteger, nullable=False) - model = sa.Column(sa.Unicode(length=20), nullable=False) # noqa:WPS432 + model = sa.Column(sa.Unicode(length=20), nullable=False) # Raw `.prediction`s are stored as `float`s (possibly negative). # The rounding is then done on the fly if required. prediction = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False) diff --git a/src/urban_meal_delivery/db/grids.py b/src/urban_meal_delivery/db/grids.py index c1d7dd2..db03215 100644 --- a/src/urban_meal_delivery/db/grids.py +++ b/src/urban_meal_delivery/db/grids.py @@ -51,7 +51,7 @@ class Grid(meta.Base): @property def pixel_area(self) -> float: """The area of a `Pixel` on the grid in square kilometers.""" - return round((self.side_length ** 2) / 1_000_000, 1) # noqa:WPS432 + return round((self.side_length ** 2) / 1_000_000, 1) @classmethod def gridify(cls, city: db.City, side_length: int) -> db.Grid: diff --git a/src/urban_meal_delivery/db/restaurants.py b/src/urban_meal_delivery/db/restaurants.py index d427540..b17cae7 100644 --- a/src/urban_meal_delivery/db/restaurants.py +++ b/src/urban_meal_delivery/db/restaurants.py @@ -21,7 +21,7 @@ class Restaurant(meta.Base): sa.SmallInteger, primary_key=True, autoincrement=False, ) created_at = sa.Column(sa.DateTime, nullable=False) - name = sa.Column(sa.Unicode(length=45), nullable=False) # noqa:WPS432 + name = sa.Column(sa.Unicode(length=45), nullable=False) address_id = sa.Column(sa.Integer, nullable=False, index=True) estimated_prep_duration = sa.Column(sa.SmallInteger, nullable=False) From f36fffdd4d33c4da7f9fc2958c8fa09583008bad Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sun, 24 Jan 2021 18:32:07 +0100 Subject: [PATCH 45/72] Add pytest-mock to the dev dependencies --- poetry.lock | 20 +++++++++++++++++++- pyproject.toml | 1 + 2 files changed, 20 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index 469cd87..01b57f1 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1425,6 +1425,20 @@ python-versions = "*" [package.dependencies] pytest = ">=2.6.0" +[[package]] +name = "pytest-mock" +version = "3.5.1" +description = "Thin-wrapper around the mock package for easier use with pytest" +category = "dev" +optional = false +python-versions = ">=3.5" + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "tox", "pytest-asyncio"] + [[package]] name = "pytest-randomly" version = "3.5.0" @@ -1954,7 +1968,7 @@ research = ["jupyterlab", "nb_black", "numpy", "pytz"] [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "5493dc22f056cf2845ac6bd480c55092b1ceca8038312a551cad9fb76761b77a" +content-hash = "1a67cb850c9d8b35104d7429caf8f54c3fa0a2888ab4d8f54dbc3901afb14717" [metadata.files] alabaster = [ @@ -2707,6 +2721,10 @@ pytest-cov = [ pytest-env = [ {file = "pytest-env-0.6.2.tar.gz", hash = "sha256:7e94956aef7f2764f3c147d216ce066bf6c42948bb9e293169b1b1c880a580c2"}, ] +pytest-mock = [ + {file = "pytest-mock-3.5.1.tar.gz", hash = "sha256:a1e2aba6af9560d313c642dae7e00a2a12b022b80301d9d7fc8ec6858e1dd9fc"}, + {file = "pytest_mock-3.5.1-py3-none-any.whl", hash = "sha256:379b391cfad22422ea2e252bdfc008edd08509029bcde3c25b2c0bd741e0424e"}, +] pytest-randomly = [ {file = "pytest-randomly-3.5.0.tar.gz", hash = "sha256:440cec143fd9b0adeb072006c71e0294402a2bc2ccd08079c2341087ba4cf2d1"}, {file = "pytest_randomly-3.5.0-py3-none-any.whl", hash = "sha256:9db10d160237f3f8ee60cef72e4cb9ea88d2893c9dd5c8aa334b060cdeb67c3a"}, diff --git a/pyproject.toml b/pyproject.toml index 991dc06..7c02ff1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -81,6 +81,7 @@ packaging = "^20.4" # used to test the packaged version pytest = "^6.0.1" pytest-cov = "^2.10.0" pytest-env = "^0.6.2" +pytest-mock = "^3.5.1" pytest-randomly = "^3.5.0" xdoctest = { version="^0.13.0", extras=["optional"] } From 0c1ff5338da2f277609b7e4393435d7d33637d96 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sun, 24 Jan 2021 18:35:14 +0100 Subject: [PATCH 46/72] Check if `predict_at/day` is in `.totals` - this is a minor sanity check --- setup.cfg | 2 ++ src/urban_meal_delivery/forecasts/decomposition.py | 2 +- src/urban_meal_delivery/forecasts/timify.py | 12 +++++++++--- 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/setup.cfg b/setup.cfg index 800ade7..37987fd 100644 --- a/setup.cfg +++ b/setup.cfg @@ -145,6 +145,8 @@ per-file-ignores = src/urban_meal_delivery/forecasts/timify.py: # No SQL injection as the inputs come from a safe source. S608, + # The many noqa's are ok. + WPS403, tests/*.py: # Type annotations are not strictly enforced. ANN0, ANN2, diff --git a/src/urban_meal_delivery/forecasts/decomposition.py b/src/urban_meal_delivery/forecasts/decomposition.py index bf4466b..a0762d5 100644 --- a/src/urban_meal_delivery/forecasts/decomposition.py +++ b/src/urban_meal_delivery/forecasts/decomposition.py @@ -99,7 +99,7 @@ def stl( # noqa:C901,WPS210,WPS211,WPS231 if ns % 2 == 0 or ns < 7: raise ValueError('`ns` must be odd and `>= 7`') - default_nt = math.ceil((1.5 * frequency) / (1 - (1.5 / ns))) # noqa:WPS432 + default_nt = math.ceil((1.5 * frequency) / (1 - (1.5 / ns))) if nt is not None: if nt % 2 == 0 or nt < default_nt: raise ValueError( diff --git a/src/urban_meal_delivery/forecasts/timify.py b/src/urban_meal_delivery/forecasts/timify.py index 0220a58..cacee23 100644 --- a/src/urban_meal_delivery/forecasts/timify.py +++ b/src/urban_meal_delivery/forecasts/timify.py @@ -168,7 +168,7 @@ class OrderHistory: training time series, frequency, actual order count at `predict_at` Raises: - LookupError: `pixel_id` is not in the `grid` + LookupError: `pixel_id` not in `grid` or `predict_at` not in `.totals` RuntimeError: desired time series slice is not entirely in `.totals` """ try: @@ -211,6 +211,8 @@ class OrderHistory: raise RuntimeError('Not enough historic data for `predict_at`') actuals_ts = intra_pixel.loc[[predict_at], 'total_orders'] + if not len(actuals_ts): # pragma: no cover + raise LookupError('`predict_at` is not in the order history') return training_ts, frequency, actuals_ts @@ -235,7 +237,7 @@ class OrderHistory: training time series, frequency, actual order counts on `predict_day` Raises: - LookupError: `pixel_id` is not in the `grid` + LookupError: `pixel_id` not in `grid` or `predict_day` not in `.totals` RuntimeError: desired time series slice is not entirely in `.totals` """ try: @@ -295,6 +297,8 @@ class OrderHistory: first_prediction_at:last_prediction_at, # type: ignore 'total_orders', ] + if not len(actuals_ts): # pragma: no cover + raise LookupError('`predict_day` is not in the order history') return training_ts, frequency, actuals_ts @@ -319,7 +323,7 @@ class OrderHistory: training time series, frequency, actual order count at `predict_at` Raises: - LookupError: `pixel_id` is not in the `grid` + LookupError: `pixel_id` not in `grid` or `predict_at` not in `.totals` RuntimeError: desired time series slice is not entirely in `.totals` """ try: @@ -386,5 +390,7 @@ class OrderHistory: raise RuntimeError('Not enough historic data for `predict_day`') actuals_ts = intra_pixel.loc[[predict_at], 'total_orders'] + if not len(actuals_ts): # pragma: no cover + raise LookupError('`predict_at` is not in the order history') return training_ts, frequency, actuals_ts From 1bfc7db916d0ae21ca58bccba3f0bd9d799ada7f Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sun, 24 Jan 2021 18:57:44 +0100 Subject: [PATCH 47/72] Make `Grid.gridify()` use only pickup addresses - ensure a `Restaurant` only has one unique `Order.pickup_address` - rework `Grid.gridify()` so that only pickup addresses are assigned into `Pixel`s - include database migrations to ensure the data adhere to these tighter constraints --- ...05e_remove_orders_from_restaurants_with.py | 398 ++++++++++++++++++ setup.cfg | 2 + src/urban_meal_delivery/console/gridify.py | 2 +- src/urban_meal_delivery/db/addresses.py | 2 +- src/urban_meal_delivery/db/grids.py | 16 +- src/urban_meal_delivery/db/orders.py | 20 +- src/urban_meal_delivery/db/pixels.py | 2 +- src/urban_meal_delivery/db/restaurants.py | 4 +- tests/console/test_gridify.py | 27 +- tests/db/test_grids.py | 66 +-- .../forecasts/timify/test_aggregate_orders.py | 41 +- 11 files changed, 519 insertions(+), 61 deletions(-) create mode 100644 migrations/versions/rev_20210123_15_e86290e7305e_remove_orders_from_restaurants_with.py diff --git a/migrations/versions/rev_20210123_15_e86290e7305e_remove_orders_from_restaurants_with.py b/migrations/versions/rev_20210123_15_e86290e7305e_remove_orders_from_restaurants_with.py new file mode 100644 index 0000000..19c9223 --- /dev/null +++ b/migrations/versions/rev_20210123_15_e86290e7305e_remove_orders_from_restaurants_with.py @@ -0,0 +1,398 @@ +"""Remove orders from restaurants with invalid location ... + +... and also de-duplicate a couple of redundant addresses. + +Revision: #e86290e7305e at 2021-01-23 15:56:59 +Revises: #26711cd3f9b9 + +1) Remove orders + +Some restaurants have orders to be picked up at an address that +not their primary address. That is ok if that address is the location +of a second franchise. However, for a small number of restaurants +there is only exactly one order at that other address that often is +located far away from the restaurant's primary location. It looks +like a restaurant signed up with some invalid location that was then +corrected into the primary one. + +Use the following SQL statement to obtain a list of these locations +before this migration is run: + +SELECT + orders.pickup_address_id, + COUNT(*) AS n_orders, + MIN(placed_at) as first_order_at, + MAX(placed_at) as last_order_at +FROM + {config.CLEAN_SCHEMA}.orders +LEFT OUTER JOIN + {config.CLEAN_SCHEMA}.restaurants + ON orders.restaurant_id = restaurants.id +WHERE + orders.pickup_address_id <> restaurants.address_id +GROUP BY + pickup_address_id; + +50 orders with such weird pickup addresses are removed with this migration. + + +2) De-duplicate addresses + +Five restaurants have two pickup addresses that are actually the same location. + +The following SQL statement shows them before this migration is run: + +SELECT + orders.restaurant_id, + restaurants.name, + restaurants.address_id AS primary_address_id, + addresses.id AS address_id, + addresses.street, + COUNT(*) AS n_orders +FROM + {config.CLEAN_SCHEMA}.orders +LEFT OUTER JOIN + {config.CLEAN_SCHEMA}.addresses ON orders.pickup_address_id = addresses.id +LEFT OUTER JOIN + {config.CLEAN_SCHEMA}.restaurants ON orders.restaurant_id = restaurants.id +WHERE + orders.restaurant_id IN ( + SELECT + restaurant_id + FROM ( + SELECT DISTINCT + restaurant_id, + pickup_address_id + FROM + {config.CLEAN_SCHEMA}.orders + ) AS restaurant_locations + GROUP BY + restaurant_id + HAVING + COUNT(pickup_address_id) > 1 +) +GROUP BY + orders.restaurant_id, + restaurants.name, + restaurants.address_id, + addresses.id, + addresses.street +ORDER BY + orders.restaurant_id, + restaurants.name, + restaurants.address_id, + addresses.id, + addresses.street; + + +3) Remove addresses without any association + +After steps 1) and 2) some addresses are not associated with a restaurant any more. + +The following SQL statement lists them before this migration is run: + +SELECT + id, + street, + zip_code, + city +FROM + {config.CLEAN_SCHEMA}.addresses +WHERE + id NOT IN ( + SELECT DISTINCT + pickup_address_id AS id + FROM + {config.CLEAN_SCHEMA}.orders + UNION + SELECT DISTINCT + delivery_address_id AS id + FROM + {config.CLEAN_SCHEMA}.orders + UNION + SELECT DISTINCT + address_id AS id + FROM + {config.CLEAN_SCHEMA}.restaurants +); + +4) Ensure every `Restaurant` has exactly one `Address`. + +Replace the current `ForeignKeyConstraint` to from `Order` to `Restaurant` +with one that also includes the `Order.pickup_address_id`. +""" + +import os + +from alembic import op + +from urban_meal_delivery import configuration + + +revision = 'e86290e7305e' +down_revision = '26711cd3f9b9' +branch_labels = None +depends_on = None + + +config = configuration.make_config('testing' if os.getenv('TESTING') else 'production') + + +def upgrade(): + """Upgrade to revision e86290e7305e.""" + # 1) Remove orders + op.execute( + f""" + DELETE + FROM + {config.CLEAN_SCHEMA}.orders + WHERE pickup_address_id IN ( + SELECT + orders.pickup_address_id + FROM + {config.CLEAN_SCHEMA}.orders + LEFT OUTER JOIN + {config.CLEAN_SCHEMA}.restaurants + ON orders.restaurant_id = restaurants.id + WHERE + orders.pickup_address_id <> restaurants.address_id + GROUP BY + orders.pickup_address_id + HAVING + COUNT(*) = 1 + ); + """, + ) + + # 2) De-duplicate addresses + op.execute( + f""" + UPDATE + {config.CLEAN_SCHEMA}.orders + SET + pickup_address_id = 353 + WHERE + pickup_address_id = 548916; + """, + ) + op.execute( + f""" + UPDATE + {config.CLEAN_SCHEMA}.orders + SET + pickup_address_id = 4850 + WHERE + pickup_address_id = 6415; + """, + ) + op.execute( + f""" + UPDATE + {config.CLEAN_SCHEMA}.orders + SET + pickup_address_id = 16227 + WHERE + pickup_address_id = 44627; + """, + ) + op.execute( + f""" + UPDATE + {config.CLEAN_SCHEMA}.orders + SET + pickup_address_id = 44458 + WHERE + pickup_address_id = 534543; + """, + ) + op.execute( + f""" + UPDATE + {config.CLEAN_SCHEMA}.orders + SET + pickup_address_id = 289997 + WHERE + pickup_address_id = 309525; + """, + ) + + # 3) Remove addresses + op.execute( + f""" + DELETE + FROM + {config.CLEAN_SCHEMA}.addresses_pixels + WHERE + address_id NOT IN ( + SELECT DISTINCT + pickup_address_id AS id + FROM + {config.CLEAN_SCHEMA}.orders + UNION + SELECT DISTINCT + delivery_address_id AS id + FROM + {config.CLEAN_SCHEMA}.orders + UNION + SELECT DISTINCT + address_id AS id + FROM + {config.CLEAN_SCHEMA}.restaurants + ); + """, + ) + op.execute( + f""" + UPDATE + {config.CLEAN_SCHEMA}.addresses + SET + primary_id = 302883 + WHERE + primary_id = 43526; + """, + ) + op.execute( + f""" + UPDATE + {config.CLEAN_SCHEMA}.addresses + SET + primary_id = 47597 + WHERE + primary_id = 43728; + """, + ) + op.execute( + f""" + UPDATE + {config.CLEAN_SCHEMA}.addresses + SET + primary_id = 159631 + WHERE + primary_id = 43942; + """, + ) + op.execute( + f""" + UPDATE + {config.CLEAN_SCHEMA}.addresses + SET + primary_id = 275651 + WHERE + primary_id = 44759; + """, + ) + op.execute( + f""" + UPDATE + {config.CLEAN_SCHEMA}.addresses + SET + primary_id = 156685 + WHERE + primary_id = 50599; + """, + ) + op.execute( + f""" + UPDATE + {config.CLEAN_SCHEMA}.addresses + SET + primary_id = 480206 + WHERE + primary_id = 51774; + """, + ) + op.execute( + f""" + DELETE + FROM + {config.CLEAN_SCHEMA}.addresses + WHERE + id NOT IN ( + SELECT DISTINCT + pickup_address_id AS id + FROM + {config.CLEAN_SCHEMA}.orders + UNION + SELECT DISTINCT + delivery_address_id AS id + FROM + {config.CLEAN_SCHEMA}.orders + UNION + SELECT DISTINCT + address_id AS id + FROM + {config.CLEAN_SCHEMA}.restaurants + ); + """, + ) + + # 4) Ensure every `Restaurant` has only one `Order.pickup_address`. + op.execute( + f""" + UPDATE + {config.CLEAN_SCHEMA}.orders + SET + pickup_address_id = 53733 + WHERE + pickup_address_id = 54892; + """, + ) + op.execute( + f""" + DELETE + FROM + {config.CLEAN_SCHEMA}.addresses + WHERE + id = 54892; + """, + ) + op.create_unique_constraint( + 'uq_restaurants_on_id_address_id', + 'restaurants', + ['id', 'address_id'], + schema=config.CLEAN_SCHEMA, + ) + op.create_foreign_key( + op.f('fk_orders_to_restaurants_via_restaurant_id_pickup_address_id'), + 'orders', + 'restaurants', + ['restaurant_id', 'pickup_address_id'], + ['id', 'address_id'], + source_schema=config.CLEAN_SCHEMA, + referent_schema=config.CLEAN_SCHEMA, + onupdate='RESTRICT', + ondelete='RESTRICT', + ) + op.drop_constraint( + 'fk_orders_to_restaurants_via_restaurant_id', + 'orders', + type_='foreignkey', + schema=config.CLEAN_SCHEMA, + ) + + +def downgrade(): + """Downgrade to revision 26711cd3f9b9.""" + op.create_foreign_key( + op.f('fk_orders_to_restaurants_via_restaurant_id'), + 'orders', + 'restaurants', + ['restaurant_id'], + ['id'], + source_schema=config.CLEAN_SCHEMA, + referent_schema=config.CLEAN_SCHEMA, + onupdate='RESTRICT', + ondelete='RESTRICT', + ) + op.drop_constraint( + 'fk_orders_to_restaurants_via_restaurant_id_pickup_address_id', + 'orders', + type_='foreignkey', + schema=config.CLEAN_SCHEMA, + ) + op.drop_constraint( + 'uq_restaurants_on_id_address_id', + 'restaurants', + type_='unique', + schema=config.CLEAN_SCHEMA, + ) diff --git a/setup.cfg b/setup.cfg index 37987fd..b7efe8f 100644 --- a/setup.cfg +++ b/setup.cfg @@ -121,6 +121,8 @@ per-file-ignores = migrations/versions/*.py: # Type annotations are not strictly enforced. ANN0, ANN2, + # Do not worry about SQL injection here. + S608, # File names of revisions are ok. WPS114,WPS118, # Revisions may have too many expressions. diff --git a/src/urban_meal_delivery/console/gridify.py b/src/urban_meal_delivery/console/gridify.py index 44f2fc3..3024f14 100644 --- a/src/urban_meal_delivery/console/gridify.py +++ b/src/urban_meal_delivery/console/gridify.py @@ -8,7 +8,7 @@ from urban_meal_delivery.console import decorators @click.command() -@decorators.db_revision('888e352d7526') +@decorators.db_revision('e86290e7305e') def gridify() -> None: # pragma: no cover note:b1f68d24 """Create grids for all cities. diff --git a/src/urban_meal_delivery/db/addresses.py b/src/urban_meal_delivery/db/addresses.py index dad5b72..5b61d41 100644 --- a/src/urban_meal_delivery/db/addresses.py +++ b/src/urban_meal_delivery/db/addresses.py @@ -52,7 +52,7 @@ class Address(meta.Base): # Relationships city = orm.relationship('City', back_populates='addresses') - restaurant = orm.relationship('Restaurant', back_populates='address', uselist=False) + restaurants = orm.relationship('Restaurant', back_populates='address') orders_picked_up = orm.relationship( 'Order', back_populates='pickup_address', diff --git a/src/urban_meal_delivery/db/grids.py b/src/urban_meal_delivery/db/grids.py index db03215..d0b6629 100644 --- a/src/urban_meal_delivery/db/grids.py +++ b/src/urban_meal_delivery/db/grids.py @@ -54,11 +54,12 @@ class Grid(meta.Base): return round((self.side_length ** 2) / 1_000_000, 1) @classmethod - def gridify(cls, city: db.City, side_length: int) -> db.Grid: + def gridify(cls, city: db.City, side_length: int) -> db.Grid: # noqa:WPS210 """Create a fully populated `Grid` for a `city`. - The `Grid` contains only `Pixel`s that have at least one `Address`. - `Address` objects outside the `city`'s viewport are discarded. + The `Grid` contains only `Pixel`s that have at least one + `Order.pickup_address`. `Address` objects outside the `.city`'s + viewport are discarded. Args: city: city for which the grid is created @@ -72,7 +73,14 @@ class Grid(meta.Base): # `Pixel`s grouped by `.n_x`-`.n_y` coordinates. pixels = {} - for address in city.addresses: + pickup_addresses = ( # noqa:ECE:001 + db.session.query(db.Address) + .join(db.Order, db.Address.id == db.Order.pickup_address_id) + .filter(db.Address.city == city) + .all() + ) + + for address in pickup_addresses: # Check if an `address` is not within the `city`'s viewport, ... not_within_city_viewport = ( address.x < 0 diff --git a/src/urban_meal_delivery/db/orders.py b/src/urban_meal_delivery/db/orders.py index 244e4c1..0b4550b 100644 --- a/src/urban_meal_delivery/db/orders.py +++ b/src/urban_meal_delivery/db/orders.py @@ -79,12 +79,6 @@ class Order(meta.Base): # noqa:WPS214 sa.ForeignKeyConstraint( ['customer_id'], ['customers.id'], onupdate='RESTRICT', ondelete='RESTRICT', ), - sa.ForeignKeyConstraint( - ['restaurant_id'], - ['restaurants.id'], - onupdate='RESTRICT', - ondelete='RESTRICT', - ), sa.ForeignKeyConstraint( ['courier_id'], ['couriers.id'], onupdate='RESTRICT', ondelete='RESTRICT', ), @@ -94,6 +88,14 @@ class Order(meta.Base): # noqa:WPS214 onupdate='RESTRICT', ondelete='RESTRICT', ), + sa.ForeignKeyConstraint( + # This foreign key ensures that there is only + # one `.pickup_address` per `.restaurant` + ['restaurant_id', 'pickup_address_id'], + ['restaurants.id', 'restaurants.address_id'], + onupdate='RESTRICT', + ondelete='RESTRICT', + ), sa.ForeignKeyConstraint( ['delivery_address_id'], ['addresses.id'], @@ -302,7 +304,11 @@ class Order(meta.Base): # noqa:WPS214 # Relationships customer = orm.relationship('Customer', back_populates='orders') - restaurant = orm.relationship('Restaurant', back_populates='orders') + restaurant = orm.relationship( + 'Restaurant', + back_populates='orders', + primaryjoin='Restaurant.id == Order.restaurant_id', + ) courier = orm.relationship('Courier', back_populates='orders') pickup_address = orm.relationship( 'Address', diff --git a/src/urban_meal_delivery/db/pixels.py b/src/urban_meal_delivery/db/pixels.py index 26faf1c..f75e9e3 100644 --- a/src/urban_meal_delivery/db/pixels.py +++ b/src/urban_meal_delivery/db/pixels.py @@ -12,7 +12,7 @@ class Pixel(meta.Base): Square pixels aggregate `Address` objects within a `City`. Every `Address` belongs to exactly one `Pixel` in a `Grid`. - Every `Pixel` has a unique "coordinate" within the `Grid`. + Every `Pixel` has a unique `n_x`-`n_y` coordinate within the `Grid`. """ __tablename__ = 'pixels' diff --git a/src/urban_meal_delivery/db/restaurants.py b/src/urban_meal_delivery/db/restaurants.py index b17cae7..23fa896 100644 --- a/src/urban_meal_delivery/db/restaurants.py +++ b/src/urban_meal_delivery/db/restaurants.py @@ -34,10 +34,12 @@ class Restaurant(meta.Base): '0 <= estimated_prep_duration AND estimated_prep_duration <= 2400', name='realistic_estimated_prep_duration', ), + # Needed by a `ForeignKeyConstraint` in `Order`. + sa.UniqueConstraint('id', 'address_id'), ) # Relationships - address = orm.relationship('Address', back_populates='restaurant') + address = orm.relationship('Address', back_populates='restaurants') orders = orm.relationship('Order', back_populates='restaurant') def __repr__(self) -> str: diff --git a/tests/console/test_gridify.py b/tests/console/test_gridify.py index 2911a0e..515d153 100644 --- a/tests/console/test_gridify.py +++ b/tests/console/test_gridify.py @@ -8,24 +8,31 @@ from urban_meal_delivery.console import gridify @pytest.mark.db -def test_four_pixels_with_two_addresses( - cli, db_session, monkeypatch, city, make_address, +def test_two_pixels_with_two_addresses( # noqa:WPS211 + cli, db_session, monkeypatch, city, make_address, make_restaurant, make_order, ): """Two `Address` objects in distinct `Pixel` objects. This is roughly the same test case as - `tests.db.test_grids.test_four_pixels_with_two_addresses`. + `tests.db.test_grids.test_two_pixels_with_two_addresses`. The difference is that the result is written to the database. """ # Create two `Address` objects in distinct `Pixel`s. - city.addresses = [ - # One `Address` in the lower-left `Pixel`, ... - make_address(latitude=48.8357377, longitude=2.2517412), - # ... and another one in the upper-right one. - make_address(latitude=48.8898312, longitude=2.4357622), - ] + # One `Address` in the lower-left `Pixel`, ... + address1 = make_address(latitude=48.8357377, longitude=2.2517412) + # ... and another one in the upper-right one. + address2 = make_address(latitude=48.8898312, longitude=2.4357622) - db_session.add(city) + # Locate a `Restaurant` at the two `Address` objects and + # place one `Order` for each of them so that the `Address` + # objects are used as `Order.pickup_address`s. + restaurant1 = make_restaurant(address=address1) + restaurant2 = make_restaurant(address=address2) + order1 = make_order(restaurant=restaurant1) + order2 = make_order(restaurant=restaurant2) + + db_session.add(order1) + db_session.add(order2) db_session.commit() side_length = max(city.total_x // 2, city.total_y // 2) + 1 diff --git a/tests/db/test_grids.py b/tests/db/test_grids.py index 3d8858d..e28baf2 100644 --- a/tests/db/test_grids.py +++ b/tests/db/test_grids.py @@ -74,18 +74,30 @@ class TestProperties: class TestGridification: """Test the `Grid.gridify()` constructor.""" - @pytest.mark.no_cover - def test_one_pixel_without_addresses(self, city): - """At the very least, there must be one `Pixel` ... + @pytest.fixture + def addresses_mock(self, mocker, monkeypatch): + """A `Mock` whose `.return_value` are to be set ... - ... if the `side_length` is greater than both the - horizontal and vertical distances of the viewport. + ... to the addresses that are gridified. The addresses are + all considered `Order.pickup_address` attributes for some orders. + """ + mock = mocker.Mock() + query = ( # noqa:ECE001 + mock.query.return_value.join.return_value.filter.return_value.all # noqa:E501,WPS219 + ) + monkeypatch.setattr(db, 'session', mock) + + return query + + @pytest.mark.no_cover + def test_no_pixel_without_addresses(self, city, addresses_mock): + """Without orders, there are no `Pixel` objects on the `grid`. This test case skips the `for`-loop inside `Grid.gridify()`. - Interestingly, coverage.py does not see this. """ - city.addresses = [] + addresses_mock.return_value = [] + # The chosen `side_length` would result in one `Pixel` if there were orders. # `+1` as otherwise there would be a second pixel in one direction. side_length = max(city.total_x, city.total_y) + 1 @@ -94,13 +106,13 @@ class TestGridification: assert isinstance(result, db.Grid) assert len(result.pixels) == 0 # noqa:WPS507 - def test_one_pixel_with_one_address(self, city, address): + def test_one_pixel_with_one_address(self, city, order, addresses_mock): """At the very least, there must be one `Pixel` ... ... if the `side_length` is greater than both the horizontal and vertical distances of the viewport. """ - city.addresses = [address] + addresses_mock.return_value = [order.pickup_address] # `+1` as otherwise there would be a second pixel in one direction. side_length = max(city.total_x, city.total_y) + 1 @@ -110,7 +122,7 @@ class TestGridification: assert isinstance(result, db.Grid) assert len(result.pixels) == 1 - def test_one_pixel_with_two_addresses(self, city, make_address): + def test_one_pixel_with_two_addresses(self, city, make_order, addresses_mock): """At the very least, there must be one `Pixel` ... ... if the `side_length` is greater than both the @@ -119,7 +131,8 @@ class TestGridification: This test case is necessary as `test_one_pixel_with_one_address` does not have to re-use an already created `Pixel` object internally. """ - city.addresses = [make_address(), make_address()] + orders = [make_order(), make_order()] + addresses_mock.return_value = [order.pickup_address for order in orders] # `+1` as otherwise there would be a second pixel in one direction. side_length = max(city.total_x, city.total_y) + 1 @@ -129,12 +142,11 @@ class TestGridification: assert isinstance(result, db.Grid) assert len(result.pixels) == 1 - def test_one_pixel_with_address_too_far_south(self, city, address): + def test_no_pixel_with_one_address_too_far_south(self, city, order, addresses_mock): """An `address` outside the `city`'s viewport is discarded.""" # Move the `address` just below `city.southwest`. - address.latitude = city.southwest.latitude - 0.1 - - city.addresses = [address] + order.pickup_address.latitude = city.southwest.latitude - 0.1 + addresses_mock.return_value = [order.pickup_address] # `+1` as otherwise there would be a second pixel in one direction. side_length = max(city.total_x, city.total_y) + 1 @@ -145,16 +157,15 @@ class TestGridification: assert len(result.pixels) == 0 # noqa:WPS507 @pytest.mark.no_cover - def test_one_pixel_with_address_too_far_west(self, city, address): + def test_no_pixel_with_one_address_too_far_west(self, city, order, addresses_mock): """An `address` outside the `city`'s viewport is discarded. - This test is a logical sibling to `test_one_pixel_with_address_too_far_south` - and therefore redundant. + This test is a logical sibling to + `test_no_pixel_with_one_address_too_far_south` and therefore redundant. """ # Move the `address` just left to `city.southwest`. - address.longitude = city.southwest.longitude - 0.1 - - city.addresses = [address] + order.pickup_address.longitude = city.southwest.longitude - 0.1 + addresses_mock.return_value = [order.pickup_address] # `+1` as otherwise there would be a second pixel in one direction. side_length = max(city.total_x, city.total_y) + 1 @@ -165,13 +176,13 @@ class TestGridification: assert len(result.pixels) == 0 # noqa:WPS507 @pytest.mark.no_cover - def test_four_pixels_with_two_addresses(self, city, make_address): + def test_two_pixels_with_two_addresses(self, city, make_address, addresses_mock): """Two `Address` objects in distinct `Pixel` objects. This test is more of a sanity check. """ # Create two `Address` objects in distinct `Pixel`s. - city.addresses = [ + addresses_mock.return_value = [ # One `Address` in the lower-left `Pixel`, ... make_address(latitude=48.8357377, longitude=2.2517412), # ... and another one in the upper-right one. @@ -194,7 +205,9 @@ class TestGridification: @pytest.mark.db @pytest.mark.no_cover @pytest.mark.parametrize('side_length', [250, 500, 1_000, 2_000, 4_000, 8_000]) - def test_make_random_grids(self, db_session, city, make_address, side_length): + def test_make_random_grids( # noqa:WPS211 + self, db_session, city, make_address, make_restaurant, make_order, side_length, + ): """With 100 random `Address` objects, a grid must have ... ... between 1 and a deterministic upper bound of `Pixel` objects. @@ -202,7 +215,10 @@ class TestGridification: This test creates confidence that the created `Grid` objects adhere to the database constraints. """ - city.addresses = [make_address() for _ in range(100)] + addresses = [make_address() for _ in range(100)] + restaurants = [make_restaurant(address=address) for address in addresses] + orders = [make_order(restaurant=restaurant) for restaurant in restaurants] + db_session.add_all(orders) n_pixels_x = (city.total_x // side_length) + 1 n_pixels_y = (city.total_y // side_length) + 1 diff --git a/tests/forecasts/timify/test_aggregate_orders.py b/tests/forecasts/timify/test_aggregate_orders.py index b3c4206..75f0531 100644 --- a/tests/forecasts/timify/test_aggregate_orders.py +++ b/tests/forecasts/timify/test_aggregate_orders.py @@ -17,16 +17,34 @@ class TestAggregateOrders: """ @pytest.fixture - def one_pixel_grid(self, db_session, city, restaurant): + def addresses_mock(self, mocker, monkeypatch): + """A `Mock` whose `.return_value` are to be set ... + + ... to the addresses that are gridified. The addresses are + all considered `Order.pickup_address` attributes for some orders. + + Note: This fixture also exists in `tests.db.test_grids`. + """ + mock = mocker.Mock() + query = ( # noqa:ECE001 + mock.query.return_value.join.return_value.filter.return_value.all # noqa:E501,WPS219 + ) + monkeypatch.setattr(db, 'session', mock) + + return query + + @pytest.fixture + def one_pixel_grid(self, db_session, city, restaurant, addresses_mock): """A persisted `Grid` with one `Pixel`. - `restaurant` must be a dependency as otherwise - its `.address` is not put into the database. + `restaurant` must be a dependency as otherwise the `restaurant.address` + is not put into the database as an `Order.pickup_address`. """ + addresses_mock.return_value = [restaurant.address] + # `+1` as otherwise there would be a second pixel in one direction. side_length = max(city.total_x, city.total_y) + 1 grid = db.Grid.gridify(city=city, side_length=side_length) - db_session.add(grid) assert len(grid.pixels) == 1 # sanity check @@ -272,17 +290,17 @@ class TestAggregateOrders: assert result['total_orders'].sum() == 18 @pytest.fixture - def two_pixel_grid(self, db_session, city, make_address, make_restaurant): - """A persisted `Grid` with two `Pixel` objects. - - `restaurant` must be a dependency as otherwise - its `.address` is not put into the database. - """ + def two_pixel_grid( # noqa:WPS211 + self, db_session, city, make_address, make_restaurant, addresses_mock, + ): + """A persisted `Grid` with two `Pixel` objects.""" # One `Address` in the lower-left `Pixel`, ... address1 = make_address(latitude=48.8357377, longitude=2.2517412) # ... and another one in the upper-right one. address2 = make_address(latitude=48.8898312, longitude=2.4357622) + addresses_mock.return_value = [address1, address2] + # Create `Restaurant`s at the two addresses. make_restaurant(address=address1) make_restaurant(address=address2) @@ -307,7 +325,8 @@ class TestAggregateOrders: In total, there are 30 orders. """ address1, address2 = two_pixel_grid.city.addresses - restaurant1, restaurant2 = address1.restaurant, address2.restaurant + # Rarely, an `Address` may have several `Restaurant`s in the real dataset. + restaurant1, restaurant2 = address1.restaurants[0], address2.restaurants[0] # Create one order every other hour for `restaurant1`. for hour in range(11, 23, 2): From ca2ba0c9d5e90d293b9a631ab921078a7d2bd0cc Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sun, 24 Jan 2021 19:18:09 +0100 Subject: [PATCH 48/72] Fix missing dependencies in test session --- noxfile.py | 1 + 1 file changed, 1 insertion(+) diff --git a/noxfile.py b/noxfile.py index f5e9967..e43d2ba 100644 --- a/noxfile.py +++ b/noxfile.py @@ -196,6 +196,7 @@ def test(session): 'pytest', 'pytest-cov', 'pytest-env', + 'pytest-mock', 'pytest-randomly', 'xdoctest[optional]', ) From 605ade4078e352c75ccd62ae5ad4b8f1f8feb5d7 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Tue, 26 Jan 2021 17:02:51 +0100 Subject: [PATCH 49/72] Add `Pixel.northeast/southwest` properties - the properties are needed for the drawing functionalitites --- src/urban_meal_delivery/db/pixels.py | 47 +++++++++++++++++++ src/urban_meal_delivery/db/utils/locations.py | 7 ++- tests/db/test_grids.py | 9 +++- tests/db/test_pixels.py | 28 +++++++++++ tests/db/utils/test_locations.py | 7 +++ 5 files changed, 96 insertions(+), 2 deletions(-) diff --git a/src/urban_meal_delivery/db/pixels.py b/src/urban_meal_delivery/db/pixels.py index f75e9e3..c182206 100644 --- a/src/urban_meal_delivery/db/pixels.py +++ b/src/urban_meal_delivery/db/pixels.py @@ -1,9 +1,11 @@ """Provide the ORM's `Pixel` model.""" import sqlalchemy as sa +import utm from sqlalchemy import orm from urban_meal_delivery.db import meta +from urban_meal_delivery.db import utils class Pixel(meta.Base): @@ -58,3 +60,48 @@ class Pixel(meta.Base): def area(self) -> float: """The area of a pixel in square kilometers.""" return self.grid.pixel_area + + @property + def northeast(self) -> utils.Location: + """The pixel's northeast corner, relative to `.grid.city.southwest`. + + Implementation detail: This property is cached as none of the + underlying attributes to calculate the value are to be changed. + """ + if not hasattr(self, '_northeast'): # noqa:WPS421 note:d334120e + # The origin is the southwest corner of the `.grid.city`'s viewport. + easting_origin = self.grid.city.southwest.easting + northing_origin = self.grid.city.southwest.northing + + # `+1` as otherwise we get the pixel's `.southwest` corner. + easting = easting_origin + ((self.n_x + 1) * self.side_length) + northing = northing_origin + ((self.n_y + 1) * self.side_length) + zone, band = self.grid.city.southwest.zone_details + latitude, longitude = utm.to_latlon(easting, northing, zone, band) + + self._northeast = utils.Location(latitude, longitude) + self._northeast.relate_to(self.grid.city.southwest) + + return self._northeast + + @property + def southwest(self) -> utils.Location: + """The pixel's northeast corner, relative to `.grid.city.southwest`. + + Implementation detail: This property is cached as none of the + underlying attributes to calculate the value are to be changed. + """ + if not hasattr(self, '_southwest'): # noqa:WPS421 note:d334120e + # The origin is the southwest corner of the `.grid.city`'s viewport. + easting_origin = self.grid.city.southwest.easting + northing_origin = self.grid.city.southwest.northing + + easting = easting_origin + (self.n_x * self.side_length) + northing = northing_origin + (self.n_y * self.side_length) + zone, band = self.grid.city.southwest.zone_details + latitude, longitude = utm.to_latlon(easting, northing, zone, band) + + self._southwest = utils.Location(latitude, longitude) + self._southwest.relate_to(self.grid.city.southwest) + + return self._southwest diff --git a/src/urban_meal_delivery/db/utils/locations.py b/src/urban_meal_delivery/db/utils/locations.py index 741edfe..b6ef41e 100644 --- a/src/urban_meal_delivery/db/utils/locations.py +++ b/src/urban_meal_delivery/db/utils/locations.py @@ -2,7 +2,7 @@ from __future__ import annotations -from typing import Optional +from typing import Optional, Tuple import utm @@ -82,6 +82,11 @@ class Location: """The UTM zone of the location.""" return f'{self._zone}{self._band}' + @property + def zone_details(self) -> Tuple[int, str]: + """The UTM zone of the location as the zone number and the band.""" + return (self._zone, self._band) + def __eq__(self, other: object) -> bool: """Check if two `Location` objects are the same location.""" if not isinstance(other, Location): diff --git a/tests/db/test_grids.py b/tests/db/test_grids.py index e28baf2..2babf25 100644 --- a/tests/db/test_grids.py +++ b/tests/db/test_grids.py @@ -205,7 +205,7 @@ class TestGridification: @pytest.mark.db @pytest.mark.no_cover @pytest.mark.parametrize('side_length', [250, 500, 1_000, 2_000, 4_000, 8_000]) - def test_make_random_grids( # noqa:WPS211 + def test_make_random_grids( # noqa:WPS211,WPS218 self, db_session, city, make_address, make_restaurant, make_order, side_length, ): """With 100 random `Address` objects, a grid must have ... @@ -228,5 +228,12 @@ class TestGridification: assert isinstance(result, db.Grid) assert 1 <= len(result.pixels) <= n_pixels_x * n_pixels_y + # Sanity checks for `Pixel.southwest` and `Pixel.northeast`. + for pixel in result.pixels: + assert abs(pixel.southwest.x - pixel.n_x * side_length) < 2 + assert abs(pixel.southwest.y - pixel.n_y * side_length) < 2 + assert abs(pixel.northeast.x - (pixel.n_x + 1) * side_length) < 2 + assert abs(pixel.northeast.y - (pixel.n_y + 1) * side_length) < 2 + db_session.add(result) db_session.commit() diff --git a/tests/db/test_pixels.py b/tests/db/test_pixels.py index 3ebfb26..d5acc4a 100644 --- a/tests/db/test_pixels.py +++ b/tests/db/test_pixels.py @@ -87,3 +87,31 @@ class TestProperties: result = pixel.area assert result == 1.0 + + def test_northeast(self, pixel, city): + """Test `Pixel.northeast` property.""" + result = pixel.northeast + + assert abs(result.x - pixel.side_length) < 2 + assert abs(result.y - pixel.side_length) < 2 + + def test_northeast_is_cached(self, pixel): + """Test `Pixel.northeast` property.""" + result1 = pixel.northeast + result2 = pixel.northeast + + assert result1 is result2 + + def test_southwest(self, pixel, city): + """Test `Pixel.southwest` property.""" + result = pixel.southwest + + assert abs(result.x) < 2 + assert abs(result.y) < 2 + + def test_southwest_is_cached(self, pixel): + """Test `Pixel.southwest` property.""" + result1 = pixel.southwest + result2 = pixel.southwest + + assert result1 is result2 diff --git a/tests/db/utils/test_locations.py b/tests/db/utils/test_locations.py index 51750e2..8eb0263 100644 --- a/tests/db/utils/test_locations.py +++ b/tests/db/utils/test_locations.py @@ -140,6 +140,13 @@ class TestProperties: assert result == ZONE + def test_zone_details(self, location): + """Test `Location.zone_details` property.""" + result = location.zone_details + + zone, band = result + assert ZONE == f'{zone}{band}' + class TestRelateTo: """Test the `Location.relate_to()` method and the `.x` and `.y` properties.""" From 4b6d92958d756cd450c8418453977e1dd5d3186b Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Tue, 26 Jan 2021 17:07:50 +0100 Subject: [PATCH 50/72] Add functionality for drawing `folium.Map`s - this code is not unit-tested due to the complexity involving interactive `folium.Map`s => visual checks give high confidence --- setup.cfg | 24 ++- src/urban_meal_delivery/configuration.py | 4 + src/urban_meal_delivery/db/addresses.py | 46 +++++- src/urban_meal_delivery/db/cities.py | 154 ++++++++++++++++++ src/urban_meal_delivery/db/customers.py | 157 +++++++++++++++++++ src/urban_meal_delivery/db/grids.py | 31 ++++ src/urban_meal_delivery/db/pixels.py | 136 ++++++++++++++++ src/urban_meal_delivery/db/restaurants.py | 94 +++++++++++ src/urban_meal_delivery/db/utils/__init__.py | 2 + src/urban_meal_delivery/db/utils/colors.py | 69 ++++++++ tests/db/fake_data/factories.py | 2 +- tests/db/test_addresses.py | 8 +- 12 files changed, 714 insertions(+), 13 deletions(-) create mode 100644 src/urban_meal_delivery/db/utils/colors.py diff --git a/setup.cfg b/setup.cfg index b7efe8f..00d589d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -92,7 +92,7 @@ extend-ignore = # Google's Python Style Guide is not reStructuredText # until after being processed by Sphinx Napoleon. # Source: https://github.com/peterjc/flake8-rst-docstrings/issues/17 - RST201,RST203,RST301, + RST201,RST203,RST210,RST213,RST301, # String constant over-use is checked visually by the programmer. WPS226, # Allow underscores in numbers. @@ -101,6 +101,10 @@ extend-ignore = WPS305, # Classes should not have to specify a base class. WPS306, + # Let's be modern: The Walrus is ok. + WPS332, + # Let's not worry about the number of noqa's. + WPS402, # Putting logic into __init__.py files may be justified. WPS412, # Allow multiple assignment, e.g., x = y = 123 @@ -127,8 +131,6 @@ per-file-ignores = WPS114,WPS118, # Revisions may have too many expressions. WPS204,WPS213, - # Too many noqa's are ok. - WPS402, noxfile.py: # Type annotations are not strictly enforced. ANN0, ANN2, @@ -136,13 +138,17 @@ per-file-ignores = WPS202, # TODO (isort): Remove after simplifying the nox session "lint". WPS213, - # The noxfile is rather long => allow many noqa's. - WPS402, src/urban_meal_delivery/configuration.py: # Allow upper case class variables within classes. WPS115, + src/urban_meal_delivery/db/customers.py: + # The module is not too complex. + WPS232, + src/urban_meal_delivery/db/restaurants.py: + # The module is not too complex. + WPS232, src/urban_meal_delivery/forecasts/decomposition.py: - # The module does not have a high cognitive complexity. + # The module is not too complex. WPS232, src/urban_meal_delivery/forecasts/timify.py: # No SQL injection as the inputs come from a safe source. @@ -247,8 +253,14 @@ single_line_exclusions = typing [mypy] cache_dir = .cache/mypy +[mypy-folium.*] +ignore_missing_imports = true +[mypy-matplotlib.*] +ignore_missing_imports = true [mypy-nox.*] ignore_missing_imports = true +[mypy-numpy.*] +ignore_missing_imports = true [mypy-packaging] ignore_missing_imports = true [mypy-pandas] diff --git a/src/urban_meal_delivery/configuration.py b/src/urban_meal_delivery/configuration.py index 267d579..ad813b7 100644 --- a/src/urban_meal_delivery/configuration.py +++ b/src/urban_meal_delivery/configuration.py @@ -55,6 +55,10 @@ class Config: # The demand forecasting methods used in the simulations. FORECASTING_METHODS = ['hets', 'rtarima'] + # Colors for the visualizations ins `folium`. + RESTAURANT_COLOR = 'red' + CUSTOMER_COLOR = 'blue' + # Implementation-specific settings # -------------------------------- diff --git a/src/urban_meal_delivery/db/addresses.py b/src/urban_meal_delivery/db/addresses.py index 5b61d41..8ce7193 100644 --- a/src/urban_meal_delivery/db/addresses.py +++ b/src/urban_meal_delivery/db/addresses.py @@ -1,5 +1,10 @@ """Provide the ORM's `Address` model.""" +from __future__ import annotations + +from typing import Any + +import folium import sqlalchemy as sa from sqlalchemy import orm from sqlalchemy.dialects import postgresql @@ -16,7 +21,7 @@ class Address(meta.Base): # Columns id = sa.Column(sa.Integer, primary_key=True, autoincrement=False) # noqa:WPS125 - _primary_id = sa.Column('primary_id', sa.Integer, nullable=False, index=True) + primary_id = sa.Column(sa.Integer, nullable=False, index=True) created_at = sa.Column(sa.DateTime, nullable=False) place_id = sa.Column(sa.Unicode(length=120), nullable=False, index=True) latitude = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False) @@ -83,7 +88,7 @@ class Address(meta.Base): `.is_primary` indicates the first in a group of `Address` objects. """ - return self.id == self._primary_id + return self.id == self.primary_id @property def location(self) -> utils.Location: @@ -121,3 +126,40 @@ class Address(meta.Base): Shortcut for `.location.y`. """ return self.location.y + + def clear_map(self) -> Address: # pragma: no cover + """Shortcut to the `.city.clear_map()` method. + + Returns: + self: enabling method chaining + """ # noqa:D402,DAR203 + self.city.clear_map() + return self + + @property # pragma: no cover + def map(self) -> folium.Map: # noqa:WPS125 + """Shortcut to the `.city.map` object.""" + return self.city.map + + def draw(self, **kwargs: Any) -> folium.Map: # pragma: no cover + """Draw the address on the `.city.map`. + + By default, addresses are shown as black dots. + Use `**kwargs` to overwrite that. + + Args: + **kwargs: passed on to `folium.Circle()`; overwrite default settings + + Returns: + `.city.map` for convenience in interactive usage + """ + defaults = { + 'color': 'black', + 'popup': f'{self.street}, {self.zip_code} {self.city_name}', + } + defaults.update(kwargs) + + marker = folium.Circle((self.latitude, self.longitude), **defaults) + marker.add_to(self.city.map) + + return self.map diff --git a/src/urban_meal_delivery/db/cities.py b/src/urban_meal_delivery/db/cities.py index b2f0cc4..bd5932f 100644 --- a/src/urban_meal_delivery/db/cities.py +++ b/src/urban_meal_delivery/db/cities.py @@ -1,9 +1,14 @@ """Provide the ORM's `City` model.""" +from __future__ import annotations + +import folium import sqlalchemy as sa from sqlalchemy import orm from sqlalchemy.dialects import postgresql +from urban_meal_delivery import config +from urban_meal_delivery import db from urban_meal_delivery.db import meta from urban_meal_delivery.db import utils @@ -94,3 +99,152 @@ class City(meta.Base): The city borders refer to the Google Maps viewport. """ return self.northeast.northing - self.southwest.northing + + def clear_map(self) -> City: # pragma: no cover + """Create a new `folium.Map` object aligned with the city's viewport. + + The map is available via the `.map` property. Note that it is a + mutable objects that is changed from various locations in the code base. + + Returns: + self: enabling method chaining + """ # noqa:DAR203 + self._map = folium.Map( + location=[self.center_latitude, self.center_longitude], + zoom_start=self.initial_zoom, + ) + return self + + @property # pragma: no cover + def map(self) -> folium.Map: # noqa:WPS125 + """A `folium.Map` object aligned with the city's viewport. + + See docstring for `.clear_map()` for further info. + """ + if not hasattr(self, '_map'): # noqa:WPS421 note:d334120e + self.clear_map() + + return self._map + + def draw_restaurants( # noqa:WPS231 + self, order_counts: bool = False, # pragma: no cover + ) -> folium.Map: + """Draw all restaurants on the`.map`. + + Args: + order_counts: show the number of orders + + Returns: + `.map` for convenience in interactive usage + """ + # Obtain all primary `Address`es in the city that host `Restaurant`s. + addresses = ( # noqa:ECE001 + db.session.query(db.Address) + .filter( + db.Address.id.in_( + db.session.query(db.Address.primary_id) # noqa:WPS221 + .join(db.Restaurant, db.Address.id == db.Restaurant.address_id) + .filter(db.Address.city == self) + .distinct() + .all(), + ), + ) + .all() + ) + + for address in addresses: + # Show the restaurant's name if there is only one. + # Otherwise, list all the restaurants' ID's. + restaurants = ( # noqa:ECE001 + db.session.query(db.Restaurant) + .join(db.Address, db.Restaurant.address_id == db.Address.id) + .filter(db.Address.primary_id == address.id) + .all() + ) + if len(restaurants) == 1: + tooltip = f'{restaurants[0].name} (#{restaurants[0].id})' # noqa:WPS221 + else: + tooltip = 'Restaurants ' + ', '.join( # noqa:WPS336 + f'#{restaurant.id}' for restaurant in restaurants + ) + + if order_counts: + # Calculate the number of orders for ALL restaurants ... + n_orders = ( # noqa:ECE001 + db.session.query(db.Order.id) + .join(db.Address, db.Order.pickup_address_id == db.Address.id) + .filter(db.Address.primary_id == address.id) + .count() + ) + # ... and adjust the size of the red dot on the `.map`. + if n_orders >= 1000: + radius = 20 # noqa:WPS220 + elif n_orders >= 500: + radius = 15 # noqa:WPS220 + elif n_orders >= 100: + radius = 10 # noqa:WPS220 + elif n_orders >= 10: + radius = 5 # noqa:WPS220 + else: + radius = 1 # noqa:WPS220 + + tooltip += f' | n_orders={n_orders}' # noqa:WPS336 + + address.draw( + radius=radius, + color=config.RESTAURANT_COLOR, + fill_color=config.RESTAURANT_COLOR, + fill_opacity=0.3, + tooltip=tooltip, + ) + + else: + address.draw( + radius=1, color=config.RESTAURANT_COLOR, tooltip=tooltip, + ) + + return self.map + + def draw_zip_codes(self) -> folium.Map: # pragma: no cover + """Draw all addresses on the `.map`, colorized by their `.zip_code`. + + This does not make a distinction between restaurant and customer addresses. + Also, due to the high memory usage, the number of orders is not calculated. + + Returns: + `.map` for convenience in interactive usage + """ + # First, create a color map with distinct colors for each zip code. + all_zip_codes = sorted( + row[0] + for row in db.session.execute( + f""" -- # noqa:S608 + SELECT DISTINCT + zip_code + FROM + {config.CLEAN_SCHEMA}.addresses + WHERE + city_id = {self.id}; + """, + ) + ) + cmap = utils.make_random_cmap(len(all_zip_codes), bright=False) + colors = { + code: utils.rgb_to_hex(*cmap(index)) + for index, code in enumerate(all_zip_codes) + } + + # Second, draw every address on the `.map. + for address in self.addresses: + # Non-primary addresses are covered by primary ones anyway. + if not address.is_primary: + continue + + marker = folium.Circle( # noqa:WPS317 + (address.latitude, address.longitude), + color=colors[address.zip_code], + radius=1, + ) + marker.add_to(self.map) + + return self.map diff --git a/src/urban_meal_delivery/db/customers.py b/src/urban_meal_delivery/db/customers.py index 2a96d9a..f6d59c2 100644 --- a/src/urban_meal_delivery/db/customers.py +++ b/src/urban_meal_delivery/db/customers.py @@ -1,8 +1,13 @@ """Provide the ORM's `Customer` model.""" +from __future__ import annotations + +import folium import sqlalchemy as sa from sqlalchemy import orm +from urban_meal_delivery import config +from urban_meal_delivery import db from urban_meal_delivery.db import meta @@ -22,3 +27,155 @@ class Customer(meta.Base): # Relationships orders = orm.relationship('Order', back_populates='customer') + + def clear_map(self) -> Customer: # pragma: no cover + """Shortcut to the `...city.clear_map()` method. + + Returns: + self: enabling method chaining + """ # noqa:D402,DAR203 + self.orders[0].pickup_address.city.clear_map() # noqa:WPS219 + return self + + @property # pragma: no cover + def map(self) -> folium.Map: # noqa:WPS125 + """Shortcut to the `...city.map` object.""" + return self.orders[0].pickup_address.city.map # noqa:WPS219 + + def draw( # noqa:C901,WPS210,WPS231 + self, restaurants: bool = True, order_counts: bool = False, # pragma: no cover + ) -> folium.Map: + """Draw all the customer's delivery addresses on the `...city.map`. + + By default, the pickup locations (= restaurants) are also shown. + + Args: + restaurants: show the pickup locations + order_counts: show both the number of pickups at the restaurants + and the number of deliveries at the customer's delivery addresses; + the former is only shown if `restaurants=True` + + Returns: + `...city.map` for convenience in interactive usage + """ + # Note: a `Customer` may have more than one delivery `Address`es. + # That is not true for `Restaurant`s after the data cleaning. + + # Obtain all primary `Address`es where + # at least one delivery was made to `self`. + delivery_addresses = ( # noqa:ECE001 + db.session.query(db.Address) + .filter( + db.Address.id.in_( + db.session.query(db.Address.primary_id) # noqa:WPS221 + .join(db.Order, db.Address.id == db.Order.delivery_address_id) + .filter(db.Order.customer_id == self.id) + .distinct() + .all(), + ), + ) + .all() + ) + + for address in delivery_addresses: + if order_counts: + n_orders = ( # noqa:ECE001 + db.session.query(db.Order) + .join(db.Address, db.Order.delivery_address_id == db.Address.id) + .filter(db.Order.customer_id == self.id) + .filter(db.Address.primary_id == address.id) + .count() + ) + if n_orders >= 25: + radius = 20 # noqa:WPS220 + elif n_orders >= 10: + radius = 15 # noqa:WPS220 + elif n_orders >= 5: + radius = 10 # noqa:WPS220 + elif n_orders > 1: + radius = 5 # noqa:WPS220 + else: + radius = 1 # noqa:WPS220 + + address.draw( + radius=radius, + color=config.CUSTOMER_COLOR, + fill_color=config.CUSTOMER_COLOR, + fill_opacity=0.3, + tooltip=f'n_orders={n_orders}', + ) + + else: + address.draw( + radius=1, color=config.CUSTOMER_COLOR, + ) + + if restaurants: + pickup_addresses = ( # noqa:ECE001 + db.session.query(db.Address) + .filter( + db.Address.id.in_( + db.session.query(db.Address.primary_id) # noqa:WPS221 + .join(db.Order, db.Address.id == db.Order.pickup_address_id) + .filter(db.Order.customer_id == self.id) + .distinct() + .all(), + ), + ) + .all() + ) + + for address in pickup_addresses: # noqa:WPS440 + # Show the restaurant's name if there is only one. + # Otherwise, list all the restaurants' ID's. + # We cannot show the `Order.restaurant.name` due to the aggregation. + restaurants = ( # noqa:ECE001 + db.session.query(db.Restaurant) + .join(db.Address, db.Restaurant.address_id == db.Address.id) + .filter(db.Address.primary_id == address.id) # noqa:WPS441 + .all() + ) + if len(restaurants) == 1: # type:ignore + tooltip = ( + f'{restaurants[0].name} (#{restaurants[0].id})' # type:ignore + ) + else: + tooltip = 'Restaurants ' + ', '.join( # noqa:WPS336 + f'#{restaurant.id}' for restaurant in restaurants # type:ignore + ) + + if order_counts: + n_orders = ( # noqa:ECE001 + db.session.query(db.Order) + .join(db.Address, db.Order.pickup_address_id == db.Address.id) + .filter(db.Order.customer_id == self.id) + .filter(db.Address.primary_id == address.id) # noqa:WPS441 + .count() + ) + if n_orders >= 25: + radius = 20 # noqa:WPS220 + elif n_orders >= 10: + radius = 15 # noqa:WPS220 + elif n_orders >= 5: + radius = 10 # noqa:WPS220 + elif n_orders > 1: + radius = 5 # noqa:WPS220 + else: + radius = 1 # noqa:WPS220 + + tooltip += f' | n_orders={n_orders}' # noqa:WPS336 + + address.draw( # noqa:WPS441 + radius=radius, + color=config.RESTAURANT_COLOR, + fill_color=config.RESTAURANT_COLOR, + fill_opacity=0.3, + tooltip=tooltip, + ) + + else: + address.draw( # noqa:WPS441 + radius=1, color=config.RESTAURANT_COLOR, tooltip=tooltip, + ) + + return self.map diff --git a/src/urban_meal_delivery/db/grids.py b/src/urban_meal_delivery/db/grids.py index d0b6629..dac6e48 100644 --- a/src/urban_meal_delivery/db/grids.py +++ b/src/urban_meal_delivery/db/grids.py @@ -2,6 +2,9 @@ from __future__ import annotations +from typing import Any + +import folium import sqlalchemy as sa from sqlalchemy import orm @@ -104,3 +107,31 @@ class Grid(meta.Base): pixel.addresses.append(assoc) return grid + + def clear_map(self) -> Grid: # pragma: no cover + """Shortcut to the `.city.clear_map()` method. + + Returns: + self: enabling method chaining + """ # noqa:D402,DAR203 + self.city.clear_map() + return self + + @property # pragma: no cover + def map(self) -> folium.Map: # noqa:WPS125 + """Shortcut to the `.city.map` object.""" + return self.city.map + + def draw(self, **kwargs: Any) -> folium.Map: # pragma: no cover + """Draw all pixels in the grid. + + Args: + **kwargs: passed on to `Pixel.draw()` + + Returns: + `.city.map` for convenience in interactive usage + """ + for pixel in self.pixels: + pixel.draw(**kwargs) + + return self.map diff --git a/src/urban_meal_delivery/db/pixels.py b/src/urban_meal_delivery/db/pixels.py index c182206..f5ca091 100644 --- a/src/urban_meal_delivery/db/pixels.py +++ b/src/urban_meal_delivery/db/pixels.py @@ -1,9 +1,14 @@ """Provide the ORM's `Pixel` model.""" +from __future__ import annotations + +import folium import sqlalchemy as sa import utm from sqlalchemy import orm +from urban_meal_delivery import config +from urban_meal_delivery import db from urban_meal_delivery.db import meta from urban_meal_delivery.db import utils @@ -105,3 +110,134 @@ class Pixel(meta.Base): self._southwest.relate_to(self.grid.city.southwest) return self._southwest + + def clear_map(self) -> Pixel: # pragma: no cover + """Shortcut to the `.city.clear_map()` method. + + Returns: + self: enabling method chaining + """ # noqa:D402,DAR203 + self.grid.city.clear_map() + return self + + @property # pragma: no cover + def map(self) -> folium.Map: # noqa:WPS125 + """Shortcut to the `.city.map` object.""" + return self.grid.city.map + + def draw( # noqa:C901,WPS210,WPS231 + self, restaurants: bool = True, order_counts: bool = False, # pragma: no cover + ) -> folium.Map: + """Draw the pixel on the `.grid.city.map`. + + Args: + restaurants: include the restaurants + order_counts: show the number of orders at a restaurant + + Returns: + `.grid.city.map` for convenience in interactive usage + """ + bounds = ( + (self.southwest.latitude, self.southwest.longitude), + (self.northeast.latitude, self.northeast.longitude), + ) + info_text = f'Pixel({self.n_x}, {self.n_y})' + + # Make the `Pixel`s look like a checkerboard. + if (self.n_x + self.n_y) % 2: + color = '#808000' + else: + color = '#ff8c00' + + marker = folium.Rectangle( + bounds=bounds, + color='gray', + opacity=0.2, + weight=5, + fill_color=color, + fill_opacity=0.2, + popup=info_text, + tooltip=info_text, + ) + marker.add_to(self.grid.city.map) + + if restaurants: + # Obtain all primary `Address`es in the city that host `Restaurant`s + # and are in the `self` `Pixel`. + addresses = ( # noqa:ECE001 + db.session.query(db.Address) + .filter( + db.Address.id.in_( + ( + db.session.query(db.Address.primary_id) + .join( + db.Restaurant, + db.Address.id == db.Restaurant.address_id, + ) + .join( + db.AddressPixelAssociation, + db.Address.id == db.AddressPixelAssociation.address_id, + ) + .filter(db.AddressPixelAssociation.pixel_id == self.id) + ) + .distinct() + .all(), + ), + ) + .all() + ) + + for address in addresses: + # Show the restaurant's name if there is only one. + # Otherwise, list all the restaurants' ID's. + restaurants = ( # noqa:ECE001 + db.session.query(db.Restaurant) + .join(db.Address, db.Restaurant.address_id == db.Address.id) + .filter(db.Address.primary_id == address.id) + .all() + ) + if len(restaurants) == 1: # type:ignore + tooltip = ( + f'{restaurants[0].name} (#{restaurants[0].id})' # type:ignore + ) + else: + tooltip = 'Restaurants ' + ', '.join( # noqa:WPS336 + f'#{restaurant.id}' for restaurant in restaurants # type:ignore + ) + + if order_counts: + # Calculate the number of orders for ALL restaurants ... + n_orders = ( # noqa:ECE001 + db.session.query(db.Order.id) + .join(db.Address, db.Order.pickup_address_id == db.Address.id) + .filter(db.Address.primary_id == address.id) + .count() + ) + # ... and adjust the size of the red dot on the `.map`. + if n_orders >= 1000: + radius = 20 # noqa:WPS220 + elif n_orders >= 500: + radius = 15 # noqa:WPS220 + elif n_orders >= 100: + radius = 10 # noqa:WPS220 + elif n_orders >= 10: + radius = 5 # noqa:WPS220 + else: + radius = 1 # noqa:WPS220 + + tooltip += f' | n_orders={n_orders}' # noqa:WPS336 + + address.draw( + radius=radius, + color=config.RESTAURANT_COLOR, + fill_color=config.RESTAURANT_COLOR, + fill_opacity=0.3, + tooltip=tooltip, + ) + + else: + address.draw( + radius=1, color=config.RESTAURANT_COLOR, tooltip=tooltip, + ) + + return self.map diff --git a/src/urban_meal_delivery/db/restaurants.py b/src/urban_meal_delivery/db/restaurants.py index 23fa896..cf02e53 100644 --- a/src/urban_meal_delivery/db/restaurants.py +++ b/src/urban_meal_delivery/db/restaurants.py @@ -1,8 +1,13 @@ """Provide the ORM's `Restaurant` model.""" +from __future__ import annotations + +import folium import sqlalchemy as sa from sqlalchemy import orm +from urban_meal_delivery import config +from urban_meal_delivery import db from urban_meal_delivery.db import meta @@ -45,3 +50,92 @@ class Restaurant(meta.Base): def __repr__(self) -> str: """Non-literal text representation.""" return '<{cls}({name})>'.format(cls=self.__class__.__name__, name=self.name) + + def clear_map(self) -> Restaurant: # pragma: no cover + """Shortcut to the `.address.city.clear_map()` method. + + Returns: + self: enabling method chaining + """ # noqa:D402,DAR203 + self.address.city.clear_map() + return self + + @property # pragma: no cover + def map(self) -> folium.Map: # noqa:WPS125 + """Shortcut to the `.address.city.map` object.""" + return self.address.city.map + + def draw( # noqa:WPS231 + self, customers: bool = True, order_counts: bool = False, # pragma: no cover + ) -> folium.Map: + """Draw the restaurant on the `.address.city.map`. + + By default, the restaurant's delivery locations are also shown. + + Args: + customers: show the restaurant's delivery locations + order_counts: show the number of orders at the delivery locations; + only useful if `customers=True` + + Returns: + `.address.city.map` for convenience in interactive usage + """ + if customers: + # Obtain all primary `Address`es in the city that + # received at least one delivery from `self`. + delivery_addresses = ( # noqa:ECE001 + db.session.query(db.Address) + .filter( + db.Address.id.in_( + db.session.query(db.Address.primary_id) # noqa:WPS221 + .join(db.Order, db.Address.id == db.Order.delivery_address_id) + .filter(db.Order.restaurant_id == self.id) + .distinct() + .all(), + ), + ) + .all() + ) + + for address in delivery_addresses: + if order_counts: + n_orders = ( # noqa:ECE001 + db.session.query(db.Order) + .join(db.Address, db.Order.delivery_address_id == db.Address.id) + .filter(db.Order.restaurant_id == self.id) + .filter(db.Address.primary_id == address.id) + .count() + ) + if n_orders >= 25: + radius = 20 # noqa:WPS220 + elif n_orders >= 10: + radius = 15 # noqa:WPS220 + elif n_orders >= 5: + radius = 10 # noqa:WPS220 + elif n_orders > 1: + radius = 5 # noqa:WPS220 + else: + radius = 1 # noqa:WPS220 + + address.draw( + radius=radius, + color=config.CUSTOMER_COLOR, + fill_color=config.CUSTOMER_COLOR, + fill_opacity=0.3, + tooltip=f'n_orders={n_orders}', + ) + + else: + address.draw( + radius=1, color=config.CUSTOMER_COLOR, + ) + + self.address.draw( + radius=20, + color=config.RESTAURANT_COLOR, + fill_color=config.RESTAURANT_COLOR, + fill_opacity=0.3, + tooltip=f'{self.name} (#{self.id}) | n_orders={len(self.orders)}', + ) + + return self.map diff --git a/src/urban_meal_delivery/db/utils/__init__.py b/src/urban_meal_delivery/db/utils/__init__.py index 59ade94..5d6f8b6 100644 --- a/src/urban_meal_delivery/db/utils/__init__.py +++ b/src/urban_meal_delivery/db/utils/__init__.py @@ -1,3 +1,5 @@ """Utilities used by the ORM models.""" +from urban_meal_delivery.db.utils.colors import make_random_cmap +from urban_meal_delivery.db.utils.colors import rgb_to_hex from urban_meal_delivery.db.utils.locations import Location diff --git a/src/urban_meal_delivery/db/utils/colors.py b/src/urban_meal_delivery/db/utils/colors.py new file mode 100644 index 0000000..ad45327 --- /dev/null +++ b/src/urban_meal_delivery/db/utils/colors.py @@ -0,0 +1,69 @@ +"""Utilities for drawing maps with `folium`.""" + +import colorsys + +import numpy as np +from matplotlib import colors + + +def make_random_cmap( + n_colors: int, bright: bool = True, # pragma: no cover +) -> colors.LinearSegmentedColormap: + """Create a random `Colormap` with `n_colors` different colors. + + Args: + n_colors: number of of different colors; size of `Colormap` + bright: `True` for strong colors, `False` for pastel colors + + Returns: + colormap + """ + np.random.seed(42) + + if bright: + hsv_colors = [ + ( + np.random.uniform(low=0.0, high=1), + np.random.uniform(low=0.2, high=1), + np.random.uniform(low=0.9, high=1), + ) + for _ in range(n_colors) + ] + + rgb_colors = [] + for color in hsv_colors: + rgb_colors.append(colorsys.hsv_to_rgb(*color)) + + else: + low = 0.0 + high = 0.66 + + rgb_colors = [ + ( + np.random.uniform(low=low, high=high), + np.random.uniform(low=low, high=high), + np.random.uniform(low=low, high=high), + ) + for _ in range(n_colors) + ] + + return colors.LinearSegmentedColormap.from_list( + 'random_color_map', rgb_colors, N=n_colors, + ) + + +def rgb_to_hex(*args: float) -> str: # pragma: no cover + """Convert RGB colors into hexadecimal notation. + + Args: + *args: percentages (0% - 100%) for the RGB channels + + Returns: + hexadecimal_representation + """ + red, green, blue = ( + int(255 * args[0]), + int(255 * args[1]), + int(255 * args[2]), + ) + return f'#{red:02x}{green:02x}{blue:02x}' # noqa:WPS221 diff --git a/tests/db/fake_data/factories.py b/tests/db/fake_data/factories.py index 61c27e9..46f2ff3 100644 --- a/tests/db/fake_data/factories.py +++ b/tests/db/fake_data/factories.py @@ -48,7 +48,7 @@ class AddressFactory(alchemy.SQLAlchemyModelFactory): # As non-primary addresses have no different behavior and # the property is only kept from the original dataset for # completeness sake, that is ok to do. - _primary_id = factory.LazyAttribute(lambda obj: obj.id) + primary_id = factory.LazyAttribute(lambda obj: obj.id) # Mimic a Google Maps Place ID with just random characters. place_id = factory.LazyFunction( diff --git a/tests/db/test_addresses.py b/tests/db/test_addresses.py index 0b14ccc..ab49855 100644 --- a/tests/db/test_addresses.py +++ b/tests/db/test_addresses.py @@ -39,8 +39,8 @@ class TestConstraints: def test_delete_a_referenced_address(self, db_session, address, make_address): """Remove a record that is referenced with a FK.""" db_session.add(address) - # Fake another_address that has the same `._primary_id` as `address`. - db_session.add(make_address(_primary_id=address.id)) + # Fake another_address that has the same `.primary_id` as `address`. + db_session.add(make_address(primary_id=address.id)) db_session.commit() db_session.delete(address) @@ -109,7 +109,7 @@ class TestProperties: def test_is_primary(self, address): """Test `Address.is_primary` property.""" - assert address.id == address._primary_id + assert address.id == address.primary_id result = address.is_primary @@ -117,7 +117,7 @@ class TestProperties: def test_is_not_primary(self, address): """Test `Address.is_primary` property.""" - address._primary_id = 999 + address.primary_id = 999 result = address.is_primary From 6429165aaf5b12dbeb0ddbe16843908274d5ee74 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sun, 31 Jan 2021 18:24:03 +0100 Subject: [PATCH 51/72] Add statsmodels to the dependencies --- poetry.lock | 91 +++++++++++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 1 + setup.cfg | 4 +++ 3 files changed, 95 insertions(+), 1 deletion(-) diff --git a/poetry.lock b/poetry.lock index 01b57f1..6c1d5a9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1200,6 +1200,18 @@ category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +[[package]] +name = "patsy" +version = "0.5.1" +description = "A Python package for describing statistical models and for building design matrices." +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +numpy = ">=1.4" +six = "*" + [[package]] name = "pbr" version = "5.5.1" @@ -1565,6 +1577,17 @@ pytest = "*" pytz = "*" tzlocal = "*" +[[package]] +name = "scipy" +version = "1.6.0" +description = "SciPy: Scientific Library for Python" +category = "main" +optional = false +python-versions = ">=3.7" + +[package.dependencies] +numpy = ">=1.16.5" + [[package]] name = "send2trash" version = "1.5.0" @@ -1747,6 +1770,25 @@ postgresql_psycopg2binary = ["psycopg2-binary"] postgresql_psycopg2cffi = ["psycopg2cffi"] pymysql = ["pymysql"] +[[package]] +name = "statsmodels" +version = "0.12.1" +description = "Statistical computations and models for Python" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +numpy = ">=1.15" +pandas = ">=0.21" +patsy = ">=0.5" +scipy = ">=1.1" + +[package.extras] +build = ["cython (>=0.29)"] +develop = ["cython (>=0.29)"] +docs = ["sphinx", "nbconvert", "jupyter-client", "ipykernel", "matplotlib", "nbformat", "numpydoc", "pandas-datareader"] + [[package]] name = "stevedore" version = "3.3.0" @@ -1968,7 +2010,7 @@ research = ["jupyterlab", "nb_black", "numpy", "pytz"] [metadata] lock-version = "1.1" python-versions = "^3.8" -content-hash = "1a67cb850c9d8b35104d7429caf8f54c3fa0a2888ab4d8f54dbc3901afb14717" +content-hash = "9a2253e699e28998cb3ef8d8dadd8bf15a891c5e0cec4709671afe22159d5d86" [metadata.files] alabaster = [ @@ -2592,6 +2634,10 @@ pathspec = [ {file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"}, {file = "pathspec-0.8.1.tar.gz", hash = "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd"}, ] +patsy = [ + {file = "patsy-0.5.1-py2.py3-none-any.whl", hash = "sha256:5465be1c0e670c3a965355ec09e9a502bf2c4cbe4875e8528b0221190a8a5d40"}, + {file = "patsy-0.5.1.tar.gz", hash = "sha256:f115cec4201e1465cd58b9866b0b0e7b941caafec129869057405bfe5b5e3991"}, +] pbr = [ {file = "pbr-5.5.1-py2.py3-none-any.whl", hash = "sha256:b236cde0ac9a6aedd5e3c34517b423cd4fd97ef723849da6b0d2231142d89c00"}, {file = "pbr-5.5.1.tar.gz", hash = "sha256:5fad80b613c402d5b7df7bd84812548b2a61e9977387a80a5fc5c396492b13c9"}, @@ -2870,6 +2916,27 @@ rpy2 = [ {file = "rpy2-3.4.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:ebbd7fceef359279f56b481d7ea2dd60db91928abb3726010a88fbb3362213af"}, {file = "rpy2-3.4.1.tar.gz", hash = "sha256:644360b569656700dfe13f59878ec1cf8c116c128d4f2f0bf96144031f95d2e2"}, ] +scipy = [ + {file = "scipy-1.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3d4303e3e21d07d9557b26a1707bb9fc065510ee8501c9bf22a0157249a82fd0"}, + {file = "scipy-1.6.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:1bc5b446600c4ff7ab36bade47180673141322f0febaa555f1c433fe04f2a0e3"}, + {file = "scipy-1.6.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:8840a9adb4ede3751f49761653d3ebf664f25195fdd42ada394ffea8903dd51d"}, + {file = "scipy-1.6.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:8629135ee00cc2182ac8be8e75643b9f02235942443732c2ed69ab48edcb6614"}, + {file = "scipy-1.6.0-cp37-cp37m-win32.whl", hash = "sha256:58731bbe0103e96b89b2f41516699db9b63066e4317e31b8402891571f6d358f"}, + {file = "scipy-1.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:876badc33eec20709d4e042a09834f5953ebdac4088d45a4f3a1f18b56885718"}, + {file = "scipy-1.6.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c0911f3180de343643f369dc5cfedad6ba9f939c2d516bddea4a6871eb000722"}, + {file = "scipy-1.6.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:b8af26839ae343655f3ca377a5d5e5466f1d3b3ac7432a43449154fe958ae0e0"}, + {file = "scipy-1.6.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:4f1d9cc977ac6a4a63c124045c1e8bf67ec37098f67c699887a93736961a00ae"}, + {file = "scipy-1.6.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:eb7928275f3560d47e5538e15e9f32b3d64cd30ea8f85f3e82987425476f53f6"}, + {file = "scipy-1.6.0-cp38-cp38-win32.whl", hash = "sha256:31ab217b5c27ab429d07428a76002b33662f98986095bbce5d55e0788f7e8b15"}, + {file = "scipy-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:2f1c2ebca6fd867160e70102200b1bd07b3b2d31a3e6af3c58d688c15d0d07b7"}, + {file = "scipy-1.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:155225621df90fcd151e25d51c50217e412de717475999ebb76e17e310176981"}, + {file = "scipy-1.6.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:f68d5761a2d2376e2b194c8e9192bbf7c51306ca176f1a0889990a52ef0d551f"}, + {file = "scipy-1.6.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:d902d3a5ad7f28874c0a82db95246d24ca07ad932741df668595fe00a4819870"}, + {file = "scipy-1.6.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:aef3a2dbc436bbe8f6e0b635f0b5fe5ed024b522eee4637dbbe0b974129ca734"}, + {file = "scipy-1.6.0-cp39-cp39-win32.whl", hash = "sha256:cdbc47628184a0ebeb5c08f1892614e1bd4a51f6e0d609c6eed253823a960f5b"}, + {file = "scipy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:313785c4dab65060f9648112d025f6d2fec69a8a889c714328882d678a95f053"}, + {file = "scipy-1.6.0.tar.gz", hash = "sha256:cb6dc9f82dfd95f6b9032a8d7ea70efeeb15d5b5fd6ed4e8537bb3c673580566"}, +] send2trash = [ {file = "Send2Trash-1.5.0-py3-none-any.whl", hash = "sha256:f1691922577b6fa12821234aeb57599d887c4900b9ca537948d2dac34aea888b"}, {file = "Send2Trash-1.5.0.tar.gz", hash = "sha256:60001cc07d707fe247c94f74ca6ac0d3255aabcb930529690897ca2a39db28b2"}, @@ -2982,6 +3049,28 @@ sqlalchemy = [ {file = "SQLAlchemy-1.3.20-cp39-cp39-win_amd64.whl", hash = "sha256:d05cef4a164b44ffda58200efcb22355350979e000828479971ebca49b82ddb1"}, {file = "SQLAlchemy-1.3.20.tar.gz", hash = "sha256:d2f25c7f410338d31666d7ddedfa67570900e248b940d186b48461bd4e5569a1"}, ] +statsmodels = [ + {file = "statsmodels-0.12.1-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:3b482ab9759b89cc1c4777b71c1ccf272e868a7551fc6b74da300557407d8379"}, + {file = "statsmodels-0.12.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:33c6cbed74f075b8816cec37e5c7853ed31dcacebfdbbc3af898b4907911544e"}, + {file = "statsmodels-0.12.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:142eacd5a1bd8728358ff48101ee0e51ca3d42a93f6e5cb61fcfacf613977bcf"}, + {file = "statsmodels-0.12.1-cp36-none-win32.whl", hash = "sha256:ef3a54b3594f4c49c295388de1fdd840a8c63a857a5252125aaf92a03ea1e3a6"}, + {file = "statsmodels-0.12.1-cp36-none-win_amd64.whl", hash = "sha256:830d59d94841332429edf735430180031ad5dc660de26728d723e347f414c59d"}, + {file = "statsmodels-0.12.1-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:aa964ca1d65f066b9b096c94fe298aab1441e11731ce6b154ffb5f8d4a4e9ccf"}, + {file = "statsmodels-0.12.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:7614ef58ebb96cc0d4c45150116f5252a2f1e0bd15e809700776163e5a246b8c"}, + {file = "statsmodels-0.12.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:588c0f7e29403161ca952dcdad3d67970583742e9f11f66c7c5b08ac97a0408c"}, + {file = "statsmodels-0.12.1-cp37-none-win32.whl", hash = "sha256:7be4c6d43f1f3a6b28614a4b18fdcf202bd305faf15f4c558e901cbe099ca9ea"}, + {file = "statsmodels-0.12.1-cp37-none-win_amd64.whl", hash = "sha256:e5e426fb962f41d58a07a7d2f7daf32f83e911ff578368caddbcdd1886887ed1"}, + {file = "statsmodels-0.12.1-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:3582c0a497a9cda473470b4dd59ecd103739e3cfef1eb2e20d48dd1a2239f2e4"}, + {file = "statsmodels-0.12.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:02679bf39d35a2aceb2d9f6d332b4e1cda1797157df792fe867b45f2a14d20d3"}, + {file = "statsmodels-0.12.1-cp38-none-win32.whl", hash = "sha256:a652d8bfb4ec430b706a69e3fcbdac1cdf930823e3f9b8468e3e179d47097bbb"}, + {file = "statsmodels-0.12.1-cp38-none-win_amd64.whl", hash = "sha256:74c6c863d6f8a1f021d42f965b1b97eeea05293d3b18e3690c46eac0cf6d64d9"}, + {file = "statsmodels-0.12.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:62be4dd5b4a254d59b7feb8093623ba6158080aa6758c2eb19105609da4b40fb"}, + {file = "statsmodels-0.12.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:630b9d938b0388488c66394597500dfba877e3b53da536369393a9a840b8f2a0"}, + {file = "statsmodels-0.12.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f585c02b716c161f00e6a2d10f9f3497f57191183dbd6ae7eaa988707023b1ee"}, + {file = "statsmodels-0.12.1-cp39-none-win32.whl", hash = "sha256:3dd59b7cd35843f4764b8a1476be20cf959d3da700327975f7cd2bf2a1b630b2"}, + {file = "statsmodels-0.12.1-cp39-none-win_amd64.whl", hash = "sha256:78813784f5fa612b4399c4963414799fbbb031188f1ad630a501c6b2af7e94e0"}, + {file = "statsmodels-0.12.1.tar.gz", hash = "sha256:a271b4ccec190148dccda25f0cbdcbf871f408fc1394a10a7dc1af4a62b91c8e"}, +] stevedore = [ {file = "stevedore-3.3.0-py3-none-any.whl", hash = "sha256:50d7b78fbaf0d04cd62411188fa7eedcb03eb7f4c4b37005615ceebe582aa82a"}, {file = "stevedore-3.3.0.tar.gz", hash = "sha256:3a5bbd0652bf552748871eaa73a4a8dc2899786bc497a2aa1fcb4dcdb0debeee"}, diff --git a/pyproject.toml b/pyproject.toml index 7c02ff1..d07df8f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -37,6 +37,7 @@ pandas = "^1.1.0" psycopg2 = "^2.8.5" # adapter for PostgreSQL rpy2 = "^3.4.1" sqlalchemy = "^1.3.18" +statsmodels = "^0.12.1" utm = "^0.7.0" # Jupyter Lab => notebooks with analyses using the developed package diff --git a/setup.cfg b/setup.cfg index 00d589d..1e46f21 100644 --- a/setup.cfg +++ b/setup.cfg @@ -271,6 +271,8 @@ ignore_missing_imports = true ignore_missing_imports = true [mypy-sqlalchemy.*] ignore_missing_imports = true +[mypy-statsmodels.*] +ignore_missing_imports = true [mypy-utm.*] ignore_missing_imports = true @@ -282,6 +284,8 @@ cache_dir = .cache/pytest console_output_style = count env = TESTING=true +filterwarnings = + ignore:::patsy.* markers = db: (integration) tests touching the database e2e: non-db and non-r integration tests From a5b590b24c6e78c9f1c07355282a6d88db5854e7 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sun, 31 Jan 2021 18:29:53 +0100 Subject: [PATCH 52/72] Add `Forecast.actual` column --- ...2af85bada01_store_actuals_with_forecast.py | 41 +++++++++++++++++++ src/urban_meal_delivery/db/forecasts.py | 5 +++ tests/db/test_forecasts.py | 12 ++++++ 3 files changed, 58 insertions(+) create mode 100644 migrations/versions/rev_20210129_11_c2af85bada01_store_actuals_with_forecast.py diff --git a/migrations/versions/rev_20210129_11_c2af85bada01_store_actuals_with_forecast.py b/migrations/versions/rev_20210129_11_c2af85bada01_store_actuals_with_forecast.py new file mode 100644 index 0000000..810fbb5 --- /dev/null +++ b/migrations/versions/rev_20210129_11_c2af85bada01_store_actuals_with_forecast.py @@ -0,0 +1,41 @@ +"""Store actuals with forecast. + +Revision: #c2af85bada01 at 2021-01-29 11:13:15 +Revises: #e86290e7305e +""" + +import os + +import sqlalchemy as sa +from alembic import op + +from urban_meal_delivery import configuration + + +revision = 'c2af85bada01' +down_revision = 'e86290e7305e' +branch_labels = None +depends_on = None + + +config = configuration.make_config('testing' if os.getenv('TESTING') else 'production') + + +def upgrade(): + """Upgrade to revision c2af85bada01.""" + op.add_column( + 'forecasts', + sa.Column('actual', sa.SmallInteger(), nullable=False), + schema=config.CLEAN_SCHEMA, + ) + op.create_check_constraint( + op.f('ck_forecasts_on_actuals_must_be_non_negative'), + 'forecasts', + 'actual >= 0', + schema=config.CLEAN_SCHEMA, + ) + + +def downgrade(): + """Downgrade to revision e86290e7305e.""" + op.drop_column('forecasts', 'actual', schema=config.CLEAN_SCHEMA) diff --git a/src/urban_meal_delivery/db/forecasts.py b/src/urban_meal_delivery/db/forecasts.py index 2edb695..f9ebc44 100644 --- a/src/urban_meal_delivery/db/forecasts.py +++ b/src/urban_meal_delivery/db/forecasts.py @@ -22,6 +22,10 @@ class Forecast(meta.Base): time_step = sa.Column(sa.SmallInteger, nullable=False) training_horizon = sa.Column(sa.SmallInteger, nullable=False) model = sa.Column(sa.Unicode(length=20), nullable=False) + # We also store the actual order counts for convenient retrieval. + # A `UniqueConstraint` below ensures that redundant values that + # are to be expected are consistend across rows. + actual = sa.Column(sa.SmallInteger, nullable=False) # Raw `.prediction`s are stored as `float`s (possibly negative). # The rounding is then done on the fly if required. prediction = sa.Column(postgresql.DOUBLE_PRECISION, nullable=False) @@ -62,6 +66,7 @@ class Forecast(meta.Base): sa.CheckConstraint( 'training_horizon > 0', name='training_horizon_must_be_positive', ), + sa.CheckConstraint('actual >= 0', name='actuals_must_be_non_negative'), sa.CheckConstraint( """ NOT ( diff --git a/tests/db/test_forecasts.py b/tests/db/test_forecasts.py index 426de7b..d6780df 100644 --- a/tests/db/test_forecasts.py +++ b/tests/db/test_forecasts.py @@ -18,6 +18,7 @@ def forecast(pixel): time_step=60, training_horizon=8, model='hets', + actual=12, prediction=12.3, low80=1.23, high80=123.4, @@ -131,6 +132,16 @@ class TestConstraints: ): db_session.commit() + def test_non_negative_actuals(self, db_session, forecast): + """Insert an instance with invalid data.""" + forecast.actual = -1 + db_session.add(forecast) + + with pytest.raises( + sa_exc.IntegrityError, match='actuals_must_be_non_negative', + ): + db_session.commit() + def test_set_prediction_without_ci(self, db_session, forecast): """Sanity check to see that the check constraint ... @@ -388,6 +399,7 @@ class TestConstraints: time_step=forecast.time_step, training_horizon=forecast.training_horizon, model=forecast.model, + actual=forecast.actual, prediction=2, low80=1, high80=3, From 08b748c867381b2dc5bf2447da1195194dd2d707 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sun, 31 Jan 2021 18:50:24 +0100 Subject: [PATCH 53/72] Move `decomposition` module into `methods` sub-package - move the module - unify the corresponding tests in `tests.forecasts.methods` sub-package - make all `predict()` and the `stl()` function round results - streamline documentation --- setup.cfg | 2 +- src/urban_meal_delivery/forecasts/__init__.py | 1 - .../forecasts/methods/__init__.py | 1 + .../forecasts/methods/arima.py | 4 ++-- .../forecasts/{ => methods}/decomposition.py | 15 +++++++++------ src/urban_meal_delivery/forecasts/methods/ets.py | 4 ++-- tests/forecasts/methods/__init__.py | 1 + .../forecasts/{ => methods}/test_decomposition.py | 2 +- .../test_ts_methods.py} | 5 ++++- 9 files changed, 21 insertions(+), 14 deletions(-) rename src/urban_meal_delivery/forecasts/{ => methods}/decomposition.py (96%) create mode 100644 tests/forecasts/methods/__init__.py rename tests/forecasts/{ => methods}/test_decomposition.py (99%) rename tests/forecasts/{test_methods.py => methods/test_ts_methods.py} (96%) diff --git a/setup.cfg b/setup.cfg index 1e46f21..46e2db8 100644 --- a/setup.cfg +++ b/setup.cfg @@ -147,7 +147,7 @@ per-file-ignores = src/urban_meal_delivery/db/restaurants.py: # The module is not too complex. WPS232, - src/urban_meal_delivery/forecasts/decomposition.py: + src/urban_meal_delivery/forecasts/methods/decomposition.py: # The module is not too complex. WPS232, src/urban_meal_delivery/forecasts/timify.py: diff --git a/src/urban_meal_delivery/forecasts/__init__.py b/src/urban_meal_delivery/forecasts/__init__.py index 86dcac6..5ecdd1e 100644 --- a/src/urban_meal_delivery/forecasts/__init__.py +++ b/src/urban_meal_delivery/forecasts/__init__.py @@ -1,5 +1,4 @@ """Demand forecasting utilities.""" -from urban_meal_delivery.forecasts import decomposition from urban_meal_delivery.forecasts import methods from urban_meal_delivery.forecasts import timify diff --git a/src/urban_meal_delivery/forecasts/methods/__init__.py b/src/urban_meal_delivery/forecasts/methods/__init__.py index 316ae69..9b88926 100644 --- a/src/urban_meal_delivery/forecasts/methods/__init__.py +++ b/src/urban_meal_delivery/forecasts/methods/__init__.py @@ -1,4 +1,5 @@ """Various forecasting methods implemented as functions.""" from urban_meal_delivery.forecasts.methods import arima +from urban_meal_delivery.forecasts.methods import decomposition from urban_meal_delivery.forecasts.methods import ets diff --git a/src/urban_meal_delivery/forecasts/methods/arima.py b/src/urban_meal_delivery/forecasts/methods/arima.py index 976df3e..3abd60e 100644 --- a/src/urban_meal_delivery/forecasts/methods/arima.py +++ b/src/urban_meal_delivery/forecasts/methods/arima.py @@ -14,7 +14,7 @@ def predict( ) -> pd.DataFrame: """Predict with an automatically chosen ARIMA model. - Note: The function does not check if the `forecast` interval + Note: The function does not check if the `forecast_interval` extends the `training_ts`'s interval without a gap! Args: @@ -65,7 +65,7 @@ def predict( forecasts = pandas2ri.rpy2py(result) forecasts.index = forecast_interval - return forecasts.rename( + return forecasts.round(5).rename( columns={ 'Point Forecast': 'prediction', 'Lo 80': 'low80', diff --git a/src/urban_meal_delivery/forecasts/decomposition.py b/src/urban_meal_delivery/forecasts/methods/decomposition.py similarity index 96% rename from src/urban_meal_delivery/forecasts/decomposition.py rename to src/urban_meal_delivery/forecasts/methods/decomposition.py index a0762d5..3be8582 100644 --- a/src/urban_meal_delivery/forecasts/decomposition.py +++ b/src/urban_meal_delivery/forecasts/methods/decomposition.py @@ -169,10 +169,13 @@ def stl( # noqa:C901,WPS210,WPS211,WPS231 # Unpack the result to a `pd.DataFrame`. result = pandas2ri.rpy2py(result[0]) - result = { - 'seasonal': pd.Series(result[:, 0], index=time_series.index), - 'trend': pd.Series(result[:, 1], index=time_series.index), - 'residual': pd.Series(result[:, 2], index=time_series.index), - } + result = pd.DataFrame( + data={ + 'seasonal': result[:, 0], + 'trend': result[:, 1], + 'residual': result[:, 2], + }, + index=time_series.index, + ) - return pd.DataFrame(result) + return result.round(5) diff --git a/src/urban_meal_delivery/forecasts/methods/ets.py b/src/urban_meal_delivery/forecasts/methods/ets.py index 020e4a4..5b70aef 100644 --- a/src/urban_meal_delivery/forecasts/methods/ets.py +++ b/src/urban_meal_delivery/forecasts/methods/ets.py @@ -14,7 +14,7 @@ def predict( ) -> pd.DataFrame: """Predict with an automatically calibrated ETS model. - Note: The function does not check if the `forecast` interval + Note: The function does not check if the `forecast_interval` extends the `training_ts`'s interval without a gap! Args: @@ -66,7 +66,7 @@ def predict( forecasts = pandas2ri.rpy2py(result) forecasts.index = forecast_interval - return forecasts.rename( + return forecasts.round(5).rename( columns={ 'Point Forecast': 'prediction', 'Lo 80': 'low80', diff --git a/tests/forecasts/methods/__init__.py b/tests/forecasts/methods/__init__.py new file mode 100644 index 0000000..e767595 --- /dev/null +++ b/tests/forecasts/methods/__init__.py @@ -0,0 +1 @@ +"""Tests for the `urban_meal_delivery.forecasts.methods` sub-package.""" diff --git a/tests/forecasts/test_decomposition.py b/tests/forecasts/methods/test_decomposition.py similarity index 99% rename from tests/forecasts/test_decomposition.py rename to tests/forecasts/methods/test_decomposition.py index 1f20535..0687d9c 100644 --- a/tests/forecasts/test_decomposition.py +++ b/tests/forecasts/methods/test_decomposition.py @@ -7,7 +7,7 @@ import pytest from tests.forecasts.conftest import NS from tests.forecasts.conftest import VERTICAL_FREQUENCY -from urban_meal_delivery.forecasts import decomposition +from urban_meal_delivery.forecasts.methods import decomposition class TestInvalidArguments: diff --git a/tests/forecasts/test_methods.py b/tests/forecasts/methods/test_ts_methods.py similarity index 96% rename from tests/forecasts/test_methods.py rename to tests/forecasts/methods/test_ts_methods.py index 9b2f0f8..11691c7 100644 --- a/tests/forecasts/test_methods.py +++ b/tests/forecasts/methods/test_ts_methods.py @@ -1,4 +1,7 @@ -"""Test the `arima.predict()` and `ets.predict()` functions.""" +"""Test the `arima.predict()` and `ets.predict()` functions. + +We consider both "classical" time series prediction models. +""" import datetime as dt From 63e8e94145a1f5ef9da0c3cad5918dc04caa1acd Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sun, 31 Jan 2021 19:20:17 +0100 Subject: [PATCH 54/72] Add `Pixel.restaurants` property - the property loads all `Restaurant`s from the database that are within the `Pixel` --- setup.cfg | 3 +++ src/urban_meal_delivery/db/pixels.py | 18 +++++++++++++ tests/db/test_pixels.py | 39 ++++++++++++++++++++++++++-- 3 files changed, 58 insertions(+), 2 deletions(-) diff --git a/setup.cfg b/setup.cfg index 46e2db8..83d92d5 100644 --- a/setup.cfg +++ b/setup.cfg @@ -172,6 +172,9 @@ per-file-ignores = WPS202,WPS204,WPS214, # Do not check for Jones complexity in the test suite. WPS221, + # "Private" methods are really just a convention for + # fixtures without a return value. + WPS338, # We do not care about the number of "# noqa"s in the test suite. WPS402, # Allow closures. diff --git a/src/urban_meal_delivery/db/pixels.py b/src/urban_meal_delivery/db/pixels.py index f5ca091..5876f19 100644 --- a/src/urban_meal_delivery/db/pixels.py +++ b/src/urban_meal_delivery/db/pixels.py @@ -2,6 +2,8 @@ from __future__ import annotations +from typing import List + import folium import sqlalchemy as sa import utm @@ -111,6 +113,22 @@ class Pixel(meta.Base): return self._southwest + @property + def restaurants(self) -> List[db.Restaurant]: # pragma: no cover + """Obtain all `Restaurant`s in `self`.""" + if not hasattr(self, '_restaurants'): # noqa:WPS421 note:d334120e + self._restaurants = ( # noqa:ECE001 + db.session.query(db.Restaurant) + .join( + db.AddressPixelAssociation, + db.Restaurant.address_id == db.AddressPixelAssociation.address_id, + ) + .filter(db.AddressPixelAssociation.pixel_id == self.id) + .all() + ) + + return self._restaurants + def clear_map(self) -> Pixel: # pragma: no cover """Shortcut to the `.city.clear_map()` method. diff --git a/tests/db/test_pixels.py b/tests/db/test_pixels.py index d5acc4a..ed7bbec 100644 --- a/tests/db/test_pixels.py +++ b/tests/db/test_pixels.py @@ -88,7 +88,7 @@ class TestProperties: assert result == 1.0 - def test_northeast(self, pixel, city): + def test_northeast(self, pixel): """Test `Pixel.northeast` property.""" result = pixel.northeast @@ -102,7 +102,7 @@ class TestProperties: assert result1 is result2 - def test_southwest(self, pixel, city): + def test_southwest(self, pixel): """Test `Pixel.southwest` property.""" result = pixel.southwest @@ -115,3 +115,38 @@ class TestProperties: result2 = pixel.southwest assert result1 is result2 + + @pytest.fixture + def _restaurants_mock(self, mocker, monkeypatch, restaurant): + """A `Mock` whose `.return_value` is `[restaurant]`.""" + mock = mocker.Mock() + query = ( # noqa:ECE001 + mock.query.return_value.join.return_value.filter.return_value.all # noqa:E501,WPS219 + ) + query.return_value = [restaurant] + monkeypatch.setattr(db, 'session', mock) + + @pytest.mark.usefixtures('_restaurants_mock') + def test_restaurants(self, pixel, restaurant): + """Test `Pixel.restaurants` property.""" + result = pixel.restaurants + + assert result == [restaurant] + + @pytest.mark.usefixtures('_restaurants_mock') + def test_restaurants_is_cached(self, pixel): + """Test `Pixel.restaurants` property.""" + result1 = pixel.restaurants + result2 = pixel.restaurants + + assert result1 is result2 + + @pytest.mark.db + def test_restaurants_with_db(self, pixel): + """Test `Pixel.restaurants` property. + + This is a trivial integration test. + """ + result = pixel.restaurants + + assert not result # = empty `list` From fd404e2b89cf305b853a79fc338aa295cf76f271 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sun, 31 Jan 2021 19:34:05 +0100 Subject: [PATCH 55/72] Adjust `Pixel.__repr__()` a tiny bit --- src/urban_meal_delivery/db/pixels.py | 4 ++-- tests/db/test_pixels.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/src/urban_meal_delivery/db/pixels.py b/src/urban_meal_delivery/db/pixels.py index 5876f19..9461d4d 100644 --- a/src/urban_meal_delivery/db/pixels.py +++ b/src/urban_meal_delivery/db/pixels.py @@ -52,7 +52,7 @@ class Pixel(meta.Base): def __repr__(self) -> str: """Non-literal text representation.""" - return '<{cls}: ({x}, {y})>'.format( + return '<{cls}: ({x}|{y})>'.format( cls=self.__class__.__name__, x=self.n_x, y=self.n_y, ) @@ -159,7 +159,7 @@ class Pixel(meta.Base): (self.southwest.latitude, self.southwest.longitude), (self.northeast.latitude, self.northeast.longitude), ) - info_text = f'Pixel({self.n_x}, {self.n_y})' + info_text = f'Pixel({self.n_x}|{self.n_y})' # Make the `Pixel`s look like a checkerboard. if (self.n_x + self.n_y) % 2: diff --git a/tests/db/test_pixels.py b/tests/db/test_pixels.py index ed7bbec..317ce56 100644 --- a/tests/db/test_pixels.py +++ b/tests/db/test_pixels.py @@ -18,7 +18,7 @@ class TestSpecialMethods: """`Pixel` has a non-literal text representation.""" result = repr(pixel) - assert result == f'' + assert result == f'' @pytest.mark.db From d45c60b7640fc2c629aa1dcadf107efa01d3717c Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sun, 31 Jan 2021 20:06:23 +0100 Subject: [PATCH 56/72] Add `OrderHistory.time_step` property --- src/urban_meal_delivery/forecasts/timify.py | 5 +++ tests/forecasts/timify/test_order_history.py | 34 +++++++++++++------- 2 files changed, 27 insertions(+), 12 deletions(-) diff --git a/src/urban_meal_delivery/forecasts/timify.py b/src/urban_meal_delivery/forecasts/timify.py index cacee23..c52aef3 100644 --- a/src/urban_meal_delivery/forecasts/timify.py +++ b/src/urban_meal_delivery/forecasts/timify.py @@ -39,6 +39,11 @@ class OrderHistory: # The `_data` are populated by `.aggregate_orders()`. self._data = None + @property + def time_step(self) -> int: + """The length of one time step.""" + return self._time_step + @property def totals(self) -> pd.DataFrame: """The order totals by `Pixel` and `.time_step`. diff --git a/tests/forecasts/timify/test_order_history.py b/tests/forecasts/timify/test_order_history.py index eb6bbcc..cbf1530 100644 --- a/tests/forecasts/timify/test_order_history.py +++ b/tests/forecasts/timify/test_order_history.py @@ -6,33 +6,43 @@ from tests import config as test_config from urban_meal_delivery.forecasts import timify +@pytest.fixture +def order_history(grid): + """An `OrderHistory` object.""" + return timify.OrderHistory(grid=grid, time_step=test_config.LONG_TIME_STEP) + + class TestSpecialMethods: """Test the special methods in `OrderHistory`.""" - @pytest.mark.parametrize('time_step', test_config.TIME_STEPS) - def test_instantiate(self, grid, time_step): + def test_instantiate(self, order_history): """Test `OrderHistory.__init__()`.""" - oh = timify.OrderHistory(grid=grid, time_step=time_step) - - assert oh is not None + assert order_history is not None class TestProperties: """Test the properties in `OrderHistory`.""" - def test_totals_is_cached(self, grid, monkeypatch): - """Test `.totals` property. + @pytest.mark.parametrize('time_step', test_config.TIME_STEPS) + def test_time_step(self, grid, time_step): + """Test `OrderHistory.time_step` property.""" + order_history = timify.OrderHistory(grid=grid, time_step=time_step) + + result = order_history.time_step + + assert result == time_step + + def test_totals_is_cached(self, order_history, monkeypatch): + """Test `OrderHistory.totals` property. The result of the `OrderHistory.aggregate_orders()` method call is cached in the `OrderHistory.totals` property. """ - oh = timify.OrderHistory(grid=grid, time_step=test_config.LONG_TIME_STEP) - sentinel = object() - monkeypatch.setattr(oh, 'aggregate_orders', lambda: sentinel) + monkeypatch.setattr(order_history, 'aggregate_orders', lambda: sentinel) - result1 = oh.totals - result2 = oh.totals + result1 = order_history.totals + result2 = order_history.totals assert result1 is result2 assert result1 is sentinel From 7b824a4a1288a9bd561c157229e9a8ceba0814ef Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sun, 31 Jan 2021 20:19:12 +0100 Subject: [PATCH 57/72] Shorten a couple of names - rename "total_orders" columns into "n_orders" - rename `.make_*_time_series()` methods into `.make_*_ts()` --- src/urban_meal_delivery/forecasts/timify.py | 30 +++++----- tests/forecasts/conftest.py | 8 +-- .../forecasts/timify/test_aggregate_orders.py | 46 +++++++-------- .../forecasts/timify/test_make_time_series.py | 58 +++++++++---------- 4 files changed, 71 insertions(+), 71 deletions(-) diff --git a/src/urban_meal_delivery/forecasts/timify.py b/src/urban_meal_delivery/forecasts/timify.py index c52aef3..48d1732 100644 --- a/src/urban_meal_delivery/forecasts/timify.py +++ b/src/urban_meal_delivery/forecasts/timify.py @@ -53,7 +53,7 @@ class OrderHistory: Returns: order_totals: a one-column `DataFrame` with a `MultiIndex` of the "pixel_id"s and "start_at"s (i.e., beginnings of the intervals); - the column with data is "total_orders" + the column with data is "n_orders" """ if self._data is None: self._data = self.aggregate_orders() @@ -69,7 +69,7 @@ class OrderHistory: SELECT pixel_id, start_at, - COUNT(*) AS total_orders + COUNT(*) AS n_orders FROM ( SELECT pixel_id, @@ -152,7 +152,7 @@ class OrderHistory: return data.reindex(index, fill_value=0) - def make_horizontal_time_series( # noqa:WPS210 + def make_horizontal_ts( # noqa:WPS210 self, pixel_id: int, predict_at: dt.datetime, train_horizon: int, ) -> Tuple[pd.Series, int, pd.Series]: """Slice a horizontal time series out of the `.totals`. @@ -209,19 +209,19 @@ class OrderHistory: # Take only the counts at the `predict_at` time. training_ts = intra_pixel.loc[ - first_start_at : last_start_at : self._n_daily_time_steps, # type: ignore - 'total_orders', + first_start_at : last_start_at : self._n_daily_time_steps, # type:ignore + 'n_orders', ] if len(training_ts) != frequency * train_horizon: raise RuntimeError('Not enough historic data for `predict_at`') - actuals_ts = intra_pixel.loc[[predict_at], 'total_orders'] + actuals_ts = intra_pixel.loc[[predict_at], 'n_orders'] if not len(actuals_ts): # pragma: no cover raise LookupError('`predict_at` is not in the order history') return training_ts, frequency, actuals_ts - def make_vertical_time_series( # noqa:WPS210 + def make_vertical_ts( # noqa:WPS210 self, pixel_id: int, predict_day: dt.date, train_horizon: int, ) -> Tuple[pd.Series, int, pd.Series]: """Slice a vertical time series out of the `.totals`. @@ -277,8 +277,8 @@ class OrderHistory: # Take all the counts between `first_train_day` and `last_train_day`. training_ts = intra_pixel.loc[ - first_start_at:last_start_at, # type: ignore - 'total_orders', + first_start_at:last_start_at, # type:ignore + 'n_orders', ] if len(training_ts) != frequency * train_horizon: raise RuntimeError('Not enough historic data for `predict_day`') @@ -299,15 +299,15 @@ class OrderHistory: ) - dt.timedelta(minutes=self._time_step) actuals_ts = intra_pixel.loc[ - first_prediction_at:last_prediction_at, # type: ignore - 'total_orders', + first_prediction_at:last_prediction_at, # type:ignore + 'n_orders', ] if not len(actuals_ts): # pragma: no cover raise LookupError('`predict_day` is not in the order history') return training_ts, frequency, actuals_ts - def make_real_time_time_series( # noqa:WPS210 + def make_realtime_ts( # noqa:WPS210 self, pixel_id: int, predict_at: dt.datetime, train_horizon: int, ) -> Tuple[pd.Series, int, pd.Series]: """Slice a vertical real-time time series out of the `.totals`. @@ -374,8 +374,8 @@ class OrderHistory: # Take all the counts between `first_train_day` and `last_train_day`, # including the ones on the `predict_at` day prior to `predict_at`. training_ts = intra_pixel.loc[ - first_start_at:last_start_at, # type: ignore - 'total_orders', + first_start_at:last_start_at, # type:ignore + 'n_orders', ] n_time_steps_on_predict_day = ( ( @@ -394,7 +394,7 @@ class OrderHistory: if len(training_ts) != frequency * train_horizon + n_time_steps_on_predict_day: raise RuntimeError('Not enough historic data for `predict_day`') - actuals_ts = intra_pixel.loc[[predict_at], 'total_orders'] + actuals_ts = intra_pixel.loc[[predict_at], 'n_orders'] if not len(actuals_ts): # pragma: no cover raise LookupError('`predict_at` is not in the order history') diff --git a/tests/forecasts/conftest.py b/tests/forecasts/conftest.py index 08c2439..c7fcf3b 100644 --- a/tests/forecasts/conftest.py +++ b/tests/forecasts/conftest.py @@ -42,8 +42,8 @@ def horizontal_datetime_index(): @pytest.fixture def horizontal_no_demand(horizontal_datetime_index): - """A horizontal time series of order totals when there was no demand.""" - return pd.Series(0, index=horizontal_datetime_index, name='order_totals') + """A horizontal time series with order totals: no demand.""" + return pd.Series(0, index=horizontal_datetime_index, name='n_orders') @pytest.fixture @@ -72,5 +72,5 @@ def vertical_datetime_index(): @pytest.fixture def vertical_no_demand(vertical_datetime_index): - """A vertical time series of order totals when there was no demand.""" - return pd.Series(0, index=vertical_datetime_index, name='order_totals') + """A vertical time series with order totals: no demand.""" + return pd.Series(0, index=vertical_datetime_index, name='n_orders') diff --git a/tests/forecasts/timify/test_aggregate_orders.py b/tests/forecasts/timify/test_aggregate_orders.py index 75f0531..325db74 100644 --- a/tests/forecasts/timify/test_aggregate_orders.py +++ b/tests/forecasts/timify/test_aggregate_orders.py @@ -91,9 +91,9 @@ class TestAggregateOrders: # The resulting `DataFrame` has 12 rows holding `1`s. assert len(result) == 12 - assert result['total_orders'].min() == 1 - assert result['total_orders'].max() == 1 - assert result['total_orders'].sum() == 12 + assert result['n_orders'].min() == 1 + assert result['n_orders'].max() == 1 + assert result['n_orders'].sum() == 12 def test_evenly_distributed_ad_hoc_orders_with_no_demand_late( # noqa:WPS218 self, db_session, one_pixel_grid, restaurant, make_order, @@ -123,10 +123,10 @@ class TestAggregateOrders: # Even though there are only 10 orders, there are 12 rows in the `DataFrame`. # That is so as `0`s are filled in for hours without any demand at the end. assert len(result) == 12 - assert result['total_orders'].min() == 0 - assert result['total_orders'].max() == 1 - assert result.iloc[:10]['total_orders'].sum() == 10 - assert result.iloc[10:]['total_orders'].sum() == 0 + assert result['n_orders'].min() == 0 + assert result['n_orders'].max() == 1 + assert result.iloc[:10]['n_orders'].sum() == 10 + assert result.iloc[10:]['n_orders'].sum() == 0 def test_one_ad_hoc_order_every_other_hour( self, db_session, one_pixel_grid, restaurant, make_order, @@ -155,9 +155,9 @@ class TestAggregateOrders: # The resulting `DataFrame` has 12 rows, 6 holding `0`s, and 6 holding `1`s. assert len(result) == 12 - assert result['total_orders'].min() == 0 - assert result['total_orders'].max() == 1 - assert result['total_orders'].sum() == 6 + assert result['n_orders'].min() == 0 + assert result['n_orders'].max() == 1 + assert result['n_orders'].sum() == 6 def test_one_ad_hoc_and_one_pre_order( self, db_session, one_pixel_grid, restaurant, make_order, @@ -199,9 +199,9 @@ class TestAggregateOrders: # The resulting `DataFrame` has 12 rows, 11 holding `0`s, and one holding a `1`. assert len(result) == 12 - assert result['total_orders'].min() == 0 - assert result['total_orders'].max() == 1 - assert result['total_orders'].sum() == 1 + assert result['n_orders'].min() == 0 + assert result['n_orders'].max() == 1 + assert result['n_orders'].sum() == 1 def test_evenly_distributed_ad_hoc_orders_with_half_hour_time_steps( # noqa:WPS218 self, db_session, one_pixel_grid, restaurant, make_order, @@ -234,10 +234,10 @@ class TestAggregateOrders: # The resulting `DataFrame` has 24 rows for the 24 30-minute time steps. # The rows' values are `0` and `1` alternating. assert len(result) == 24 - assert result['total_orders'].min() == 0 - assert result['total_orders'].max() == 1 - assert result.iloc[::2]['total_orders'].sum() == 12 - assert result.iloc[1::2]['total_orders'].sum() == 0 + assert result['n_orders'].min() == 0 + assert result['n_orders'].max() == 1 + assert result.iloc[::2]['n_orders'].sum() == 12 + assert result.iloc[1::2]['n_orders'].sum() == 0 def test_ad_hoc_orders_over_two_days( self, db_session, one_pixel_grid, restaurant, make_order, @@ -285,9 +285,9 @@ class TestAggregateOrders: # The resulting `DataFrame` has 24 rows, 12 for each day. assert len(result) == 24 - assert result['total_orders'].min() == 0 - assert result['total_orders'].max() == 1 - assert result['total_orders'].sum() == 18 + assert result['n_orders'].min() == 0 + assert result['n_orders'].max() == 1 + assert result['n_orders'].sum() == 18 @pytest.fixture def two_pixel_grid( # noqa:WPS211 @@ -381,6 +381,6 @@ class TestAggregateOrders: # The resulting `DataFrame` has 24 rows, 12 for each pixel. assert len(result) == 24 - assert result['total_orders'].min() == 0 - assert result['total_orders'].max() == 2 - assert result['total_orders'].sum() == 30 + assert result['n_orders'].min() == 0 + assert result['n_orders'].max() == 2 + assert result['n_orders'].sum() == 30 diff --git a/tests/forecasts/timify/test_make_time_series.py b/tests/forecasts/timify/test_make_time_series.py index 4dc187d..d828a9a 100644 --- a/tests/forecasts/timify/test_make_time_series.py +++ b/tests/forecasts/timify/test_make_time_series.py @@ -41,7 +41,7 @@ def order_totals(good_pixel_id): index = pd.MultiIndex.from_tuples(gen) index.names = ['pixel_id', 'start_at'] - df = pd.DataFrame(data={'total_orders': 0}, index=index) + df = pd.DataFrame(data={'n_orders': 0}, index=index) # Sanity check: n_pixels * n_time_steps_per_day * n_weekdays * n_weeks. assert len(df) == 2 * 12 * (7 * 2 + 1) @@ -88,13 +88,13 @@ def bad_predict_at(): class TestMakeHorizontalTimeSeries: - """Test the `OrderHistory.make_horizontal_time_series()` method.""" + """Test the `OrderHistory.make_horizontal_ts()` method.""" @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) def test_wrong_pixel(self, order_history, good_predict_at, train_horizon): """A `pixel_id` that is not in the `grid`.""" with pytest.raises(LookupError): - order_history.make_horizontal_time_series( + order_history.make_horizontal_ts( pixel_id=999_999, predict_at=good_predict_at, train_horizon=train_horizon, @@ -105,7 +105,7 @@ class TestMakeHorizontalTimeSeries: self, order_history, good_pixel_id, good_predict_at, train_horizon, ): """The time series come as a `pd.Series`.""" - result = order_history.make_horizontal_time_series( + result = order_history.make_horizontal_ts( pixel_id=good_pixel_id, predict_at=good_predict_at, train_horizon=train_horizon, @@ -114,9 +114,9 @@ class TestMakeHorizontalTimeSeries: training_ts, _, actuals_ts = result assert isinstance(training_ts, pd.Series) - assert training_ts.name == 'total_orders' + assert training_ts.name == 'n_orders' assert isinstance(actuals_ts, pd.Series) - assert actuals_ts.name == 'total_orders' + assert actuals_ts.name == 'n_orders' @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) def test_time_series_have_correct_length( @@ -126,7 +126,7 @@ class TestMakeHorizontalTimeSeries: ... whereas the time series with the actual order counts has only `1` value. """ - result = order_history.make_horizontal_time_series( + result = order_history.make_horizontal_ts( pixel_id=good_pixel_id, predict_at=good_predict_at, train_horizon=train_horizon, @@ -142,7 +142,7 @@ class TestMakeHorizontalTimeSeries: self, order_history, good_pixel_id, good_predict_at, train_horizon, ): """The `frequency` must be `7`.""" - result = order_history.make_horizontal_time_series( + result = order_history.make_horizontal_ts( pixel_id=good_pixel_id, predict_at=good_predict_at, train_horizon=train_horizon, @@ -161,7 +161,7 @@ class TestMakeHorizontalTimeSeries: ... the history of order totals is not long enough. """ with pytest.raises(RuntimeError): - order_history.make_horizontal_time_series( + order_history.make_horizontal_ts( pixel_id=good_pixel_id, predict_at=bad_predict_at, train_horizon=train_horizon, @@ -175,19 +175,19 @@ class TestMakeHorizontalTimeSeries: ... the history of order totals can never be long enough. """ with pytest.raises(RuntimeError): - order_history.make_horizontal_time_series( + order_history.make_horizontal_ts( pixel_id=good_pixel_id, predict_at=good_predict_at, train_horizon=999, ) class TestMakeVerticalTimeSeries: - """Test the `OrderHistory.make_vertical_time_series()` method.""" + """Test the `OrderHistory.make_vertical_ts()` method.""" @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) def test_wrong_pixel(self, order_history, good_predict_at, train_horizon): """A `pixel_id` that is not in the `grid`.""" with pytest.raises(LookupError): - order_history.make_vertical_time_series( + order_history.make_vertical_ts( pixel_id=999_999, predict_day=good_predict_at.date(), train_horizon=train_horizon, @@ -198,7 +198,7 @@ class TestMakeVerticalTimeSeries: self, order_history, good_pixel_id, good_predict_at, train_horizon, ): """The time series come as `pd.Series`.""" - result = order_history.make_vertical_time_series( + result = order_history.make_vertical_ts( pixel_id=good_pixel_id, predict_day=good_predict_at.date(), train_horizon=train_horizon, @@ -207,9 +207,9 @@ class TestMakeVerticalTimeSeries: training_ts, _, actuals_ts = result assert isinstance(training_ts, pd.Series) - assert training_ts.name == 'total_orders' + assert training_ts.name == 'n_orders' assert isinstance(actuals_ts, pd.Series) - assert actuals_ts.name == 'total_orders' + assert actuals_ts.name == 'n_orders' @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) def test_time_series_have_correct_length( @@ -223,7 +223,7 @@ class TestMakeVerticalTimeSeries: The time series with the actual order counts always holds one observation per time step of a day. """ - result = order_history.make_vertical_time_series( + result = order_history.make_vertical_ts( pixel_id=good_pixel_id, predict_day=good_predict_at.date(), train_horizon=train_horizon, @@ -245,7 +245,7 @@ class TestMakeVerticalTimeSeries: self, order_history, good_pixel_id, good_predict_at, train_horizon, ): """The `frequency` is the number of weekly time steps.""" - result = order_history.make_vertical_time_series( + result = order_history.make_vertical_ts( pixel_id=good_pixel_id, predict_day=good_predict_at.date(), train_horizon=train_horizon, @@ -270,7 +270,7 @@ class TestMakeVerticalTimeSeries: ... the history of order totals is not long enough. """ with pytest.raises(RuntimeError): - order_history.make_vertical_time_series( + order_history.make_vertical_ts( pixel_id=good_pixel_id, predict_day=bad_predict_at.date(), train_horizon=train_horizon, @@ -284,7 +284,7 @@ class TestMakeVerticalTimeSeries: ... the history of order totals can never be long enough. """ with pytest.raises(RuntimeError): - order_history.make_vertical_time_series( + order_history.make_vertical_ts( pixel_id=good_pixel_id, predict_day=good_predict_at.date(), train_horizon=999, @@ -292,13 +292,13 @@ class TestMakeVerticalTimeSeries: class TestMakeRealTimeTimeSeries: - """Test the `OrderHistory.make_real_time_time_series()` method.""" + """Test the `OrderHistory.make_realtime_ts()` method.""" @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) def test_wrong_pixel(self, order_history, good_predict_at, train_horizon): """A `pixel_id` that is not in the `grid`.""" with pytest.raises(LookupError): - order_history.make_real_time_time_series( + order_history.make_realtime_ts( pixel_id=999_999, predict_at=good_predict_at, train_horizon=train_horizon, @@ -309,7 +309,7 @@ class TestMakeRealTimeTimeSeries: self, order_history, good_pixel_id, good_predict_at, train_horizon, ): """The time series come as `pd.Series`.""" - result = order_history.make_real_time_time_series( + result = order_history.make_realtime_ts( pixel_id=good_pixel_id, predict_at=good_predict_at, train_horizon=train_horizon, @@ -318,9 +318,9 @@ class TestMakeRealTimeTimeSeries: training_ts, _, actuals_ts = result assert isinstance(training_ts, pd.Series) - assert training_ts.name == 'total_orders' + assert training_ts.name == 'n_orders' assert isinstance(actuals_ts, pd.Series) - assert actuals_ts.name == 'total_orders' + assert actuals_ts.name == 'n_orders' @pytest.mark.parametrize('train_horizon', test_config.TRAIN_HORIZONS) def test_time_series_have_correct_length1( @@ -341,7 +341,7 @@ class TestMakeRealTimeTimeSeries: config.SERVICE_START, 0, ) - result = order_history.make_real_time_time_series( + result = order_history.make_realtime_ts( pixel_id=good_pixel_id, predict_at=predict_at, train_horizon=train_horizon, ) @@ -372,7 +372,7 @@ class TestMakeRealTimeTimeSeries: """ assert good_predict_at.hour == test_config.NOON - result = order_history.make_real_time_time_series( + result = order_history.make_realtime_ts( pixel_id=good_pixel_id, predict_at=good_predict_at, train_horizon=train_horizon, @@ -400,7 +400,7 @@ class TestMakeRealTimeTimeSeries: self, order_history, good_pixel_id, good_predict_at, train_horizon, ): """The `frequency` is the number of weekly time steps.""" - result = order_history.make_real_time_time_series( + result = order_history.make_realtime_ts( pixel_id=good_pixel_id, predict_at=good_predict_at, train_horizon=train_horizon, @@ -425,7 +425,7 @@ class TestMakeRealTimeTimeSeries: ... the history of order totals is not long enough. """ with pytest.raises(RuntimeError): - order_history.make_real_time_time_series( + order_history.make_realtime_ts( pixel_id=good_pixel_id, predict_at=bad_predict_at, train_horizon=train_horizon, @@ -439,6 +439,6 @@ class TestMakeRealTimeTimeSeries: ... the history of order totals can never be long enough. """ with pytest.raises(RuntimeError): - order_history.make_real_time_time_series( + order_history.make_realtime_ts( pixel_id=good_pixel_id, predict_at=good_predict_at, train_horizon=999, ) From 47ef1f875961958a112b5d27ea8ae99ec1041e4c Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sun, 31 Jan 2021 21:24:48 +0100 Subject: [PATCH 58/72] Add `OrderHistory.first/last_order()` methods - get the `datetime` of the first or last order within a pixel - unify some fixtures in `tests.forecasts.timify.conftest` --- src/urban_meal_delivery/forecasts/timify.py | 68 +++++++++++++++++++ tests/db/fake_data/static_fixtures.py | 2 +- tests/forecasts/timify/conftest.py | 54 +++++++++++++++ .../forecasts/timify/test_make_time_series.py | 45 ------------ tests/forecasts/timify/test_order_history.py | 56 +++++++++++++-- 5 files changed, 173 insertions(+), 52 deletions(-) create mode 100644 tests/forecasts/timify/conftest.py diff --git a/src/urban_meal_delivery/forecasts/timify.py b/src/urban_meal_delivery/forecasts/timify.py index 48d1732..4f85dfe 100644 --- a/src/urban_meal_delivery/forecasts/timify.py +++ b/src/urban_meal_delivery/forecasts/timify.py @@ -152,6 +152,74 @@ class OrderHistory: return data.reindex(index, fill_value=0) + def first_order_at(self, pixel_id: int) -> dt.datetime: + """Get the time step with the first order in a pixel. + + Args: + pixel_id: pixel for which to get the first order + + Returns: + minimum "start_at" from when orders take place + + Raises: + LookupError: `pixel_id` not in `grid` + + # noqa:DAR401 RuntimeError + """ + try: + intra_pixel = self.totals.loc[pixel_id] + except KeyError: + raise LookupError('The `pixel_id` is not in the `grid`') from None + + first_order = intra_pixel[intra_pixel['n_orders'] > 0].index.min() + + # Sanity check: without an `Order`, the `Pixel` should not exist. + if first_order is pd.NaT: # pragma: no cover + raise RuntimeError('no orders in the pixel') + + # Return a proper `datetime.datetime` object. + return dt.datetime( + first_order.year, + first_order.month, + first_order.day, + first_order.hour, + first_order.minute, + ) + + def last_order_at(self, pixel_id: int) -> dt.datetime: + """Get the time step with the last order in a pixel. + + Args: + pixel_id: pixel for which to get the last order + + Returns: + maximum "start_at" from when orders take place + + Raises: + LookupError: `pixel_id` not in `grid` + + # noqa:DAR401 RuntimeError + """ + try: + intra_pixel = self.totals.loc[pixel_id] + except KeyError: + raise LookupError('The `pixel_id` is not in the `grid`') from None + + last_order = intra_pixel[intra_pixel['n_orders'] > 0].index.max() + + # Sanity check: without an `Order`, the `Pixel` should not exist. + if last_order is pd.NaT: # pragma: no cover + raise RuntimeError('no orders in the pixel') + + # Return a proper `datetime.datetime` object. + return dt.datetime( + last_order.year, + last_order.month, + last_order.day, + last_order.hour, + last_order.minute, + ) + def make_horizontal_ts( # noqa:WPS210 self, pixel_id: int, predict_at: dt.datetime, train_horizon: int, ) -> Tuple[pd.Series, int, pd.Series]: diff --git a/tests/db/fake_data/static_fixtures.py b/tests/db/fake_data/static_fixtures.py index 6a386de..60d4181 100644 --- a/tests/db/fake_data/static_fixtures.py +++ b/tests/db/fake_data/static_fixtures.py @@ -67,4 +67,4 @@ def grid(city): @pytest.fixture def pixel(grid): """The `Pixel` in the lower-left corner of the `grid`.""" - return db.Pixel(grid=grid, n_x=0, n_y=0) + return db.Pixel(id=1, grid=grid, n_x=0, n_y=0) diff --git a/tests/forecasts/timify/conftest.py b/tests/forecasts/timify/conftest.py new file mode 100644 index 0000000..6143cfe --- /dev/null +++ b/tests/forecasts/timify/conftest.py @@ -0,0 +1,54 @@ +"""Fixture for testing the `urban_meal_delivery.forecast.timify` module.""" + +import pandas as pd +import pytest + +from tests import config as test_config +from urban_meal_delivery import config +from urban_meal_delivery.forecasts import timify + + +@pytest.fixture +def good_pixel_id(pixel): + """A `pixel_id` that is on the `grid`.""" + return pixel.id # `== 1` + + +@pytest.fixture +def order_totals(good_pixel_id): + """A mock for `OrderHistory.totals`. + + To be a bit more realistic, we sample two pixels on the `grid`. + + Uses the LONG_TIME_STEP as the length of a time step. + """ + pixel_ids = [good_pixel_id, good_pixel_id + 1] + + gen = ( + (pixel_id, start_at) + for pixel_id in pixel_ids + for start_at in pd.date_range( + test_config.START, test_config.END, freq=f'{test_config.LONG_TIME_STEP}T', + ) + if config.SERVICE_START <= start_at.hour < config.SERVICE_END + ) + + # Re-index `data` filling in `0`s where there is no demand. + index = pd.MultiIndex.from_tuples(gen) + index.names = ['pixel_id', 'start_at'] + + df = pd.DataFrame(data={'n_orders': 1}, index=index) + + # Sanity check: n_pixels * n_time_steps_per_day * n_weekdays * n_weeks. + assert len(df) == 2 * 12 * (7 * 2 + 1) + + return df + + +@pytest.fixture +def order_history(order_totals, grid): + """An `OrderHistory` object that does not need the database.""" + oh = timify.OrderHistory(grid=grid, time_step=test_config.LONG_TIME_STEP) + oh._data = order_totals + + return oh diff --git a/tests/forecasts/timify/test_make_time_series.py b/tests/forecasts/timify/test_make_time_series.py index d828a9a..78189c7 100644 --- a/tests/forecasts/timify/test_make_time_series.py +++ b/tests/forecasts/timify/test_make_time_series.py @@ -11,51 +11,6 @@ import pytest from tests import config as test_config from urban_meal_delivery import config -from urban_meal_delivery.forecasts import timify - - -@pytest.fixture -def good_pixel_id(): - """A `pixel_id` that is on the `grid`.""" - return 1 - - -@pytest.fixture -def order_totals(good_pixel_id): - """A mock for `OrderHistory.totals`. - - To be a bit more realistic, we sample two pixels on the `grid`. - """ - pixel_ids = [good_pixel_id, good_pixel_id + 1] - - gen = ( - (pixel_id, start_at) - for pixel_id in pixel_ids - for start_at in pd.date_range( - test_config.START, test_config.END, freq=f'{test_config.LONG_TIME_STEP}T', - ) - if config.SERVICE_START <= start_at.hour < config.SERVICE_END - ) - - # Re-index `data` filling in `0`s where there is no demand. - index = pd.MultiIndex.from_tuples(gen) - index.names = ['pixel_id', 'start_at'] - - df = pd.DataFrame(data={'n_orders': 0}, index=index) - - # Sanity check: n_pixels * n_time_steps_per_day * n_weekdays * n_weeks. - assert len(df) == 2 * 12 * (7 * 2 + 1) - - return df - - -@pytest.fixture -def order_history(order_totals, grid): - """An `OrderHistory` object that does not need the database.""" - oh = timify.OrderHistory(grid=grid, time_step=test_config.LONG_TIME_STEP) - oh._data = order_totals - - return oh @pytest.fixture diff --git a/tests/forecasts/timify/test_order_history.py b/tests/forecasts/timify/test_order_history.py index cbf1530..657e615 100644 --- a/tests/forecasts/timify/test_order_history.py +++ b/tests/forecasts/timify/test_order_history.py @@ -1,17 +1,13 @@ """Test the basic functionalities in the `OrderHistory` class.""" +import datetime as dt + import pytest from tests import config as test_config from urban_meal_delivery.forecasts import timify -@pytest.fixture -def order_history(grid): - """An `OrderHistory` object.""" - return timify.OrderHistory(grid=grid, time_step=test_config.LONG_TIME_STEP) - - class TestSpecialMethods: """Test the special methods in `OrderHistory`.""" @@ -32,12 +28,29 @@ class TestProperties: assert result == time_step + def test_totals(self, order_history, order_totals): + """Test `OrderHistory.totals` property. + + The result of the `OrderHistory.aggregate_orders()` method call + is cached in the `OrderHistory.totals` property. + + Note: `OrderHistory.aggregate_orders()` is not called as + `OrderHistory._data` is already set in the `order_history` fixture. + """ + result = order_history.totals + + assert result is order_totals + def test_totals_is_cached(self, order_history, monkeypatch): """Test `OrderHistory.totals` property. The result of the `OrderHistory.aggregate_orders()` method call is cached in the `OrderHistory.totals` property. + + Note: We make `OrderHistory.aggregate_orders()` return a `sentinel` + that is cached into `OrderHistory._data`, which must be unset first. """ + monkeypatch.setattr(order_history, '_data', None) sentinel = object() monkeypatch.setattr(order_history, 'aggregate_orders', lambda: sentinel) @@ -46,3 +59,34 @@ class TestProperties: assert result1 is result2 assert result1 is sentinel + + +class TestMethods: + """Test various methods in `OrderHistory`.""" + + def test_first_order_at_existing_pixel(self, order_history, good_pixel_id): + """Test `OrderHistory.first_order_at()` with good input.""" + result = order_history.first_order_at(good_pixel_id) + + assert result == test_config.START + + def test_first_order_at_non_existing_pixel(self, order_history, good_pixel_id): + """Test `OrderHistory.first_order_at()` with bad input.""" + with pytest.raises( + LookupError, match='`pixel_id` is not in the `grid`', + ): + order_history.first_order_at(-1) + + def test_last_order_at_existing_pixel(self, order_history, good_pixel_id): + """Test `OrderHistory.last_order_at()` with good input.""" + result = order_history.last_order_at(good_pixel_id) + + one_time_step = dt.timedelta(minutes=test_config.LONG_TIME_STEP) + assert result == test_config.END - one_time_step + + def test_last_order_at_non_existing_pixel(self, order_history, good_pixel_id): + """Test `OrderHistory.last_order_at()` with bad input.""" + with pytest.raises( + LookupError, match='`pixel_id` is not in the `grid`', + ): + order_history.last_order_at(-1) From 1d63623dfca96e924345cbe6cf477d9b093e1489 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Sun, 31 Jan 2021 21:55:32 +0100 Subject: [PATCH 59/72] Add `Forecast.__repr__()` --- src/urban_meal_delivery/db/forecasts.py | 10 ++++++++++ tests/db/test_forecasts.py | 9 +++++++++ 2 files changed, 19 insertions(+) diff --git a/src/urban_meal_delivery/db/forecasts.py b/src/urban_meal_delivery/db/forecasts.py index f9ebc44..352320e 100644 --- a/src/urban_meal_delivery/db/forecasts.py +++ b/src/urban_meal_delivery/db/forecasts.py @@ -123,3 +123,13 @@ class Forecast(meta.Base): # Relationships pixel = orm.relationship('Pixel', back_populates='forecasts') + + def __repr__(self) -> str: + """Non-literal text representation.""" + return '<{cls}: {prediction} for pixel ({n_x}|{n_y}) at {start_at}>'.format( + cls=self.__class__.__name__, + prediction=self.prediction, + n_x=self.pixel.n_x, + n_y=self.pixel.n_y, + start_at=self.start_at, + ) diff --git a/tests/db/test_forecasts.py b/tests/db/test_forecasts.py index d6780df..8cf9703 100644 --- a/tests/db/test_forecasts.py +++ b/tests/db/test_forecasts.py @@ -34,6 +34,15 @@ class TestSpecialMethods: """Test instantiation of a new `Forecast` object.""" assert forecast is not None + def test_text_representation(self, forecast): + """`Forecast` has a non-literal text representation.""" + result = repr(forecast) + + assert ( + result + == f'' # noqa:E501 + ) + @pytest.mark.db @pytest.mark.no_cover From b8952213d8ba46edbdcad2b21053a9635c1c4462 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Mon, 1 Feb 2021 11:32:10 +0100 Subject: [PATCH 60/72] Add `extrapolate_season.predict()` function - the function implements a forecasting "method" similar to the seasonal naive method => instead of simply taking the last observation given a seasonal lag, it linearly extrapolates all observations of the same seasonal lag from the past into the future; conceptually, it is like the seasonal naive method with built-in smoothing - the function is tested just like the `arima.predict()` and `ets.predict()` functions + rename the `tests.forecasts.methods.test_ts_methods` module into `tests.forecasts.methods.test_predictions` - re-organize some constants in the `tests` package - streamline some docstrings --- setup.cfg | 3 + .../forecasts/methods/__init__.py | 1 + .../forecasts/methods/extrapolate_season.py | 72 ++++++++++++++ tests/config.py | 7 +- tests/forecasts/__init__.py | 2 +- tests/forecasts/conftest.py | 7 -- tests/forecasts/methods/test_decomposition.py | 99 ++++++++++++++----- ...test_ts_methods.py => test_predictions.py} | 17 ++-- tests/forecasts/timify/conftest.py | 5 +- 9 files changed, 170 insertions(+), 43 deletions(-) create mode 100644 src/urban_meal_delivery/forecasts/methods/extrapolate_season.py rename tests/forecasts/methods/{test_ts_methods.py => test_predictions.py} (89%) diff --git a/setup.cfg b/setup.cfg index 83d92d5..86a0f8d 100644 --- a/setup.cfg +++ b/setup.cfg @@ -150,6 +150,9 @@ per-file-ignores = src/urban_meal_delivery/forecasts/methods/decomposition.py: # The module is not too complex. WPS232, + src/urban_meal_delivery/forecasts/methods/extrapolate_season.py: + # The module is not too complex. + WPS232, src/urban_meal_delivery/forecasts/timify.py: # No SQL injection as the inputs come from a safe source. S608, diff --git a/src/urban_meal_delivery/forecasts/methods/__init__.py b/src/urban_meal_delivery/forecasts/methods/__init__.py index 9b88926..5690e79 100644 --- a/src/urban_meal_delivery/forecasts/methods/__init__.py +++ b/src/urban_meal_delivery/forecasts/methods/__init__.py @@ -3,3 +3,4 @@ from urban_meal_delivery.forecasts.methods import arima from urban_meal_delivery.forecasts.methods import decomposition from urban_meal_delivery.forecasts.methods import ets +from urban_meal_delivery.forecasts.methods import extrapolate_season diff --git a/src/urban_meal_delivery/forecasts/methods/extrapolate_season.py b/src/urban_meal_delivery/forecasts/methods/extrapolate_season.py new file mode 100644 index 0000000..dfbc9c4 --- /dev/null +++ b/src/urban_meal_delivery/forecasts/methods/extrapolate_season.py @@ -0,0 +1,72 @@ +"""Forecast by linear extrapolation of a seasonal component.""" + +import pandas as pd +from statsmodels.tsa import api as ts_stats + + +def predict( + training_ts: pd.Series, forecast_interval: pd.DatetimeIndex, *, frequency: int, +) -> pd.DataFrame: + """Extrapolate a seasonal component with a linear model. + + A naive forecast for each time unit of the day is calculated by linear + extrapolation from all observations of the same time of day and on the same + day of the week (i.e., same seasonal lag). + + Note: The function does not check if the `forecast_interval` + extends the `training_ts`'s interval without a gap! + + Args: + training_ts: past observations to be fitted; + assumed to be a seasonal component after time series decomposition + forecast_interval: interval into which the `training_ts` is forecast; + its length becomes the numbers of time steps to be forecast + frequency: frequency of the observations in the `training_ts` + + Returns: + predictions: point forecasts (i.e., the "prediction" column); + includes the four "low/high80/95" columns for the confidence intervals + that only contain `NaN` values as this method does not make + any statistical assumptions about the time series process + + Raises: + ValueError: if `training_ts` contains `NaN` values or some predictions + could not be made for time steps in the `forecast_interval` + """ + if training_ts.isnull().any(): + raise ValueError('`training_ts` must not contain `NaN` values') + + extrapolated_ts = pd.Series(index=forecast_interval, dtype=float) + seasonal_lag = frequency * (training_ts.index[1] - training_ts.index[0]) + + for lag in range(frequency): + # Obtain all `observations` of the same seasonal lag and + # fit a straight line through them (= `trend`). + observations = training_ts[slice(lag, 999_999_999, frequency)] + trend = observations - ts_stats.detrend(observations) + + # Create a point forecast by linear extrapolation + # for one or even more time steps ahead. + slope = trend[-1] - trend[-2] + prediction = trend[-1] + slope + idx = observations.index.max() + seasonal_lag + while idx <= forecast_interval.max(): + if idx in forecast_interval: + extrapolated_ts.loc[idx] = prediction + prediction += slope + idx += seasonal_lag + + # Sanity check. + if extrapolated_ts.isnull().any(): # pragma: no cover + raise ValueError('missing predictions in the `forecast_interval`') + + return pd.DataFrame( + data={ + 'prediction': extrapolated_ts.round(5), + 'low80': float('NaN'), + 'high80': float('NaN'), + 'low95': float('NaN'), + 'high95': float('NaN'), + }, + index=forecast_interval, + ) diff --git a/tests/config.py b/tests/config.py index fd3e115..13fec36 100644 --- a/tests/config.py +++ b/tests/config.py @@ -16,10 +16,15 @@ NOON = 12 START = datetime.datetime(YEAR, MONTH, DAY, config.SERVICE_START, 0) END = datetime.datetime(YEAR, MONTH, 15, config.SERVICE_END, 0) -# Default time steps, for example, for `OrderHistory` objects. +# Default time steps (in minutes), for example, for `OrderHistory` objects. LONG_TIME_STEP = 60 SHORT_TIME_STEP = 30 TIME_STEPS = (SHORT_TIME_STEP, LONG_TIME_STEP) +# The `frequency` of vertical time series is the number of days in a week, 7, +# times the number of time steps per day. With 12 operating hours (11 am - 11 pm) +# the `frequency`s are 84 and 168 for the `LONG/SHORT_TIME_STEP`s. +VERTICAL_FREQUENCY_LONG = 7 * 12 +VERTICAL_FREQUENCY_SHORT = 7 * 24 # Default training horizons, for example, for # `OrderHistory.make_horizontal_time_series()`. diff --git a/tests/forecasts/__init__.py b/tests/forecasts/__init__.py index 50eaeb3..5d46e8b 100644 --- a/tests/forecasts/__init__.py +++ b/tests/forecasts/__init__.py @@ -1 +1 @@ -"""Test the forecasting-related functionality.""" +"""Tests for the `urban_meal_delivery.forecasts` sub-package.""" diff --git a/tests/forecasts/conftest.py b/tests/forecasts/conftest.py index c7fcf3b..ede73ba 100644 --- a/tests/forecasts/conftest.py +++ b/tests/forecasts/conftest.py @@ -9,13 +9,6 @@ from tests import config as test_config from urban_meal_delivery import config -# See remarks in `vertical_datetime_index` fixture. -VERTICAL_FREQUENCY = 7 * 12 - -# The default `ns` suggested for the STL method. -NS = 7 - - @pytest.fixture def horizontal_datetime_index(): """A `pd.Index` with `DateTime` values. diff --git a/tests/forecasts/methods/test_decomposition.py b/tests/forecasts/methods/test_decomposition.py index 0687d9c..c103c3f 100644 --- a/tests/forecasts/methods/test_decomposition.py +++ b/tests/forecasts/methods/test_decomposition.py @@ -5,11 +5,14 @@ import math import pandas as pd import pytest -from tests.forecasts.conftest import NS -from tests.forecasts.conftest import VERTICAL_FREQUENCY +from tests import config as test_config from urban_meal_delivery.forecasts.methods import decomposition +# The "periodic" `ns` suggested for the STL method. +NS = 999 + + class TestInvalidArguments: """Test `stl()` with invalid arguments.""" @@ -18,85 +21,118 @@ class TestInvalidArguments: time_series = pd.Series(dtype=float, index=vertical_datetime_index) with pytest.raises(ValueError, match='`NaN` values'): - decomposition.stl(time_series, frequency=VERTICAL_FREQUENCY, ns=99) + decomposition.stl( + time_series, frequency=test_config.VERTICAL_FREQUENCY_LONG, ns=NS, + ) def test_ns_not_odd(self, vertical_no_demand): """`ns` must be odd and `>= 7`.""" with pytest.raises(ValueError, match='`ns`'): - decomposition.stl(vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=8) + decomposition.stl( + vertical_no_demand, frequency=test_config.VERTICAL_FREQUENCY_LONG, ns=8, + ) @pytest.mark.parametrize('ns', [-99, -1, 1, 5]) def test_ns_smaller_than_seven(self, vertical_no_demand, ns): """`ns` must be odd and `>= 7`.""" with pytest.raises(ValueError, match='`ns`'): - decomposition.stl(vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=ns) + decomposition.stl( + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=ns, + ) def test_nt_not_odd(self, vertical_no_demand): """`nt` must be odd and `>= default_nt`.""" nt = 200 - default_nt = math.ceil((1.5 * VERTICAL_FREQUENCY) / (1 - (1.5 / NS))) + default_nt = math.ceil( + (1.5 * test_config.VERTICAL_FREQUENCY_LONG) / (1 - (1.5 / NS)), + ) assert nt > default_nt # sanity check with pytest.raises(ValueError, match='`nt`'): decomposition.stl( - vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, nt=nt, + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=NS, + nt=nt, ) - @pytest.mark.parametrize('nt', [-99, -1, 0, 1, 99, 159]) + @pytest.mark.parametrize('nt', [-99, -1, 0, 1, 99, 125]) def test_nt_not_at_least_the_default(self, vertical_no_demand, nt): """`nt` must be odd and `>= default_nt`.""" # `default_nt` becomes 161. - default_nt = math.ceil((1.5 * VERTICAL_FREQUENCY) / (1 - (1.5 / NS))) + default_nt = math.ceil( + (1.5 * test_config.VERTICAL_FREQUENCY_LONG) / (1 - (1.5 / NS)), + ) assert nt < default_nt # sanity check with pytest.raises(ValueError, match='`nt`'): decomposition.stl( - vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, nt=nt, + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=NS, + nt=nt, ) def test_nl_not_odd(self, vertical_no_demand): """`nl` must be odd and `>= frequency`.""" nl = 200 - assert nl > VERTICAL_FREQUENCY # sanity check + assert nl > test_config.VERTICAL_FREQUENCY_LONG # sanity check with pytest.raises(ValueError, match='`nl`'): decomposition.stl( - vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, nl=nl, + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=NS, + nl=nl, ) def test_nl_at_least_the_frequency(self, vertical_no_demand): """`nl` must be odd and `>= frequency`.""" nl = 77 - assert nl < VERTICAL_FREQUENCY # sanity check + assert nl < test_config.VERTICAL_FREQUENCY_LONG # sanity check with pytest.raises(ValueError, match='`nl`'): decomposition.stl( - vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, nl=nl, + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=NS, + nl=nl, ) def test_ds_not_zero_or_one(self, vertical_no_demand): """`ds` must be `0` or `1`.""" with pytest.raises(ValueError, match='`ds`'): decomposition.stl( - vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, ds=2, + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=NS, + ds=2, ) def test_dt_not_zero_or_one(self, vertical_no_demand): """`dt` must be `0` or `1`.""" with pytest.raises(ValueError, match='`dt`'): decomposition.stl( - vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, dt=2, + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=NS, + dt=2, ) def test_dl_not_zero_or_one(self, vertical_no_demand): """`dl` must be `0` or `1`.""" with pytest.raises(ValueError, match='`dl`'): decomposition.stl( - vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, dl=2, + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=NS, + dl=2, ) @pytest.mark.parametrize('js', [-1, 0]) @@ -104,7 +140,10 @@ class TestInvalidArguments: """`js` must be positive.""" with pytest.raises(ValueError, match='`js`'): decomposition.stl( - vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, js=js, + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=NS, + js=js, ) @pytest.mark.parametrize('jt', [-1, 0]) @@ -112,7 +151,10 @@ class TestInvalidArguments: """`jt` must be positive.""" with pytest.raises(ValueError, match='`jt`'): decomposition.stl( - vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, jt=jt, + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=NS, + jt=jt, ) @pytest.mark.parametrize('jl', [-1, 0]) @@ -120,7 +162,10 @@ class TestInvalidArguments: """`jl` must be positive.""" with pytest.raises(ValueError, match='`jl`'): decomposition.stl( - vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, jl=jl, + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=NS, + jl=jl, ) @pytest.mark.parametrize('ni', [-1, 0]) @@ -128,14 +173,20 @@ class TestInvalidArguments: """`ni` must be positive.""" with pytest.raises(ValueError, match='`ni`'): decomposition.stl( - vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, ni=ni, + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=NS, + ni=ni, ) def test_no_not_non_negative(self, vertical_no_demand): """`no` must be non-negative.""" with pytest.raises(ValueError, match='`no`'): decomposition.stl( - vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, no=-1, + vertical_no_demand, + frequency=test_config.VERTICAL_FREQUENCY_LONG, + ns=NS, + no=-1, ) @@ -146,7 +197,7 @@ class TestValidArguments: def test_structure_of_returned_dataframe(self, vertical_no_demand): """`stl()` returns a `pd.DataFrame` with three columns.""" result = decomposition.stl( - vertical_no_demand, frequency=VERTICAL_FREQUENCY, ns=NS, + vertical_no_demand, frequency=test_config.VERTICAL_FREQUENCY_LONG, ns=NS, ) assert isinstance(result, pd.DataFrame) @@ -173,7 +224,7 @@ class TestValidArguments: """ decomposed = decomposition.stl( vertical_no_demand, - frequency=VERTICAL_FREQUENCY, + frequency=test_config.VERTICAL_FREQUENCY_LONG, ns=NS, nt=nt, nl=nl, diff --git a/tests/forecasts/methods/test_ts_methods.py b/tests/forecasts/methods/test_predictions.py similarity index 89% rename from tests/forecasts/methods/test_ts_methods.py rename to tests/forecasts/methods/test_predictions.py index 11691c7..8155d62 100644 --- a/tests/forecasts/methods/test_ts_methods.py +++ b/tests/forecasts/methods/test_predictions.py @@ -1,7 +1,4 @@ -"""Test the `arima.predict()` and `ets.predict()` functions. - -We consider both "classical" time series prediction models. -""" +"""Test all the `*.predict()` functions in the `methods` sub-package.""" import datetime as dt @@ -9,10 +6,10 @@ import pandas as pd import pytest from tests import config as test_config -from tests.forecasts.conftest import VERTICAL_FREQUENCY from urban_meal_delivery import config from urban_meal_delivery.forecasts.methods import arima from urban_meal_delivery.forecasts.methods import ets +from urban_meal_delivery.forecasts.methods import extrapolate_season @pytest.fixture @@ -60,7 +57,9 @@ def forecast_time_step(): @pytest.mark.r -@pytest.mark.parametrize('func', [arima.predict, ets.predict]) +@pytest.mark.parametrize( + 'func', [arima.predict, ets.predict, extrapolate_season.predict], +) class TestMakePredictions: """Make predictions with `arima.predict()` and `ets.predict()`.""" @@ -74,7 +73,7 @@ class TestMakePredictions: func( training_ts=vertical_no_demand, forecast_interval=forecast_interval, - frequency=VERTICAL_FREQUENCY, + frequency=test_config.VERTICAL_FREQUENCY_LONG, ) def test_structure_of_returned_dataframe( @@ -84,7 +83,7 @@ class TestMakePredictions: result = func( training_ts=vertical_no_demand, forecast_interval=forecast_interval, - frequency=VERTICAL_FREQUENCY, + frequency=test_config.VERTICAL_FREQUENCY_LONG, ) assert isinstance(result, pd.DataFrame) @@ -123,7 +122,7 @@ class TestMakePredictions: predictions = func( training_ts=vertical_no_demand, forecast_interval=forecast_interval, - frequency=VERTICAL_FREQUENCY, + frequency=test_config.VERTICAL_FREQUENCY_LONG, ) result = predictions.sum().sum() diff --git a/tests/forecasts/timify/conftest.py b/tests/forecasts/timify/conftest.py index 6143cfe..cfb5fc7 100644 --- a/tests/forecasts/timify/conftest.py +++ b/tests/forecasts/timify/conftest.py @@ -47,7 +47,10 @@ def order_totals(good_pixel_id): @pytest.fixture def order_history(order_totals, grid): - """An `OrderHistory` object that does not need the database.""" + """An `OrderHistory` object that does not need the database. + + Uses the LONG_TIME_STEP as the length of a time step. + """ oh = timify.OrderHistory(grid=grid, time_step=test_config.LONG_TIME_STEP) oh._data = order_totals From 796fdc919c3ae4d12961cb55ae0d86c74450ec16 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Mon, 1 Feb 2021 15:46:52 +0100 Subject: [PATCH 61/72] Add `Forecast.from_dataframe()` constructor - this alternative constructor takes the `pd.DataFrame`s from the `*Model.predict()` methods and converts them into ORM models --- src/urban_meal_delivery/db/forecasts.py | 64 ++++++++++++++- tests/db/test_forecasts.py | 104 +++++++++++++++++++++--- 2 files changed, 157 insertions(+), 11 deletions(-) diff --git a/src/urban_meal_delivery/db/forecasts.py b/src/urban_meal_delivery/db/forecasts.py index 352320e..d453fcd 100644 --- a/src/urban_meal_delivery/db/forecasts.py +++ b/src/urban_meal_delivery/db/forecasts.py @@ -1,5 +1,10 @@ """Provide the ORM's `Forecast` model.""" +from __future__ import annotations + +from typing import List + +import pandas as pd import sqlalchemy as sa from sqlalchemy import orm from sqlalchemy.dialects import postgresql @@ -10,7 +15,8 @@ from urban_meal_delivery.db import meta class Forecast(meta.Base): """A demand forecast for a `.pixel` and `.time_step` pair. - This table is denormalized on purpose to keep things simple. + This table is denormalized on purpose to keep things simple. In particular, + the `.model` and `.actual` hold redundant values. """ __tablename__ = 'forecasts' @@ -133,3 +139,59 @@ class Forecast(meta.Base): n_y=self.pixel.n_y, start_at=self.start_at, ) + + @classmethod + def from_dataframe( # noqa:WPS211 + cls, + pixel: db.Pixel, + time_step: int, + training_horizon: int, + model: str, + data: pd.Dataframe, + ) -> List[db.Forecast]: + """Convert results from the forecasting `*Model`s into `Forecast` objects. + + This is an alternative constructor method. + + Background: The functions in `urban_meal_delivery.forecasts.methods` + return `pd.Dataframe`s with "start_at" (i.e., `pd.Timestamp` objects) + values in the index and five columns "prediction", "low80", "high80", + "low95", and "high95" with `np.float` values. The `*Model.predic()` + methods in `urban_meal_delivery.forecasts.models` then add an "actual" + column. This constructor converts these results into ORM models. + Also, the `np.float` values are cast as plain `float` ones as + otherwise SQLAlchemy and the database would complain. + + Args: + pixel: in which the forecast is made + time_step: length of one time step in minutes + training_horizon: length of the training horizon in weeks + model: name of the forecasting model + data: a `pd.Dataframe` as described above (i.e., + with the six columns holding `float`s) + + Returns: + forecasts: the `data` as `Forecast` objects + """ # noqa:RST215 + forecasts = [] + + for timestamp_idx in data.index: + forecast = cls( + pixel=pixel, + start_at=timestamp_idx.to_pydatetime(), + time_step=time_step, + training_horizon=training_horizon, + model=model, + actual=int(data.loc[timestamp_idx, 'actual']), + prediction=round(data.loc[timestamp_idx, 'prediction'], 5), + low80=round(data.loc[timestamp_idx, 'low80'], 5), + high80=round(data.loc[timestamp_idx, 'high80'], 5), + low95=round(data.loc[timestamp_idx, 'low95'], 5), + high95=round(data.loc[timestamp_idx, 'high95'], 5), + ) + forecasts.append(forecast) + + return forecasts + + +from urban_meal_delivery import db # noqa:E402 isort:skip diff --git a/tests/db/test_forecasts.py b/tests/db/test_forecasts.py index 8cf9703..a2cd1bb 100644 --- a/tests/db/test_forecasts.py +++ b/tests/db/test_forecasts.py @@ -1,23 +1,35 @@ """Test the ORM's `Forecast` model.""" -import datetime +import datetime as dt +import pandas as pd import pytest import sqlalchemy as sqla from sqlalchemy import exc as sa_exc +from tests import config as test_config from urban_meal_delivery import db +MODEL = 'hets' + + @pytest.fixture def forecast(pixel): - """A `forecast` made in the `pixel`.""" + """A `forecast` made in the `pixel` at `NOON`.""" + start_at = dt.datetime( + test_config.END.year, + test_config.END.month, + test_config.END.day, + test_config.NOON, + ) + return db.Forecast( pixel=pixel, - start_at=datetime.datetime(2020, 1, 1, 12, 0), - time_step=60, - training_horizon=8, - model='hets', + start_at=start_at, + time_step=test_config.LONG_TIME_STEP, + training_horizon=test_config.LONG_TRAIN_HORIZON, + model=MODEL, actual=12, prediction=12.3, low80=1.23, @@ -76,7 +88,7 @@ class TestConstraints: self, db_session, forecast, hour, ): """Insert an instance with invalid data.""" - forecast.start_at = datetime.datetime( + forecast.start_at = dt.datetime( forecast.start_at.year, forecast.start_at.month, forecast.start_at.day, @@ -91,7 +103,7 @@ class TestConstraints: def test_invalid_start_at_not_quarter_of_hour(self, db_session, forecast): """Insert an instance with invalid data.""" - forecast.start_at += datetime.timedelta(minutes=1) + forecast.start_at += dt.timedelta(minutes=1) db_session.add(forecast) with pytest.raises( @@ -101,7 +113,7 @@ class TestConstraints: def test_invalid_start_at_seconds_set(self, db_session, forecast): """Insert an instance with invalid data.""" - forecast.start_at += datetime.timedelta(seconds=1) + forecast.start_at += dt.timedelta(seconds=1) db_session.add(forecast) with pytest.raises( @@ -111,7 +123,7 @@ class TestConstraints: def test_invalid_start_at_microseconds_set(self, db_session, forecast): """Insert an instance with invalid data.""" - forecast.start_at += datetime.timedelta(microseconds=1) + forecast.start_at += dt.timedelta(microseconds=1) db_session.add(forecast) with pytest.raises( @@ -419,3 +431,75 @@ class TestConstraints: with pytest.raises(sa_exc.IntegrityError, match='duplicate key value'): db_session.commit() + + +class TestFromDataFrameConstructor: + """Test the alternative `Forecast.from_dataframe()` constructor.""" + + @pytest.fixture + def prediction_data(self): + """A `pd.DataFrame` as returned by `*Model.predict()` ... + + ... and used as the `data` argument to `Forecast.from_dataframe()`. + + We assume the `data` come from some vertical forecasting `*Model` + and contain several rows (= `3` in this example) corresponding + to different time steps centered around `NOON`. + """ + noon_start_at = dt.datetime( + test_config.END.year, + test_config.END.month, + test_config.END.day, + test_config.NOON, + ) + + index = pd.Index( + [ + noon_start_at - dt.timedelta(minutes=test_config.LONG_TIME_STEP), + noon_start_at, + noon_start_at + dt.timedelta(minutes=test_config.LONG_TIME_STEP), + ], + ) + index.name = 'start_at' + + return pd.DataFrame( + data={ + 'actual': (11, 12, 13), + 'prediction': (11.3, 12.3, 13.3), + 'low80': (1.123, 1.23, 1.323), + 'high80': (112.34, 123.4, 132.34), + 'low95': (0.1123, 0.123, 0.1323), + 'high95': (1123.45, 1234.5, 1323.45), + }, + index=index, + ) + + def test_convert_dataframe_into_orm_objects(self, pixel, prediction_data): + """Call `Forecast.from_dataframe()`.""" + forecasts = db.Forecast.from_dataframe( + pixel=pixel, + time_step=test_config.LONG_TIME_STEP, + training_horizon=test_config.LONG_TRAIN_HORIZON, + model=MODEL, + data=prediction_data, + ) + + assert len(forecasts) == 3 + for forecast in forecasts: + assert isinstance(forecast, db.Forecast) + + @pytest.mark.db + def test_persist_predictions_into_database( + self, db_session, pixel, prediction_data, + ): + """Call `Forecast.from_dataframe()` and persist the results.""" + forecasts = db.Forecast.from_dataframe( + pixel=pixel, + time_step=test_config.LONG_TIME_STEP, + training_horizon=test_config.LONG_TRAIN_HORIZON, + model=MODEL, + data=prediction_data, + ) + + db_session.add_all(forecasts) + db_session.commit() From 67cd58cf16f4612f26c1b8581160cb57c10346cb Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Mon, 1 Feb 2021 20:39:52 +0100 Subject: [PATCH 62/72] Add `urban_meal_delivery.forecasts.models` sub-package - `*Model`s use the `methods.*.predict()` functions to predict demand given an order time series generated by `timify.OrderHistory` - `models.base.ForecastingModelABC` unifies how all `*Model`s work and implements a caching strategy - implement three `*Model`s for tactical forecasting, based on the hets, varima, and rtarima models described in the first research paper - add overall documentation for `urban_meal_delivery.forecasts` package - move the fixtures in `tests.forecasts.timify.conftest` to `tests.forecasts.conftest` and adjust the horizon of the test horizon from two to three weeks --- src/urban_meal_delivery/forecasts/__init__.py | 27 ++- .../forecasts/models/__init__.py | 34 ++++ .../forecasts/models/base.py | 116 +++++++++++ .../forecasts/models/tactical/__init__.py | 16 ++ .../forecasts/models/tactical/horizontal.py | 67 +++++++ .../forecasts/models/tactical/realtime.py | 117 +++++++++++ .../forecasts/models/tactical/vertical.py | 119 ++++++++++++ tests/config.py | 10 +- tests/forecasts/conftest.py | 64 ++++++- tests/forecasts/test_models.py | 181 ++++++++++++++++++ tests/forecasts/timify/conftest.py | 57 ------ .../forecasts/timify/test_make_time_series.py | 10 +- 12 files changed, 747 insertions(+), 71 deletions(-) create mode 100644 src/urban_meal_delivery/forecasts/models/__init__.py create mode 100644 src/urban_meal_delivery/forecasts/models/base.py create mode 100644 src/urban_meal_delivery/forecasts/models/tactical/__init__.py create mode 100644 src/urban_meal_delivery/forecasts/models/tactical/horizontal.py create mode 100644 src/urban_meal_delivery/forecasts/models/tactical/realtime.py create mode 100644 src/urban_meal_delivery/forecasts/models/tactical/vertical.py create mode 100644 tests/forecasts/test_models.py delete mode 100644 tests/forecasts/timify/conftest.py diff --git a/src/urban_meal_delivery/forecasts/__init__.py b/src/urban_meal_delivery/forecasts/__init__.py index 5ecdd1e..2dcd196 100644 --- a/src/urban_meal_delivery/forecasts/__init__.py +++ b/src/urban_meal_delivery/forecasts/__init__.py @@ -1,4 +1,29 @@ -"""Demand forecasting utilities.""" +"""Demand forecasting utilities. + +This sub-package is divided into further sub-packages and modules as follows: + +`methods` contains various time series related statistical methods, implemented +as plain `function` objects that are used to predict into the future given a +time series of historic order counts. The methods are context-agnostic, meaning +that they only take and return `pd.Series/DataFrame`s holding numbers and +are not concerned with how these numbers were generated or what they mean. +Some functions, like `arima.predict()` or `ets.predict()` wrap functions called +in R using the `rpy2` library. Others, like `extrapolate_season.predict()`, are +written in plain Python. + +`timify` defines an `OrderHistory` class that abstracts away the communication +with the database and provides `pd.Series` objects with the order counts that +are fed into the `methods`. In particular, it uses SQL statements behind the +scenes to calculate the historic order counts on a per-`Pixel` level. Once the +data is loaded from the database, an `OrderHistory` instance provides various +ways to slice out, or generate, different kinds of order time series (e.g., +"horizontal" vs. "vertical" time series). + +`models` defines various forecasting `*Model`s that combine a given kind of +time series with one of the forecasting `methods`. For example, the ETS method +applied to a horizontal time series is implemented in the `HorizontalETSModel`. +""" from urban_meal_delivery.forecasts import methods +from urban_meal_delivery.forecasts import models from urban_meal_delivery.forecasts import timify diff --git a/src/urban_meal_delivery/forecasts/models/__init__.py b/src/urban_meal_delivery/forecasts/models/__init__.py new file mode 100644 index 0000000..9d33f71 --- /dev/null +++ b/src/urban_meal_delivery/forecasts/models/__init__.py @@ -0,0 +1,34 @@ +"""Define the forecasting `*Model`s used in this project. + +`*Model`s are different from plain forecasting `methods` in that they are tied +to a given kind of historic order time series, as provided by the `OrderHistory` +class in the `timify` module. For example, the ARIMA model applied to a vertical +time series becomes the `VerticalARIMAModel`. + +An overview of the `*Model`s used for tactical forecasting can be found in section +"3.6 Forecasting Models" in the paper "Real-time Demand Forecasting for an Urban +Delivery Platform" that is part of the `urban-meal-delivery` research project. + +For the paper check: + https://github.com/webartifex/urban-meal-delivery-demand-forecasting/blob/main/paper.pdf + https://www.sciencedirect.com/science/article/pii/S1366554520307936 + +This sub-package is organized as follows. The `base` module defines an abstract +`ForecastingModelABC` class that unifies how the concrete `*Model`s work. +While the abstact `.predict()` method returns a `pd.DataFrame` (= basically, +the result of one of the forecasting `methods`, the concrete `.make_forecast()` +method converts the results into `Forecast` (=ORM) objects. +Also, `.make_forecast()` implements a caching strategy where already made +`Forecast`s are loaded from the database instead of calculating them again, +which could be a heavier computation. + +The `tactical` sub-package contains all the `*Model`s used to implement the +UDP's predictive routing strategy. + +A future `planning` sub-package will contain the `*Model`s used to plan the +`Courier`'s shifts a week ahead. +""" # noqa:RST215 + +from urban_meal_delivery.forecasts.models.tactical.horizontal import HorizontalETSModel +from urban_meal_delivery.forecasts.models.tactical.realtime import RealtimeARIMAModel +from urban_meal_delivery.forecasts.models.tactical.vertical import VerticalARIMAModel diff --git a/src/urban_meal_delivery/forecasts/models/base.py b/src/urban_meal_delivery/forecasts/models/base.py new file mode 100644 index 0000000..8be733a --- /dev/null +++ b/src/urban_meal_delivery/forecasts/models/base.py @@ -0,0 +1,116 @@ +"""The abstract blueprint for a forecasting `*Model`.""" + +import abc +import datetime as dt + +import pandas as pd + +from urban_meal_delivery import db +from urban_meal_delivery.forecasts import timify + + +class ForecastingModelABC(abc.ABC): + """An abstract interface of a forecasting `*Model`.""" + + def __init__(self, order_history: timify.OrderHistory) -> None: + """Initialize a new forecasting model. + + Args: + order_history: an abstraction providing the time series data + """ + self._order_history = order_history + + @property + @abc.abstractmethod + def name(self) -> str: + """The name of the model. + + Used to identify `Forecast`s of the same `*Model` in the database. + So, these must be chosen carefully and must not be changed later on! + + Example: "hets" or "varima" for tactical demand forecasting + """ + + @abc.abstractmethod + def predict( + self, pixel: db.Pixel, predict_at: dt.datetime, train_horizon: int, + ) -> pd.DataFrame: + """Concrete implementation of how a `*Model` makes a prediction. + + This method is called by the unified `*Model.make_forecast()` method, + which implements the caching logic with the database. + + Args: + pixel: pixel in which the prediction is made + predict_at: time step (i.e., "start_at") to make the prediction for + train_horizon: weeks of historic data used to predict `predict_at` + + Returns: + actuals, predictions, and possibly 80%/95% confidence intervals; + includes a row for the time step starting at `predict_at` and + may contain further rows for other time steps on the same day + """ # noqa:DAR202 + + def make_forecast( + self, pixel: db.Pixel, predict_at: dt.datetime, train_horizon: int, + ) -> db.Forecast: + """Make a forecast for the time step starting at `predict_at`. + + Important: This method uses a unified `predict_at` argument. + Some `*Model`s, in particular vertical ones, are only trained once per + day and then make a prediction for all time steps on that day, and + therefore, work with a `predict_day` argument instead of `predict_at` + behind the scenes. Then, all `Forecast`s are stored into the database + and only the one starting at `predict_at` is returned. + + Args: + pixel: pixel in which the `Forecast` is made + predict_at: time step (i.e., "start_at") to make the `Forecast` for + train_horizon: weeks of historic data used to forecast `predict_at` + + Returns: + actual, prediction, and possibly 80%/95% confidence intervals + for the time step starting at `predict_at` + + # noqa:DAR401 RuntimeError + """ + if ( # noqa:WPS337 + cached_forecast := db.session.query(db.Forecast) # noqa:ECE001,WPS221 + .filter_by(pixel=pixel) + .filter_by(start_at=predict_at) + .filter_by(time_step=self._order_history.time_step) + .filter_by(training_horizon=train_horizon) + .filter_by(model=self.name) + .first() + ) : + return cached_forecast + + # Horizontal and real-time `*Model`s return a `pd.DataFrame` with one + # row corresponding to the time step starting at `predict_at` whereas + # vertical models return several rows, covering all time steps of a day. + predictions = self.predict(pixel, predict_at, train_horizon) + + # Convert the `predictions` into a `list` of `Forecast` objects. + forecasts = db.Forecast.from_dataframe( + pixel=pixel, + time_step=self._order_history.time_step, + training_horizon=train_horizon, + model=self.name, + data=predictions, + ) + + # We persist all `Forecast`s into the database to + # not have to run the same model training again. + db.session.add_all(forecasts) + db.session.commit() + + # The one `Forecast` object asked for must be in `forecasts` + # if the concrete `*Model.predict()` method works correctly; ... + for forecast in forecasts: + if forecast.start_at == predict_at: + return forecast + + # ..., however, we put in a loud error, just in case. + raise RuntimeError( # pragma: no cover + '`Forecast` for `predict_at` was not returned by `*Model.predict()`', + ) diff --git a/src/urban_meal_delivery/forecasts/models/tactical/__init__.py b/src/urban_meal_delivery/forecasts/models/tactical/__init__.py new file mode 100644 index 0000000..df70622 --- /dev/null +++ b/src/urban_meal_delivery/forecasts/models/tactical/__init__.py @@ -0,0 +1,16 @@ +"""Forecasting `*Model`s to predict demand for tactical purposes. + +The `*Model`s in this module predict only a small number (e.g., one) +of time steps into the near future and are used to implement the UDP's +predictive routing strategies. + +They are classified into "horizontal", "vertical", and "real-time" models +with respect to what historic data they are trained on and how often they +are re-trained on the day to be predicted. For the details, check section +"3.6 Forecasting Models" in the paper "Real-time Demand Forecasting for an +Urban Delivery Platform". + +For the paper check: + https://github.com/webartifex/urban-meal-delivery-demand-forecasting/blob/main/paper.pdf + https://www.sciencedirect.com/science/article/pii/S1366554520307936 +""" # noqa:RST215 diff --git a/src/urban_meal_delivery/forecasts/models/tactical/horizontal.py b/src/urban_meal_delivery/forecasts/models/tactical/horizontal.py new file mode 100644 index 0000000..53e85be --- /dev/null +++ b/src/urban_meal_delivery/forecasts/models/tactical/horizontal.py @@ -0,0 +1,67 @@ +"""Horizontal forecasting `*Model`s to predict demand for tactical purposes. + +Horizontal `*Model`s take the historic order counts only from time steps +corresponding to the same time of day as the one to be predicted (i.e., the +one starting at `predict_at`). Then, they make a prediction for only one day +into the future. Thus, the training time series have a `frequency` of `7`, the +number of days in a week. +""" # noqa:RST215 + +import datetime as dt + +import pandas as pd + +from urban_meal_delivery import db +from urban_meal_delivery.forecasts import methods +from urban_meal_delivery.forecasts.models import base + + +class HorizontalETSModel(base.ForecastingModelABC): + """The ETS model applied on a horizontal time series.""" + + name = 'hets' + + def predict( + self, pixel: db.Pixel, predict_at: dt.datetime, train_horizon: int, + ) -> pd.DataFrame: + """Predict demand for a time step. + + Args: + pixel: pixel in which the prediction is made + predict_at: time step (i.e., "start_at") to make the prediction for + train_horizon: weeks of historic data used to predict `predict_at` + + Returns: + actual order counts (i.e., the "actual" column), + point forecasts (i.e., the "prediction" column), and + confidence intervals (i.e, the four "low/high/80/95" columns); + contains one row for the `predict_at` time step + + # noqa:DAR401 RuntimeError + """ + # Generate the historic (and horizontal) order time series. + training_ts, frequency, actuals_ts = self._order_history.make_horizontal_ts( + pixel_id=pixel.id, predict_at=predict_at, train_horizon=train_horizon, + ) + + # Sanity check. + if frequency != 7: # pragma: no cover + raise RuntimeError('`frequency` should be `7`') + + # Make `predictions` with the seasonal ETS method ("ZZZ" model). + predictions = methods.ets.predict( + training_ts=training_ts, + forecast_interval=actuals_ts.index, + frequency=frequency, # `== 7`, the number of weekdays + seasonal_fit=True, # because there was no decomposition before + ) + + predictions.insert(loc=0, column='actual', value=actuals_ts) + + # Sanity checks. + if predictions.isnull().any().any(): # pragma: no cover + raise RuntimeError('missing predictions in hets model') + if predict_at not in predictions.index: # pragma: no cover + raise RuntimeError('missing prediction for `predict_at`') + + return predictions diff --git a/src/urban_meal_delivery/forecasts/models/tactical/realtime.py b/src/urban_meal_delivery/forecasts/models/tactical/realtime.py new file mode 100644 index 0000000..bf30ee5 --- /dev/null +++ b/src/urban_meal_delivery/forecasts/models/tactical/realtime.py @@ -0,0 +1,117 @@ +"""Real-time forecasting `*Model`s to predict demand for tactical purposes. + +Real-time `*Model`s take order counts of all time steps in the training data +and make a prediction for only one time step on the day to be predicted (i.e., +the one starting at `predict_at`). Thus, the training time series have a +`frequency` of the number of weekdays, `7`, times the number of time steps on a +day. For example, for 60-minute time steps, the `frequency` becomes `7 * 12` +(= operating hours from 11 am to 11 pm), which is `84`. Real-time `*Model`s +train the forecasting `methods` on a seasonally decomposed time series internally. +""" # noqa:RST215 + +import datetime as dt + +import pandas as pd + +from urban_meal_delivery import db +from urban_meal_delivery.forecasts import methods +from urban_meal_delivery.forecasts.models import base + + +class RealtimeARIMAModel(base.ForecastingModelABC): + """The ARIMA model applied on a real-time time series.""" + + name = 'rtarima' + + def predict( + self, pixel: db.Pixel, predict_at: dt.datetime, train_horizon: int, + ) -> pd.DataFrame: + """Predict demand for a time step. + + Args: + pixel: pixel in which the prediction is made + predict_at: time step (i.e., "start_at") to make the prediction for + train_horizon: weeks of historic data used to predict `predict_at` + + Returns: + actual order counts (i.e., the "actual" column), + point forecasts (i.e., the "prediction" column), and + confidence intervals (i.e, the four "low/high/80/95" columns); + contains one row for the `predict_at` time step + + # noqa:DAR401 RuntimeError + """ + # Generate the historic (and real-time) order time series. + training_ts, frequency, actuals_ts = self._order_history.make_realtime_ts( + pixel_id=pixel.id, predict_at=predict_at, train_horizon=train_horizon, + ) + + # Decompose the `training_ts` to make predictions for the seasonal + # component and the seasonally adjusted observations separately. + decomposed_training_ts = methods.decomposition.stl( + time_series=training_ts, + frequency=frequency, + # "Periodic" `ns` parameter => same seasonal component value + # for observations of the same lag. + ns=999, + ) + + # Make predictions for the seasonal component by linear extrapolation. + seasonal_predictions = methods.extrapolate_season.predict( + training_ts=decomposed_training_ts['seasonal'], + forecast_interval=actuals_ts.index, + frequency=frequency, + ) + + # Make predictions with the ARIMA model on the seasonally adjusted time series. + seasonally_adjusted_predictions = methods.arima.predict( + training_ts=( + decomposed_training_ts['trend'] + decomposed_training_ts['residual'] + ), + forecast_interval=actuals_ts.index, + # Because the seasonality was taken out before, + # the `training_ts` has, by definition, a `frequency` of `1`. + frequency=1, + seasonal_fit=False, + ) + + # The overall `predictions` are the sum of the separate predictions above. + # As the linear extrapolation of the seasonal component has no + # confidence interval, we put the one from the ARIMA model around + # the extrapolated seasonal component. + predictions = pd.DataFrame( + data={ + 'actual': actuals_ts, + 'prediction': ( + seasonal_predictions['prediction'] # noqa:WPS204 + + seasonally_adjusted_predictions['prediction'] + ), + 'low80': ( + seasonal_predictions['prediction'] + + seasonally_adjusted_predictions['low80'] + ), + 'high80': ( + seasonal_predictions['prediction'] + + seasonally_adjusted_predictions['high80'] + ), + 'low95': ( + seasonal_predictions['prediction'] + + seasonally_adjusted_predictions['low95'] + ), + 'high95': ( + seasonal_predictions['prediction'] + + seasonally_adjusted_predictions['high95'] + ), + }, + index=actuals_ts.index, + ) + + # Sanity checks. + if len(predictions) != 1: # pragma: no cover + raise RuntimeError('real-time models should predict exactly one time step') + if predictions.isnull().any().any(): # pragma: no cover + raise RuntimeError('missing predictions in rtarima model') + if predict_at not in predictions.index: # pragma: no cover + raise RuntimeError('missing prediction for `predict_at`') + + return predictions diff --git a/src/urban_meal_delivery/forecasts/models/tactical/vertical.py b/src/urban_meal_delivery/forecasts/models/tactical/vertical.py new file mode 100644 index 0000000..caf4317 --- /dev/null +++ b/src/urban_meal_delivery/forecasts/models/tactical/vertical.py @@ -0,0 +1,119 @@ +"""Vertical forecasting `*Model`s to predict demand for tactical purposes. + +Vertical `*Model`s take order counts of all time steps in the training data +and make a prediction for all time steps on the day to be predicted at once. +Thus, the training time series have a `frequency` of the number of weekdays, +`7`, times the number of time steps on a day. For example, with 60-minute time +steps, the `frequency` becomes `7 * 12` (= operating hours from 11 am to 11 pm), +which is `84`. Vertical `*Model`s train the forecasting `methods` on a seasonally +decomposed time series internally. +""" # noqa:RST215 + +import datetime as dt + +import pandas as pd + +from urban_meal_delivery import db +from urban_meal_delivery.forecasts import methods +from urban_meal_delivery.forecasts.models import base + + +class VerticalARIMAModel(base.ForecastingModelABC): + """The ARIMA model applied on a vertical time series.""" + + name = 'varima' + + def predict( + self, pixel: db.Pixel, predict_at: dt.datetime, train_horizon: int, + ) -> pd.DataFrame: + """Predict demand for a time step. + + Args: + pixel: pixel in which the prediction is made + predict_at: time step (i.e., "start_at") to make the prediction for + train_horizon: weeks of historic data used to predict `predict_at` + + Returns: + actual order counts (i.e., the "actual" column), + point forecasts (i.e., the "prediction" column), and + confidence intervals (i.e, the four "low/high/80/95" columns); + contains several rows, including one for the `predict_at` time step + + # noqa:DAR401 RuntimeError + """ + # Generate the historic (and vertical) order time series. + training_ts, frequency, actuals_ts = self._order_history.make_vertical_ts( + pixel_id=pixel.id, + predict_day=predict_at.date(), + train_horizon=train_horizon, + ) + + # Decompose the `training_ts` to make predictions for the seasonal + # component and the seasonally adjusted observations separately. + decomposed_training_ts = methods.decomposition.stl( + time_series=training_ts, + frequency=frequency, + # "Periodic" `ns` parameter => same seasonal component value + # for observations of the same lag. + ns=999, + ) + + # Make predictions for the seasonal component by linear extrapolation. + seasonal_predictions = methods.extrapolate_season.predict( + training_ts=decomposed_training_ts['seasonal'], + forecast_interval=actuals_ts.index, + frequency=frequency, + ) + + # Make predictions with the ARIMA model on the seasonally adjusted time series. + seasonally_adjusted_predictions = methods.arima.predict( + training_ts=( + decomposed_training_ts['trend'] + decomposed_training_ts['residual'] + ), + forecast_interval=actuals_ts.index, + # Because the seasonality was taken out before, + # the `training_ts` has, by definition, a `frequency` of `1`. + frequency=1, + seasonal_fit=False, + ) + + # The overall `predictions` are the sum of the separate predictions above. + # As the linear extrapolation of the seasonal component has no + # confidence interval, we put the one from the ARIMA model around + # the extrapolated seasonal component. + predictions = pd.DataFrame( + data={ + 'actual': actuals_ts, + 'prediction': ( + seasonal_predictions['prediction'] # noqa:WPS204 + + seasonally_adjusted_predictions['prediction'] + ), + 'low80': ( + seasonal_predictions['prediction'] + + seasonally_adjusted_predictions['low80'] + ), + 'high80': ( + seasonal_predictions['prediction'] + + seasonally_adjusted_predictions['high80'] + ), + 'low95': ( + seasonal_predictions['prediction'] + + seasonally_adjusted_predictions['low95'] + ), + 'high95': ( + seasonal_predictions['prediction'] + + seasonally_adjusted_predictions['high95'] + ), + }, + index=actuals_ts.index, + ) + + # Sanity checks. + if len(predictions) <= 1: # pragma: no cover + raise RuntimeError('vertical models should predict several time steps') + if predictions.isnull().any().any(): # pragma: no cover + raise RuntimeError('missing predictions in varima model') + if predict_at not in predictions.index: # pragma: no cover + raise RuntimeError('missing prediction for `predict_at`') + + return predictions diff --git a/tests/config.py b/tests/config.py index 13fec36..7b1ec29 100644 --- a/tests/config.py +++ b/tests/config.py @@ -11,10 +11,10 @@ YEAR, MONTH, DAY = 2016, 7, 1 # The hour when most test cases take place. NOON = 12 -# `START` and `END` constitute a 15-day time span. -# That implies a maximum `train_horizon` of `2` as that needs full 7-day weeks. +# `START` and `END` constitute a 22-day time span. +# That implies a maximum `train_horizon` of `3` as that needs full 7-day weeks. START = datetime.datetime(YEAR, MONTH, DAY, config.SERVICE_START, 0) -END = datetime.datetime(YEAR, MONTH, 15, config.SERVICE_END, 0) +END = datetime.datetime(YEAR, MONTH, DAY + 21, config.SERVICE_END, 0) # Default time steps (in minutes), for example, for `OrderHistory` objects. LONG_TIME_STEP = 60 @@ -28,6 +28,6 @@ VERTICAL_FREQUENCY_SHORT = 7 * 24 # Default training horizons, for example, for # `OrderHistory.make_horizontal_time_series()`. -LONG_TRAIN_HORIZON = 2 -SHORT_TRAIN_HORIZON = 1 +LONG_TRAIN_HORIZON = 3 +SHORT_TRAIN_HORIZON = 2 TRAIN_HORIZONS = (SHORT_TRAIN_HORIZON, LONG_TRAIN_HORIZON) diff --git a/tests/forecasts/conftest.py b/tests/forecasts/conftest.py index ede73ba..527b5b9 100644 --- a/tests/forecasts/conftest.py +++ b/tests/forecasts/conftest.py @@ -1,4 +1,4 @@ -"""Fixtures and globals for testing `urban_meal_delivery.forecasts`.""" +"""Fixtures for testing the `urban_meal_delivery.forecasts` sub-package.""" import datetime as dt @@ -7,6 +7,7 @@ import pytest from tests import config as test_config from urban_meal_delivery import config +from urban_meal_delivery.forecasts import timify @pytest.fixture @@ -28,7 +29,10 @@ def horizontal_datetime_index(): index = pd.Index(gen) index.name = 'start_at' - assert len(index) == 15 # sanity check + # Sanity check. + # `+1` as both the `START` and `END` day are included. + n_days = (test_config.END - test_config.START).days + 1 + assert len(index) == n_days return index @@ -58,7 +62,10 @@ def vertical_datetime_index(): index = pd.Index(gen) index.name = 'start_at' - assert len(index) == 15 * 12 # sanity check + # Sanity check: n_days * n_number_of_opening_hours. + # `+1` as both the `START` and `END` day are included. + n_days = (test_config.END - test_config.START).days + 1 + assert len(index) == n_days * 12 return index @@ -67,3 +74,54 @@ def vertical_datetime_index(): def vertical_no_demand(vertical_datetime_index): """A vertical time series with order totals: no demand.""" return pd.Series(0, index=vertical_datetime_index, name='n_orders') + + +@pytest.fixture +def good_pixel_id(pixel): + """A `pixel_id` that is on the `grid`.""" + return pixel.id # `== 1` + + +@pytest.fixture +def order_totals(good_pixel_id): + """A mock for `OrderHistory.totals`. + + To be a bit more realistic, we sample two pixels on the `grid`. + + Uses the LONG_TIME_STEP as the length of a time step. + """ + pixel_ids = [good_pixel_id, good_pixel_id + 1] + + gen = ( + (pixel_id, start_at) + for pixel_id in pixel_ids + for start_at in pd.date_range( + test_config.START, test_config.END, freq=f'{test_config.LONG_TIME_STEP}T', + ) + if config.SERVICE_START <= start_at.hour < config.SERVICE_END + ) + + # Re-index `data` filling in `0`s where there is no demand. + index = pd.MultiIndex.from_tuples(gen) + index.names = ['pixel_id', 'start_at'] + + df = pd.DataFrame(data={'n_orders': 1}, index=index) + + # Sanity check: n_pixels * n_time_steps_per_day * n_days. + # `+1` as both the `START` and `END` day are included. + n_days = (test_config.END - test_config.START).days + 1 + assert len(df) == 2 * 12 * n_days + + return df + + +@pytest.fixture +def order_history(order_totals, grid): + """An `OrderHistory` object that does not need the database. + + Uses the LONG_TIME_STEP as the length of a time step. + """ + oh = timify.OrderHistory(grid=grid, time_step=test_config.LONG_TIME_STEP) + oh._data = order_totals + + return oh diff --git a/tests/forecasts/test_models.py b/tests/forecasts/test_models.py new file mode 100644 index 0000000..c4b8a91 --- /dev/null +++ b/tests/forecasts/test_models.py @@ -0,0 +1,181 @@ +"""Tests for the `urban_meal_delivery.forecasts.models` sub-package.""" + +import datetime as dt + +import pandas as pd +import pytest + +from tests import config as test_config +from urban_meal_delivery import db +from urban_meal_delivery.forecasts import models + + +MODELS = ( + models.HorizontalETSModel, + models.RealtimeARIMAModel, + models.VerticalARIMAModel, +) + + +@pytest.mark.parametrize('model_cls', MODELS) +class TestGenericForecastingModelProperties: + """Test everything all concrete `*Model`s have in common. + + The test cases here replace testing the `ForecastingModelABC` class on its own. + + As uncertainty is in the nature of forecasting, we do not test the individual + point forecasts or confidence intervals themselves. Instead, we confirm + that all the `*Model`s adhere to the `ForecastingModelABC` generically. + So, these test cases are more like integration tests conceptually. + + Also, note that some `methods.*.predict()` functions use R behind the scenes. + """ # noqa:RST215 + + def test_create_model(self, model_cls, order_history): + """Test instantiation of a new and concrete `*Model` object.""" + model = model_cls(order_history=order_history) + + assert model is not None + + def test_model_has_a_name(self, model_cls, order_history): + """Access the `*Model.name` property.""" + model = model_cls(order_history=order_history) + + result = model.name + + assert isinstance(result, str) + + unique_model_names = set() + + def test_each_model_has_a_unique_name(self, model_cls, order_history): + """The `*Model.name` values must be unique across all `*Model`s. + + Important: this test case has a side effect that is visible + across the different parametrized versions of this case! + """ # noqa:RST215 + model = model_cls(order_history=order_history) + + assert model.name not in self.unique_model_names + + self.unique_model_names.add(model.name) + + @pytest.fixture + def predict_at(self) -> dt.datetime: + """`NOON` on the day to be predicted.""" + return dt.datetime( + test_config.END.year, + test_config.END.month, + test_config.END.day, + test_config.NOON, + ) + + @pytest.mark.r + def test_make_prediction_structure( + self, model_cls, order_history, pixel, predict_at, + ): + """`*Model.predict()` returns a `pd.DataFrame` ... + + ... with known columns. + """ # noqa:RST215 + model = model_cls(order_history=order_history) + + result = model.predict( + pixel=pixel, + predict_at=predict_at, + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + assert isinstance(result, pd.DataFrame) + assert list(result.columns) == [ + 'actual', + 'prediction', + 'low80', + 'high80', + 'low95', + 'high95', + ] + + @pytest.mark.r + def test_make_prediction_for_given_time_step( + self, model_cls, order_history, pixel, predict_at, + ): + """`*Model.predict()` returns a row for ... + + ... the time step starting at `predict_at`. + """ # noqa:RST215 + model = model_cls(order_history=order_history) + + result = model.predict( + pixel=pixel, + predict_at=predict_at, + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + assert predict_at in result.index + + @pytest.mark.r + def test_make_prediction_contains_actual_values( + self, model_cls, order_history, pixel, predict_at, + ): + """`*Model.predict()` returns a `pd.DataFrame` ... + + ... where the "actual" and "prediction" columns must not be empty. + """ # noqa:RST215 + model = model_cls(order_history=order_history) + + result = model.predict( + pixel=pixel, + predict_at=predict_at, + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + assert not result['actual'].isnull().any() + assert not result['prediction'].isnull().any() + + @pytest.mark.db + @pytest.mark.r + def test_make_forecast( # noqa:WPS211 + self, db_session, model_cls, order_history, pixel, predict_at, + ): + """`*Model.make_forecast()` returns a `Forecast` object.""" # noqa:RST215 + model = model_cls(order_history=order_history) + + result = model.make_forecast( + pixel=pixel, + predict_at=predict_at, + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + assert isinstance(result, db.Forecast) + assert result.pixel == pixel + assert result.start_at == predict_at + assert result.training_horizon == test_config.LONG_TRAIN_HORIZON + + @pytest.mark.db + @pytest.mark.r + def test_make_forecast_is_cached( # noqa:WPS211 + self, db_session, model_cls, order_history, pixel, predict_at, + ): + """`*Model.make_forecast()` caches the `Forecast` object.""" # noqa:RST215 + model = model_cls(order_history=order_history) + + assert db_session.query(db.Forecast).count() == 0 + + result1 = model.make_forecast( + pixel=pixel, + predict_at=predict_at, + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + n_cached_forecasts = db_session.query(db.Forecast).count() + assert n_cached_forecasts >= 1 + + result2 = model.make_forecast( + pixel=pixel, + predict_at=predict_at, + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + assert n_cached_forecasts == db_session.query(db.Forecast).count() + + assert result1 == result2 diff --git a/tests/forecasts/timify/conftest.py b/tests/forecasts/timify/conftest.py deleted file mode 100644 index cfb5fc7..0000000 --- a/tests/forecasts/timify/conftest.py +++ /dev/null @@ -1,57 +0,0 @@ -"""Fixture for testing the `urban_meal_delivery.forecast.timify` module.""" - -import pandas as pd -import pytest - -from tests import config as test_config -from urban_meal_delivery import config -from urban_meal_delivery.forecasts import timify - - -@pytest.fixture -def good_pixel_id(pixel): - """A `pixel_id` that is on the `grid`.""" - return pixel.id # `== 1` - - -@pytest.fixture -def order_totals(good_pixel_id): - """A mock for `OrderHistory.totals`. - - To be a bit more realistic, we sample two pixels on the `grid`. - - Uses the LONG_TIME_STEP as the length of a time step. - """ - pixel_ids = [good_pixel_id, good_pixel_id + 1] - - gen = ( - (pixel_id, start_at) - for pixel_id in pixel_ids - for start_at in pd.date_range( - test_config.START, test_config.END, freq=f'{test_config.LONG_TIME_STEP}T', - ) - if config.SERVICE_START <= start_at.hour < config.SERVICE_END - ) - - # Re-index `data` filling in `0`s where there is no demand. - index = pd.MultiIndex.from_tuples(gen) - index.names = ['pixel_id', 'start_at'] - - df = pd.DataFrame(data={'n_orders': 1}, index=index) - - # Sanity check: n_pixels * n_time_steps_per_day * n_weekdays * n_weeks. - assert len(df) == 2 * 12 * (7 * 2 + 1) - - return df - - -@pytest.fixture -def order_history(order_totals, grid): - """An `OrderHistory` object that does not need the database. - - Uses the LONG_TIME_STEP as the length of a time step. - """ - oh = timify.OrderHistory(grid=grid, time_step=test_config.LONG_TIME_STEP) - oh._data = order_totals - - return oh diff --git a/tests/forecasts/timify/test_make_time_series.py b/tests/forecasts/timify/test_make_time_series.py index 78189c7..c47c14a 100644 --- a/tests/forecasts/timify/test_make_time_series.py +++ b/tests/forecasts/timify/test_make_time_series.py @@ -17,8 +17,8 @@ from urban_meal_delivery import config def good_predict_at(): """A `predict_at` within `START`-`END` and ... - ... a long enough history so that either `train_horizon=1` - or `train_horizon=2` works. + ... a long enough history so that either `SHORT_TRAIN_HORIZON` + or `LONG_TRAIN_HORIZON` works. """ return datetime.datetime( test_config.END.year, @@ -33,10 +33,10 @@ def good_predict_at(): def bad_predict_at(): """A `predict_at` within `START`-`END` but ... - ... not a long enough history so that both `train_horizon=1` - and `train_horizon=2` do not work. + ... not a long enough history so that both `SHORT_TRAIN_HORIZON` + and `LONG_TRAIN_HORIZON` do not work. """ - predict_day = test_config.END - datetime.timedelta(weeks=1, days=1) + predict_day = test_config.END - datetime.timedelta(weeks=2, days=1) return datetime.datetime( predict_day.year, predict_day.month, predict_day.day, test_config.NOON, 0, ) From cb7611d58763bb4885a479db871232655c031d28 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Mon, 1 Feb 2021 21:48:28 +0100 Subject: [PATCH 63/72] Add `OrderHistory.avg_daily_demand()` - the method calculates the number of daily `Order`s in a `Pixel` withing the `train_horizon` preceding the `predict_day` --- src/urban_meal_delivery/forecasts/timify.py | 35 ++++++++++++++++++ tests/forecasts/conftest.py | 11 ++++++ tests/forecasts/test_models.py | 11 ------ .../forecasts/timify/test_avg_daily_demand.py | 37 +++++++++++++++++++ 4 files changed, 83 insertions(+), 11 deletions(-) create mode 100644 tests/forecasts/timify/test_avg_daily_demand.py diff --git a/src/urban_meal_delivery/forecasts/timify.py b/src/urban_meal_delivery/forecasts/timify.py index 4f85dfe..c5ecdb2 100644 --- a/src/urban_meal_delivery/forecasts/timify.py +++ b/src/urban_meal_delivery/forecasts/timify.py @@ -467,3 +467,38 @@ class OrderHistory: raise LookupError('`predict_at` is not in the order history') return training_ts, frequency, actuals_ts + + def avg_daily_demand( + self, pixel_id: int, predict_day: dt.date, train_horizon: int, + ) -> float: + """Calculate the average daily demand (ADD) for a `Pixel`. + + The ADD is defined as the average number of daily `Order`s in a + `Pixel` within the training horizon preceding the `predict_day`. + + The ADD is primarily used for the rule-based heuristic to determine + the best forecasting model for a `Pixel` on the `predict_day`. + + Implementation note: To calculate the ADD, the order counts are + generated as a vertical time series. That must be so as we need to + include all time steps of the days before the `predict_day` and + no time step of the latter. + + Args: + pixel_id: pixel for which the ADD is calculated + predict_day: following the `train_horizon` on which the ADD is calculated + train_horizon: time horizon over which the ADD is calculated + + Returns: + average number of orders per day + """ + training_ts, _, _ = self.make_vertical_ts( # noqa:WPS434 + pixel_id=pixel_id, predict_day=predict_day, train_horizon=train_horizon, + ) + + first_day = training_ts.index.min().date() + last_day = training_ts.index.max().date() + # `+1` as both `first_day` and `last_day` are included. + n_days = (last_day - first_day).days + 1 + + return round(training_ts.sum() / n_days, 1) diff --git a/tests/forecasts/conftest.py b/tests/forecasts/conftest.py index 527b5b9..f258a3c 100644 --- a/tests/forecasts/conftest.py +++ b/tests/forecasts/conftest.py @@ -82,6 +82,17 @@ def good_pixel_id(pixel): return pixel.id # `== 1` +@pytest.fixture +def predict_at() -> dt.datetime: + """`NOON` on the day to be predicted.""" + return dt.datetime( + test_config.END.year, + test_config.END.month, + test_config.END.day, + test_config.NOON, + ) + + @pytest.fixture def order_totals(good_pixel_id): """A mock for `OrderHistory.totals`. diff --git a/tests/forecasts/test_models.py b/tests/forecasts/test_models.py index c4b8a91..2ce04b4 100644 --- a/tests/forecasts/test_models.py +++ b/tests/forecasts/test_models.py @@ -1,6 +1,5 @@ """Tests for the `urban_meal_delivery.forecasts.models` sub-package.""" -import datetime as dt import pandas as pd import pytest @@ -59,16 +58,6 @@ class TestGenericForecastingModelProperties: self.unique_model_names.add(model.name) - @pytest.fixture - def predict_at(self) -> dt.datetime: - """`NOON` on the day to be predicted.""" - return dt.datetime( - test_config.END.year, - test_config.END.month, - test_config.END.day, - test_config.NOON, - ) - @pytest.mark.r def test_make_prediction_structure( self, model_cls, order_history, pixel, predict_at, diff --git a/tests/forecasts/timify/test_avg_daily_demand.py b/tests/forecasts/timify/test_avg_daily_demand.py new file mode 100644 index 0000000..f8e2bb4 --- /dev/null +++ b/tests/forecasts/timify/test_avg_daily_demand.py @@ -0,0 +1,37 @@ +"""Tests for the `OrderHistory.avg_daily_demand()` method.""" + +from tests import config as test_config + + +def test_avg_daily_demand_with_constant_demand( + order_history, good_pixel_id, predict_at, +): + """The average daily demand must be the number of time steps ... + + ... if the demand is `1` at each time step. + + Note: The `order_history` fixture assumes `12` time steps per day as it + uses `LONG_TIME_STEP=60` as the length of a time step. + """ + result = order_history.avg_daily_demand( + pixel_id=good_pixel_id, + predict_day=predict_at.date(), + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + assert result == 12.0 + + +def test_avg_daily_demand_with_no_demand( + order_history, good_pixel_id, predict_at, +): + """Without demand, the average daily demand must be `0.0`.""" + order_history._data.loc[:, 'n_orders'] = 0 + + result = order_history.avg_daily_demand( + pixel_id=good_pixel_id, + predict_day=predict_at.date(), + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + assert result == 0.0 From 8926e9ff28c79d8420b64c3cf761fe7989782140 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Mon, 1 Feb 2021 22:00:47 +0100 Subject: [PATCH 64/72] Fix nox session for slow CI tests - when running tests marked with "r" we still must not run tests marked with "db" on the CI server --- noxfile.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/noxfile.py b/noxfile.py index e43d2ba..eac123e 100644 --- a/noxfile.py +++ b/noxfile.py @@ -207,7 +207,7 @@ def test(session): # test cases that require the slow installation of R and some packages. if session.env.get('_slow_ci_tests'): session.run( - 'pytest', '--randomly-seed=4287', '-m', 'r', PYTEST_LOCATION, + 'pytest', '--randomly-seed=4287', '-m', 'r and not db', PYTEST_LOCATION, ) # In the "ci-tests-slow" session, we do not run any test tool From af82951485c272bb81756e865a64f23547975560 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Tue, 2 Feb 2021 11:29:27 +0100 Subject: [PATCH 65/72] Add `OrderHistory.choose_tactical_model()` - the method implements a heuristic from the first research paper that chooses the most promising forecasting `*Model` based on the average daily demand in a `Pixel` for a given `train_horizon` - adjust the test scenario => `LONG_TRAIN_HORIZON` becomes `8` as that is part of the rule implemented in the heuristic --- src/urban_meal_delivery/configuration.py | 5 +- .../forecasts/models/__init__.py | 1 + src/urban_meal_delivery/forecasts/timify.py | 53 ++++++ tests/config.py | 13 +- .../forecasts/timify/test_avg_daily_demand.py | 160 +++++++++++++++--- .../forecasts/timify/test_make_time_series.py | 2 +- 6 files changed, 199 insertions(+), 35 deletions(-) diff --git a/src/urban_meal_delivery/configuration.py b/src/urban_meal_delivery/configuration.py index ad813b7..9d8c924 100644 --- a/src/urban_meal_delivery/configuration.py +++ b/src/urban_meal_delivery/configuration.py @@ -48,8 +48,9 @@ class Config: # individual orders into time series. TIME_STEPS = [60] - # Training horizons (in full weeks) used - # to train the forecasting models. + # Training horizons (in full weeks) used to train the forecasting models. + # For now, we only use 8 weeks as that was the best performing in + # a previous study (note:4f79e8fa). TRAINING_HORIZONS = [8] # The demand forecasting methods used in the simulations. diff --git a/src/urban_meal_delivery/forecasts/models/__init__.py b/src/urban_meal_delivery/forecasts/models/__init__.py index 9d33f71..391efcf 100644 --- a/src/urban_meal_delivery/forecasts/models/__init__.py +++ b/src/urban_meal_delivery/forecasts/models/__init__.py @@ -29,6 +29,7 @@ A future `planning` sub-package will contain the `*Model`s used to plan the `Courier`'s shifts a week ahead. """ # noqa:RST215 +from urban_meal_delivery.forecasts.models.base import ForecastingModelABC from urban_meal_delivery.forecasts.models.tactical.horizontal import HorizontalETSModel from urban_meal_delivery.forecasts.models.tactical.realtime import RealtimeARIMAModel from urban_meal_delivery.forecasts.models.tactical.vertical import VerticalARIMAModel diff --git a/src/urban_meal_delivery/forecasts/timify.py b/src/urban_meal_delivery/forecasts/timify.py index c5ecdb2..92674f7 100644 --- a/src/urban_meal_delivery/forecasts/timify.py +++ b/src/urban_meal_delivery/forecasts/timify.py @@ -1,5 +1,7 @@ """Obtain and work with time series data.""" +from __future__ import annotations + import datetime as dt from typing import Tuple @@ -7,6 +9,7 @@ import pandas as pd from urban_meal_delivery import config from urban_meal_delivery import db +from urban_meal_delivery.forecasts import models class OrderHistory: @@ -502,3 +505,53 @@ class OrderHistory: n_days = (last_day - first_day).days + 1 return round(training_ts.sum() / n_days, 1) + + def choose_tactical_model( + self, pixel_id: int, predict_day: dt.date, train_horizon: int, + ) -> models.ForecastingModelABC: + """Choose the most promising forecasting `*Model` for tactical purposes. + + The rules are deduced from "Table 1: Top-3 models by ..." in the article + "Real-time demand forecasting for an urban delivery platform", the first + research paper published for this `urban-meal-delivery` project. + + According to the research findings in the article "Real-time demand + forecasting for an urban delivery platform", the best model is a function + of the average daily demand (ADD) and the length of the training horizon. + + For the paper check: + https://github.com/webartifex/urban-meal-delivery-demand-forecasting/blob/main/paper.pdf + https://www.sciencedirect.com/science/article/pii/S1366554520307936 + + Args: + pixel_id: pixel for which a forecasting `*Model` is chosen + predict_day: day for which demand is to be predicted with the `*Model` + train_horizon: time horizon available for training the `*Model` + + Returns: + most promising forecasting `*Model` + + # noqa:DAR401 RuntimeError + """ # noqa:RST215 + add = self.avg_daily_demand( + pixel_id=pixel_id, predict_day=predict_day, train_horizon=train_horizon, + ) + + # For now, we only make forecasts with 8 weeks + # as the training horizon (note:4f79e8fa). + if train_horizon == 8: + if add >= 25: # = "high demand" + return models.HorizontalETSModel(order_history=self) + elif add >= 10: # = "medium demand" + return models.HorizontalETSModel(order_history=self) + elif add >= 2.5: # = "low demand" + # TODO: create HorizontalSMAModel + return models.HorizontalETSModel(order_history=self) + + # = "no demand" + # TODO: create HorizontalTrivialModel + return models.HorizontalETSModel(order_history=self) + + raise RuntimeError( + 'no rule for the given average daily demand and training horizon', + ) diff --git a/tests/config.py b/tests/config.py index 7b1ec29..2af0d60 100644 --- a/tests/config.py +++ b/tests/config.py @@ -1,6 +1,6 @@ """Globals used when testing.""" -import datetime +import datetime as dt from urban_meal_delivery import config @@ -11,10 +11,11 @@ YEAR, MONTH, DAY = 2016, 7, 1 # The hour when most test cases take place. NOON = 12 -# `START` and `END` constitute a 22-day time span. -# That implies a maximum `train_horizon` of `3` as that needs full 7-day weeks. -START = datetime.datetime(YEAR, MONTH, DAY, config.SERVICE_START, 0) -END = datetime.datetime(YEAR, MONTH, DAY + 21, config.SERVICE_END, 0) +# `START` and `END` constitute a 57-day time span, 8 full weeks plus 1 day. +# That implies a maximum `train_horizon` of `8` as that needs full 7-day weeks. +START = dt.datetime(YEAR, MONTH, DAY, config.SERVICE_START, 0) +_end = START + dt.timedelta(days=56) # `56` as `START` is not included +END = dt.datetime(_end.year, _end.month, _end.day, config.SERVICE_END, 0) # Default time steps (in minutes), for example, for `OrderHistory` objects. LONG_TIME_STEP = 60 @@ -28,6 +29,6 @@ VERTICAL_FREQUENCY_SHORT = 7 * 24 # Default training horizons, for example, for # `OrderHistory.make_horizontal_time_series()`. -LONG_TRAIN_HORIZON = 3 +LONG_TRAIN_HORIZON = 8 SHORT_TRAIN_HORIZON = 2 TRAIN_HORIZONS = (SHORT_TRAIN_HORIZON, LONG_TRAIN_HORIZON) diff --git a/tests/forecasts/timify/test_avg_daily_demand.py b/tests/forecasts/timify/test_avg_daily_demand.py index f8e2bb4..c8ab66f 100644 --- a/tests/forecasts/timify/test_avg_daily_demand.py +++ b/tests/forecasts/timify/test_avg_daily_demand.py @@ -1,37 +1,145 @@ -"""Tests for the `OrderHistory.avg_daily_demand()` method.""" +"""Tests for the `OrderHistory.avg_daily_demand()` and ... + +`OrderHistory.choose_tactical_model()` methods. + +We test both methods together as they take the same input and are really +two parts of the same conceptual step. +""" + +import pytest from tests import config as test_config +from urban_meal_delivery.forecasts import models -def test_avg_daily_demand_with_constant_demand( - order_history, good_pixel_id, predict_at, -): - """The average daily demand must be the number of time steps ... +class TestAverageDailyDemand: + """Tests for the `OrderHistory.avg_daily_demand()` method.""" - ... if the demand is `1` at each time step. + def test_avg_daily_demand_with_constant_demand( + self, order_history, good_pixel_id, predict_at, + ): + """The average daily demand must be the number of time steps ... - Note: The `order_history` fixture assumes `12` time steps per day as it - uses `LONG_TIME_STEP=60` as the length of a time step. - """ - result = order_history.avg_daily_demand( - pixel_id=good_pixel_id, - predict_day=predict_at.date(), - train_horizon=test_config.LONG_TRAIN_HORIZON, - ) + ... if the demand is `1` at each time step. - assert result == 12.0 + Note: The `order_history` fixture assumes `12` time steps per day as it + uses `LONG_TIME_STEP=60` as the length of a time step. + """ + result = order_history.avg_daily_demand( + pixel_id=good_pixel_id, + predict_day=predict_at.date(), + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + assert result == 12.0 + + def test_avg_daily_demand_with_no_demand( + self, order_history, good_pixel_id, predict_at, + ): + """Without demand, the average daily demand must be `0.0`.""" + order_history._data.loc[:, 'n_orders'] = 0 + + result = order_history.avg_daily_demand( + pixel_id=good_pixel_id, + predict_day=predict_at.date(), + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + assert result == 0.0 -def test_avg_daily_demand_with_no_demand( - order_history, good_pixel_id, predict_at, -): - """Without demand, the average daily demand must be `0.0`.""" - order_history._data.loc[:, 'n_orders'] = 0 +class TestChooseTacticalModel: + """Tests for the `OrderHistory.choose_tactical_model()` method.""" - result = order_history.avg_daily_demand( - pixel_id=good_pixel_id, - predict_day=predict_at.date(), - train_horizon=test_config.LONG_TRAIN_HORIZON, - ) + def test_best_model_with_high_demand( + self, order_history, good_pixel_id, predict_at, + ): + """With high demand, the average daily demand is `.>= 25.0`.""" + # With 12 time steps per day, the ADD becomes `36.0`. + order_history._data.loc[:, 'n_orders'] = 3 - assert result == 0.0 + result = order_history.choose_tactical_model( + pixel_id=good_pixel_id, + predict_day=predict_at.date(), + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + assert isinstance(result, models.HorizontalETSModel) + + def test_best_model_with_medium_demand( + self, order_history, good_pixel_id, predict_at, + ): + """With medium demand, the average daily demand is `>= 10.0` and `< 25.0`.""" + # With 12 time steps per day, the ADD becomes `24.0`. + order_history._data.loc[:, 'n_orders'] = 2 + + result = order_history.choose_tactical_model( + pixel_id=good_pixel_id, + predict_day=predict_at.date(), + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + assert isinstance(result, models.HorizontalETSModel) + + def test_best_model_with_low_demand( + self, order_history, good_pixel_id, predict_at, + ): + """With low demand, the average daily demand is `>= 2.5` and `< 10.0`.""" + # With 12 time steps per day, the ADD becomes `12.0` ... + data = order_history._data + data.loc[:, 'n_orders'] = 1 + + # ... and we set three additional time steps per day to `0`. + data.loc[ # noqa:ECE001 + # all `Pixel`s, all `Order`s in time steps starting at 11 am + (slice(None), slice(data.index.levels[1][0], None, 12)), + 'n_orders', + ] = 0 + data.loc[ # noqa:ECE001 + # all `Pixel`s, all `Order`s in time steps starting at 12 am + (slice(None), slice(data.index.levels[1][1], None, 12)), + 'n_orders', + ] = 0 + data.loc[ # noqa:ECE001 + # all `Pixel`s, all `Order`s in time steps starting at 1 pm + (slice(None), slice(data.index.levels[1][2], None, 12)), + 'n_orders', + ] = 0 + + result = order_history.choose_tactical_model( + pixel_id=good_pixel_id, + predict_day=predict_at.date(), + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + # TODO: this should be the future `HorizontalSMAModel`. + assert isinstance(result, models.HorizontalETSModel) + + def test_best_model_with_no_demand( + self, order_history, good_pixel_id, predict_at, + ): + """Without demand, the average daily demand is `< 2.5`.""" + order_history._data.loc[:, 'n_orders'] = 0 + + result = order_history.choose_tactical_model( + pixel_id=good_pixel_id, + predict_day=predict_at.date(), + train_horizon=test_config.LONG_TRAIN_HORIZON, + ) + + # TODO: this should be the future `HorizontalTrivialModel`. + assert isinstance(result, models.HorizontalETSModel) + + def test_best_model_for_unknown_train_horizon( + self, order_history, good_pixel_id, predict_at, # noqa:RST215 + ): + """For `train_horizon`s not included in the rule-based system ... + + ... the method raises a `RuntimeError`. + """ + with pytest.raises(RuntimeError, match='no rule'): + order_history.choose_tactical_model( + pixel_id=good_pixel_id, + predict_day=predict_at.date(), + train_horizon=test_config.SHORT_TRAIN_HORIZON, + ) diff --git a/tests/forecasts/timify/test_make_time_series.py b/tests/forecasts/timify/test_make_time_series.py index c47c14a..790eec6 100644 --- a/tests/forecasts/timify/test_make_time_series.py +++ b/tests/forecasts/timify/test_make_time_series.py @@ -36,7 +36,7 @@ def bad_predict_at(): ... not a long enough history so that both `SHORT_TRAIN_HORIZON` and `LONG_TRAIN_HORIZON` do not work. """ - predict_day = test_config.END - datetime.timedelta(weeks=2, days=1) + predict_day = test_config.END - datetime.timedelta(weeks=6, days=1) return datetime.datetime( predict_day.year, predict_day.month, predict_day.day, test_config.NOON, 0, ) From 015d304306e70fe2e1eb04d54a7308f12405af62 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Tue, 2 Feb 2021 12:40:53 +0100 Subject: [PATCH 66/72] Add `HorizontalSMAModel` - the model applies a simple moving average on horizontal time series - refactor `db.Forecast.from_dataframe()` to correctly convert `float('NaN')` values into `None`; otherwise, SQLAlchemy complains --- setup.cfg | 3 + src/urban_meal_delivery/db/forecasts.py | 62 +++++++++++++----- .../forecasts/models/__init__.py | 1 + .../forecasts/models/tactical/horizontal.py | 63 +++++++++++++++++++ tests/forecasts/test_models.py | 1 + 5 files changed, 116 insertions(+), 14 deletions(-) diff --git a/setup.cfg b/setup.cfg index 86a0f8d..d9a8249 100644 --- a/setup.cfg +++ b/setup.cfg @@ -153,6 +153,9 @@ per-file-ignores = src/urban_meal_delivery/forecasts/methods/extrapolate_season.py: # The module is not too complex. WPS232, + src/urban_meal_delivery/forecasts/models/tactical/horizontal.py: + # The many noqa's are ok. + WPS403, src/urban_meal_delivery/forecasts/timify.py: # No SQL injection as the inputs come from a safe source. S608, diff --git a/src/urban_meal_delivery/db/forecasts.py b/src/urban_meal_delivery/db/forecasts.py index d453fcd..0937f97 100644 --- a/src/urban_meal_delivery/db/forecasts.py +++ b/src/urban_meal_delivery/db/forecasts.py @@ -2,6 +2,7 @@ from __future__ import annotations +import math from typing import List import pandas as pd @@ -141,7 +142,7 @@ class Forecast(meta.Base): ) @classmethod - def from_dataframe( # noqa:WPS211 + def from_dataframe( # noqa:WPS210,WPS211 cls, pixel: db.Pixel, time_step: int, @@ -176,20 +177,53 @@ class Forecast(meta.Base): forecasts = [] for timestamp_idx in data.index: - forecast = cls( - pixel=pixel, - start_at=timestamp_idx.to_pydatetime(), - time_step=time_step, - training_horizon=training_horizon, - model=model, - actual=int(data.loc[timestamp_idx, 'actual']), - prediction=round(data.loc[timestamp_idx, 'prediction'], 5), - low80=round(data.loc[timestamp_idx, 'low80'], 5), - high80=round(data.loc[timestamp_idx, 'high80'], 5), - low95=round(data.loc[timestamp_idx, 'low95'], 5), - high95=round(data.loc[timestamp_idx, 'high95'], 5), + start_at = timestamp_idx.to_pydatetime() + actual = int(data.loc[timestamp_idx, 'actual']) + prediction = round(data.loc[timestamp_idx, 'prediction'], 5) + + # Explicit type casting. SQLAlchemy does not convert + # `float('NaN')`s into plain `None`s. + + low80 = data.loc[timestamp_idx, 'low80'] + high80 = data.loc[timestamp_idx, 'high80'] + low95 = data.loc[timestamp_idx, 'low95'] + high95 = data.loc[timestamp_idx, 'high95'] + + if math.isnan(low80): + low80 = None + else: + low80 = round(low80, 5) + + if math.isnan(high80): + high80 = None + else: + high80 = round(high80, 5) + + if math.isnan(low95): + low95 = None + else: + low95 = round(low95, 5) + + if math.isnan(high95): + high95 = None + else: + high95 = round(high95, 5) + + forecasts.append( + cls( + pixel=pixel, + start_at=start_at, + time_step=time_step, + training_horizon=training_horizon, + model=model, + actual=actual, + prediction=prediction, + low80=low80, + high80=high80, + low95=low95, + high95=high95, + ), ) - forecasts.append(forecast) return forecasts diff --git a/src/urban_meal_delivery/forecasts/models/__init__.py b/src/urban_meal_delivery/forecasts/models/__init__.py index 391efcf..b236c79 100644 --- a/src/urban_meal_delivery/forecasts/models/__init__.py +++ b/src/urban_meal_delivery/forecasts/models/__init__.py @@ -31,5 +31,6 @@ A future `planning` sub-package will contain the `*Model`s used to plan the from urban_meal_delivery.forecasts.models.base import ForecastingModelABC from urban_meal_delivery.forecasts.models.tactical.horizontal import HorizontalETSModel +from urban_meal_delivery.forecasts.models.tactical.horizontal import HorizontalSMAModel from urban_meal_delivery.forecasts.models.tactical.realtime import RealtimeARIMAModel from urban_meal_delivery.forecasts.models.tactical.vertical import VerticalARIMAModel diff --git a/src/urban_meal_delivery/forecasts/models/tactical/horizontal.py b/src/urban_meal_delivery/forecasts/models/tactical/horizontal.py index 53e85be..3a18d76 100644 --- a/src/urban_meal_delivery/forecasts/models/tactical/horizontal.py +++ b/src/urban_meal_delivery/forecasts/models/tactical/horizontal.py @@ -65,3 +65,66 @@ class HorizontalETSModel(base.ForecastingModelABC): raise RuntimeError('missing prediction for `predict_at`') return predictions + + +class HorizontalSMAModel(base.ForecastingModelABC): + """A simple moving average model applied on a horizontal time series.""" + + name = 'hsma' + + def predict( + self, pixel: db.Pixel, predict_at: dt.datetime, train_horizon: int, + ) -> pd.DataFrame: + """Predict demand for a time step. + + Args: + pixel: pixel in which the prediction is made + predict_at: time step (i.e., "start_at") to make the prediction for + train_horizon: weeks of historic data used to predict `predict_at` + + Returns: + actual order counts (i.e., the "actual" column) and + point forecasts (i.e., the "prediction" column); + this model does not support confidence intervals; + contains one row for the `predict_at` time step + + # noqa:DAR401 RuntimeError + """ + # Generate the historic (and horizontal) order time series. + training_ts, frequency, actuals_ts = self._order_history.make_horizontal_ts( + pixel_id=pixel.id, predict_at=predict_at, train_horizon=train_horizon, + ) + + # Sanity checks. + if frequency != 7: # pragma: no cover + raise RuntimeError('`frequency` should be `7`') + if len(actuals_ts) != 1: # pragma: no cover + raise RuntimeError( + 'the hsma model can only predict one step into the future', + ) + + # The "prediction" is calculated as the `np.mean()`. + # As the `training_ts` covers only full week horizons, + # no adjustment regarding the weekly seasonality is needed. + predictions = pd.DataFrame( + data={ + 'actual': actuals_ts, + 'prediction': training_ts.values.mean(), + 'low80': float('NaN'), + 'high80': float('NaN'), + 'low95': float('NaN'), + 'high95': float('NaN'), + }, + index=actuals_ts.index, + ) + + # Sanity checks. + if ( # noqa:WPS337 + predictions[['actual', 'prediction']].isnull().any().any() + ): # pragma: no cover + + raise RuntimeError('missing predictions in hsma model') + if predict_at not in predictions.index: # pragma: no cover + raise RuntimeError('missing prediction for `predict_at`') + + return predictions diff --git a/tests/forecasts/test_models.py b/tests/forecasts/test_models.py index 2ce04b4..4ebebd8 100644 --- a/tests/forecasts/test_models.py +++ b/tests/forecasts/test_models.py @@ -11,6 +11,7 @@ from urban_meal_delivery.forecasts import models MODELS = ( models.HorizontalETSModel, + models.HorizontalSMAModel, models.RealtimeARIMAModel, models.VerticalARIMAModel, ) From 6fd16f2a6cbf7ae98ca4ebf8ec399c4ae49de707 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Tue, 2 Feb 2021 12:45:26 +0100 Subject: [PATCH 67/72] Add `TrivialModel` - the trivial model simply predicts `0` demand for all time steps --- .../forecasts/models/__init__.py | 1 + .../forecasts/models/tactical/other.py | 75 +++++++++++++++++++ tests/forecasts/test_models.py | 1 + 3 files changed, 77 insertions(+) create mode 100644 src/urban_meal_delivery/forecasts/models/tactical/other.py diff --git a/src/urban_meal_delivery/forecasts/models/__init__.py b/src/urban_meal_delivery/forecasts/models/__init__.py index b236c79..c5c905f 100644 --- a/src/urban_meal_delivery/forecasts/models/__init__.py +++ b/src/urban_meal_delivery/forecasts/models/__init__.py @@ -32,5 +32,6 @@ A future `planning` sub-package will contain the `*Model`s used to plan the from urban_meal_delivery.forecasts.models.base import ForecastingModelABC from urban_meal_delivery.forecasts.models.tactical.horizontal import HorizontalETSModel from urban_meal_delivery.forecasts.models.tactical.horizontal import HorizontalSMAModel +from urban_meal_delivery.forecasts.models.tactical.other import TrivialModel from urban_meal_delivery.forecasts.models.tactical.realtime import RealtimeARIMAModel from urban_meal_delivery.forecasts.models.tactical.vertical import VerticalARIMAModel diff --git a/src/urban_meal_delivery/forecasts/models/tactical/other.py b/src/urban_meal_delivery/forecasts/models/tactical/other.py new file mode 100644 index 0000000..b439957 --- /dev/null +++ b/src/urban_meal_delivery/forecasts/models/tactical/other.py @@ -0,0 +1,75 @@ +"""Forecasting `*Model`s to predict demand for tactical purposes ... + +... that cannot be classified into either "horizontal", "vertical", +or "real-time". +""" # noqa:RST215 + +import datetime as dt + +import pandas as pd + +from urban_meal_delivery import db +from urban_meal_delivery.forecasts.models import base + + +class TrivialModel(base.ForecastingModelABC): + """A trivial model predicting `0` demand. + + No need to distinguish between a "horizontal", "vertical", or + "real-time" model here as all give the same prediction for all time steps. + """ + + name = 'trivial' + + def predict( + self, pixel: db.Pixel, predict_at: dt.datetime, train_horizon: int, + ) -> pd.DataFrame: + """Predict demand for a time step. + + Args: + pixel: pixel in which the prediction is made + predict_at: time step (i.e., "start_at") to make the prediction for + train_horizon: weeks of historic data used to predict `predict_at` + + Returns: + actual order counts (i.e., the "actual" column) and + point forecasts (i.e., the "prediction" column); + this model does not support confidence intervals; + contains one row for the `predict_at` time step + + # noqa:DAR401 RuntimeError + """ + # Generate the historic order time series mainly to check if a valid + # `training_ts` exists (i.e., the demand history is long enough). + _, frequency, actuals_ts = self._order_history.make_horizontal_ts( + pixel_id=pixel.id, predict_at=predict_at, train_horizon=train_horizon, + ) + + # Sanity checks. + if frequency != 7: # pragma: no cover + raise RuntimeError('`frequency` should be `7`') + if len(actuals_ts) != 1: # pragma: no cover + raise RuntimeError( + 'the trivial model can only predict one step into the future', + ) + + # The "prediction" is simply `0.0`. + predictions = pd.DataFrame( + data={ + 'actual': actuals_ts, + 'prediction': 0.0, + 'low80': float('NaN'), + 'high80': float('NaN'), + 'low95': float('NaN'), + 'high95': float('NaN'), + }, + index=actuals_ts.index, + ) + + # Sanity checks. + if predictions['actual'].isnull().any(): # pragma: no cover + raise RuntimeError('missing actuals in trivial model') + if predict_at not in predictions.index: # pragma: no cover + raise RuntimeError('missing prediction for `predict_at`') + + return predictions diff --git a/tests/forecasts/test_models.py b/tests/forecasts/test_models.py index 4ebebd8..19a21d2 100644 --- a/tests/forecasts/test_models.py +++ b/tests/forecasts/test_models.py @@ -14,6 +14,7 @@ MODELS = ( models.HorizontalSMAModel, models.RealtimeARIMAModel, models.VerticalARIMAModel, + models.TrivialModel, ) From 3f5b4a50bbd321c5239fb6f2a6ffb67f7dc0450c Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Tue, 2 Feb 2021 13:04:43 +0100 Subject: [PATCH 68/72] Rename `Forecast.training_horizon` into `.train_horizon` - we use that shorter name in `urban_meal_delivery.forecasts.*` and want to be consistent in the ORM layer as well --- ...ame_training_horizon_into_train_horizon.py | 48 +++++++++++++++++++ src/urban_meal_delivery/db/forecasts.py | 12 ++--- .../forecasts/models/base.py | 4 +- tests/db/test_forecasts.py | 12 ++--- tests/forecasts/test_models.py | 2 +- 5 files changed, 63 insertions(+), 15 deletions(-) create mode 100644 migrations/versions/rev_20210202_12_8bfb928a31f8_rename_training_horizon_into_train_horizon.py diff --git a/migrations/versions/rev_20210202_12_8bfb928a31f8_rename_training_horizon_into_train_horizon.py b/migrations/versions/rev_20210202_12_8bfb928a31f8_rename_training_horizon_into_train_horizon.py new file mode 100644 index 0000000..fb5fc93 --- /dev/null +++ b/migrations/versions/rev_20210202_12_8bfb928a31f8_rename_training_horizon_into_train_horizon.py @@ -0,0 +1,48 @@ +"""Rename `Forecast.training_horizon` into `.train_horizon`. + +Revision: #8bfb928a31f8 at 2021-02-02 12:55:09 +Revises: #c2af85bada01 +""" + +import os + +from alembic import op + +from urban_meal_delivery import configuration + + +revision = '8bfb928a31f8' +down_revision = 'c2af85bada01' +branch_labels = None +depends_on = None + + +config = configuration.make_config('testing' if os.getenv('TESTING') else 'production') + + +def upgrade(): + """Upgrade to revision 8bfb928a31f8.""" + op.execute( + f""" + ALTER TABLE + {config.CLEAN_SCHEMA}.forecasts + RENAME COLUMN + training_horizon + TO + train_horizon; + """, + ) # noqa:WPS355 + + +def downgrade(): + """Downgrade to revision c2af85bada01.""" + op.execute( + f""" + ALTER TABLE + {config.CLEAN_SCHEMA}.forecasts + RENAME COLUMN + train_horizon + TO + training_horizon; + """, + ) # noqa:WPS355 diff --git a/src/urban_meal_delivery/db/forecasts.py b/src/urban_meal_delivery/db/forecasts.py index 0937f97..a85fa74 100644 --- a/src/urban_meal_delivery/db/forecasts.py +++ b/src/urban_meal_delivery/db/forecasts.py @@ -27,7 +27,7 @@ class Forecast(meta.Base): pixel_id = sa.Column(sa.Integer, nullable=False, index=True) start_at = sa.Column(sa.DateTime, nullable=False) time_step = sa.Column(sa.SmallInteger, nullable=False) - training_horizon = sa.Column(sa.SmallInteger, nullable=False) + train_horizon = sa.Column(sa.SmallInteger, nullable=False) model = sa.Column(sa.Unicode(length=20), nullable=False) # We also store the actual order counts for convenient retrieval. # A `UniqueConstraint` below ensures that redundant values that @@ -71,7 +71,7 @@ class Forecast(meta.Base): ), sa.CheckConstraint('time_step > 0', name='time_step_must_be_positive'), sa.CheckConstraint( - 'training_horizon > 0', name='training_horizon_must_be_positive', + 'train_horizon > 0', name='training_horizon_must_be_positive', ), sa.CheckConstraint('actual >= 0', name='actuals_must_be_non_negative'), sa.CheckConstraint( @@ -124,7 +124,7 @@ class Forecast(meta.Base): ), # There can be only one prediction per forecasting setting. sa.UniqueConstraint( - 'pixel_id', 'start_at', 'time_step', 'training_horizon', 'model', + 'pixel_id', 'start_at', 'time_step', 'train_horizon', 'model', ), ) @@ -146,7 +146,7 @@ class Forecast(meta.Base): cls, pixel: db.Pixel, time_step: int, - training_horizon: int, + train_horizon: int, model: str, data: pd.Dataframe, ) -> List[db.Forecast]: @@ -166,7 +166,7 @@ class Forecast(meta.Base): Args: pixel: in which the forecast is made time_step: length of one time step in minutes - training_horizon: length of the training horizon in weeks + train_horizon: length of the training horizon in weeks model: name of the forecasting model data: a `pd.Dataframe` as described above (i.e., with the six columns holding `float`s) @@ -214,7 +214,7 @@ class Forecast(meta.Base): pixel=pixel, start_at=start_at, time_step=time_step, - training_horizon=training_horizon, + train_horizon=train_horizon, model=model, actual=actual, prediction=prediction, diff --git a/src/urban_meal_delivery/forecasts/models/base.py b/src/urban_meal_delivery/forecasts/models/base.py index 8be733a..9a9cd72 100644 --- a/src/urban_meal_delivery/forecasts/models/base.py +++ b/src/urban_meal_delivery/forecasts/models/base.py @@ -79,7 +79,7 @@ class ForecastingModelABC(abc.ABC): .filter_by(pixel=pixel) .filter_by(start_at=predict_at) .filter_by(time_step=self._order_history.time_step) - .filter_by(training_horizon=train_horizon) + .filter_by(train_horizon=train_horizon) .filter_by(model=self.name) .first() ) : @@ -94,7 +94,7 @@ class ForecastingModelABC(abc.ABC): forecasts = db.Forecast.from_dataframe( pixel=pixel, time_step=self._order_history.time_step, - training_horizon=train_horizon, + train_horizon=train_horizon, model=self.name, data=predictions, ) diff --git a/tests/db/test_forecasts.py b/tests/db/test_forecasts.py index a2cd1bb..ff37dda 100644 --- a/tests/db/test_forecasts.py +++ b/tests/db/test_forecasts.py @@ -28,7 +28,7 @@ def forecast(pixel): pixel=pixel, start_at=start_at, time_step=test_config.LONG_TIME_STEP, - training_horizon=test_config.LONG_TRAIN_HORIZON, + train_horizon=test_config.LONG_TRAIN_HORIZON, model=MODEL, actual=12, prediction=12.3, @@ -143,9 +143,9 @@ class TestConstraints: db_session.commit() @pytest.mark.parametrize('value', [-1, 0]) - def test_positive_training_horizon(self, db_session, forecast, value): + def test_positive_train_horizon(self, db_session, forecast, value): """Insert an instance with invalid data.""" - forecast.training_horizon = value + forecast.train_horizon = value db_session.add(forecast) with pytest.raises( @@ -418,7 +418,7 @@ class TestConstraints: pixel=forecast.pixel, start_at=forecast.start_at, time_step=forecast.time_step, - training_horizon=forecast.training_horizon, + train_horizon=forecast.train_horizon, model=forecast.model, actual=forecast.actual, prediction=2, @@ -479,7 +479,7 @@ class TestFromDataFrameConstructor: forecasts = db.Forecast.from_dataframe( pixel=pixel, time_step=test_config.LONG_TIME_STEP, - training_horizon=test_config.LONG_TRAIN_HORIZON, + train_horizon=test_config.LONG_TRAIN_HORIZON, model=MODEL, data=prediction_data, ) @@ -496,7 +496,7 @@ class TestFromDataFrameConstructor: forecasts = db.Forecast.from_dataframe( pixel=pixel, time_step=test_config.LONG_TIME_STEP, - training_horizon=test_config.LONG_TRAIN_HORIZON, + train_horizon=test_config.LONG_TRAIN_HORIZON, model=MODEL, data=prediction_data, ) diff --git a/tests/forecasts/test_models.py b/tests/forecasts/test_models.py index 19a21d2..ef24d3c 100644 --- a/tests/forecasts/test_models.py +++ b/tests/forecasts/test_models.py @@ -140,7 +140,7 @@ class TestGenericForecastingModelProperties: assert isinstance(result, db.Forecast) assert result.pixel == pixel assert result.start_at == predict_at - assert result.training_horizon == test_config.LONG_TRAIN_HORIZON + assert result.train_horizon == test_config.LONG_TRAIN_HORIZON @pytest.mark.db @pytest.mark.r From 23391c2fa4f5adaeb5001dec9147ff22be8f180b Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Tue, 2 Feb 2021 15:20:02 +0100 Subject: [PATCH 69/72] Adjust `OrderHistory.choose_tactical_model()` heuristic - use the `HorizontalSMAModel` for low demand - use the `TrivialModel` for no demand --- src/urban_meal_delivery/forecasts/timify.py | 6 ++---- tests/forecasts/timify/test_avg_daily_demand.py | 6 ++---- 2 files changed, 4 insertions(+), 8 deletions(-) diff --git a/src/urban_meal_delivery/forecasts/timify.py b/src/urban_meal_delivery/forecasts/timify.py index 92674f7..3c9a147 100644 --- a/src/urban_meal_delivery/forecasts/timify.py +++ b/src/urban_meal_delivery/forecasts/timify.py @@ -545,12 +545,10 @@ class OrderHistory: elif add >= 10: # = "medium demand" return models.HorizontalETSModel(order_history=self) elif add >= 2.5: # = "low demand" - # TODO: create HorizontalSMAModel - return models.HorizontalETSModel(order_history=self) + return models.HorizontalSMAModel(order_history=self) # = "no demand" - # TODO: create HorizontalTrivialModel - return models.HorizontalETSModel(order_history=self) + return models.TrivialModel(order_history=self) raise RuntimeError( 'no rule for the given average daily demand and training horizon', diff --git a/tests/forecasts/timify/test_avg_daily_demand.py b/tests/forecasts/timify/test_avg_daily_demand.py index c8ab66f..4ad3c15 100644 --- a/tests/forecasts/timify/test_avg_daily_demand.py +++ b/tests/forecasts/timify/test_avg_daily_demand.py @@ -112,8 +112,7 @@ class TestChooseTacticalModel: train_horizon=test_config.LONG_TRAIN_HORIZON, ) - # TODO: this should be the future `HorizontalSMAModel`. - assert isinstance(result, models.HorizontalETSModel) + assert isinstance(result, models.HorizontalSMAModel) def test_best_model_with_no_demand( self, order_history, good_pixel_id, predict_at, @@ -127,8 +126,7 @@ class TestChooseTacticalModel: train_horizon=test_config.LONG_TRAIN_HORIZON, ) - # TODO: this should be the future `HorizontalTrivialModel`. - assert isinstance(result, models.HorizontalETSModel) + assert isinstance(result, models.TrivialModel) def test_best_model_for_unknown_train_horizon( self, order_history, good_pixel_id, predict_at, # noqa:RST215 From 50b35a828479fdda1293f547b2acf1eeea8fe302 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Thu, 4 Feb 2021 12:05:43 +0100 Subject: [PATCH 70/72] Add CLI script to run tactical forecasting heuristic --- setup.cfg | 3 + src/urban_meal_delivery/configuration.py | 2 +- src/urban_meal_delivery/console/__init__.py | 2 + src/urban_meal_delivery/console/forecasts.py | 144 +++++++++++++++++++ 4 files changed, 150 insertions(+), 1 deletion(-) create mode 100644 src/urban_meal_delivery/console/forecasts.py diff --git a/setup.cfg b/setup.cfg index d9a8249..47924d4 100644 --- a/setup.cfg +++ b/setup.cfg @@ -141,6 +141,9 @@ per-file-ignores = src/urban_meal_delivery/configuration.py: # Allow upper case class variables within classes. WPS115, + src/urban_meal_delivery/console/forecasts.py: + # The module is not too complex. + WPS232, src/urban_meal_delivery/db/customers.py: # The module is not too complex. WPS232, diff --git a/src/urban_meal_delivery/configuration.py b/src/urban_meal_delivery/configuration.py index 9d8c924..8e43cf5 100644 --- a/src/urban_meal_delivery/configuration.py +++ b/src/urban_meal_delivery/configuration.py @@ -51,7 +51,7 @@ class Config: # Training horizons (in full weeks) used to train the forecasting models. # For now, we only use 8 weeks as that was the best performing in # a previous study (note:4f79e8fa). - TRAINING_HORIZONS = [8] + TRAIN_HORIZONS = [8] # The demand forecasting methods used in the simulations. FORECASTING_METHODS = ['hets', 'rtarima'] diff --git a/src/urban_meal_delivery/console/__init__.py b/src/urban_meal_delivery/console/__init__.py index 60ac801..baa089c 100644 --- a/src/urban_meal_delivery/console/__init__.py +++ b/src/urban_meal_delivery/console/__init__.py @@ -1,9 +1,11 @@ """Provide CLI scripts for the project.""" +from urban_meal_delivery.console import forecasts from urban_meal_delivery.console import gridify from urban_meal_delivery.console import main cli = main.entry_point +cli.add_command(forecasts.tactical_heuristic, name='tactical-forecasts') cli.add_command(gridify.gridify) diff --git a/src/urban_meal_delivery/console/forecasts.py b/src/urban_meal_delivery/console/forecasts.py new file mode 100644 index 0000000..1a7bcf9 --- /dev/null +++ b/src/urban_meal_delivery/console/forecasts.py @@ -0,0 +1,144 @@ +"""CLI script to forecast demand. + +The main purpose of this script is to pre-populate the `db.Forecast` table +with demand predictions such that they can readily be used by the +predictive routing algorithms. +""" + +import datetime as dt +import sys + +import click +from sqlalchemy import func +from sqlalchemy.orm import exc as orm_exc + +from urban_meal_delivery import config +from urban_meal_delivery import db +from urban_meal_delivery.console import decorators +from urban_meal_delivery.forecasts import timify + + +@click.command() +@click.argument('city', default='Paris', type=str) +@click.argument('side_length', default=1000, type=int) +@click.argument('time_step', default=60, type=int) +@click.argument('train_horizon', default=8, type=int) +@decorators.db_revision('8bfb928a31f8') +def tactical_heuristic( # noqa:C901,WPS213,WPS216,WPS231 + city: str, side_length: int, time_step: int, train_horizon: int, +) -> None: # pragma: no cover + """Predict demand for all pixels and days in a city. + + This command makes demand `Forecast`s for all `Pixel`s and days + for tactical purposes with the heuristic specified in + `urban_meal_delivery.forecasts.timify.OrderHistory.choose_tactical_model()`. + + According to this heuristic, there is exactly one `Forecast` per + `Pixel` and time step (e.g., hour of the day with 60-minute time steps) + given the lengths of the training horizon and a time step. That is so + as the heuristic chooses the most promising forecasting `*Model`. + + All `Forecast`s are persisted to the database so that they can be readily + used by the predictive routing algorithms. + + This command first checks, which `Forecast`s still need to be made + and then does its work. So, it can be interrupted at any point in + time and then simply continues where it left off the next time it + is executed. + + Important: In a future revision, this command may need to be adapted such + that is does not simply obtain the last time step for which a `Forecast` + was made and continues from there. The reason is that another future command + may make predictions using all available forecasting `*Model`s per `Pixel` + and time step. + + Arguments: + + CITY: one of "Bordeaux", "Lyon", or "Paris" (=default) + + SIDE_LENGTH: of a pixel in the grid; defaults to `1000` + + TIME_STEP: length of one time step in minutes; defaults to `60` + + TRAIN_HORIZON: length of the training horizon; defaults to `8` + """ # noqa:D412,D417,RST215 + # Input validation. + + try: + city_obj = ( + db.session.query(db.City).filter_by(name=city.title()).one() # noqa:WPS221 + ) + except orm_exc.NoResultFound: + click.echo('NAME must be one of "Paris", "Lyon", or "Bordeaux"') + sys.exit(1) + + for grid in city_obj.grids: + if grid.side_length == side_length: + break + else: + click.echo(f'SIDE_LENGTH must be in {config.GRID_SIDE_LENGTHS}') + sys.exit(1) + + if time_step not in config.TIME_STEPS: + click.echo(f'TIME_STEP must be in {config.TIME_STEPS}') + sys.exit(1) + + if train_horizon not in config.TRAIN_HORIZONS: + click.echo(f'TRAIN_HORIZON must be in {config.TRAIN_HORIZONS}') + sys.exit(1) + + click.echo( + 'Parameters: ' + + f'city="{city}", grid.side_length={side_length}, ' + + f'time_step={time_step}, train_horizon={train_horizon}', + ) + + # Load the historic order data. + order_history = timify.OrderHistory(grid=grid, time_step=time_step) # noqa:WPS441 + order_history.aggregate_orders() + + # Run the tactical heuristic. + + for pixel in grid.pixels: # noqa:WPS441 + # Important: this check may need to be adapted once further + # commands are added the make `Forecast`s without the heuristic! + # Continue with forecasting on the day the last prediction was made ... + last_predict_at = ( # noqa:ECE001 + db.session.query(func.max(db.Forecast.start_at)) + .filter(db.Forecast.pixel == pixel) + .first() + )[0] + # ... or start `train_horizon` weeks after the first `Order` + # if no `Forecast`s are in the database yet. + if last_predict_at is None: + predict_day = order_history.first_order_at(pixel_id=pixel.id).date() + predict_day += dt.timedelta(weeks=train_horizon) + else: + predict_day = last_predict_at.date() + + # Go over all days in chronological order ... + while predict_day <= order_history.last_order_at(pixel_id=pixel.id).date(): + # ... and choose the most promising `*Model` for that day. + model = order_history.choose_tactical_model( + pixel_id=pixel.id, predict_day=predict_day, train_horizon=train_horizon, + ) + click.echo( + f'Predicting pixel #{pixel.id} in {city} ' + + f'for {predict_day} with {model.name}', + ) + + # Only loop over the time steps corresponding to working hours. + predict_at = dt.datetime( + predict_day.year, + predict_day.month, + predict_day.day, + config.SERVICE_START, + ) + while predict_at.hour < config.SERVICE_END: + model.make_forecast( + pixel=pixel, predict_at=predict_at, train_horizon=train_horizon, + ) + + predict_at += dt.timedelta(minutes=time_step) + + predict_day += dt.timedelta(days=1) From 0da86e5f07f401cd9c8aa2789f3b0c951657bfd1 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Thu, 4 Feb 2021 12:21:41 +0100 Subject: [PATCH 71/72] Pin the dependencies ... ... after upgrading: - alembic - matplotlib - pandas - rpy2 - sqlalchemy - statsmodels - dev dependencies + coverage + factory-boy + faker + nox + packaging + pre-commit + flake8-annotations + pytest + pytest-cov + sphinx - research dependencies + numpy + pyty - transient dependencies + astpretty + atomicwrites + bleach + chardet + colorlog + darglint + flake8-comprehensions + gitpython + identify + ipykernel + ipython + jedi + jinja2 + jupyter-client + jupyter-core + mako + nbformat + nest-asyncio + notebook + parso + pluggy + prompt-toolkit + ptyprocess + pygments + pyyaml + pyzmq + requests + smmap + terminado + textfixtures + snowballstemmer + typed-ast + urllib3 + virtualenv - fix SQL statements written in raw text --- poetry.lock | 866 ++++++++++---------- src/urban_meal_delivery/db/cities.py | 18 +- src/urban_meal_delivery/forecasts/timify.py | 101 +-- 3 files changed, 504 insertions(+), 481 deletions(-) diff --git a/poetry.lock b/poetry.lock index 6c1d5a9..5b0958b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -8,17 +8,17 @@ python-versions = "*" [[package]] name = "alembic" -version = "1.4.3" +version = "1.5.4" description = "A database migration tool for SQLAlchemy." category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" [package.dependencies] Mako = "*" python-dateutil = "*" python-editor = ">=0.3" -SQLAlchemy = ">=1.1.0" +SQLAlchemy = ">=1.3.0" [[package]] name = "appdirs" @@ -74,7 +74,7 @@ python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" [[package]] name = "astpretty" -version = "2.0.0" +version = "2.1.0" description = "Pretty print the output of python stdlib `ast.parse`." category = "dev" optional = false @@ -95,7 +95,7 @@ python-versions = ">=3.5" name = "atomicwrites" version = "1.4.0" description = "Atomic file writes." -category = "main" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" @@ -180,7 +180,7 @@ d = ["aiohttp (>=3.3.2)", "aiohttp-cors"] [[package]] name = "bleach" -version = "3.2.1" +version = "3.3.0" description = "An easy safelist-based HTML-sanitizing tool." category = "main" optional = true @@ -231,11 +231,11 @@ python-versions = ">=3.6.1" [[package]] name = "chardet" -version = "3.0.4" +version = "4.0.0" description = "Universal encoding detector for Python 2 and 3" category = "main" optional = false -python-versions = "*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "click" @@ -255,7 +255,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "colorlog" -version = "4.6.2" +version = "4.7.2" description = "Log formatting with colors!" category = "dev" optional = false @@ -266,7 +266,7 @@ colorama = {version = "*", markers = "sys_platform == \"win32\""} [[package]] name = "coverage" -version = "5.3" +version = "5.4" description = "Code coverage measurement for Python" category = "dev" optional = false @@ -288,7 +288,7 @@ six = "*" [[package]] name = "darglint" -version = "1.5.8" +version = "1.6.0" description = "A utility for ensuring Google-style docstrings stay up to date with the source code." category = "dev" optional = false @@ -344,22 +344,22 @@ python-versions = "*" [[package]] name = "factory-boy" -version = "3.1.0" +version = "3.2.0" description = "A versatile test fixtures replacement based on thoughtbot's factory_bot for Ruby." category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [package.dependencies] Faker = ">=0.7.0" [package.extras] dev = ["coverage", "django", "flake8", "isort", "pillow", "sqlalchemy", "mongoengine", "wheel (>=0.32.0)", "tox", "zest.releaser"] -doc = ["sphinx", "sphinx-rtd-theme"] +doc = ["sphinx", "sphinx-rtd-theme", "sphinxcontrib-spelling"] [[package]] name = "faker" -version = "5.0.1" +version = "5.8.0" description = "Faker is a Python package that generates fake data for you." category = "dev" optional = false @@ -392,7 +392,7 @@ pyflakes = ">=2.2.0,<2.3.0" [[package]] name = "flake8-annotations" -version = "2.4.1" +version = "2.5.0" description = "Flake8 Type Annotation Checks" category = "dev" optional = false @@ -463,11 +463,11 @@ flake8 = ">=2,<4.0.0" [[package]] name = "flake8-comprehensions" -version = "3.3.0" +version = "3.3.1" description = "A flake8 plugin to help you write better list/set/dict comprehensions." category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [package.dependencies] flake8 = ">=3.0,<3.2.0 || >3.2.0,<4" @@ -659,7 +659,7 @@ smmap = ">=3.0.1,<4" [[package]] name = "gitpython" -version = "3.1.11" +version = "3.1.12" description = "Python Git Library" category = "dev" optional = false @@ -670,7 +670,7 @@ gitdb = ">=4.0.1,<5" [[package]] name = "identify" -version = "1.5.10" +version = "1.5.13" description = "File identification library for Python" category = "dev" optional = false @@ -699,13 +699,13 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" name = "iniconfig" version = "1.1.1" description = "iniconfig: brain-dead simple config-ini parsing" -category = "main" +category = "dev" optional = false python-versions = "*" [[package]] name = "ipykernel" -version = "5.4.2" +version = "5.4.3" description = "IPython Kernel for Jupyter" category = "main" optional = true @@ -719,11 +719,11 @@ tornado = ">=4.2" traitlets = ">=4.1.0" [package.extras] -test = ["pytest (!=5.3.4)", "pytest-cov", "flaky", "nose"] +test = ["pytest (!=5.3.4)", "pytest-cov", "flaky", "nose", "jedi (<=0.17.2)"] [[package]] name = "ipython" -version = "7.19.0" +version = "7.20.0" description = "IPython: Productive Interactive Computing" category = "main" optional = true @@ -734,7 +734,7 @@ appnope = {version = "*", markers = "sys_platform == \"darwin\""} backcall = "*" colorama = {version = "*", markers = "sys_platform == \"win32\""} decorator = "*" -jedi = ">=0.10" +jedi = ">=0.16" pexpect = {version = ">4.3", markers = "sys_platform != \"win32\""} pickleshare = "*" prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" @@ -779,22 +779,22 @@ xdg_home = ["appdirs (>=1.4.0)"] [[package]] name = "jedi" -version = "0.17.2" +version = "0.18.0" description = "An autocompletion tool for Python that can be used for text editors." category = "main" optional = true -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.6" [package.dependencies] -parso = ">=0.7.0,<0.8.0" +parso = ">=0.8.0,<0.9.0" [package.extras] -qa = ["flake8 (==3.7.9)"] -testing = ["Django (<3.1)", "colorama", "docopt", "pytest (>=3.9.0,<5.0.0)"] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["Django (<3.1)", "colorama", "docopt", "pytest (<6.0.0)"] [[package]] name = "jinja2" -version = "2.11.2" +version = "2.11.3" description = "A very fast and expressive template engine." category = "main" optional = false @@ -836,7 +836,7 @@ format_nongpl = ["idna", "jsonpointer (>1.13)", "webcolors", "rfc3986-validator [[package]] name = "jupyter-client" -version = "6.1.7" +version = "6.1.11" description = "Jupyter protocol implementation and client libraries" category = "main" optional = true @@ -850,11 +850,12 @@ tornado = ">=4.1" traitlets = "*" [package.extras] -test = ["ipykernel", "ipython", "mock", "pytest", "pytest-asyncio", "async-generator", "pytest-timeout"] +doc = ["sphinx (>=1.3.6)", "sphinx-rtd-theme", "sphinxcontrib-github-alt"] +test = ["jedi (<=0.17.2)", "ipykernel", "ipython", "mock", "pytest", "pytest-asyncio", "async-generator", "pytest-timeout"] [[package]] name = "jupyter-core" -version = "4.7.0" +version = "4.7.1" description = "Jupyter core package. A base package on which Jupyter projects rely." category = "main" optional = true @@ -921,7 +922,7 @@ python-versions = ">=3.6" [[package]] name = "mako" -version = "1.1.3" +version = "1.1.4" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." category = "main" optional = false @@ -944,7 +945,7 @@ python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" [[package]] name = "matplotlib" -version = "3.3.3" +version = "3.3.4" description = "Python plotting package" category = "main" optional = false @@ -1061,7 +1062,7 @@ webpdf = ["pyppeteer (==0.2.2)"] [[package]] name = "nbformat" -version = "5.0.8" +version = "5.1.2" description = "The Jupyter Notebook format" category = "main" optional = true @@ -1075,11 +1076,11 @@ traitlets = ">=4.1" [package.extras] fast = ["fastjsonschema"] -test = ["fastjsonschema", "testpath", "pytest", "pytest-cov"] +test = ["check-manifest", "fastjsonschema", "testpath", "pytest", "pytest-cov"] [[package]] name = "nest-asyncio" -version = "1.4.3" +version = "1.5.1" description = "Patch asyncio to allow nested event loops" category = "main" optional = true @@ -1095,7 +1096,7 @@ python-versions = "*" [[package]] name = "notebook" -version = "6.1.5" +version = "6.2.0" description = "A web-based notebook environment for interactive computing" category = "main" optional = true @@ -1112,22 +1113,23 @@ nbconvert = "*" nbformat = "*" prometheus-client = "*" pyzmq = ">=17" -Send2Trash = "*" +Send2Trash = ">=1.5.0" terminado = ">=0.8.3" -tornado = ">=5.0" +tornado = ">=6.1" traitlets = ">=4.2.1" [package.extras] -docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt"] -test = ["nose", "coverage", "requests", "nose-warnings-filters", "nbval", "nose-exclude", "selenium", "pytest", "pytest-cov", "requests-unixsocket"] +docs = ["sphinx", "nbsphinx", "sphinxcontrib-github-alt", "sphinx-rtd-theme"] +json-logging = ["json-logging"] +test = ["pytest", "coverage", "requests", "nbval", "selenium", "pytest-cov", "requests-unixsocket"] [[package]] name = "nox" -version = "2020.8.22" +version = "2020.12.31" description = "Flexible test automation." category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [package.dependencies] argcomplete = ">=1.9.4,<2.0" @@ -1140,15 +1142,15 @@ tox_to_nox = ["jinja2", "tox"] [[package]] name = "numpy" -version = "1.19.4" +version = "1.20.0" description = "NumPy is the fundamental package for array computing with Python." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" [[package]] name = "packaging" -version = "20.8" +version = "20.9" description = "Core utilities for Python packages" category = "main" optional = false @@ -1159,19 +1161,19 @@ pyparsing = ">=2.0.2" [[package]] name = "pandas" -version = "1.1.5" +version = "1.2.1" description = "Powerful data structures for data analysis, time series, and statistics" category = "main" optional = false -python-versions = ">=3.6.1" +python-versions = ">=3.7.1" [package.dependencies] -numpy = ">=1.15.4" +numpy = ">=1.16.5" python-dateutil = ">=2.7.3" -pytz = ">=2017.2" +pytz = ">=2017.3" [package.extras] -test = ["pytest (>=4.0.2)", "pytest-xdist", "hypothesis (>=3.58)"] +test = ["pytest (>=5.0.1)", "pytest-xdist", "hypothesis (>=3.58)"] [[package]] name = "pandocfilters" @@ -1183,14 +1185,15 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "parso" -version = "0.7.1" +version = "0.8.1" description = "A Python Parser" category = "main" optional = true -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" [package.extras] -testing = ["docopt", "pytest (>=3.0.7)"] +qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] +testing = ["docopt", "pytest (<6.0.0)"] [[package]] name = "pathspec" @@ -1262,7 +1265,7 @@ python-versions = ">=3.6" name = "pluggy" version = "0.13.1" description = "plugin and hook calling mechanisms for python" -category = "main" +category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" @@ -1271,7 +1274,7 @@ dev = ["pre-commit", "tox"] [[package]] name = "pre-commit" -version = "2.9.3" +version = "2.10.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." category = "dev" optional = false @@ -1298,7 +1301,7 @@ twisted = ["twisted"] [[package]] name = "prompt-toolkit" -version = "3.0.8" +version = "3.0.14" description = "Library for building powerful interactive command lines in Python" category = "main" optional = true @@ -1317,7 +1320,7 @@ python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" [[package]] name = "ptyprocess" -version = "0.6.0" +version = "0.7.0" description = "Run a subprocess in a pseudo terminal" category = "main" optional = true @@ -1368,7 +1371,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pygments" -version = "2.7.3" +version = "2.7.4" description = "Pygments is a syntax highlighting package written in Python." category = "main" optional = false @@ -1392,9 +1395,9 @@ python-versions = ">=3.5" [[package]] name = "pytest" -version = "6.2.1" +version = "6.2.2" description = "pytest: simple powerful testing with Python" -category = "main" +category = "dev" optional = false python-versions = ">=3.6" @@ -1413,14 +1416,14 @@ testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xm [[package]] name = "pytest-cov" -version = "2.10.1" +version = "2.11.1" description = "Pytest plugin for measuring coverage." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.dependencies] -coverage = ">=4.4" +coverage = ">=5.2.1" pytest = ">=4.6" [package.extras] @@ -1483,7 +1486,7 @@ python-versions = "*" [[package]] name = "pytz" -version = "2020.4" +version = "2020.5" description = "World timezone definitions, modern and historical" category = "main" optional = false @@ -1507,23 +1510,23 @@ python-versions = "*" [[package]] name = "pyyaml" -version = "5.3.1" +version = "5.4.1" description = "YAML parser and emitter for Python" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [[package]] name = "pyzmq" -version = "20.0.0" +version = "22.0.2" description = "Python bindings for 0MQ" category = "main" optional = true -python-versions = ">=3.5" +python-versions = ">=3.6" [package.dependencies] -cffi = {version = "*", markers = "implementation_name === \"pypy\""} -py = {version = "*", markers = "implementation_name === \"pypy\""} +cffi = {version = "*", markers = "implementation_name == \"pypy\""} +py = {version = "*", markers = "implementation_name == \"pypy\""} [[package]] name = "regex" @@ -1535,7 +1538,7 @@ python-versions = "*" [[package]] name = "requests" -version = "2.25.0" +version = "2.25.1" description = "Python HTTP for Humans." category = "main" optional = false @@ -1543,7 +1546,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.dependencies] certifi = ">=2017.4.17" -chardet = ">=3.0.2,<4" +chardet = ">=3.0.2,<5" idna = ">=2.5,<3" urllib3 = ">=1.21.1,<1.27" @@ -1564,7 +1567,7 @@ docutils = ">=0.11,<1.0" [[package]] name = "rpy2" -version = "3.4.1" +version = "3.4.2" description = "Python interface to the R language (embedded R)" category = "main" optional = false @@ -1573,10 +1576,15 @@ python-versions = "*" [package.dependencies] cffi = ">=1.10.0" jinja2 = "*" -pytest = "*" pytz = "*" tzlocal = "*" +[package.extras] +all = ["pandas", "numpy", "pytest"] +numpy = ["pandas"] +pandas = ["numpy", "pandas"] +test = ["pytest"] + [[package]] name = "scipy" version = "1.6.0" @@ -1619,7 +1627,7 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "smmap" -version = "3.0.4" +version = "3.0.5" description = "A pure Python implementation of a sliding window memory map manager" category = "dev" optional = false @@ -1627,15 +1635,15 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "snowballstemmer" -version = "2.0.0" -description = "This package provides 26 stemmers for 25 languages generated from Snowball algorithms." +version = "2.1.0" +description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." category = "dev" optional = false python-versions = "*" [[package]] name = "sphinx" -version = "3.3.1" +version = "3.4.3" description = "Python documentation generator" category = "dev" optional = false @@ -1661,8 +1669,8 @@ sphinxcontrib-serializinghtml = "*" [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["flake8 (>=3.5.0)", "flake8-import-order", "mypy (>=0.790)", "docutils-stubs"] -test = ["pytest", "pytest-cov", "html5lib", "typed-ast", "cython"] +lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.790)", "docutils-stubs"] +test = ["pytest", "pytest-cov", "html5lib", "cython", "typed-ast"] [[package]] name = "sphinx-autodoc-typehints" @@ -1752,7 +1760,7 @@ test = ["pytest"] [[package]] name = "sqlalchemy" -version = "1.3.20" +version = "1.3.23" description = "Database Abstraction Library" category = "main" optional = false @@ -1765,14 +1773,14 @@ mssql_pyodbc = ["pyodbc"] mysql = ["mysqlclient"] oracle = ["cx-oracle"] postgresql = ["psycopg2"] -postgresql_pg8000 = ["pg8000"] +postgresql_pg8000 = ["pg8000 (<1.16.6)"] postgresql_psycopg2binary = ["psycopg2-binary"] postgresql_psycopg2cffi = ["psycopg2cffi"] -pymysql = ["pymysql"] +pymysql = ["pymysql (<1)", "pymysql"] [[package]] name = "statsmodels" -version = "0.12.1" +version = "0.12.2" description = "Statistical computations and models for Python" category = "main" optional = false @@ -1802,7 +1810,7 @@ pbr = ">=2.0.0,<2.1.0 || >2.1.0" [[package]] name = "terminado" -version = "0.9.1" +version = "0.9.2" description = "Tornado websocket backend for the Xterm.js Javascript terminal emulator library." category = "main" optional = true @@ -1815,7 +1823,7 @@ tornado = ">=4" [[package]] name = "testfixtures" -version = "6.16.0" +version = "6.17.1" description = "A collection of helpers and mock objects for unit tests and doc tests." category = "dev" optional = false @@ -1849,7 +1857,7 @@ python-versions = "*" name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" -category = "main" +category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" @@ -1877,7 +1885,7 @@ test = ["pytest"] [[package]] name = "typed-ast" -version = "1.4.1" +version = "1.4.2" description = "a fork of Python 2 and 3 ast modules with type comment support" category = "dev" optional = false @@ -1904,7 +1912,7 @@ pytz = "*" [[package]] name = "urllib3" -version = "1.26.2" +version = "1.26.3" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false @@ -1925,7 +1933,7 @@ python-versions = "*" [[package]] name = "virtualenv" -version = "20.2.2" +version = "20.4.2" description = "Virtual Python Environment builder" category = "dev" optional = false @@ -1939,7 +1947,7 @@ six = ">=1.9.0,<2" [package.extras] docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=19.9.0rc1)"] -testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "pytest-xdist (>=1.31.0)", "packaging (>=20.0)", "xonsh (>=0.9.16)"] +testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)", "xonsh (>=0.9.16)"] [[package]] name = "wcwidth" @@ -2018,8 +2026,7 @@ alabaster = [ {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, ] alembic = [ - {file = "alembic-1.4.3-py2.py3-none-any.whl", hash = "sha256:4e02ed2aa796bd179965041afa092c55b51fb077de19d61835673cc80672c01c"}, - {file = "alembic-1.4.3.tar.gz", hash = "sha256:5334f32314fb2a56d86b4c4dd1ae34b08c03cae4cb888bc699942104d66bc245"}, + {file = "alembic-1.5.4.tar.gz", hash = "sha256:e871118b6174681f7e9a9ea67cfcae954c6d18e05b49c6b17f662d2530c76bf5"}, ] appdirs = [ {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, @@ -2058,8 +2065,8 @@ astor = [ {file = "astor-0.8.1.tar.gz", hash = "sha256:6a6effda93f4e1ce9f618779b2dd1d9d84f1e32812c23a29b3fff6fd7f63fa5e"}, ] astpretty = [ - {file = "astpretty-2.0.0-py2.py3-none-any.whl", hash = "sha256:7f27633ed885033da8b58666e7079ffff7e8e01869ec1aa66484cb5185ea3aa4"}, - {file = "astpretty-2.0.0.tar.gz", hash = "sha256:e4724bfd753636ba4a84384702e9796e5356969f40af2596d846ce64addde086"}, + {file = "astpretty-2.1.0-py2.py3-none-any.whl", hash = "sha256:f81f14b5636f7af81fadb1e3c09ca7702ce4615500d9cc6d6829befb2dec2e3c"}, + {file = "astpretty-2.1.0.tar.gz", hash = "sha256:8a801fcda604ec741f010bb36d7cbadc3ec8a182ea6fb83e20ab663463e75ff6"}, ] async-generator = [ {file = "async_generator-1.10-py3-none-any.whl", hash = "sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b"}, @@ -2093,8 +2100,8 @@ black = [ {file = "black-19.10b0.tar.gz", hash = "sha256:c2edb73a08e9e0e6f65a0e6af18b059b8b1cdd5bef997d7a0b181df93dc81539"}, ] bleach = [ - {file = "bleach-3.2.1-py2.py3-none-any.whl", hash = "sha256:9f8ccbeb6183c6e6cddea37592dfb0167485c1e3b13b3363bc325aa8bda3adbd"}, - {file = "bleach-3.2.1.tar.gz", hash = "sha256:52b5919b81842b1854196eaae5ca29679a2f2e378905c346d3ca8227c2c66080"}, + {file = "bleach-3.3.0-py2.py3-none-any.whl", hash = "sha256:6123ddc1052673e52bab52cdc955bcb57a015264a1c57d37bea2f6b817af0125"}, + {file = "bleach-3.3.0.tar.gz", hash = "sha256:98b3170739e5e83dd9dc19633f074727ad848cbedb6026708c8ac2d3b697a433"}, ] branca = [ {file = "branca-0.4.2-py3-none-any.whl", hash = "sha256:62c2e777f074fc1830cd40ba9e650beb941861075980babafead8d97856b1a4b"}, @@ -2147,8 +2154,8 @@ cfgv = [ {file = "cfgv-3.2.0.tar.gz", hash = "sha256:cf22deb93d4bcf92f345a5c3cd39d3d41d6340adc60c78bbbd6588c384fda6a1"}, ] chardet = [ - {file = "chardet-3.0.4-py2.py3-none-any.whl", hash = "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"}, - {file = "chardet-3.0.4.tar.gz", hash = "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"}, + {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, + {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, ] click = [ {file = "click-7.1.2-py2.py3-none-any.whl", hash = "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc"}, @@ -2159,52 +2166,67 @@ colorama = [ {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] colorlog = [ - {file = "colorlog-4.6.2-py2.py3-none-any.whl", hash = "sha256:edd5ada5de03e880e42b2526f8be5570fd9b692f8eb7cf6b1fdcac3e3fb23976"}, - {file = "colorlog-4.6.2.tar.gz", hash = "sha256:54e5f153419c22afc283c130c4201db19a3dbd83221a0f4657d5ee66234a2ea4"}, + {file = "colorlog-4.7.2-py2.py3-none-any.whl", hash = "sha256:0a9dcdba6cab68e8a768448b418a858d73c52b37b6e8dea2568296faece393bd"}, + {file = "colorlog-4.7.2.tar.gz", hash = "sha256:18d05b616438a75762d7d214b9ec3b05d274466c9f3ddd92807e755840c88251"}, ] coverage = [ - {file = "coverage-5.3-cp27-cp27m-macosx_10_13_intel.whl", hash = "sha256:bd3166bb3b111e76a4f8e2980fa1addf2920a4ca9b2b8ca36a3bc3dedc618270"}, - {file = "coverage-5.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:9342dd70a1e151684727c9c91ea003b2fb33523bf19385d4554f7897ca0141d4"}, - {file = "coverage-5.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:63808c30b41f3bbf65e29f7280bf793c79f54fb807057de7e5238ffc7cc4d7b9"}, - {file = "coverage-5.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:4d6a42744139a7fa5b46a264874a781e8694bb32f1d76d8137b68138686f1729"}, - {file = "coverage-5.3-cp27-cp27m-win32.whl", hash = "sha256:86e9f8cd4b0cdd57b4ae71a9c186717daa4c5a99f3238a8723f416256e0b064d"}, - {file = "coverage-5.3-cp27-cp27m-win_amd64.whl", hash = "sha256:7858847f2d84bf6e64c7f66498e851c54de8ea06a6f96a32a1d192d846734418"}, - {file = "coverage-5.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:530cc8aaf11cc2ac7430f3614b04645662ef20c348dce4167c22d99bec3480e9"}, - {file = "coverage-5.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:381ead10b9b9af5f64646cd27107fb27b614ee7040bb1226f9c07ba96625cbb5"}, - {file = "coverage-5.3-cp35-cp35m-macosx_10_13_x86_64.whl", hash = "sha256:71b69bd716698fa62cd97137d6f2fdf49f534decb23a2c6fc80813e8b7be6822"}, - {file = "coverage-5.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:1d44bb3a652fed01f1f2c10d5477956116e9b391320c94d36c6bf13b088a1097"}, - {file = "coverage-5.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:1c6703094c81fa55b816f5ae542c6ffc625fec769f22b053adb42ad712d086c9"}, - {file = "coverage-5.3-cp35-cp35m-win32.whl", hash = "sha256:cedb2f9e1f990918ea061f28a0f0077a07702e3819602d3507e2ff98c8d20636"}, - {file = "coverage-5.3-cp35-cp35m-win_amd64.whl", hash = "sha256:7f43286f13d91a34fadf61ae252a51a130223c52bfefb50310d5b2deb062cf0f"}, - {file = "coverage-5.3-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:c851b35fc078389bc16b915a0a7c1d5923e12e2c5aeec58c52f4aa8085ac8237"}, - {file = "coverage-5.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:aac1ba0a253e17889550ddb1b60a2063f7474155465577caa2a3b131224cfd54"}, - {file = "coverage-5.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2b31f46bf7b31e6aa690d4c7a3d51bb262438c6dcb0d528adde446531d0d3bb7"}, - {file = "coverage-5.3-cp36-cp36m-win32.whl", hash = "sha256:c5f17ad25d2c1286436761b462e22b5020d83316f8e8fcb5deb2b3151f8f1d3a"}, - {file = "coverage-5.3-cp36-cp36m-win_amd64.whl", hash = "sha256:aef72eae10b5e3116bac6957de1df4d75909fc76d1499a53fb6387434b6bcd8d"}, - {file = "coverage-5.3-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:e8caf961e1b1a945db76f1b5fa9c91498d15f545ac0ababbe575cfab185d3bd8"}, - {file = "coverage-5.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:29a6272fec10623fcbe158fdf9abc7a5fa032048ac1d8631f14b50fbfc10d17f"}, - {file = "coverage-5.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:2d43af2be93ffbad25dd959899b5b809618a496926146ce98ee0b23683f8c51c"}, - {file = "coverage-5.3-cp37-cp37m-win32.whl", hash = "sha256:c3888a051226e676e383de03bf49eb633cd39fc829516e5334e69b8d81aae751"}, - {file = "coverage-5.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9669179786254a2e7e57f0ecf224e978471491d660aaca833f845b72a2df3709"}, - {file = "coverage-5.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0203acd33d2298e19b57451ebb0bed0ab0c602e5cf5a818591b4918b1f97d516"}, - {file = "coverage-5.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:582ddfbe712025448206a5bc45855d16c2e491c2dd102ee9a2841418ac1c629f"}, - {file = "coverage-5.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:0f313707cdecd5cd3e217fc68c78a960b616604b559e9ea60cc16795c4304259"}, - {file = "coverage-5.3-cp38-cp38-win32.whl", hash = "sha256:78e93cc3571fd928a39c0b26767c986188a4118edc67bc0695bc7a284da22e82"}, - {file = "coverage-5.3-cp38-cp38-win_amd64.whl", hash = "sha256:8f264ba2701b8c9f815b272ad568d555ef98dfe1576802ab3149c3629a9f2221"}, - {file = "coverage-5.3-cp39-cp39-macosx_10_13_x86_64.whl", hash = "sha256:50691e744714856f03a86df3e2bff847c2acede4c191f9a1da38f088df342978"}, - {file = "coverage-5.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9361de40701666b034c59ad9e317bae95c973b9ff92513dd0eced11c6adf2e21"}, - {file = "coverage-5.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:c1b78fb9700fc961f53386ad2fd86d87091e06ede5d118b8a50dea285a071c24"}, - {file = "coverage-5.3-cp39-cp39-win32.whl", hash = "sha256:cb7df71de0af56000115eafd000b867d1261f786b5eebd88a0ca6360cccfaca7"}, - {file = "coverage-5.3-cp39-cp39-win_amd64.whl", hash = "sha256:47a11bdbd8ada9b7ee628596f9d97fbd3851bd9999d398e9436bd67376dbece7"}, - {file = "coverage-5.3.tar.gz", hash = "sha256:280baa8ec489c4f542f8940f9c4c2181f0306a8ee1a54eceba071a449fb870a0"}, + {file = "coverage-5.4-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:6d9c88b787638a451f41f97446a1c9fd416e669b4d9717ae4615bd29de1ac135"}, + {file = "coverage-5.4-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:66a5aae8233d766a877c5ef293ec5ab9520929c2578fd2069308a98b7374ea8c"}, + {file = "coverage-5.4-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:9754a5c265f991317de2bac0c70a746efc2b695cf4d49f5d2cddeac36544fb44"}, + {file = "coverage-5.4-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:fbb17c0d0822684b7d6c09915677a32319f16ff1115df5ec05bdcaaee40b35f3"}, + {file = "coverage-5.4-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:b7f7421841f8db443855d2854e25914a79a1ff48ae92f70d0a5c2f8907ab98c9"}, + {file = "coverage-5.4-cp27-cp27m-win32.whl", hash = "sha256:4a780807e80479f281d47ee4af2eb2df3e4ccf4723484f77da0bb49d027e40a1"}, + {file = "coverage-5.4-cp27-cp27m-win_amd64.whl", hash = "sha256:87c4b38288f71acd2106f5d94f575bc2136ea2887fdb5dfe18003c881fa6b370"}, + {file = "coverage-5.4-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:c6809ebcbf6c1049002b9ac09c127ae43929042ec1f1dbd8bb1615f7cd9f70a0"}, + {file = "coverage-5.4-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:ba7ca81b6d60a9f7a0b4b4e175dcc38e8fef4992673d9d6e6879fd6de00dd9b8"}, + {file = "coverage-5.4-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:89fc12c6371bf963809abc46cced4a01ca4f99cba17be5e7d416ed7ef1245d19"}, + {file = "coverage-5.4-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a8eb7785bd23565b542b01fb39115a975fefb4a82f23d407503eee2c0106247"}, + {file = "coverage-5.4-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:7e40d3f8eb472c1509b12ac2a7e24158ec352fc8567b77ab02c0db053927e339"}, + {file = "coverage-5.4-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:1ccae21a076d3d5f471700f6d30eb486da1626c380b23c70ae32ab823e453337"}, + {file = "coverage-5.4-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:755c56beeacac6a24c8e1074f89f34f4373abce8b662470d3aa719ae304931f3"}, + {file = "coverage-5.4-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:322549b880b2d746a7672bf6ff9ed3f895e9c9f108b714e7360292aa5c5d7cf4"}, + {file = "coverage-5.4-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:60a3307a84ec60578accd35d7f0c71a3a971430ed7eca6567399d2b50ef37b8c"}, + {file = "coverage-5.4-cp35-cp35m-win32.whl", hash = "sha256:1375bb8b88cb050a2d4e0da901001347a44302aeadb8ceb4b6e5aa373b8ea68f"}, + {file = "coverage-5.4-cp35-cp35m-win_amd64.whl", hash = "sha256:16baa799ec09cc0dcb43a10680573269d407c159325972dd7114ee7649e56c66"}, + {file = "coverage-5.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:2f2cf7a42d4b7654c9a67b9d091ec24374f7c58794858bff632a2039cb15984d"}, + {file = "coverage-5.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:b62046592b44263fa7570f1117d372ae3f310222af1fc1407416f037fb3af21b"}, + {file = "coverage-5.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:812eaf4939ef2284d29653bcfee9665f11f013724f07258928f849a2306ea9f9"}, + {file = "coverage-5.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:859f0add98707b182b4867359e12bde806b82483fb12a9ae868a77880fc3b7af"}, + {file = "coverage-5.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:04b14e45d6a8e159c9767ae57ecb34563ad93440fc1b26516a89ceb5b33c1ad5"}, + {file = "coverage-5.4-cp36-cp36m-win32.whl", hash = "sha256:ebfa374067af240d079ef97b8064478f3bf71038b78b017eb6ec93ede1b6bcec"}, + {file = "coverage-5.4-cp36-cp36m-win_amd64.whl", hash = "sha256:84df004223fd0550d0ea7a37882e5c889f3c6d45535c639ce9802293b39cd5c9"}, + {file = "coverage-5.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1b811662ecf72eb2d08872731636aee6559cae21862c36f74703be727b45df90"}, + {file = "coverage-5.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6b588b5cf51dc0fd1c9e19f622457cc74b7d26fe295432e434525f1c0fae02bc"}, + {file = "coverage-5.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:3fe50f1cac369b02d34ad904dfe0771acc483f82a1b54c5e93632916ba847b37"}, + {file = "coverage-5.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:32ab83016c24c5cf3db2943286b85b0a172dae08c58d0f53875235219b676409"}, + {file = "coverage-5.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:68fb816a5dd901c6aff352ce49e2a0ffadacdf9b6fae282a69e7a16a02dad5fb"}, + {file = "coverage-5.4-cp37-cp37m-win32.whl", hash = "sha256:a636160680c6e526b84f85d304e2f0bb4e94f8284dd765a1911de9a40450b10a"}, + {file = "coverage-5.4-cp37-cp37m-win_amd64.whl", hash = "sha256:bb32ca14b4d04e172c541c69eec5f385f9a075b38fb22d765d8b0ce3af3a0c22"}, + {file = "coverage-5.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4d7165a4e8f41eca6b990c12ee7f44fef3932fac48ca32cecb3a1b2223c21f"}, + {file = "coverage-5.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:a565f48c4aae72d1d3d3f8e8fb7218f5609c964e9c6f68604608e5958b9c60c3"}, + {file = "coverage-5.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:fff1f3a586246110f34dc762098b5afd2de88de507559e63553d7da643053786"}, + {file = "coverage-5.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:a839e25f07e428a87d17d857d9935dd743130e77ff46524abb992b962eb2076c"}, + {file = "coverage-5.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:6625e52b6f346a283c3d563d1fd8bae8956daafc64bb5bbd2b8f8a07608e3994"}, + {file = "coverage-5.4-cp38-cp38-win32.whl", hash = "sha256:5bee3970617b3d74759b2d2df2f6a327d372f9732f9ccbf03fa591b5f7581e39"}, + {file = "coverage-5.4-cp38-cp38-win_amd64.whl", hash = "sha256:03ed2a641e412e42cc35c244508cf186015c217f0e4d496bf6d7078ebe837ae7"}, + {file = "coverage-5.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:14a9f1887591684fb59fdba8feef7123a0da2424b0652e1b58dd5b9a7bb1188c"}, + {file = "coverage-5.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9564ac7eb1652c3701ac691ca72934dd3009997c81266807aef924012df2f4b3"}, + {file = "coverage-5.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:0f48fc7dc82ee14aeaedb986e175a429d24129b7eada1b7e94a864e4f0644dde"}, + {file = "coverage-5.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:107d327071061fd4f4a2587d14c389a27e4e5c93c7cba5f1f59987181903902f"}, + {file = "coverage-5.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:0cdde51bfcf6b6bd862ee9be324521ec619b20590787d1655d005c3fb175005f"}, + {file = "coverage-5.4-cp39-cp39-win32.whl", hash = "sha256:c67734cff78383a1f23ceba3b3239c7deefc62ac2b05fa6a47bcd565771e5880"}, + {file = "coverage-5.4-cp39-cp39-win_amd64.whl", hash = "sha256:c669b440ce46ae3abe9b2d44a913b5fd86bb19eb14a8701e88e3918902ecd345"}, + {file = "coverage-5.4-pp36-none-any.whl", hash = "sha256:c0ff1c1b4d13e2240821ef23c1efb1f009207cb3f56e16986f713c2b0e7cd37f"}, + {file = "coverage-5.4-pp37-none-any.whl", hash = "sha256:cd601187476c6bed26a0398353212684c427e10a903aeafa6da40c63309d438b"}, + {file = "coverage-5.4.tar.gz", hash = "sha256:6d2e262e5e8da6fa56e774fb8e2643417351427604c2b177f8e8c5f75fc928ca"}, ] cycler = [ {file = "cycler-0.10.0-py2.py3-none-any.whl", hash = "sha256:1d8a5ae1ff6c5cf9b93e8811e581232ad8920aeec647c37316ceac982b08cb2d"}, {file = "cycler-0.10.0.tar.gz", hash = "sha256:cd7b2d1018258d7247a71425e9f26463dfb444d411c39569972f4ce586b0c9d8"}, ] darglint = [ - {file = "darglint-1.5.8-py3-none-any.whl", hash = "sha256:2e1012945a09d19a15cc87f9d15e7b14c18473ec9cf7769c641951b348de1353"}, - {file = "darglint-1.5.8.tar.gz", hash = "sha256:529f4969029d5ff5f74bfec48adc14b6f003409141f722b6cc4b787dddc8a4dd"}, + {file = "darglint-1.6.0-py3-none-any.whl", hash = "sha256:c80849fd83a06d0bec3c93240360214cf56979691c6d18e2abb293aa404bf443"}, + {file = "darglint-1.6.0.tar.gz", hash = "sha256:9c91a1dd93f6cdbdd626ecea8ae2849fafe6588bb6d6dbbf7066f9ae69fca771"}, ] decorator = [ {file = "decorator-4.4.2-py2.py3-none-any.whl", hash = "sha256:41fa54c2a0cc4ba648be4fd43cff00aedf5b9465c9bf18d64325bc225f08f760"}, @@ -2230,12 +2252,12 @@ eradicate = [ {file = "eradicate-1.0.tar.gz", hash = "sha256:4ffda82aae6fd49dfffa777a857cb758d77502a1f2e0f54c9ac5155a39d2d01a"}, ] factory-boy = [ - {file = "factory_boy-3.1.0-py2.py3-none-any.whl", hash = "sha256:d8626622550c8ba31392f9e19fdbcef9f139cf1ad643c5923f20490a7b3e2e3d"}, - {file = "factory_boy-3.1.0.tar.gz", hash = "sha256:ded73e49135c24bd4d3f45bf1eb168f8d290090f5cf4566b8df3698317dc9c08"}, + {file = "factory_boy-3.2.0-py2.py3-none-any.whl", hash = "sha256:1d3db4b44b8c8c54cdd8b83ae4bdb9aeb121e464400035f1f03ae0e1eade56a4"}, + {file = "factory_boy-3.2.0.tar.gz", hash = "sha256:401cc00ff339a022f84d64a4339503d1689e8263a4478d876e58a3295b155c5b"}, ] faker = [ - {file = "Faker-5.0.1-py3-none-any.whl", hash = "sha256:1fcb415562ee6e2395b041e85fa6901d4708d30b84d54015226fa754ed0822c3"}, - {file = "Faker-5.0.1.tar.gz", hash = "sha256:e8beccb398ee9b8cc1a91d9295121d66512b6753b4846eb1e7370545d46b3311"}, + {file = "Faker-5.8.0-py3-none-any.whl", hash = "sha256:0783729c61501d52efea2967aff6e6fcb8370f0f6b5a558f2a81233642ae529a"}, + {file = "Faker-5.8.0.tar.gz", hash = "sha256:6b2995ffff6c2b02bc5daad96f8c24c021e5bd491d9d53d31bcbd66f348181d4"}, ] filelock = [ {file = "filelock-3.0.12-py3-none-any.whl", hash = "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836"}, @@ -2246,8 +2268,8 @@ flake8 = [ {file = "flake8-3.8.4.tar.gz", hash = "sha256:aadae8761ec651813c24be05c6f7b4680857ef6afaae4651a4eccaef97ce6c3b"}, ] flake8-annotations = [ - {file = "flake8-annotations-2.4.1.tar.gz", hash = "sha256:0bcebb0792f1f96d617ded674dca7bf64181870bfe5dace353a1483551f8e5f1"}, - {file = "flake8_annotations-2.4.1-py3-none-any.whl", hash = "sha256:bebd11a850f6987a943ce8cdff4159767e0f5f89b3c88aca64680c2175ee02df"}, + {file = "flake8-annotations-2.5.0.tar.gz", hash = "sha256:e17947a48a5b9f632fe0c72682fc797c385e451048e7dfb20139f448a074cb3e"}, + {file = "flake8_annotations-2.5.0-py3-none-any.whl", hash = "sha256:3a377140556aecf11fa9f3bb18c10db01f5ea56dc79a730e2ec9b4f1f49e2055"}, ] flake8-bandit = [ {file = "flake8_bandit-2.1.2.tar.gz", hash = "sha256:687fc8da2e4a239b206af2e54a90093572a60d0954f3054e23690739b0b0de3b"}, @@ -2268,8 +2290,8 @@ flake8-commas = [ {file = "flake8_commas-2.0.0-py2.py3-none-any.whl", hash = "sha256:ee2141a3495ef9789a3894ed8802d03eff1eaaf98ce6d8653a7c573ef101935e"}, ] flake8-comprehensions = [ - {file = "flake8-comprehensions-3.3.0.tar.gz", hash = "sha256:355ef47288523cad7977cb9c1bc81b71c82b7091e425cd9fbcd7e5c19a613677"}, - {file = "flake8_comprehensions-3.3.0-py3-none-any.whl", hash = "sha256:c1dd6d8a00e9722619a5c5e0e6c5747f5cf23c089032c86eaf614c14a2e40adb"}, + {file = "flake8-comprehensions-3.3.1.tar.gz", hash = "sha256:e734bf03806bb562886d9bf635d23a65a1a995c251b67d7e007a7b608af9bd22"}, + {file = "flake8_comprehensions-3.3.1-py3-none-any.whl", hash = "sha256:6d80dfafda0d85633f88ea5bc7de949485f71f1e28db7af7719563fe5f62dcb1"}, ] flake8-debugger = [ {file = "flake8-debugger-3.2.1.tar.gz", hash = "sha256:712d7c1ff69ddf3f0130e94cc88c2519e720760bce45e8c330bfdcb61ab4090d"}, @@ -2327,12 +2349,12 @@ gitdb = [ {file = "gitdb-4.0.5.tar.gz", hash = "sha256:c9e1f2d0db7ddb9a704c2a0217be31214e91a4fe1dea1efad19ae42ba0c285c9"}, ] gitpython = [ - {file = "GitPython-3.1.11-py3-none-any.whl", hash = "sha256:6eea89b655917b500437e9668e4a12eabdcf00229a0df1762aabd692ef9b746b"}, - {file = "GitPython-3.1.11.tar.gz", hash = "sha256:befa4d101f91bad1b632df4308ec64555db684c360bd7d2130b4807d49ce86b8"}, + {file = "GitPython-3.1.12-py3-none-any.whl", hash = "sha256:867ec3dfb126aac0f8296b19fb63b8c4a399f32b4b6fafe84c4b10af5fa9f7b5"}, + {file = "GitPython-3.1.12.tar.gz", hash = "sha256:42dbefd8d9e2576c496ed0059f3103dcef7125b9ce16f9d5f9c834aed44a1dac"}, ] identify = [ - {file = "identify-1.5.10-py2.py3-none-any.whl", hash = "sha256:cc86e6a9a390879dcc2976cef169dd9cc48843ed70b7380f321d1b118163c60e"}, - {file = "identify-1.5.10.tar.gz", hash = "sha256:943cd299ac7f5715fcb3f684e2fc1594c1e0f22a90d15398e5888143bd4144b5"}, + {file = "identify-1.5.13-py2.py3-none-any.whl", hash = "sha256:9dfb63a2e871b807e3ba62f029813552a24b5289504f5b071dea9b041aee9fe4"}, + {file = "identify-1.5.13.tar.gz", hash = "sha256:70b638cf4743f33042bebb3b51e25261a0a10e80f978739f17e7fd4837664a66"}, ] idna = [ {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, @@ -2347,12 +2369,12 @@ iniconfig = [ {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] ipykernel = [ - {file = "ipykernel-5.4.2-py3-none-any.whl", hash = "sha256:63b4b96c513e1138874934e3e783a8e5e13c02b9036e37107bfe042ac8955005"}, - {file = "ipykernel-5.4.2.tar.gz", hash = "sha256:e20ceb7e52cb4d250452e1230be76e0b2323f33bd46c6b2bc7abb6601740e182"}, + {file = "ipykernel-5.4.3-py3-none-any.whl", hash = "sha256:4ed205700001a83b5832d4821c46a5733f1bf4b1c55744314ae3c756be6b6095"}, + {file = "ipykernel-5.4.3.tar.gz", hash = "sha256:697103d218e9a8828025af7986e033c89e0b36e2b6eb84a5bda4739b9a27f3cb"}, ] ipython = [ - {file = "ipython-7.19.0-py3-none-any.whl", hash = "sha256:c987e8178ced651532b3b1ff9965925bfd445c279239697052561a9ab806d28f"}, - {file = "ipython-7.19.0.tar.gz", hash = "sha256:cbb2ef3d5961d44e6a963b9817d4ea4e1fa2eb589c371a470fed14d8d40cbd6a"}, + {file = "ipython-7.20.0-py3-none-any.whl", hash = "sha256:1918dea4bfdc5d1a830fcfce9a710d1d809cbed123e85eab0539259cb0f56640"}, + {file = "ipython-7.20.0.tar.gz", hash = "sha256:1923af00820a8cf58e91d56b89efc59780a6e81363b94464a0f17c039dffff9e"}, ] ipython-genutils = [ {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, @@ -2363,12 +2385,12 @@ isort = [ {file = "isort-4.3.21.tar.gz", hash = "sha256:54da7e92468955c4fceacd0c86bd0ec997b0e1ee80d97f67c35a78b719dccab1"}, ] jedi = [ - {file = "jedi-0.17.2-py2.py3-none-any.whl", hash = "sha256:98cc583fa0f2f8304968199b01b6b4b94f469a1f4a74c1560506ca2a211378b5"}, - {file = "jedi-0.17.2.tar.gz", hash = "sha256:86ed7d9b750603e4ba582ea8edc678657fb4007894a12bcf6f4bb97892f31d20"}, + {file = "jedi-0.18.0-py2.py3-none-any.whl", hash = "sha256:18456d83f65f400ab0c2d3319e48520420ef43b23a086fdc05dff34132f0fb93"}, + {file = "jedi-0.18.0.tar.gz", hash = "sha256:92550a404bad8afed881a137ec9a461fed49eca661414be45059329614ed0707"}, ] jinja2 = [ - {file = "Jinja2-2.11.2-py2.py3-none-any.whl", hash = "sha256:f0a4641d3cf955324a89c04f3d94663aa4d638abe8f733ecd3582848e1c37035"}, - {file = "Jinja2-2.11.2.tar.gz", hash = "sha256:89aab215427ef59c34ad58735269eb58b1a5808103067f7bb9d5836c651b3bb0"}, + {file = "Jinja2-2.11.3-py2.py3-none-any.whl", hash = "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419"}, + {file = "Jinja2-2.11.3.tar.gz", hash = "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6"}, ] json5 = [ {file = "json5-0.9.5-py2.py3-none-any.whl", hash = "sha256:af1a1b9a2850c7f62c23fde18be4749b3599fd302f494eebf957e2ada6b9e42c"}, @@ -2379,12 +2401,12 @@ jsonschema = [ {file = "jsonschema-3.2.0.tar.gz", hash = "sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"}, ] jupyter-client = [ - {file = "jupyter_client-6.1.7-py3-none-any.whl", hash = "sha256:c958d24d6eacb975c1acebb68ac9077da61b5f5c040f22f6849928ad7393b950"}, - {file = "jupyter_client-6.1.7.tar.gz", hash = "sha256:49e390b36fe4b4226724704ea28d9fb903f1a3601b6882ce3105221cd09377a1"}, + {file = "jupyter_client-6.1.11-py3-none-any.whl", hash = "sha256:5eaaa41df449167ebba5e1cf6ca9b31f7fd4f71625069836e2e4fee07fe3cb13"}, + {file = "jupyter_client-6.1.11.tar.gz", hash = "sha256:649ca3aca1e28f27d73ef15868a7c7f10d6e70f761514582accec3ca6bb13085"}, ] jupyter-core = [ - {file = "jupyter_core-4.7.0-py3-none-any.whl", hash = "sha256:0a451c9b295e4db772bdd8d06f2f1eb31caeec0e81fbb77ba37d4a3024e3b315"}, - {file = "jupyter_core-4.7.0.tar.gz", hash = "sha256:aa1f9496ab3abe72da4efe0daab0cb2233997914581f9a071e07498c6add8ed3"}, + {file = "jupyter_core-4.7.1-py3-none-any.whl", hash = "sha256:8c6c0cac5c1b563622ad49321d5ec47017bd18b94facb381c6973a0486395f8e"}, + {file = "jupyter_core-4.7.1.tar.gz", hash = "sha256:79025cb3225efcd36847d0840f3fc672c0abd7afd0de83ba8a1d3837619122b4"}, ] jupyterlab = [ {file = "jupyterlab-2.2.9-py3-none-any.whl", hash = "sha256:59af02c26a15ec2d2862a15bc72e41ae304b406a0b0d3f4f705eeb7caf91902b"}, @@ -2433,8 +2455,7 @@ kiwisolver = [ {file = "kiwisolver-1.3.1.tar.gz", hash = "sha256:950a199911a8d94683a6b10321f9345d5a3a8433ec58b217ace979e18f16e248"}, ] mako = [ - {file = "Mako-1.1.3-py2.py3-none-any.whl", hash = "sha256:93729a258e4ff0747c876bd9e20df1b9758028946e976324ccd2d68245c7b6a9"}, - {file = "Mako-1.1.3.tar.gz", hash = "sha256:8195c8c1400ceb53496064314c6736719c6f25e7479cd24c77be3d9361cddc27"}, + {file = "Mako-1.1.4.tar.gz", hash = "sha256:17831f0b7087c313c0ffae2bcbbd3c1d5ba9eeac9c38f2eb7b50e8c99fe9d5ab"}, ] markupsafe = [ {file = "MarkupSafe-1.1.1-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161"}, @@ -2472,31 +2493,31 @@ markupsafe = [ {file = "MarkupSafe-1.1.1.tar.gz", hash = "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"}, ] matplotlib = [ - {file = "matplotlib-3.3.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:b2a5e1f637a92bb6f3526cc54cc8af0401112e81ce5cba6368a1b7908f9e18bc"}, - {file = "matplotlib-3.3.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c586ac1d64432f92857c3cf4478cfb0ece1ae18b740593f8a39f2f0b27c7fda5"}, - {file = "matplotlib-3.3.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:9b03722c89a43a61d4d148acfc89ec5bb54cd0fd1539df25b10eb9c5fa6c393a"}, - {file = "matplotlib-3.3.3-cp36-cp36m-win32.whl", hash = "sha256:2c2c5041608cb75c39cbd0ed05256f8a563e144234a524c59d091abbfa7a868f"}, - {file = "matplotlib-3.3.3-cp36-cp36m-win_amd64.whl", hash = "sha256:c092fc4673260b1446b8578015321081d5db73b94533fe4bf9b69f44e948d174"}, - {file = "matplotlib-3.3.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:27c9393fada62bd0ad7c730562a0fecbd3d5aaa8d9ed80ba7d3ebb8abc4f0453"}, - {file = "matplotlib-3.3.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:b8ba2a1dbb4660cb469fe8e1febb5119506059e675180c51396e1723ff9b79d9"}, - {file = "matplotlib-3.3.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:0caa687fce6174fef9b27d45f8cc57cbc572e04e98c81db8e628b12b563d59a2"}, - {file = "matplotlib-3.3.3-cp37-cp37m-win32.whl", hash = "sha256:b7b09c61a91b742cb5460b72efd1fe26ef83c1c704f666e0af0df156b046aada"}, - {file = "matplotlib-3.3.3-cp37-cp37m-win_amd64.whl", hash = "sha256:6ffd2d80d76df2e5f9f0c0140b5af97e3b87dd29852dcdb103ec177d853ec06b"}, - {file = "matplotlib-3.3.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:5111d6d47a0f5b8f3e10af7a79d5e7eb7e73a22825391834734274c4f312a8a0"}, - {file = "matplotlib-3.3.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:a4fe54eab2c7129add75154823e6543b10261f9b65b2abe692d68743a4999f8c"}, - {file = "matplotlib-3.3.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:83e6c895d93fdf93eeff1a21ee96778ba65ef258e5d284160f7c628fee40c38f"}, - {file = "matplotlib-3.3.3-cp38-cp38-win32.whl", hash = "sha256:b26c472847911f5a7eb49e1c888c31c77c4ddf8023c1545e0e8e0367ba74fb15"}, - {file = "matplotlib-3.3.3-cp38-cp38-win_amd64.whl", hash = "sha256:09225edca87a79815822eb7d3be63a83ebd4d9d98d5aa3a15a94f4eee2435954"}, - {file = "matplotlib-3.3.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eb6b6700ea454bb88333d98601e74928e06f9669c1ea231b4c4c666c1d7701b4"}, - {file = "matplotlib-3.3.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2d31aff0c8184b05006ad756b9a4dc2a0805e94d28f3abc3187e881b6673b302"}, - {file = "matplotlib-3.3.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:d082f77b4ed876ae94a9373f0db96bf8768a7cca6c58fc3038f94e30ffde1880"}, - {file = "matplotlib-3.3.3-cp39-cp39-win32.whl", hash = "sha256:e71cdd402047e657c1662073e9361106c6981e9621ab8c249388dfc3ec1de07b"}, - {file = "matplotlib-3.3.3-cp39-cp39-win_amd64.whl", hash = "sha256:756ee498b9ba35460e4cbbd73f09018e906daa8537fff61da5b5bf8d5e9de5c7"}, - {file = "matplotlib-3.3.3-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ad44f2c74c50567c694ee91c6fa16d67e7c8af6f22c656b80469ad927688457"}, - {file = "matplotlib-3.3.3-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:3a4c3e9be63adf8e9b305aa58fb3ec40ecc61fd0f8fd3328ce55bc30e7a2aeb0"}, - {file = "matplotlib-3.3.3-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:746897fbd72bd462b888c74ed35d812ca76006b04f717cd44698cdfc99aca70d"}, - {file = "matplotlib-3.3.3-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:5ed3d3342698c2b1f3651f8ea6c099b0f196d16ee00e33dc3a6fee8cb01d530a"}, - {file = "matplotlib-3.3.3.tar.gz", hash = "sha256:b1b60c6476c4cfe9e5cf8ab0d3127476fd3d5f05de0f343a452badaad0e4bdec"}, + {file = "matplotlib-3.3.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:672960dd114e342b7c610bf32fb99d14227f29919894388b41553217457ba7ef"}, + {file = "matplotlib-3.3.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:7c155437ae4fd366e2700e2716564d1787700687443de46bcb895fe0f84b761d"}, + {file = "matplotlib-3.3.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:a17f0a10604fac7627ec82820439e7db611722e80c408a726cd00d8c974c2fb3"}, + {file = "matplotlib-3.3.4-cp36-cp36m-win32.whl", hash = "sha256:215e2a30a2090221a9481db58b770ce56b8ef46f13224ae33afe221b14b24dc1"}, + {file = "matplotlib-3.3.4-cp36-cp36m-win_amd64.whl", hash = "sha256:348e6032f666ffd151b323342f9278b16b95d4a75dfacae84a11d2829a7816ae"}, + {file = "matplotlib-3.3.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:94bdd1d55c20e764d8aea9d471d2ae7a7b2c84445e0fa463f02e20f9730783e1"}, + {file = "matplotlib-3.3.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:a1acb72f095f1d58ecc2538ed1b8bca0b57df313b13db36ed34b8cdf1868e674"}, + {file = "matplotlib-3.3.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:46b1a60a04e6d884f0250d5cc8dc7bd21a9a96c584a7acdaab44698a44710bab"}, + {file = "matplotlib-3.3.4-cp37-cp37m-win32.whl", hash = "sha256:ed4a9e6dcacba56b17a0a9ac22ae2c72a35b7f0ef0693aa68574f0b2df607a89"}, + {file = "matplotlib-3.3.4-cp37-cp37m-win_amd64.whl", hash = "sha256:c24c05f645aef776e8b8931cb81e0f1632d229b42b6d216e30836e2e145a2b40"}, + {file = "matplotlib-3.3.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7310e353a4a35477c7f032409966920197d7df3e757c7624fd842f3eeb307d3d"}, + {file = "matplotlib-3.3.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:451cc89cb33d6652c509fc6b588dc51c41d7246afdcc29b8624e256b7663ed1f"}, + {file = "matplotlib-3.3.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:3d2eb9c1cc254d0ffa90bc96fde4b6005d09c2228f99dfd493a4219c1af99644"}, + {file = "matplotlib-3.3.4-cp38-cp38-win32.whl", hash = "sha256:e15fa23d844d54e7b3b7243afd53b7567ee71c721f592deb0727ee85e668f96a"}, + {file = "matplotlib-3.3.4-cp38-cp38-win_amd64.whl", hash = "sha256:1de0bb6cbfe460725f0e97b88daa8643bcf9571c18ba90bb8e41432aaeca91d6"}, + {file = "matplotlib-3.3.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f44149a0ef5b4991aaef12a93b8e8d66d6412e762745fea1faa61d98524e0ba9"}, + {file = "matplotlib-3.3.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:746a1df55749629e26af7f977ea426817ca9370ad1569436608dc48d1069b87c"}, + {file = "matplotlib-3.3.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:5f571b92a536206f7958f7cb2d367ff6c9a1fa8229dc35020006e4cdd1ca0acd"}, + {file = "matplotlib-3.3.4-cp39-cp39-win32.whl", hash = "sha256:9265ae0fb35e29f9b8cc86c2ab0a2e3dcddc4dd9de4b85bf26c0f63fe5c1c2ca"}, + {file = "matplotlib-3.3.4-cp39-cp39-win_amd64.whl", hash = "sha256:9a79e5dd7bb797aa611048f5b70588b23c5be05b63eefd8a0d152ac77c4243db"}, + {file = "matplotlib-3.3.4-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1e850163579a8936eede29fad41e202b25923a0a8d5ffd08ce50fc0a97dcdc93"}, + {file = "matplotlib-3.3.4-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:d738acfdfb65da34c91acbdb56abed46803db39af259b7f194dc96920360dbe4"}, + {file = "matplotlib-3.3.4-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:aa49571d8030ad0b9ac39708ee77bd2a22f87815e12bdee52ecaffece9313ed8"}, + {file = "matplotlib-3.3.4-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:cf3a7e54eff792f0815dbbe9b85df2f13d739289c93d346925554f71d484be78"}, + {file = "matplotlib-3.3.4.tar.gz", hash = "sha256:3e477db76c22929e4c6876c44f88d790aacdf3c3f8f3a90cb1975c0bf37825b0"}, ] mccabe = [ {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, @@ -2538,97 +2559,80 @@ nbconvert = [ {file = "nbconvert-6.0.7.tar.gz", hash = "sha256:cbbc13a86dfbd4d1b5dee106539de0795b4db156c894c2c5dc382062bbc29002"}, ] nbformat = [ - {file = "nbformat-5.0.8-py3-none-any.whl", hash = "sha256:aa9450c16d29286dc69b92ea4913c1bffe86488f90184445996ccc03a2f60382"}, - {file = "nbformat-5.0.8.tar.gz", hash = "sha256:f545b22138865bfbcc6b1ffe89ed5a2b8e2dc5d4fe876f2ca60d8e6f702a30f8"}, + {file = "nbformat-5.1.2-py3-none-any.whl", hash = "sha256:3949fdc8f5fa0b1afca16fb307546e78494fa7a7bceff880df8168eafda0e7ac"}, + {file = "nbformat-5.1.2.tar.gz", hash = "sha256:1d223e64a18bfa7cdf2db2e9ba8a818312fc2a0701d2e910b58df66809385a56"}, ] nest-asyncio = [ - {file = "nest_asyncio-1.4.3-py3-none-any.whl", hash = "sha256:dbe032f3e9ff7f120e76be22bf6e7958e867aed1743e6894b8a9585fe8495cc9"}, - {file = "nest_asyncio-1.4.3.tar.gz", hash = "sha256:eaa09ef1353ebefae19162ad423eef7a12166bcc63866f8bff8f3635353cd9fa"}, + {file = "nest_asyncio-1.5.1-py3-none-any.whl", hash = "sha256:76d6e972265063fe92a90b9cc4fb82616e07d586b346ed9d2c89a4187acea39c"}, + {file = "nest_asyncio-1.5.1.tar.gz", hash = "sha256:afc5a1c515210a23c461932765691ad39e8eba6551c055ac8d5546e69250d0aa"}, ] nodeenv = [ {file = "nodeenv-1.5.0-py2.py3-none-any.whl", hash = "sha256:5304d424c529c997bc888453aeaa6362d242b6b4631e90f3d4bf1b290f1c84a9"}, {file = "nodeenv-1.5.0.tar.gz", hash = "sha256:ab45090ae383b716c4ef89e690c41ff8c2b257b85b309f01f3654df3d084bd7c"}, ] notebook = [ - {file = "notebook-6.1.5-py3-none-any.whl", hash = "sha256:508cf9dad7cdb3188f1aa27017dc78179029dfe83814fc505329f689bc2ab50f"}, - {file = "notebook-6.1.5.tar.gz", hash = "sha256:3db37ae834c5f3b6378381229d0e5dfcbfb558d08c8ce646b1ad355147f5e91d"}, + {file = "notebook-6.2.0-py3-none-any.whl", hash = "sha256:25ad93c982b623441b491e693ef400598d1a46cdf11b8c9c0b3be6c61ebbb6cd"}, + {file = "notebook-6.2.0.tar.gz", hash = "sha256:0464b28e18e7a06cec37e6177546c2322739be07962dd13bf712bcb88361f013"}, ] nox = [ - {file = "nox-2020.8.22-py3-none-any.whl", hash = "sha256:55f8cab16bcfaaea08b141c83bf2b7c779e943518d0de6cd9c38cd8da95d11ea"}, - {file = "nox-2020.8.22.tar.gz", hash = "sha256:efa5adcf1134012f96bcd0a496ccebd4c9e9da53a831888a2a779462440eebcf"}, + {file = "nox-2020.12.31-py3-none-any.whl", hash = "sha256:f179d6990f7a0a9cebad01b9ecea34556518b8d3340dfcafdc1d85f2c1a37ea0"}, + {file = "nox-2020.12.31.tar.gz", hash = "sha256:58a662070767ed4786beb46ce3a789fca6f1e689ed3ac15c73c4d0094e4f9dc4"}, ] numpy = [ - {file = "numpy-1.19.4-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e9b30d4bd69498fc0c3fe9db5f62fffbb06b8eb9321f92cc970f2969be5e3949"}, - {file = "numpy-1.19.4-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:fedbd128668ead37f33917820b704784aff695e0019309ad446a6d0b065b57e4"}, - {file = "numpy-1.19.4-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8ece138c3a16db8c1ad38f52eb32be6086cc72f403150a79336eb2045723a1ad"}, - {file = "numpy-1.19.4-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:64324f64f90a9e4ef732be0928be853eee378fd6a01be21a0a8469c4f2682c83"}, - {file = "numpy-1.19.4-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:ad6f2ff5b1989a4899bf89800a671d71b1612e5ff40866d1f4d8bcf48d4e5764"}, - {file = "numpy-1.19.4-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d6c7bb82883680e168b55b49c70af29b84b84abb161cbac2800e8fcb6f2109b6"}, - {file = "numpy-1.19.4-cp36-cp36m-win32.whl", hash = "sha256:13d166f77d6dc02c0a73c1101dd87fdf01339febec1030bd810dcd53fff3b0f1"}, - {file = "numpy-1.19.4-cp36-cp36m-win_amd64.whl", hash = "sha256:448ebb1b3bf64c0267d6b09a7cba26b5ae61b6d2dbabff7c91b660c7eccf2bdb"}, - {file = "numpy-1.19.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:27d3f3b9e3406579a8af3a9f262f5339005dd25e0ecf3cf1559ff8a49ed5cbf2"}, - {file = "numpy-1.19.4-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:16c1b388cc31a9baa06d91a19366fb99ddbe1c7b205293ed072211ee5bac1ed2"}, - {file = "numpy-1.19.4-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e5b6ed0f0b42317050c88022349d994fe72bfe35f5908617512cd8c8ef9da2a9"}, - {file = "numpy-1.19.4-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:18bed2bcb39e3f758296584337966e68d2d5ba6aab7e038688ad53c8f889f757"}, - {file = "numpy-1.19.4-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:fe45becb4c2f72a0907c1d0246ea6449fe7a9e2293bb0e11c4e9a32bb0930a15"}, - {file = "numpy-1.19.4-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:6d7593a705d662be5bfe24111af14763016765f43cb6923ed86223f965f52387"}, - {file = "numpy-1.19.4-cp37-cp37m-win32.whl", hash = "sha256:6ae6c680f3ebf1cf7ad1d7748868b39d9f900836df774c453c11c5440bc15b36"}, - {file = "numpy-1.19.4-cp37-cp37m-win_amd64.whl", hash = "sha256:9eeb7d1d04b117ac0d38719915ae169aa6b61fca227b0b7d198d43728f0c879c"}, - {file = "numpy-1.19.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cb1017eec5257e9ac6209ac172058c430e834d5d2bc21961dceeb79d111e5909"}, - {file = "numpy-1.19.4-cp38-cp38-manylinux1_i686.whl", hash = "sha256:edb01671b3caae1ca00881686003d16c2209e07b7ef8b7639f1867852b948f7c"}, - {file = "numpy-1.19.4-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:f29454410db6ef8126c83bd3c968d143304633d45dc57b51252afbd79d700893"}, - {file = "numpy-1.19.4-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:ec149b90019852266fec2341ce1db513b843e496d5a8e8cdb5ced1923a92faab"}, - {file = "numpy-1.19.4-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:1aeef46a13e51931c0b1cf8ae1168b4a55ecd282e6688fdb0a948cc5a1d5afb9"}, - {file = "numpy-1.19.4-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:08308c38e44cc926bdfce99498b21eec1f848d24c302519e64203a8da99a97db"}, - {file = "numpy-1.19.4-cp38-cp38-win32.whl", hash = "sha256:5734bdc0342aba9dfc6f04920988140fb41234db42381cf7ccba64169f9fe7ac"}, - {file = "numpy-1.19.4-cp38-cp38-win_amd64.whl", hash = "sha256:09c12096d843b90eafd01ea1b3307e78ddd47a55855ad402b157b6c4862197ce"}, - {file = "numpy-1.19.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e452dc66e08a4ce642a961f134814258a082832c78c90351b75c41ad16f79f63"}, - {file = "numpy-1.19.4-cp39-cp39-manylinux1_i686.whl", hash = "sha256:a5d897c14513590a85774180be713f692df6fa8ecf6483e561a6d47309566f37"}, - {file = "numpy-1.19.4-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:a09f98011236a419ee3f49cedc9ef27d7a1651df07810ae430a6b06576e0b414"}, - {file = "numpy-1.19.4-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:50e86c076611212ca62e5a59f518edafe0c0730f7d9195fec718da1a5c2bb1fc"}, - {file = "numpy-1.19.4-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f0d3929fe88ee1c155129ecd82f981b8856c5d97bcb0d5f23e9b4242e79d1de3"}, - {file = "numpy-1.19.4-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:c42c4b73121caf0ed6cd795512c9c09c52a7287b04d105d112068c1736d7c753"}, - {file = "numpy-1.19.4-cp39-cp39-win32.whl", hash = "sha256:8cac8790a6b1ddf88640a9267ee67b1aee7a57dfa2d2dd33999d080bc8ee3a0f"}, - {file = "numpy-1.19.4-cp39-cp39-win_amd64.whl", hash = "sha256:4377e10b874e653fe96985c05feed2225c912e328c8a26541f7fc600fb9c637b"}, - {file = "numpy-1.19.4-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:2a2740aa9733d2e5b2dfb33639d98a64c3b0f24765fed86b0fd2aec07f6a0a08"}, - {file = "numpy-1.19.4.zip", hash = "sha256:141ec3a3300ab89c7f2b0775289954d193cc8edb621ea05f99db9cb181530512"}, + {file = "numpy-1.20.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:89bd70c9ad540febe6c28451ba225eb4e49d27f64728357f512c808002325dfa"}, + {file = "numpy-1.20.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:1264c66129f5ef63187649dd43f1ca59532e8c098723643336a85131c0dcce3f"}, + {file = "numpy-1.20.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e9c5fd330d2fedf06051bafb996252de9b032fcb2ec03eefc9a543e56efa66d4"}, + {file = "numpy-1.20.0-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:db5e69d08756a2fa75a42b4e433880b6187768fe1bc73d21819def893e5128c6"}, + {file = "numpy-1.20.0-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:1abc02e30e3efd81a4571e00f8e62bf42e343c76698e0a3e11d9c2b3ee0d77a7"}, + {file = "numpy-1.20.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:5ae765dd29c71a555f8102281f6fb15a3f4dbd35f6e7daf36af9df6d9dd716a5"}, + {file = "numpy-1.20.0-cp37-cp37m-win32.whl", hash = "sha256:b51b9ef0624f4b01b846c981034c10d2e30db33f9f8be71e992f3900741f6f77"}, + {file = "numpy-1.20.0-cp37-cp37m-win_amd64.whl", hash = "sha256:afeee581b50df20ef07b736e62ca612858f1fcdba96651d26ab44e3d567a4e6e"}, + {file = "numpy-1.20.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2bf0e68c92ef077fe766e53f8937d8ac341bdbca68ec128ae049b7d5c34e3206"}, + {file = "numpy-1.20.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:2445a96fbae23a4109c61be0f0af0f3bc273905dc5687a710850c1dfde0fc994"}, + {file = "numpy-1.20.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:33edfc0eb229f86f539493917b34035054313a11afbed48404aaf9f86bf4b0f6"}, + {file = "numpy-1.20.0-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:894aaee60043a98b03f0ad992c810f62e3a15f98a701e1c0f58a4f4a0df13429"}, + {file = "numpy-1.20.0-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:b66a6c15d793eda7cdad986e737775aa31b9306d588c14dd0277d2dda5546150"}, + {file = "numpy-1.20.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:eee454d3aa3955d0c0069a0f265fea47f1e1384c35a110a95efed358eb6e1562"}, + {file = "numpy-1.20.0-cp38-cp38-win32.whl", hash = "sha256:abdfa075e293d73638ece434708aa60b510dc6e70d805f57f481a0f550b25a9e"}, + {file = "numpy-1.20.0-cp38-cp38-win_amd64.whl", hash = "sha256:f1e9424e9aa3834ea27cc12f9c6ea8ace5da18ee60a720bb3a85b2f733f41782"}, + {file = "numpy-1.20.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cb257bb0c0a3176c32782a63cfab2eace7eabfa2a3b2dfd85a13700617ccaf28"}, + {file = "numpy-1.20.0-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:cf5d9dcbdbe523fa665c5309cce5f144648d94a7fddbf5a40f8e0d5c9f5b596d"}, + {file = "numpy-1.20.0-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:93c2abea7bb69f47029b84ceac30ab46dfcfdb99b671ad850a333ff794a765e4"}, + {file = "numpy-1.20.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:0d28a54afcf46f1f9ebd163e49ad6b49087f22986fefd01a23ca0c1cdda25ca6"}, + {file = "numpy-1.20.0-cp39-cp39-win32.whl", hash = "sha256:d1bc331e1706fd1809a1bc8a31205329e5b30cf5ba50461c624da267e99f6ae6"}, + {file = "numpy-1.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:e3db646af9f6a145f0c57202f4b55d4a33f975e395e78fb7b394644c17c1a3a6"}, + {file = "numpy-1.20.0-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:4d592264d2a4f368afbb4288b5ceb646d4cbaf559c0249c096fbb0a149806b90"}, + {file = "numpy-1.20.0.zip", hash = "sha256:3d8233c03f116d068d5365fed4477f2947c7229582dad81e5953088989294cec"}, ] packaging = [ - {file = "packaging-20.8-py2.py3-none-any.whl", hash = "sha256:24e0da08660a87484d1602c30bb4902d74816b6985b93de36926f5bc95741858"}, - {file = "packaging-20.8.tar.gz", hash = "sha256:78598185a7008a470d64526a8059de9aaa449238f280fc9eb6b13ba6c4109093"}, + {file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"}, + {file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"}, ] pandas = [ - {file = "pandas-1.1.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:bf23a3b54d128b50f4f9d4675b3c1857a688cc6731a32f931837d72effb2698d"}, - {file = "pandas-1.1.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5a780260afc88268a9d3ac3511d8f494fdcf637eece62fb9eb656a63d53eb7ca"}, - {file = "pandas-1.1.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:b61080750d19a0122469ab59b087380721d6b72a4e7d962e4d7e63e0c4504814"}, - {file = "pandas-1.1.5-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:0de3ddb414d30798cbf56e642d82cac30a80223ad6fe484d66c0ce01a84d6f2f"}, - {file = "pandas-1.1.5-cp36-cp36m-win32.whl", hash = "sha256:70865f96bb38fec46f7ebd66d4b5cfd0aa6b842073f298d621385ae3898d28b5"}, - {file = "pandas-1.1.5-cp36-cp36m-win_amd64.whl", hash = "sha256:19a2148a1d02791352e9fa637899a78e371a3516ac6da5c4edc718f60cbae648"}, - {file = "pandas-1.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:26fa92d3ac743a149a31b21d6f4337b0594b6302ea5575b37af9ca9611e8981a"}, - {file = "pandas-1.1.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c16d59c15d946111d2716856dd5479221c9e4f2f5c7bc2d617f39d870031e086"}, - {file = "pandas-1.1.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:3be7a7a0ca71a2640e81d9276f526bca63505850add10206d0da2e8a0a325dae"}, - {file = "pandas-1.1.5-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:573fba5b05bf2c69271a32e52399c8de599e4a15ab7cec47d3b9c904125ab788"}, - {file = "pandas-1.1.5-cp37-cp37m-win32.whl", hash = "sha256:21b5a2b033380adbdd36b3116faaf9a4663e375325831dac1b519a44f9e439bb"}, - {file = "pandas-1.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:24c7f8d4aee71bfa6401faeba367dd654f696a77151a8a28bc2013f7ced4af98"}, - {file = "pandas-1.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2860a97cbb25444ffc0088b457da0a79dc79f9c601238a3e0644312fcc14bf11"}, - {file = "pandas-1.1.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:5008374ebb990dad9ed48b0f5d0038124c73748f5384cc8c46904dace27082d9"}, - {file = "pandas-1.1.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2c2f7c670ea4e60318e4b7e474d56447cf0c7d83b3c2a5405a0dbb2600b9c48e"}, - {file = "pandas-1.1.5-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:0a643bae4283a37732ddfcecab3f62dd082996021b980f580903f4e8e01b3c5b"}, - {file = "pandas-1.1.5-cp38-cp38-win32.whl", hash = "sha256:5447ea7af4005b0daf695a316a423b96374c9c73ffbd4533209c5ddc369e644b"}, - {file = "pandas-1.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:4c62e94d5d49db116bef1bd5c2486723a292d79409fc9abd51adf9e05329101d"}, - {file = "pandas-1.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:731568be71fba1e13cae212c362f3d2ca8932e83cb1b85e3f1b4dd77d019254a"}, - {file = "pandas-1.1.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:c61c043aafb69329d0f961b19faa30b1dab709dd34c9388143fc55680059e55a"}, - {file = "pandas-1.1.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:2b1c6cd28a0dfda75c7b5957363333f01d370936e4c6276b7b8e696dd500582a"}, - {file = "pandas-1.1.5-cp39-cp39-win32.whl", hash = "sha256:c94ff2780a1fd89f190390130d6d36173ca59fcfb3fe0ff596f9a56518191ccb"}, - {file = "pandas-1.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:edda9bacc3843dfbeebaf7a701763e68e741b08fccb889c003b0a52f0ee95782"}, - {file = "pandas-1.1.5.tar.gz", hash = "sha256:f10fc41ee3c75a474d3bdf68d396f10782d013d7f67db99c0efbfd0acb99701b"}, + {file = "pandas-1.2.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:50e6c0a17ef7f831b5565fd0394dbf9bfd5d615ee4dd4bb60a3d8c9d2e872323"}, + {file = "pandas-1.2.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:324e60bea729cf3b55c1bf9e88fe8b9932c26f8669d13b928e3c96b3a1453dff"}, + {file = "pandas-1.2.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:37443199f451f8badfe0add666e43cdb817c59fa36bceedafd9c543a42f236ca"}, + {file = "pandas-1.2.1-cp37-cp37m-win32.whl", hash = "sha256:23ac77a3a222d9304cb2a7934bb7b4805ff43d513add7a42d1a22dc7df14edd2"}, + {file = "pandas-1.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:496fcc29321e9a804d56d5aa5d7ec1320edfd1898eee2f451aa70171cf1d5a29"}, + {file = "pandas-1.2.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:30e9e8bc8c5c17c03d943e8d6f778313efff59e413b8dbdd8214c2ed9aa165f6"}, + {file = "pandas-1.2.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:055647e7f4c5e66ba92c2a7dcae6c2c57898b605a3fb007745df61cc4015937f"}, + {file = "pandas-1.2.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:9d45f58b03af1fea4b48e44aa38a819a33dccb9821ef9e1d68f529995f8a632f"}, + {file = "pandas-1.2.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:b26e2dabda73d347c7af3e6fed58483161c7b87a886a4e06d76ccfe55a044aa9"}, + {file = "pandas-1.2.1-cp38-cp38-win32.whl", hash = "sha256:47ec0808a8357ab3890ce0eca39a63f79dcf941e2e7f494470fe1c9ec43f6091"}, + {file = "pandas-1.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:57d5c7ac62925a8d2ab43ea442b297a56cc8452015e71e24f4aa7e4ed6be3d77"}, + {file = "pandas-1.2.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d7cca42dba13bfee369e2944ae31f6549a55831cba3117e17636955176004088"}, + {file = "pandas-1.2.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:cfd237865d878da9b65cfee883da5e0067f5e2ff839e459466fb90565a77bda3"}, + {file = "pandas-1.2.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:050ed2c9d825ef36738e018454e6d055c63d947c1d52010fbadd7584f09df5db"}, + {file = "pandas-1.2.1-cp39-cp39-win32.whl", hash = "sha256:fe7de6fed43e7d086e3d947651ec89e55ddf00102f9dd5758763d56d182f0564"}, + {file = "pandas-1.2.1-cp39-cp39-win_amd64.whl", hash = "sha256:2de012a36cc507debd9c3351b4d757f828d5a784a5fc4e6766eafc2b56e4b0f5"}, + {file = "pandas-1.2.1.tar.gz", hash = "sha256:5527c5475d955c0bc9689c56865aaa2a7b13c504d6c44f0aadbf57b565af5ebd"}, ] pandocfilters = [ {file = "pandocfilters-1.4.3.tar.gz", hash = "sha256:bc63fbb50534b4b1f8ebe1860889289e8af94a23bff7445259592df25a3906eb"}, ] parso = [ - {file = "parso-0.7.1-py2.py3-none-any.whl", hash = "sha256:97218d9159b2520ff45eb78028ba8b50d2bc61dcc062a9682666f2dc4bd331ea"}, - {file = "parso-0.7.1.tar.gz", hash = "sha256:caba44724b994a8a5e086460bb212abc5a8bc46951bf4a9a1210745953622eb9"}, + {file = "parso-0.8.1-py2.py3-none-any.whl", hash = "sha256:15b00182f472319383252c18d5913b69269590616c947747bc50bf4ac768f410"}, + {file = "parso-0.8.1.tar.gz", hash = "sha256:8519430ad07087d4c997fda3a7918f7cfa27cb58972a8c89c2a0295a1c940e9e"}, ] pathspec = [ {file = "pathspec-0.8.1-py2.py3-none-any.whl", hash = "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d"}, @@ -2693,16 +2697,16 @@ pluggy = [ {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, ] pre-commit = [ - {file = "pre_commit-2.9.3-py2.py3-none-any.whl", hash = "sha256:6c86d977d00ddc8a60d68eec19f51ef212d9462937acf3ea37c7adec32284ac0"}, - {file = "pre_commit-2.9.3.tar.gz", hash = "sha256:ee784c11953e6d8badb97d19bc46b997a3a9eded849881ec587accd8608d74a4"}, + {file = "pre_commit-2.10.0-py2.py3-none-any.whl", hash = "sha256:391ed331fdd0a21d0be48c1b9919921e9d372dfd60f6dc77b8f01dd6b13161c1"}, + {file = "pre_commit-2.10.0.tar.gz", hash = "sha256:f413348d3a8464b77987e36ef6e02c3372dadb823edf0dfe6fb0c3dc2f378ef9"}, ] prometheus-client = [ {file = "prometheus_client-0.9.0-py2.py3-none-any.whl", hash = "sha256:b08c34c328e1bf5961f0b4352668e6c8f145b4a087e09b7296ef62cbe4693d35"}, {file = "prometheus_client-0.9.0.tar.gz", hash = "sha256:9da7b32f02439d8c04f7777021c304ed51d9ec180604700c1ba72a4d44dceb03"}, ] prompt-toolkit = [ - {file = "prompt_toolkit-3.0.8-py3-none-any.whl", hash = "sha256:7debb9a521e0b1ee7d2fe96ee4bd60ef03c6492784de0547337ca4433e46aa63"}, - {file = "prompt_toolkit-3.0.8.tar.gz", hash = "sha256:25c95d2ac813909f813c93fde734b6e44406d1477a9faef7c915ff37d39c0a8c"}, + {file = "prompt_toolkit-3.0.14-py3-none-any.whl", hash = "sha256:c96b30925025a7635471dc083ffb6af0cc67482a00611bd81aeaeeeb7e5a5e12"}, + {file = "prompt_toolkit-3.0.14.tar.gz", hash = "sha256:7e966747c18ececaec785699626b771c1ba8344c8d31759a1915d6b12fad6525"}, ] psycopg2 = [ {file = "psycopg2-2.8.6-cp27-cp27m-win32.whl", hash = "sha256:068115e13c70dc5982dfc00c5d70437fe37c014c808acce119b5448361c03725"}, @@ -2722,8 +2726,8 @@ psycopg2 = [ {file = "psycopg2-2.8.6.tar.gz", hash = "sha256:fb23f6c71107c37fd667cb4ea363ddeb936b348bbd6449278eb92c189699f543"}, ] ptyprocess = [ - {file = "ptyprocess-0.6.0-py2.py3-none-any.whl", hash = "sha256:d7cc528d76e76342423ca640335bd3633420dc1366f258cb31d05e865ef5ca1f"}, - {file = "ptyprocess-0.6.0.tar.gz", hash = "sha256:923f299cc5ad920c68f2bc0bc98b75b9f838b93b599941a6b63ddbc2476394c0"}, + {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, + {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, ] py = [ {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, @@ -2746,8 +2750,8 @@ pyflakes = [ {file = "pyflakes-2.2.0.tar.gz", hash = "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8"}, ] pygments = [ - {file = "Pygments-2.7.3-py3-none-any.whl", hash = "sha256:f275b6c0909e5dafd2d6269a656aa90fa58ebf4a74f8fcf9053195d226b24a08"}, - {file = "Pygments-2.7.3.tar.gz", hash = "sha256:ccf3acacf3782cbed4a989426012f1c535c9a90d3a7fc3f16d231b9372d2b716"}, + {file = "Pygments-2.7.4-py3-none-any.whl", hash = "sha256:bc9591213a8f0e0ca1a5e68a479b4887fdc3e75d0774e5c71c31920c427de435"}, + {file = "Pygments-2.7.4.tar.gz", hash = "sha256:df49d09b498e83c1a73128295860250b0b7edd4c723a32e9bc0d295c7c2ec337"}, ] pyparsing = [ {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, @@ -2757,12 +2761,12 @@ pyrsistent = [ {file = "pyrsistent-0.17.3.tar.gz", hash = "sha256:2e636185d9eb976a18a8a8e96efce62f2905fea90041958d8cc2a189756ebf3e"}, ] pytest = [ - {file = "pytest-6.2.1-py3-none-any.whl", hash = "sha256:1969f797a1a0dbd8ccf0fecc80262312729afea9c17f1d70ebf85c5e76c6f7c8"}, - {file = "pytest-6.2.1.tar.gz", hash = "sha256:66e419b1899bc27346cb2c993e12c5e5e8daba9073c1fbce33b9807abc95c306"}, + {file = "pytest-6.2.2-py3-none-any.whl", hash = "sha256:b574b57423e818210672e07ca1fa90aaf194a4f63f3ab909a2c67ebb22913839"}, + {file = "pytest-6.2.2.tar.gz", hash = "sha256:9d1edf9e7d0b84d72ea3dbcdfd22b35fb543a5e8f2a60092dd578936bf63d7f9"}, ] pytest-cov = [ - {file = "pytest-cov-2.10.1.tar.gz", hash = "sha256:47bd0ce14056fdd79f93e1713f88fad7bdcc583dcd7783da86ef2f085a0bb88e"}, - {file = "pytest_cov-2.10.1-py2.py3-none-any.whl", hash = "sha256:45ec2d5182f89a81fc3eb29e3d1ed3113b9e9a873bcddb2a71faaab066110191"}, + {file = "pytest-cov-2.11.1.tar.gz", hash = "sha256:359952d9d39b9f822d9d29324483e7ba04a3a17dd7d05aa6beb7ea01e359e5f7"}, + {file = "pytest_cov-2.11.1-py2.py3-none-any.whl", hash = "sha256:bdb9fdb0b85a7cc825269a4c56b48ccaa5c7e365054b6038772c32ddcdc969da"}, ] pytest-env = [ {file = "pytest-env-0.6.2.tar.gz", hash = "sha256:7e94956aef7f2764f3c147d216ce066bf6c42948bb9e293169b1b1c880a580c2"}, @@ -2787,8 +2791,8 @@ python-editor = [ {file = "python_editor-1.0.4-py3.5.egg", hash = "sha256:c3da2053dbab6b29c94e43c486ff67206eafbe7eb52dbec7390b5e2fb05aac77"}, ] pytz = [ - {file = "pytz-2020.4-py2.py3-none-any.whl", hash = "sha256:5c55e189b682d420be27c6995ba6edce0c0a77dd67bfbe2ae6607134d5851ffd"}, - {file = "pytz-2020.4.tar.gz", hash = "sha256:3e6b7dd2d1e0a59084bcee14a17af60c5c562cdc16d828e8eba2e683d3a7e268"}, + {file = "pytz-2020.5-py2.py3-none-any.whl", hash = "sha256:16962c5fb8db4a8f63a26646d8886e9d769b6c511543557bc84e9569fb9a9cb4"}, + {file = "pytz-2020.5.tar.gz", hash = "sha256:180befebb1927b16f6b57101720075a984c019ac16b1b7575673bea42c6c3da5"}, ] pywin32 = [ {file = "pywin32-300-cp35-cp35m-win32.whl", hash = "sha256:1c204a81daed2089e55d11eefa4826c05e604d27fe2be40b6bf8db7b6a39da63"}, @@ -2815,50 +2819,61 @@ pywinpty = [ {file = "pywinpty-0.5.7.tar.gz", hash = "sha256:2d7e9c881638a72ffdca3f5417dd1563b60f603e1b43e5895674c2a1b01f95a0"}, ] pyyaml = [ - {file = "PyYAML-5.3.1-cp27-cp27m-win32.whl", hash = "sha256:74809a57b329d6cc0fdccee6318f44b9b8649961fa73144a98735b0aaf029f1f"}, - {file = "PyYAML-5.3.1-cp27-cp27m-win_amd64.whl", hash = "sha256:240097ff019d7c70a4922b6869d8a86407758333f02203e0fc6ff79c5dcede76"}, - {file = "PyYAML-5.3.1-cp35-cp35m-win32.whl", hash = "sha256:4f4b913ca1a7319b33cfb1369e91e50354d6f07a135f3b901aca02aa95940bd2"}, - {file = "PyYAML-5.3.1-cp35-cp35m-win_amd64.whl", hash = "sha256:cc8955cfbfc7a115fa81d85284ee61147059a753344bc51098f3ccd69b0d7e0c"}, - {file = "PyYAML-5.3.1-cp36-cp36m-win32.whl", hash = "sha256:7739fc0fa8205b3ee8808aea45e968bc90082c10aef6ea95e855e10abf4a37b2"}, - {file = "PyYAML-5.3.1-cp36-cp36m-win_amd64.whl", hash = "sha256:69f00dca373f240f842b2931fb2c7e14ddbacd1397d57157a9b005a6a9942648"}, - {file = "PyYAML-5.3.1-cp37-cp37m-win32.whl", hash = "sha256:d13155f591e6fcc1ec3b30685d50bf0711574e2c0dfffd7644babf8b5102ca1a"}, - {file = "PyYAML-5.3.1-cp37-cp37m-win_amd64.whl", hash = "sha256:73f099454b799e05e5ab51423c7bcf361c58d3206fa7b0d555426b1f4d9a3eaf"}, - {file = "PyYAML-5.3.1-cp38-cp38-win32.whl", hash = "sha256:06a0d7ba600ce0b2d2fe2e78453a470b5a6e000a985dd4a4e54e436cc36b0e97"}, - {file = "PyYAML-5.3.1-cp38-cp38-win_amd64.whl", hash = "sha256:95f71d2af0ff4227885f7a6605c37fd53d3a106fcab511b8860ecca9fcf400ee"}, - {file = "PyYAML-5.3.1-cp39-cp39-win32.whl", hash = "sha256:ad9c67312c84def58f3c04504727ca879cb0013b2517c85a9a253f0cb6380c0a"}, - {file = "PyYAML-5.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:6034f55dab5fea9e53f436aa68fa3ace2634918e8b5994d82f3621c04ff5ed2e"}, - {file = "PyYAML-5.3.1.tar.gz", hash = "sha256:b8eac752c5e14d3eca0e6dd9199cd627518cb5ec06add0de9d32baeee6fe645d"}, + {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, + {file = "PyYAML-5.4.1-cp27-cp27m-win32.whl", hash = "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393"}, + {file = "PyYAML-5.4.1-cp27-cp27m-win_amd64.whl", hash = "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8"}, + {file = "PyYAML-5.4.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185"}, + {file = "PyYAML-5.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253"}, + {file = "PyYAML-5.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc"}, + {file = "PyYAML-5.4.1-cp36-cp36m-win32.whl", hash = "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5"}, + {file = "PyYAML-5.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df"}, + {file = "PyYAML-5.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018"}, + {file = "PyYAML-5.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63"}, + {file = "PyYAML-5.4.1-cp37-cp37m-win32.whl", hash = "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b"}, + {file = "PyYAML-5.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf"}, + {file = "PyYAML-5.4.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46"}, + {file = "PyYAML-5.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb"}, + {file = "PyYAML-5.4.1-cp38-cp38-win32.whl", hash = "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc"}, + {file = "PyYAML-5.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696"}, + {file = "PyYAML-5.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77"}, + {file = "PyYAML-5.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183"}, + {file = "PyYAML-5.4.1-cp39-cp39-win32.whl", hash = "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10"}, + {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"}, + {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, ] pyzmq = [ - {file = "pyzmq-20.0.0-cp35-cp35m-macosx_10_9_intel.whl", hash = "sha256:523d542823cabb94065178090e05347bd204365f6e7cb260f0071c995d392fc2"}, - {file = "pyzmq-20.0.0-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:225774a48ed7414c0395335e7123ef8c418dbcbe172caabdc2496133b03254c2"}, - {file = "pyzmq-20.0.0-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:bc7dd697356b31389d5118b9bcdef3e8d8079e8181800c4e8d72dccd56e1ff68"}, - {file = "pyzmq-20.0.0-cp35-cp35m-win32.whl", hash = "sha256:d81184489369ec325bd50ba1c935361e63f31f578430b9ad95471899361a8253"}, - {file = "pyzmq-20.0.0-cp35-cp35m-win_amd64.whl", hash = "sha256:7113eb93dcd0a5750c65d123ed0099e036a3a3f2dcb48afedd025ffa125c983b"}, - {file = "pyzmq-20.0.0-cp36-cp36m-macosx_10_9_intel.whl", hash = "sha256:b62113eeb9a0649cebed9b21fd578f3a0175ef214a2a91dcb7b31bbf55805295"}, - {file = "pyzmq-20.0.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:f0beef935efe78a63c785bb21ed56c1c24448511383e3994927c8bb2caf5e714"}, - {file = "pyzmq-20.0.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:46250789730489009fe139cbf576679557c070a6a3628077d09a4153d52fd381"}, - {file = "pyzmq-20.0.0-cp36-cp36m-win32.whl", hash = "sha256:bf755905a7d30d2749079611b9a89924c1f2da2695dc09ce221f42122c9808e3"}, - {file = "pyzmq-20.0.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2742e380d186673eee6a570ef83d4568741945434ba36d92b98d36cdbfedbd44"}, - {file = "pyzmq-20.0.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:1e9b75a119606732023a305d1c214146c09a91f8116f6aff3e8b7d0a60b6f0ff"}, - {file = "pyzmq-20.0.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:03638e46d486dd1c118e03c8bf9c634bdcae679600eac6573ae1e54906de7c2f"}, - {file = "pyzmq-20.0.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:63ee08e35be72fdd7568065a249a5b5cf51a2e8ab6ee63cf9f73786fcb9e710b"}, - {file = "pyzmq-20.0.0-cp37-cp37m-win32.whl", hash = "sha256:c95dda497a7c1b1e734b5e8353173ca5dd7b67784d8821d13413a97856588057"}, - {file = "pyzmq-20.0.0-cp37-cp37m-win_amd64.whl", hash = "sha256:cc09c5cd1a4332611c8564d65e6a432dc6db3e10793d0254da9fa1e31d9ffd6d"}, - {file = "pyzmq-20.0.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6e24907857c80dc67692e31f5bf3ad5bf483ee0142cec95b3d47e2db8c43bdda"}, - {file = "pyzmq-20.0.0-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:53706f4a792cdae422121fb6a5e65119bad02373153364fc9d004cf6a90394de"}, - {file = "pyzmq-20.0.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:895695be380f0f85d2e3ec5ccf68a93c92d45bd298567525ad5633071589872c"}, - {file = "pyzmq-20.0.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:d92c7f41a53ece82b91703ea433c7d34143248cf0cead33aa11c5fc621c764bf"}, - {file = "pyzmq-20.0.0-cp38-cp38-win32.whl", hash = "sha256:309d763d89ec1845c0e0fa14e1fb6558fd8c9ef05ed32baec27d7a8499cc7bb0"}, - {file = "pyzmq-20.0.0-cp38-cp38-win_amd64.whl", hash = "sha256:0e554fd390021edbe0330b67226325a820b0319c5b45e1b0a59bf22ccc36e793"}, - {file = "pyzmq-20.0.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:cfa54a162a7b32641665e99b2c12084555afe9fc8fe80ec8b2f71a57320d10e1"}, - {file = "pyzmq-20.0.0-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:dc2f48b575dff6edefd572f1ac84cf0c3f18ad5fcf13384de32df740a010594a"}, - {file = "pyzmq-20.0.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:5efe02bdcc5eafcac0aab531292294298f0ab8d28ed43be9e507d0e09173d1a4"}, - {file = "pyzmq-20.0.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:0af84f34f27b5c6a0e906c648bdf46d4caebf9c8e6e16db0728f30a58141cad6"}, - {file = "pyzmq-20.0.0-cp39-cp39-win32.whl", hash = "sha256:c63fafd2556d218368c51d18588f8e6f8d86d09d493032415057faf6de869b34"}, - {file = "pyzmq-20.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:f110a4d3f8f01209eec304ed542f6c8054cce9b0f16dfe3d571e57c290e4e133"}, - {file = "pyzmq-20.0.0-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4d9259a5eb3f71abbaf61f165cacf42240bfeea3783bebd8255341abdfe206f1"}, - {file = "pyzmq-20.0.0.tar.gz", hash = "sha256:824ad5888331aadeac772bce27e1c2fbcab82fade92edbd234542c4e12f0dca9"}, + {file = "pyzmq-22.0.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c2a8d70fe2a321a83d274970481eb244bff027b58511e943ef564721530ba786"}, + {file = "pyzmq-22.0.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:b68033181dc2e622bb5baa9b16d5933303779a03dc89860f4c44f629426d802c"}, + {file = "pyzmq-22.0.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:9bae89912cac9f03d41adb66981f6e753cfd4e451937b2cd435d732fd4ccb1a3"}, + {file = "pyzmq-22.0.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:75b68890219231bd60556a1c6e0d2dc05fa1b179a26c876442c83a0d77958bc9"}, + {file = "pyzmq-22.0.2-cp36-cp36m-win32.whl", hash = "sha256:c6b1d235a08f2c42480cb9a0a5cd2a29c391052d8bc8f43db86aa15387734a33"}, + {file = "pyzmq-22.0.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f3ad3f77ed6a3cf31f61170fc1733afd83a4cf8e02edde0762d4e630bce2a97e"}, + {file = "pyzmq-22.0.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:490a9fe5509b09369722b18b85ef494abdf7c51cb1c9484cf83c3921961c2038"}, + {file = "pyzmq-22.0.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:303b8ebafce9906fc1e8eb35734b9dba4786ca3da7cdc88e04a8997dde2372d3"}, + {file = "pyzmq-22.0.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1ffb81b08bcaaac30ba913adef686ff41b257252e96fca32497029fdc3962ff0"}, + {file = "pyzmq-22.0.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:75fa832c79ce30a23cd44a4e89224c651ef6bf5144b842ad066246e914b92233"}, + {file = "pyzmq-22.0.2-cp37-cp37m-win32.whl", hash = "sha256:d77f6eb839097e4bce96fcac7e05e33b677efe0385bd0ab6c2a9ea818ed7e8f9"}, + {file = "pyzmq-22.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:5a565af3729b2bf7c2ce1d563084d0cd90a312290ba5e571a0c3ec770ea8a287"}, + {file = "pyzmq-22.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ff236d8653f8bb74198223c7af77b9378714f411d6d95255d97c2d69bf991b20"}, + {file = "pyzmq-22.0.2-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:37beae88d6cf102419bb0ec79acb19c062dcea6765b57cf2b265dac5542bcdad"}, + {file = "pyzmq-22.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:bc9f2c26485dc76520084ee8d76f18171cc89f24f801bed8402302ee99dbbcd9"}, + {file = "pyzmq-22.0.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:0b32bd5e7346e534fddb57eab309933ff6b3b177c0106b908b6193dfa75fdabe"}, + {file = "pyzmq-22.0.2-cp38-cp38-win32.whl", hash = "sha256:58a074afa254a53872202e92594b59c0ba8cda62effc6437e34ae7048559dd38"}, + {file = "pyzmq-22.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:66d1190eec0a78bd07d39d1615b7923190ed1ba8aa04742d963b09bc66628681"}, + {file = "pyzmq-22.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:013e1343b41aaeb482f40605f3fadcfeb841706039625d7b30d12ae8fa0d3cd0"}, + {file = "pyzmq-22.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d66724bf0d423aa18c9ea43a1bf24ed5c1d143f00bdace7c1b7fc3034f188cc9"}, + {file = "pyzmq-22.0.2-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:86cb0982b02b4fc2fbd4a65155289e0e4e5015982dbe2db14f8856c303cffa08"}, + {file = "pyzmq-22.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:7b6c855c562d1c1bf7a1ba72c2617c8298e0fa1b1c08dc8d60e225031567ad9e"}, + {file = "pyzmq-22.0.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:034f5b9e4ff0bcc67e49fe8f55a1b209ea5761c8fd00c246195c8d0cb6ce096d"}, + {file = "pyzmq-22.0.2-cp39-cp39-win32.whl", hash = "sha256:849444c1699c244d5770d3a684c51f024e95c538f71dd3d1ff423a91745bab7f"}, + {file = "pyzmq-22.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:506d4716ca6e5798345038e75adcb05b4118112a36700941967925285637198b"}, + {file = "pyzmq-22.0.2-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:888d850d4b7e1426d210e901bd93075991b36fe0e2ae2547ce5c18b96df95250"}, + {file = "pyzmq-22.0.2-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:03c001be8c3817d5721137660ed21d90f6175002f0e583306079c791b1d9a855"}, + {file = "pyzmq-22.0.2-pp36-pypy36_pp73-win32.whl", hash = "sha256:3f4e6574d2589e3e22514a3669e86a7bf18a95d3c3ae65733fa6a0a769ec4c9d"}, + {file = "pyzmq-22.0.2-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:35c8c5c8160f0f0fc6d4588037243b668c3f20d981c1b8e7b5d9c33f8eeb7eb6"}, + {file = "pyzmq-22.0.2-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:841e9563ce9bd33fe9f227ec680ac033e9f1060977d613568c1dcbff09e74cc9"}, + {file = "pyzmq-22.0.2-pp37-pypy37_pp73-win32.whl", hash = "sha256:cc814880ba27f2ea8cea48ff3b480076266d4dd9c3fe29ef6e5a0a807639abe7"}, + {file = "pyzmq-22.0.2.tar.gz", hash = "sha256:d7b82a959e5e22d492f4f5a1e650e909a6c8c76ede178f538313ddb9d1e92963"}, ] regex = [ {file = "regex-2020.11.13-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8b882a78c320478b12ff024e81dc7d43c1462aa4a3341c754ee65d857a521f85"}, @@ -2904,17 +2919,17 @@ regex = [ {file = "regex-2020.11.13.tar.gz", hash = "sha256:83d6b356e116ca119db8e7c6fc2983289d87b27b3fac238cfe5dca529d884562"}, ] requests = [ - {file = "requests-2.25.0-py2.py3-none-any.whl", hash = "sha256:e786fa28d8c9154e6a4de5d46a1d921b8749f8b74e28bde23768e5e16eece998"}, - {file = "requests-2.25.0.tar.gz", hash = "sha256:7f1a0b932f4a60a1a65caa4263921bb7d9ee911957e0ae4a23a6dd08185ad5f8"}, + {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, + {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, ] restructuredtext-lint = [ {file = "restructuredtext_lint-1.3.2.tar.gz", hash = "sha256:d3b10a1fe2ecac537e51ae6d151b223b78de9fafdd50e5eb6b08c243df173c80"}, ] rpy2 = [ - {file = "rpy2-3.4.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:3f2d56bc80c2af0fe8118c53da7fd29f1809bc159a88cb10f9e2869321a21deb"}, - {file = "rpy2-3.4.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:344ac89c966b2ec91bbf9e623b7ff9c121820b5e53da2ffc75fa10f158023cd7"}, - {file = "rpy2-3.4.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:ebbd7fceef359279f56b481d7ea2dd60db91928abb3726010a88fbb3362213af"}, - {file = "rpy2-3.4.1.tar.gz", hash = "sha256:644360b569656700dfe13f59878ec1cf8c116c128d4f2f0bf96144031f95d2e2"}, + {file = "rpy2-3.4.2-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:dd2d1e9b4733a449f5c48e2f1da165bf77bc33c43ffcf9dacf051c6eb9a417d7"}, + {file = "rpy2-3.4.2-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:1eb4f7fd414a60dbe2555751b6789f64353db770b2d40917dac1bd20b7bec333"}, + {file = "rpy2-3.4.2-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:10944c3e38fb2c89fbe89c42308189936b055a9ad011d5c72e96f7ba8720dec1"}, + {file = "rpy2-3.4.2.tar.gz", hash = "sha256:8f7d1348b77bc45425b846a0d625f24a51a1c4f32ef2cd1c07a24222aa64e2e0"}, ] scipy = [ {file = "scipy-1.6.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3d4303e3e21d07d9557b26a1707bb9fc065510ee8501c9bf22a0157249a82fd0"}, @@ -2970,16 +2985,16 @@ six = [ {file = "six-1.15.0.tar.gz", hash = "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259"}, ] smmap = [ - {file = "smmap-3.0.4-py2.py3-none-any.whl", hash = "sha256:54c44c197c819d5ef1991799a7e30b662d1e520f2ac75c9efbeb54a742214cf4"}, - {file = "smmap-3.0.4.tar.gz", hash = "sha256:9c98bbd1f9786d22f14b3d4126894d56befb835ec90cef151af566c7e19b5d24"}, + {file = "smmap-3.0.5-py2.py3-none-any.whl", hash = "sha256:7bfcf367828031dc893530a29cb35eb8c8f2d7c8f2d0989354d75d24c8573714"}, + {file = "smmap-3.0.5.tar.gz", hash = "sha256:84c2751ef3072d4f6b2785ec7ee40244c6f45eb934d9e543e2c51f1bd3d54c50"}, ] snowballstemmer = [ - {file = "snowballstemmer-2.0.0-py2.py3-none-any.whl", hash = "sha256:209f257d7533fdb3cb73bdbd24f436239ca3b2fa67d56f6ff88e86be08cc5ef0"}, - {file = "snowballstemmer-2.0.0.tar.gz", hash = "sha256:df3bac3df4c2c01363f3dd2cfa78cce2840a79b9f1c2d2de9ce8d31683992f52"}, + {file = "snowballstemmer-2.1.0-py2.py3-none-any.whl", hash = "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2"}, + {file = "snowballstemmer-2.1.0.tar.gz", hash = "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914"}, ] sphinx = [ - {file = "Sphinx-3.3.1-py3-none-any.whl", hash = "sha256:d4e59ad4ea55efbb3c05cde3bfc83bfc14f0c95aa95c3d75346fcce186a47960"}, - {file = "Sphinx-3.3.1.tar.gz", hash = "sha256:1e8d592225447104d1172be415bc2972bd1357e3e12fdc76edf2261105db4300"}, + {file = "Sphinx-3.4.3-py3-none-any.whl", hash = "sha256:c314c857e7cd47c856d2c5adff514ac2e6495f8b8e0f886a8a37e9305dfea0d8"}, + {file = "Sphinx-3.4.3.tar.gz", hash = "sha256:41cad293f954f7d37f803d97eb184158cfd90f51195131e94875bc07cd08b93c"}, ] sphinx-autodoc-typehints = [ {file = "sphinx-autodoc-typehints-1.11.1.tar.gz", hash = "sha256:244ba6d3e2fdb854622f643c7763d6f95b6886eba24bec28e86edf205e4ddb20"}, @@ -3010,78 +3025,79 @@ sphinxcontrib-serializinghtml = [ {file = "sphinxcontrib_serializinghtml-1.1.4-py2.py3-none-any.whl", hash = "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a"}, ] sqlalchemy = [ - {file = "SQLAlchemy-1.3.20-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:bad73f9888d30f9e1d57ac8829f8a12091bdee4949b91db279569774a866a18e"}, - {file = "SQLAlchemy-1.3.20-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:e32e3455db14602b6117f0f422f46bc297a3853ae2c322ecd1e2c4c04daf6ed5"}, - {file = "SQLAlchemy-1.3.20-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:5cdfe54c1e37279dc70d92815464b77cd8ee30725adc9350f06074f91dbfeed2"}, - {file = "SQLAlchemy-1.3.20-cp27-cp27m-win32.whl", hash = "sha256:2e9bd5b23bba8ae8ce4219c9333974ff5e103c857d9ff0e4b73dc4cb244c7d86"}, - {file = "SQLAlchemy-1.3.20-cp27-cp27m-win_amd64.whl", hash = "sha256:5d92c18458a4aa27497a986038d5d797b5279268a2de303cd00910658e8d149c"}, - {file = "SQLAlchemy-1.3.20-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:53fd857c6c8ffc0aa6a5a3a2619f6a74247e42ec9e46b836a8ffa4abe7aab327"}, - {file = "SQLAlchemy-1.3.20-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:0a92745bb1ebbcb3985ed7bda379b94627f0edbc6c82e9e4bac4fb5647ae609a"}, - {file = "SQLAlchemy-1.3.20-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:b6f036ecc017ec2e2cc2a40615b41850dc7aaaea6a932628c0afc73ab98ba3fb"}, - {file = "SQLAlchemy-1.3.20-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:3aa6d45e149a16aa1f0c46816397e12313d5e37f22205c26e06975e150ffcf2a"}, - {file = "SQLAlchemy-1.3.20-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:ed53209b5f0f383acb49a927179fa51a6e2259878e164273ebc6815f3a752465"}, - {file = "SQLAlchemy-1.3.20-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:d3b709d64b5cf064972b3763b47139e4a0dc4ae28a36437757f7663f67b99710"}, - {file = "SQLAlchemy-1.3.20-cp35-cp35m-win32.whl", hash = "sha256:950f0e17ffba7a7ceb0dd056567bc5ade22a11a75920b0e8298865dc28c0eff6"}, - {file = "SQLAlchemy-1.3.20-cp35-cp35m-win_amd64.whl", hash = "sha256:8dcbf377529a9af167cbfc5b8acec0fadd7c2357fc282a1494c222d3abfc9629"}, - {file = "SQLAlchemy-1.3.20-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:0157c269701d88f5faf1fa0e4560e4d814f210c01a5b55df3cab95e9346a8bcc"}, - {file = "SQLAlchemy-1.3.20-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:7cd40cb4bc50d9e87b3540b23df6e6b24821ba7e1f305c1492b0806c33dbdbec"}, - {file = "SQLAlchemy-1.3.20-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:c092fe282de83d48e64d306b4bce03114859cdbfe19bf8a978a78a0d44ddadb1"}, - {file = "SQLAlchemy-1.3.20-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:166917a729b9226decff29416f212c516227c2eb8a9c9f920d69ced24e30109f"}, - {file = "SQLAlchemy-1.3.20-cp36-cp36m-win32.whl", hash = "sha256:632b32183c0cb0053194a4085c304bc2320e5299f77e3024556fa2aa395c2a8b"}, - {file = "SQLAlchemy-1.3.20-cp36-cp36m-win_amd64.whl", hash = "sha256:bbc58fca72ce45a64bb02b87f73df58e29848b693869e58bd890b2ddbb42d83b"}, - {file = "SQLAlchemy-1.3.20-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:b15002b9788ffe84e42baffc334739d3b68008a973d65fad0a410ca5d0531980"}, - {file = "SQLAlchemy-1.3.20-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:9e379674728f43a0cd95c423ac0e95262500f9bfd81d33b999daa8ea1756d162"}, - {file = "SQLAlchemy-1.3.20-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:2b5dafed97f778e9901b79cc01b88d39c605e0545b4541f2551a2fd785adc15b"}, - {file = "SQLAlchemy-1.3.20-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:fcdb3755a7c355bc29df1b5e6fb8226d5c8b90551d202d69d0076a8a5649d68b"}, - {file = "SQLAlchemy-1.3.20-cp37-cp37m-win32.whl", hash = "sha256:bca4d367a725694dae3dfdc86cf1d1622b9f414e70bd19651f5ac4fb3aa96d61"}, - {file = "SQLAlchemy-1.3.20-cp37-cp37m-win_amd64.whl", hash = "sha256:f605f348f4e6a2ba00acb3399c71d213b92f27f2383fc4abebf7a37368c12142"}, - {file = "SQLAlchemy-1.3.20-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:84f0ac4a09971536b38cc5d515d6add7926a7e13baa25135a1dbb6afa351a376"}, - {file = "SQLAlchemy-1.3.20-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2909dffe5c9a615b7e6c92d1ac2d31e3026dc436440a4f750f4749d114d88ceb"}, - {file = "SQLAlchemy-1.3.20-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:c3ab23ee9674336654bf9cac30eb75ac6acb9150dc4b1391bec533a7a4126471"}, - {file = "SQLAlchemy-1.3.20-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:009e8388d4d551a2107632921320886650b46332f61dc935e70c8bcf37d8e0d6"}, - {file = "SQLAlchemy-1.3.20-cp38-cp38-win32.whl", hash = "sha256:bf53d8dddfc3e53a5bda65f7f4aa40fae306843641e3e8e701c18a5609471edf"}, - {file = "SQLAlchemy-1.3.20-cp38-cp38-win_amd64.whl", hash = "sha256:7c735c7a6db8ee9554a3935e741cf288f7dcbe8706320251eb38c412e6a4281d"}, - {file = "SQLAlchemy-1.3.20-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:4bdbdb8ca577c6c366d15791747c1de6ab14529115a2eb52774240c412a7b403"}, - {file = "SQLAlchemy-1.3.20-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:ce64a44c867d128ab8e675f587aae7f61bd2db836a3c4ba522d884cd7c298a77"}, - {file = "SQLAlchemy-1.3.20-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:be41d5de7a8e241864189b7530ca4aaf56a5204332caa70555c2d96379e18079"}, - {file = "SQLAlchemy-1.3.20-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:1f5f369202912be72fdf9a8f25067a5ece31a2b38507bb869306f173336348da"}, - {file = "SQLAlchemy-1.3.20-cp39-cp39-win32.whl", hash = "sha256:0cca1844ba870e81c03633a99aa3dc62256fb96323431a5dec7d4e503c26372d"}, - {file = "SQLAlchemy-1.3.20-cp39-cp39-win_amd64.whl", hash = "sha256:d05cef4a164b44ffda58200efcb22355350979e000828479971ebca49b82ddb1"}, - {file = "SQLAlchemy-1.3.20.tar.gz", hash = "sha256:d2f25c7f410338d31666d7ddedfa67570900e248b940d186b48461bd4e5569a1"}, + {file = "SQLAlchemy-1.3.23-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:fd3b96f8c705af8e938eaa99cbd8fd1450f632d38cad55e7367c33b263bf98ec"}, + {file = "SQLAlchemy-1.3.23-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:29cccc9606750fe10c5d0e8bd847f17a97f3850b8682aef1f56f5d5e1a5a64b1"}, + {file = "SQLAlchemy-1.3.23-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:927ce09e49bff3104459e1451ce82983b0a3062437a07d883a4c66f0b344c9b5"}, + {file = "SQLAlchemy-1.3.23-cp27-cp27m-win32.whl", hash = "sha256:b4b0e44d586cd64b65b507fa116a3814a1a53d55dce4836d7c1a6eb2823ff8d1"}, + {file = "SQLAlchemy-1.3.23-cp27-cp27m-win_amd64.whl", hash = "sha256:6b8b8c80c7f384f06825612dd078e4a31f0185e8f1f6b8c19e188ff246334205"}, + {file = "SQLAlchemy-1.3.23-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:9e9c25522933e569e8b53ccc644dc993cab87e922fb7e142894653880fdd419d"}, + {file = "SQLAlchemy-1.3.23-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:a0e306e9bb76fd93b29ae3a5155298e4c1b504c7cbc620c09c20858d32d16234"}, + {file = "SQLAlchemy-1.3.23-cp35-cp35m-macosx_10_14_x86_64.whl", hash = "sha256:6c9e6cc9237de5660bcddea63f332428bb83c8e2015c26777281f7ffbd2efb84"}, + {file = "SQLAlchemy-1.3.23-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:94f667d86be82dd4cb17d08de0c3622e77ca865320e0b95eae6153faa7b4ecaf"}, + {file = "SQLAlchemy-1.3.23-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:751934967f5336a3e26fc5993ccad1e4fee982029f9317eb6153bc0bc3d2d2da"}, + {file = "SQLAlchemy-1.3.23-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:63677d0c08524af4c5893c18dbe42141de7178001360b3de0b86217502ed3601"}, + {file = "SQLAlchemy-1.3.23-cp35-cp35m-win32.whl", hash = "sha256:ddfb511e76d016c3a160910642d57f4587dc542ce5ee823b0d415134790eeeb9"}, + {file = "SQLAlchemy-1.3.23-cp35-cp35m-win_amd64.whl", hash = "sha256:040bdfc1d76a9074717a3f43455685f781c581f94472b010cd6c4754754e1862"}, + {file = "SQLAlchemy-1.3.23-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:d1a85dfc5dee741bf49cb9b6b6b8d2725a268e4992507cf151cba26b17d97c37"}, + {file = "SQLAlchemy-1.3.23-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:639940bbe1108ac667dcffc79925db2966826c270112e9159439ab6bb14f8d80"}, + {file = "SQLAlchemy-1.3.23-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:e8a1750b44ad6422ace82bf3466638f1aa0862dbb9689690d5f2f48cce3476c8"}, + {file = "SQLAlchemy-1.3.23-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:e5bb3463df697279e5459a7316ad5a60b04b0107f9392e88674d0ece70e9cf70"}, + {file = "SQLAlchemy-1.3.23-cp36-cp36m-win32.whl", hash = "sha256:e273367f4076bd7b9a8dc2e771978ef2bfd6b82526e80775a7db52bff8ca01dd"}, + {file = "SQLAlchemy-1.3.23-cp36-cp36m-win_amd64.whl", hash = "sha256:ac2244e64485c3778f012951fdc869969a736cd61375fde6096d08850d8be729"}, + {file = "SQLAlchemy-1.3.23-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:23927c3981d1ec6b4ea71eb99d28424b874d9c696a21e5fbd9fa322718be3708"}, + {file = "SQLAlchemy-1.3.23-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d90010304abb4102123d10cbad2cdf2c25a9f2e66a50974199b24b468509bad5"}, + {file = "SQLAlchemy-1.3.23-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:a8bfc1e1afe523e94974132d7230b82ca7fa2511aedde1f537ec54db0399541a"}, + {file = "SQLAlchemy-1.3.23-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:269990b3ab53cb035d662dcde51df0943c1417bdab707dc4a7e4114a710504b4"}, + {file = "SQLAlchemy-1.3.23-cp37-cp37m-win32.whl", hash = "sha256:fdd2ed7395df8ac2dbb10cefc44737b66c6a5cd7755c92524733d7a443e5b7e2"}, + {file = "SQLAlchemy-1.3.23-cp37-cp37m-win_amd64.whl", hash = "sha256:6a939a868fdaa4b504e8b9d4a61f21aac11e3fecc8a8214455e144939e3d2aea"}, + {file = "SQLAlchemy-1.3.23-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:24f9569e82a009a09ce2d263559acb3466eba2617203170e4a0af91e75b4f075"}, + {file = "SQLAlchemy-1.3.23-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:2578dbdbe4dbb0e5126fb37ffcd9793a25dcad769a95f171a2161030bea850ff"}, + {file = "SQLAlchemy-1.3.23-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:1fe5d8d39118c2b018c215c37b73fd6893c3e1d4895be745ca8ff6eb83333ed3"}, + {file = "SQLAlchemy-1.3.23-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:c7dc052432cd5d060d7437e217dd33c97025287f99a69a50e2dc1478dd610d64"}, + {file = "SQLAlchemy-1.3.23-cp38-cp38-win32.whl", hash = "sha256:ecce8c021894a77d89808222b1ff9687ad84db54d18e4bd0500ca766737faaf6"}, + {file = "SQLAlchemy-1.3.23-cp38-cp38-win_amd64.whl", hash = "sha256:37b83bf81b4b85dda273aaaed5f35ea20ad80606f672d94d2218afc565fb0173"}, + {file = "SQLAlchemy-1.3.23-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:8be835aac18ec85351385e17b8665bd4d63083a7160a017bef3d640e8e65cadb"}, + {file = "SQLAlchemy-1.3.23-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:6ec1044908414013ebfe363450c22f14698803ce97fbb47e53284d55c5165848"}, + {file = "SQLAlchemy-1.3.23-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:eab063a70cca4a587c28824e18be41d8ecc4457f8f15b2933584c6c6cccd30f0"}, + {file = "SQLAlchemy-1.3.23-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:baeb451ee23e264de3f577fee5283c73d9bbaa8cb921d0305c0bbf700094b65b"}, + {file = "SQLAlchemy-1.3.23-cp39-cp39-win32.whl", hash = "sha256:94208867f34e60f54a33a37f1c117251be91a47e3bfdb9ab8a7847f20886ad06"}, + {file = "SQLAlchemy-1.3.23-cp39-cp39-win_amd64.whl", hash = "sha256:f4d972139d5000105fcda9539a76452039434013570d6059993120dc2a65e447"}, + {file = "SQLAlchemy-1.3.23.tar.gz", hash = "sha256:6fca33672578666f657c131552c4ef8979c1606e494f78cd5199742dfb26918b"}, ] statsmodels = [ - {file = "statsmodels-0.12.1-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:3b482ab9759b89cc1c4777b71c1ccf272e868a7551fc6b74da300557407d8379"}, - {file = "statsmodels-0.12.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:33c6cbed74f075b8816cec37e5c7853ed31dcacebfdbbc3af898b4907911544e"}, - {file = "statsmodels-0.12.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:142eacd5a1bd8728358ff48101ee0e51ca3d42a93f6e5cb61fcfacf613977bcf"}, - {file = "statsmodels-0.12.1-cp36-none-win32.whl", hash = "sha256:ef3a54b3594f4c49c295388de1fdd840a8c63a857a5252125aaf92a03ea1e3a6"}, - {file = "statsmodels-0.12.1-cp36-none-win_amd64.whl", hash = "sha256:830d59d94841332429edf735430180031ad5dc660de26728d723e347f414c59d"}, - {file = "statsmodels-0.12.1-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:aa964ca1d65f066b9b096c94fe298aab1441e11731ce6b154ffb5f8d4a4e9ccf"}, - {file = "statsmodels-0.12.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:7614ef58ebb96cc0d4c45150116f5252a2f1e0bd15e809700776163e5a246b8c"}, - {file = "statsmodels-0.12.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:588c0f7e29403161ca952dcdad3d67970583742e9f11f66c7c5b08ac97a0408c"}, - {file = "statsmodels-0.12.1-cp37-none-win32.whl", hash = "sha256:7be4c6d43f1f3a6b28614a4b18fdcf202bd305faf15f4c558e901cbe099ca9ea"}, - {file = "statsmodels-0.12.1-cp37-none-win_amd64.whl", hash = "sha256:e5e426fb962f41d58a07a7d2f7daf32f83e911ff578368caddbcdd1886887ed1"}, - {file = "statsmodels-0.12.1-cp38-cp38-macosx_10_13_x86_64.whl", hash = "sha256:3582c0a497a9cda473470b4dd59ecd103739e3cfef1eb2e20d48dd1a2239f2e4"}, - {file = "statsmodels-0.12.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:02679bf39d35a2aceb2d9f6d332b4e1cda1797157df792fe867b45f2a14d20d3"}, - {file = "statsmodels-0.12.1-cp38-none-win32.whl", hash = "sha256:a652d8bfb4ec430b706a69e3fcbdac1cdf930823e3f9b8468e3e179d47097bbb"}, - {file = "statsmodels-0.12.1-cp38-none-win_amd64.whl", hash = "sha256:74c6c863d6f8a1f021d42f965b1b97eeea05293d3b18e3690c46eac0cf6d64d9"}, - {file = "statsmodels-0.12.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:62be4dd5b4a254d59b7feb8093623ba6158080aa6758c2eb19105609da4b40fb"}, - {file = "statsmodels-0.12.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:630b9d938b0388488c66394597500dfba877e3b53da536369393a9a840b8f2a0"}, - {file = "statsmodels-0.12.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f585c02b716c161f00e6a2d10f9f3497f57191183dbd6ae7eaa988707023b1ee"}, - {file = "statsmodels-0.12.1-cp39-none-win32.whl", hash = "sha256:3dd59b7cd35843f4764b8a1476be20cf959d3da700327975f7cd2bf2a1b630b2"}, - {file = "statsmodels-0.12.1-cp39-none-win_amd64.whl", hash = "sha256:78813784f5fa612b4399c4963414799fbbb031188f1ad630a501c6b2af7e94e0"}, - {file = "statsmodels-0.12.1.tar.gz", hash = "sha256:a271b4ccec190148dccda25f0cbdcbf871f408fc1394a10a7dc1af4a62b91c8e"}, + {file = "statsmodels-0.12.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c1d98ce2072f5e772cbf91d05475490368da5d3ee4a3150062330c7b83221ceb"}, + {file = "statsmodels-0.12.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:4184487e9c281acad3d0bda19445c69db292f0dbb18f25ebf56a7966a0a28eef"}, + {file = "statsmodels-0.12.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:37e107fa11299090ed90f93c7172162b850c28fd09999937b971926813e887c5"}, + {file = "statsmodels-0.12.2-cp36-none-win32.whl", hash = "sha256:5d3e7333e1c5b234797ed57c3d1533371374c1e1e7e7ed54d27805611f96e2d5"}, + {file = "statsmodels-0.12.2-cp36-none-win_amd64.whl", hash = "sha256:aaf3c75fd22cb9dcf9c1b28f8ae87521310870f4dd8a6a4f1010f1e46d992377"}, + {file = "statsmodels-0.12.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:c48b7cbb37a651bb1cd23614abc10f447845ad3c3a713bf74e2aad20cfc94ae7"}, + {file = "statsmodels-0.12.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:a3bd3922463dda8ad33e5e5075d2080e9e012aeb2032b5cdaeea9b79c2472000"}, + {file = "statsmodels-0.12.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:43de84bc08c8b9f778502aed7a476d6e68674e6878718e533b07d569cf0927a9"}, + {file = "statsmodels-0.12.2-cp37-none-win32.whl", hash = "sha256:0197855aa1d40c42532d6a75b4ca72e30826a50d90ec3047a404f9702d8b814f"}, + {file = "statsmodels-0.12.2-cp37-none-win_amd64.whl", hash = "sha256:93273aa1c31caf59bcce9790ca4c3f54fdc45a37c61084d06f1ba4fbe56e7752"}, + {file = "statsmodels-0.12.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:3e94306d4c07e332532ea4911d1f1d1f661c79aa73f22c5bb22e6dd47b40d562"}, + {file = "statsmodels-0.12.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:f3a7622f3d0ce2fc204f43b74de4e03e42775609705bf94d656b730482ca935a"}, + {file = "statsmodels-0.12.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:587deb788e7f8f3f866d28e812cf5c082b4d4a2d3f5beea94d0e9699ea71ef22"}, + {file = "statsmodels-0.12.2-cp38-none-win32.whl", hash = "sha256:cbbdf6f708c9a1f1fad5cdea5e4342d6fdb37e42e92288c2cf906b99976ffe15"}, + {file = "statsmodels-0.12.2-cp38-none-win_amd64.whl", hash = "sha256:1fa720e895112a1b04b27002218b0ea7f10dd1d9cffd1c018c88bbfb82520f57"}, + {file = "statsmodels-0.12.2-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:c3782ce846a52862ac72f89d22b6b1ca13d877bc593872309228a6f05d934321"}, + {file = "statsmodels-0.12.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:8f93cb3f7d87c1fc7e51b3b239371c25a17a0a8e782467fdf4788cfef600724a"}, + {file = "statsmodels-0.12.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f61f33f64760a22100b6b146217823f73cfedd251c9bdbd58453ca94e63326c7"}, + {file = "statsmodels-0.12.2-cp39-none-win32.whl", hash = "sha256:3aab85174444f1bcad1e9218a3d3db08f0f86eeb97985236ca8605a0a39ce305"}, + {file = "statsmodels-0.12.2-cp39-none-win_amd64.whl", hash = "sha256:94d3632d56c13eebebaefb52bd4b43144ad5a131337b57842f46db826fa7d2d3"}, + {file = "statsmodels-0.12.2.tar.gz", hash = "sha256:8ad7a7ae7cdd929095684118e3b05836c0ccb08b6a01fe984159475d174a1b10"}, ] stevedore = [ {file = "stevedore-3.3.0-py3-none-any.whl", hash = "sha256:50d7b78fbaf0d04cd62411188fa7eedcb03eb7f4c4b37005615ceebe582aa82a"}, {file = "stevedore-3.3.0.tar.gz", hash = "sha256:3a5bbd0652bf552748871eaa73a4a8dc2899786bc497a2aa1fcb4dcdb0debeee"}, ] terminado = [ - {file = "terminado-0.9.1-py3-none-any.whl", hash = "sha256:c55f025beb06c2e2669f7ba5a04f47bb3304c30c05842d4981d8f0fc9ab3b4e3"}, - {file = "terminado-0.9.1.tar.gz", hash = "sha256:3da72a155b807b01c9e8a5babd214e052a0a45a975751da3521a1c3381ce6d76"}, + {file = "terminado-0.9.2-py3-none-any.whl", hash = "sha256:23a053e06b22711269563c8bb96b36a036a86be8b5353e85e804f89b84aaa23f"}, + {file = "terminado-0.9.2.tar.gz", hash = "sha256:89e6d94b19e4bc9dce0ffd908dfaf55cc78a9bf735934e915a4a96f65ac9704c"}, ] testfixtures = [ - {file = "testfixtures-6.16.0-py2.py3-none-any.whl", hash = "sha256:017f1924f464189915e67162f530758537175ddd1461b211c666f0587ebc2939"}, - {file = "testfixtures-6.16.0.tar.gz", hash = "sha256:f3f567f35b3d004b0e19ee7dff61e75e3c05f3ffc1c41580d967b1fe144b3de9"}, + {file = "testfixtures-6.17.1-py2.py3-none-any.whl", hash = "sha256:9ed31e83f59619e2fa17df053b241e16e0608f4580f7b5a9333a0c9bdcc99137"}, + {file = "testfixtures-6.17.1.tar.gz", hash = "sha256:5ec3a0dd6f71cc4c304fbc024a10cc293d3e0b852c868014b9f233203e149bda"}, ] testpath = [ {file = "testpath-0.4.4-py2.py3-none-any.whl", hash = "sha256:bfcf9411ef4bf3db7579063e0546938b1edda3d69f4e1fb8756991f5951f85d4"}, @@ -3143,36 +3159,36 @@ traitlets = [ {file = "traitlets-5.0.5.tar.gz", hash = "sha256:178f4ce988f69189f7e523337a3e11d91c786ded9360174a3d9ca83e79bc5396"}, ] typed-ast = [ - {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:73d785a950fc82dd2a25897d525d003f6378d1cb23ab305578394694202a58c3"}, - {file = "typed_ast-1.4.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:aaee9905aee35ba5905cfb3c62f3e83b3bec7b39413f0a7f19be4e547ea01ebb"}, - {file = "typed_ast-1.4.1-cp35-cp35m-win32.whl", hash = "sha256:0c2c07682d61a629b68433afb159376e24e5b2fd4641d35424e462169c0a7919"}, - {file = "typed_ast-1.4.1-cp35-cp35m-win_amd64.whl", hash = "sha256:4083861b0aa07990b619bd7ddc365eb7fa4b817e99cf5f8d9cf21a42780f6e01"}, - {file = "typed_ast-1.4.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:269151951236b0f9a6f04015a9004084a5ab0d5f19b57de779f908621e7d8b75"}, - {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:24995c843eb0ad11a4527b026b4dde3da70e1f2d8806c99b7b4a7cf491612652"}, - {file = "typed_ast-1.4.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:fe460b922ec15dd205595c9b5b99e2f056fd98ae8f9f56b888e7a17dc2b757e7"}, - {file = "typed_ast-1.4.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:fcf135e17cc74dbfbc05894ebca928ffeb23d9790b3167a674921db19082401f"}, - {file = "typed_ast-1.4.1-cp36-cp36m-win32.whl", hash = "sha256:4e3e5da80ccbebfff202a67bf900d081906c358ccc3d5e3c8aea42fdfdfd51c1"}, - {file = "typed_ast-1.4.1-cp36-cp36m-win_amd64.whl", hash = "sha256:249862707802d40f7f29f6e1aad8d84b5aa9e44552d2cc17384b209f091276aa"}, - {file = "typed_ast-1.4.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:8ce678dbaf790dbdb3eba24056d5364fb45944f33553dd5869b7580cdbb83614"}, - {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:c9e348e02e4d2b4a8b2eedb48210430658df6951fa484e59de33ff773fbd4b41"}, - {file = "typed_ast-1.4.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:bcd3b13b56ea479b3650b82cabd6b5343a625b0ced5429e4ccad28a8973f301b"}, - {file = "typed_ast-1.4.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:f208eb7aff048f6bea9586e61af041ddf7f9ade7caed625742af423f6bae3298"}, - {file = "typed_ast-1.4.1-cp37-cp37m-win32.whl", hash = "sha256:d5d33e9e7af3b34a40dc05f498939f0ebf187f07c385fd58d591c533ad8562fe"}, - {file = "typed_ast-1.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:0666aa36131496aed8f7be0410ff974562ab7eeac11ef351def9ea6fa28f6355"}, - {file = "typed_ast-1.4.1-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:d205b1b46085271b4e15f670058ce182bd1199e56b317bf2ec004b6a44f911f6"}, - {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:6daac9731f172c2a22ade6ed0c00197ee7cc1221aa84cfdf9c31defeb059a907"}, - {file = "typed_ast-1.4.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:498b0f36cc7054c1fead3d7fc59d2150f4d5c6c56ba7fb150c013fbc683a8d2d"}, - {file = "typed_ast-1.4.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:7e4c9d7658aaa1fc80018593abdf8598bf91325af6af5cce4ce7c73bc45ea53d"}, - {file = "typed_ast-1.4.1-cp38-cp38-win32.whl", hash = "sha256:715ff2f2df46121071622063fc7543d9b1fd19ebfc4f5c8895af64a77a8c852c"}, - {file = "typed_ast-1.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc0fea399acb12edbf8a628ba8d2312f583bdbdb3335635db062fa98cf71fca4"}, - {file = "typed_ast-1.4.1-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:d43943ef777f9a1c42bf4e552ba23ac77a6351de620aa9acf64ad54933ad4d34"}, - {file = "typed_ast-1.4.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:92c325624e304ebf0e025d1224b77dd4e6393f18aab8d829b5b7e04afe9b7a2c"}, - {file = "typed_ast-1.4.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:d648b8e3bf2fe648745c8ffcee3db3ff903d0817a01a12dd6a6ea7a8f4889072"}, - {file = "typed_ast-1.4.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:fac11badff8313e23717f3dada86a15389d0708275bddf766cca67a84ead3e91"}, - {file = "typed_ast-1.4.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:0d8110d78a5736e16e26213114a38ca35cb15b6515d535413b090bd50951556d"}, - {file = "typed_ast-1.4.1-cp39-cp39-win32.whl", hash = "sha256:b52ccf7cfe4ce2a1064b18594381bccf4179c2ecf7f513134ec2f993dd4ab395"}, - {file = "typed_ast-1.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:3742b32cf1c6ef124d57f95be609c473d7ec4c14d0090e5a5e05a15269fb4d0c"}, - {file = "typed_ast-1.4.1.tar.gz", hash = "sha256:8c8aaad94455178e3187ab22c8b01a3837f8ee50e09cf31f1ba129eb293ec30b"}, + {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:7703620125e4fb79b64aa52427ec192822e9f45d37d4b6625ab37ef403e1df70"}, + {file = "typed_ast-1.4.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c9aadc4924d4b5799112837b226160428524a9a45f830e0d0f184b19e4090487"}, + {file = "typed_ast-1.4.2-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:9ec45db0c766f196ae629e509f059ff05fc3148f9ffd28f3cfe75d4afb485412"}, + {file = "typed_ast-1.4.2-cp35-cp35m-win32.whl", hash = "sha256:85f95aa97a35bdb2f2f7d10ec5bbdac0aeb9dafdaf88e17492da0504de2e6400"}, + {file = "typed_ast-1.4.2-cp35-cp35m-win_amd64.whl", hash = "sha256:9044ef2df88d7f33692ae3f18d3be63dec69c4fb1b5a4a9ac950f9b4ba571606"}, + {file = "typed_ast-1.4.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c1c876fd795b36126f773db9cbb393f19808edd2637e00fd6caba0e25f2c7b64"}, + {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:5dcfc2e264bd8a1db8b11a892bd1647154ce03eeba94b461effe68790d8b8e07"}, + {file = "typed_ast-1.4.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8db0e856712f79c45956da0c9a40ca4246abc3485ae0d7ecc86a20f5e4c09abc"}, + {file = "typed_ast-1.4.2-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:d003156bb6a59cda9050e983441b7fa2487f7800d76bdc065566b7d728b4581a"}, + {file = "typed_ast-1.4.2-cp36-cp36m-win32.whl", hash = "sha256:4c790331247081ea7c632a76d5b2a265e6d325ecd3179d06e9cf8d46d90dd151"}, + {file = "typed_ast-1.4.2-cp36-cp36m-win_amd64.whl", hash = "sha256:d175297e9533d8d37437abc14e8a83cbc68af93cc9c1c59c2c292ec59a0697a3"}, + {file = "typed_ast-1.4.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cf54cfa843f297991b7388c281cb3855d911137223c6b6d2dd82a47ae5125a41"}, + {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:b4fcdcfa302538f70929eb7b392f536a237cbe2ed9cba88e3bf5027b39f5f77f"}, + {file = "typed_ast-1.4.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:987f15737aba2ab5f3928c617ccf1ce412e2e321c77ab16ca5a293e7bbffd581"}, + {file = "typed_ast-1.4.2-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:37f48d46d733d57cc70fd5f30572d11ab8ed92da6e6b28e024e4a3edfb456e37"}, + {file = "typed_ast-1.4.2-cp37-cp37m-win32.whl", hash = "sha256:36d829b31ab67d6fcb30e185ec996e1f72b892255a745d3a82138c97d21ed1cd"}, + {file = "typed_ast-1.4.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8368f83e93c7156ccd40e49a783a6a6850ca25b556c0fa0240ed0f659d2fe496"}, + {file = "typed_ast-1.4.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:963c80b583b0661918718b095e02303d8078950b26cc00b5e5ea9ababe0de1fc"}, + {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e683e409e5c45d5c9082dc1daf13f6374300806240719f95dc783d1fc942af10"}, + {file = "typed_ast-1.4.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:84aa6223d71012c68d577c83f4e7db50d11d6b1399a9c779046d75e24bed74ea"}, + {file = "typed_ast-1.4.2-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:a38878a223bdd37c9709d07cd357bb79f4c760b29210e14ad0fb395294583787"}, + {file = "typed_ast-1.4.2-cp38-cp38-win32.whl", hash = "sha256:a2c927c49f2029291fbabd673d51a2180038f8cd5a5b2f290f78c4516be48be2"}, + {file = "typed_ast-1.4.2-cp38-cp38-win_amd64.whl", hash = "sha256:c0c74e5579af4b977c8b932f40a5464764b2f86681327410aa028a22d2f54937"}, + {file = "typed_ast-1.4.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:07d49388d5bf7e863f7fa2f124b1b1d89d8aa0e2f7812faff0a5658c01c59aa1"}, + {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:240296b27397e4e37874abb1df2a608a92df85cf3e2a04d0d4d61055c8305ba6"}, + {file = "typed_ast-1.4.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:d746a437cdbca200622385305aedd9aef68e8a645e385cc483bdc5e488f07166"}, + {file = "typed_ast-1.4.2-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:14bf1522cdee369e8f5581238edac09150c765ec1cb33615855889cf33dcb92d"}, + {file = "typed_ast-1.4.2-cp39-cp39-win32.whl", hash = "sha256:cc7b98bf58167b7f2db91a4327da24fb93368838eb84a44c472283778fc2446b"}, + {file = "typed_ast-1.4.2-cp39-cp39-win_amd64.whl", hash = "sha256:7147e2a76c75f0f64c4319886e7639e490fee87c9d25cb1d4faef1d8cf83a440"}, + {file = "typed_ast-1.4.2.tar.gz", hash = "sha256:9fc0b3cb5d1720e7141d103cf4819aea239f7d136acf9ee4a69b047b7986175a"}, ] typing-extensions = [ {file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"}, @@ -3184,15 +3200,15 @@ tzlocal = [ {file = "tzlocal-2.1.tar.gz", hash = "sha256:643c97c5294aedc737780a49d9df30889321cbe1204eac2c2ec6134035a92e44"}, ] urllib3 = [ - {file = "urllib3-1.26.2-py2.py3-none-any.whl", hash = "sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473"}, - {file = "urllib3-1.26.2.tar.gz", hash = "sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08"}, + {file = "urllib3-1.26.3-py2.py3-none-any.whl", hash = "sha256:1b465e494e3e0d8939b50680403e3aedaa2bc434b7d5af64dfd3c958d7f5ae80"}, + {file = "urllib3-1.26.3.tar.gz", hash = "sha256:de3eedaad74a2683334e282005cd8d7f22f4d55fa690a2a1020a416cb0a47e73"}, ] utm = [ {file = "utm-0.7.0.tar.gz", hash = "sha256:3c9a3650e98bb6eecec535418d0dfd4db8f88c8ceaca112a0ff0787e116566e2"}, ] virtualenv = [ - {file = "virtualenv-20.2.2-py2.py3-none-any.whl", hash = "sha256:54b05fc737ea9c9ee9f8340f579e5da5b09fb64fd010ab5757eb90268616907c"}, - {file = "virtualenv-20.2.2.tar.gz", hash = "sha256:b7a8ec323ee02fb2312f098b6b4c9de99559b462775bc8fe3627a73706603c1b"}, + {file = "virtualenv-20.4.2-py2.py3-none-any.whl", hash = "sha256:2be72df684b74df0ea47679a7df93fd0e04e72520022c57b479d8f881485dbe3"}, + {file = "virtualenv-20.4.2.tar.gz", hash = "sha256:147b43894e51dd6bba882cf9c282447f780e2251cd35172403745fc381a0a80d"}, ] wcwidth = [ {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, diff --git a/src/urban_meal_delivery/db/cities.py b/src/urban_meal_delivery/db/cities.py index bd5932f..b6cf4e0 100644 --- a/src/urban_meal_delivery/db/cities.py +++ b/src/urban_meal_delivery/db/cities.py @@ -218,14 +218,16 @@ class City(meta.Base): all_zip_codes = sorted( row[0] for row in db.session.execute( - f""" -- # noqa:S608 - SELECT DISTINCT - zip_code - FROM - {config.CLEAN_SCHEMA}.addresses - WHERE - city_id = {self.id}; - """, + sa.text( + f""" -- # noqa:S608 + SELECT DISTINCT + zip_code + FROM + {config.CLEAN_SCHEMA}.addresses + WHERE + city_id = {self.id}; + """, + ), ) ) cmap = utils.make_random_cmap(len(all_zip_codes), bright=False) diff --git a/src/urban_meal_delivery/forecasts/timify.py b/src/urban_meal_delivery/forecasts/timify.py index 3c9a147..b0b6497 100644 --- a/src/urban_meal_delivery/forecasts/timify.py +++ b/src/urban_meal_delivery/forecasts/timify.py @@ -6,6 +6,7 @@ import datetime as dt from typing import Tuple import pandas as pd +import sqlalchemy as sa from urban_meal_delivery import config from urban_meal_delivery import db @@ -68,64 +69,68 @@ class OrderHistory: # `data` is probably missing "pixel_id"-"start_at" pairs. # This happens when there is no demand in the `Pixel` in the given `time_step`. data = pd.read_sql_query( - f"""-- # noqa:E501,WPS221 - SELECT - pixel_id, - start_at, - COUNT(*) AS n_orders - FROM ( + sa.text( + f""" -- # noqa:WPS221 SELECT pixel_id, - placed_at_without_seconds - minutes_to_be_cut AS start_at + start_at, + COUNT(*) AS n_orders FROM ( SELECT - pixels.pixel_id, - DATE_TRUNC('MINUTE', orders.placed_at) AS placed_at_without_seconds, - (( - EXTRACT(MINUTES FROM orders.placed_at)::INTEGER % {self._time_step} - )::TEXT || ' MINUTES')::INTERVAL - AS minutes_to_be_cut + pixel_id, + placed_at_without_seconds - minutes_to_be_cut AS start_at FROM ( SELECT - id, - placed_at, - pickup_address_id - FROM - {config.CLEAN_SCHEMA}.orders + pixels.pixel_id, + DATE_TRUNC('MINUTE', orders.placed_at) + AS placed_at_without_seconds, + (( + EXTRACT(MINUTES FROM orders.placed_at)::INTEGER + % {self._time_step} + )::TEXT || ' MINUTES')::INTERVAL + AS minutes_to_be_cut + FROM ( + SELECT + id, + placed_at, + pickup_address_id + FROM + {config.CLEAN_SCHEMA}.orders + INNER JOIN ( + SELECT + id AS address_id + FROM + {config.CLEAN_SCHEMA}.addresses + WHERE + city_id = {self._grid.city.id} + ) AS in_city + ON orders.pickup_address_id = in_city.address_id + WHERE + ad_hoc IS TRUE + ) AS + orders INNER JOIN ( SELECT - id AS address_id + address_id, + pixel_id FROM - {config.CLEAN_SCHEMA}.addresses + {config.CLEAN_SCHEMA}.addresses_pixels WHERE - city_id = {self._grid.city.id} - ) AS in_city - ON orders.pickup_address_id = in_city.address_id - WHERE - ad_hoc IS TRUE - ) AS - orders - INNER JOIN ( - SELECT - address_id, - pixel_id - FROM - {config.CLEAN_SCHEMA}.addresses_pixels - WHERE - grid_id = {self._grid.id} - AND - city_id = {self._grid.city.id} -- redundant -> sanity check - ) AS pixels - ON orders.pickup_address_id = pixels.address_id - ) AS placed_at_aggregated_into_start_at - ) AS pixel_start_at_combinations - GROUP BY - pixel_id, - start_at - ORDER BY - pixel_id, - start_at; - """, + grid_id = {self._grid.id} + AND + city_id = {self._grid.city.id} -- -> sanity check + ) AS pixels + ON orders.pickup_address_id = pixels.address_id + ) AS placed_at_aggregated_into_start_at + ) AS pixel_start_at_combinations + GROUP BY + pixel_id, + start_at + ORDER BY + pixel_id, + start_at; + """, + ), # noqa:WPS355 con=db.connection, index_col=['pixel_id', 'start_at'], ) From d4ca85b55a6eeb9de14d37736c88dc86e1676930 Mon Sep 17 00:00:00 2001 From: Alexander Hess Date: Thu, 4 Feb 2021 13:13:26 +0100 Subject: [PATCH 72/72] Finalize release 0.3.0 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index d07df8f..ac86668 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ target-version = ["py38"] [tool.poetry] name = "urban-meal-delivery" -version = "0.3.0.dev0" +version = "0.3.0" authors = ["Alexander Hess "] description = "Optimizing an urban meal delivery platform"