From 738b1449b2ebf62ed223fd3721341afd05c52e24 Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Wed, 23 Jan 2019 12:25:45 +0100 Subject: [PATCH 01/47] Scaffold as packagestarting in version 0.3.0 --- .gitignore | 5 ++++ LICENSE | 21 ++++++++++++++ MANIFEST.in | 4 +++ README.md | 36 ++++++++++++++++++++++++ docker-compose.yml | 7 +++++ requirements.txt | 0 setup.cfg | 2 ++ setup.py | 70 ++++++++++++++++++++++++++++++++++++++++++++++ 8 files changed, 145 insertions(+) create mode 100644 LICENSE create mode 100644 MANIFEST.in create mode 100644 README.md create mode 100644 docker-compose.yml create mode 100644 requirements.txt create mode 100644 setup.cfg create mode 100644 setup.py diff --git a/.gitignore b/.gitignore index 24482e2..ac9fc99 100644 --- a/.gitignore +++ b/.gitignore @@ -114,3 +114,8 @@ dmypy.json # Pyre type checker .pyre/ +# VScode IDE +.vscode + +# PyCharm IDE +.idea \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..7d63f15 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2017 Geografía Aplicada S.L + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..6b9eb5e --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,4 @@ +# Include the license file +include LICENSE +include requirements.txt +include README.md diff --git a/README.md b/README.md new file mode 100644 index 0000000..de053da --- /dev/null +++ b/README.md @@ -0,0 +1,36 @@ +# Longitude + +A **new** bunch of middleware functions to build applications on top of CARTO. + +How to use: +```bash +pip install geographica-longitude +``` + +Or install from GitHub: +```bash +pip install -e git+https://github.com/GeographicaGS/Longitude#egg=longitude +``` + +## Upload a new version to PyPi + +You need to be part of *Geographica's development team* to be able to accomplish this task. + + +Start docker +``` +docker-compose run --rm python bash +``` + +Install needed dependencies +``` +pip install -r requirements.txt +``` + +Set version at ```setup.py``` + +Upload: +``` +python setup.py sdist +twine upload dist/geographica-longitude-.tar.gz +``` diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..e53abf4 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,7 @@ +version: "3" +services: + python: + image: python:3.6.3-onbuild + command: bash + volumes: + - .:/usr/src/app diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..e69de29 diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..b88034e --- /dev/null +++ b/setup.cfg @@ -0,0 +1,2 @@ +[metadata] +description-file = README.md diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..e96d70e --- /dev/null +++ b/setup.py @@ -0,0 +1,70 @@ +# Always prefer setuptools over distutils +from setuptools import setup, find_packages +# To use a consistent encoding +from codecs import open +from os import path + +here = path.abspath(path.dirname(__file__)) + +# Get the long description from the README file +with open(path.join(here, 'README.md'), encoding='utf-8') as f: + long_description = f.read() + +# get the requirements +with open('requirements.txt') as f: + required = f.read().splitlines() + +setup( + name='geographica-longitude', + + version='0.3.0', + + description='Longitude', + long_description=long_description, + + # The project's main homepage. + url='https://github.com/GeographicaGS/Longitude', + + # Author details + author='Geographica', + author_email='pypi@geographica.gs', + + # Choose your license + license='MIT', + + # See https://pypi.python.org/pypi?%3Aaction=list_classifiers + classifiers=[ + # How mature is this project? Common values are + # 3 - Alpha + # 4 - Beta + # 5 - Production/Stable + 'Development Status :: 3 - Alpha', + + # Indicate who your project is intended for + 'Intended Audience :: Developers', + 'Intended Audience :: Information Technology', + 'Topic :: Database', + 'Topic :: Scientific/Engineering :: GIS', + 'Topic :: Scientific/Engineering :: Information Analysis', + 'Topic :: Software Development :: Libraries', + + # Pick your license as you wish (should match "license" above) + 'License :: OSI Approved :: MIT License', + + # Specify the Python versions you support here. In particular, ensure + # that you indicate whether you support Python 2, Python 3 or both. + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.3', + 'Programming Language :: Python :: 3.4', + 'Programming Language :: Python :: 3.5', + 'Programming Language :: Python :: 3.6' + ], + + # What does your project relate to? + keywords='carto longitude', + + packages=find_packages(exclude=['contrib', 'docs', 'tests']), + + install_requires=[required], + +) From 8845ea5ed077fec570ccae37ab8739280e29fb19 Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Wed, 23 Jan 2019 13:17:00 +0100 Subject: [PATCH 02/47] Scaffold structure including initial unit test structure + coverage configuration. --- .coveragerc | 26 ++++ .gitignore | 5 +- Pipfile | 15 ++ Pipfile.lock | 214 +++++++++++++++++++++++++++ README.md | 19 +++ requirements.txt => core/__init__.py | 0 core/data_source.py | 2 + core/data_sources/__init__.py | 0 core/data_sources/carto.py | 5 + core/data_sources/postgre.py | 5 + tests/test_carto_data_source.py | 1 + tests/test_data_source.py | 1 + tests/test_postgre_data_source.py | 1 + 13 files changed, 293 insertions(+), 1 deletion(-) create mode 100644 .coveragerc create mode 100644 Pipfile create mode 100644 Pipfile.lock rename requirements.txt => core/__init__.py (100%) create mode 100644 core/data_source.py create mode 100644 core/data_sources/__init__.py create mode 100644 core/data_sources/carto.py create mode 100644 core/data_sources/postgre.py create mode 100644 tests/test_carto_data_source.py create mode 100644 tests/test_data_source.py create mode 100644 tests/test_postgre_data_source.py diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..f971665 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,26 @@ +# .coveragerc to control coverage.py +[run] +branch = True + +[report] +# Regexes for lines to exclude from consideration +exclude_lines = + # Have to re-enable the standard pragma + pragma: no cover + + # Don't complain about missing debug-only code: + def __repr__ + if self\.debug + + # Don't complain if tests don't hit defensive assertion code: + raise AssertionError + raise NotImplementedError + + # Don't complain if non-runnable code isn't run: + if 0: + if __name__ == .__main__.: + +ignore_errors = True + +[html] +directory = coverage_html_report \ No newline at end of file diff --git a/.gitignore b/.gitignore index ac9fc99..d34324f 100644 --- a/.gitignore +++ b/.gitignore @@ -118,4 +118,7 @@ dmypy.json .vscode # PyCharm IDE -.idea \ No newline at end of file +.idea + +# Coverage report +coverage_html_report diff --git a/Pipfile b/Pipfile new file mode 100644 index 0000000..432c140 --- /dev/null +++ b/Pipfile @@ -0,0 +1,15 @@ +[[source]] +name = "pypi" +url = "https://pypi.org/simple" +verify_ssl = true + +[dev-packages] +pytest = "*" +pylint = "*" +coverage = "*" +pytest-cov = "*" + +[packages] + +[requires] +python_version = "3.6" diff --git a/Pipfile.lock b/Pipfile.lock new file mode 100644 index 0000000..6cf91c9 --- /dev/null +++ b/Pipfile.lock @@ -0,0 +1,214 @@ +{ + "_meta": { + "hash": { + "sha256": "b2981eef83d1abd74aa04e93645e9acad199828ac6882fb3ad9f508a83be00d8" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "3.6" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": {}, + "develop": { + "astroid": { + "hashes": [ + "sha256:35b032003d6a863f5dcd7ec11abd5cd5893428beaa31ab164982403bcb311f22", + "sha256:6a5d668d7dc69110de01cdf7aeec69a679ef486862a0850cc0fd5571505b6b7e" + ], + "version": "==2.1.0" + }, + "atomicwrites": { + "hashes": [ + "sha256:0312ad34fcad8fac3704d441f7b317e50af620823353ec657a53e981f92920c0", + "sha256:ec9ae8adaae229e4f8446952d204a3e4b5fdd2d099f9be3aaf556120135fb3ee" + ], + "version": "==1.2.1" + }, + "attrs": { + "hashes": [ + "sha256:10cbf6e27dbce8c30807caf056c8eb50917e0eaafe86347671b57254006c3e69", + "sha256:ca4be454458f9dec299268d472aaa5a11f67a4ff70093396e1ceae9c76cf4bbb" + ], + "version": "==18.2.0" + }, + "coverage": { + "hashes": [ + "sha256:09e47c529ff77bf042ecfe858fb55c3e3eb97aac2c87f0349ab5a7efd6b3939f", + "sha256:0a1f9b0eb3aa15c990c328535655847b3420231af299386cfe5efc98f9c250fe", + "sha256:0cc941b37b8c2ececfed341444a456912e740ecf515d560de58b9a76562d966d", + "sha256:10e8af18d1315de936d67775d3a814cc81d0747a1a0312d84e27ae5610e313b0", + "sha256:1b4276550b86caa60606bd3572b52769860a81a70754a54acc8ba789ce74d607", + "sha256:1e8a2627c48266c7b813975335cfdea58c706fe36f607c97d9392e61502dc79d", + "sha256:2b224052bfd801beb7478b03e8a66f3f25ea56ea488922e98903914ac9ac930b", + "sha256:447c450a093766744ab53bf1e7063ec82866f27bcb4f4c907da25ad293bba7e3", + "sha256:46101fc20c6f6568561cdd15a54018bb42980954b79aa46da8ae6f008066a30e", + "sha256:4710dc676bb4b779c4361b54eb308bc84d64a2fa3d78e5f7228921eccce5d815", + "sha256:510986f9a280cd05189b42eee2b69fecdf5bf9651d4cd315ea21d24a964a3c36", + "sha256:5535dda5739257effef56e49a1c51c71f1d37a6e5607bb25a5eee507c59580d1", + "sha256:5a7524042014642b39b1fcae85fb37556c200e64ec90824ae9ecf7b667ccfc14", + "sha256:5f55028169ef85e1fa8e4b8b1b91c0b3b0fa3297c4fb22990d46ff01d22c2d6c", + "sha256:6694d5573e7790a0e8d3d177d7a416ca5f5c150742ee703f3c18df76260de794", + "sha256:6831e1ac20ac52634da606b658b0b2712d26984999c9d93f0c6e59fe62ca741b", + "sha256:77f0d9fa5e10d03aa4528436e33423bfa3718b86c646615f04616294c935f840", + "sha256:828ad813c7cdc2e71dcf141912c685bfe4b548c0e6d9540db6418b807c345ddd", + "sha256:85a06c61598b14b015d4df233d249cd5abfa61084ef5b9f64a48e997fd829a82", + "sha256:8cb4febad0f0b26c6f62e1628f2053954ad2c555d67660f28dfb1b0496711952", + "sha256:a5c58664b23b248b16b96253880b2868fb34358911400a7ba39d7f6399935389", + "sha256:aaa0f296e503cda4bc07566f592cd7a28779d433f3a23c48082af425d6d5a78f", + "sha256:ab235d9fe64833f12d1334d29b558aacedfbca2356dfb9691f2d0d38a8a7bfb4", + "sha256:b3b0c8f660fae65eac74fbf003f3103769b90012ae7a460863010539bb7a80da", + "sha256:bab8e6d510d2ea0f1d14f12642e3f35cefa47a9b2e4c7cea1852b52bc9c49647", + "sha256:c45297bbdbc8bb79b02cf41417d63352b70bcb76f1bbb1ee7d47b3e89e42f95d", + "sha256:d19bca47c8a01b92640c614a9147b081a1974f69168ecd494687c827109e8f42", + "sha256:d64b4340a0c488a9e79b66ec9f9d77d02b99b772c8b8afd46c1294c1d39ca478", + "sha256:da969da069a82bbb5300b59161d8d7c8d423bc4ccd3b410a9b4d8932aeefc14b", + "sha256:ed02c7539705696ecb7dc9d476d861f3904a8d2b7e894bd418994920935d36bb", + "sha256:ee5b8abc35b549012e03a7b1e86c09491457dba6c94112a2482b18589cc2bdb9" + ], + "index": "pypi", + "version": "==4.5.2" + }, + "isort": { + "hashes": [ + "sha256:1153601da39a25b14ddc54955dbbacbb6b2d19135386699e2ad58517953b34af", + "sha256:b9c40e9750f3d77e6e4d441d8b0266cf555e7cdabdcff33c4fd06366ca761ef8", + "sha256:ec9ef8f4a9bc6f71eec99e1806bfa2de401650d996c59330782b89a5555c1497" + ], + "version": "==4.3.4" + }, + "lazy-object-proxy": { + "hashes": [ + "sha256:0ce34342b419bd8f018e6666bfef729aec3edf62345a53b537a4dcc115746a33", + "sha256:1b668120716eb7ee21d8a38815e5eb3bb8211117d9a90b0f8e21722c0758cc39", + "sha256:209615b0fe4624d79e50220ce3310ca1a9445fd8e6d3572a896e7f9146bbf019", + "sha256:27bf62cb2b1a2068d443ff7097ee33393f8483b570b475db8ebf7e1cba64f088", + "sha256:27ea6fd1c02dcc78172a82fc37fcc0992a94e4cecf53cb6d73f11749825bd98b", + "sha256:2c1b21b44ac9beb0fc848d3993924147ba45c4ebc24be19825e57aabbe74a99e", + "sha256:2df72ab12046a3496a92476020a1a0abf78b2a7db9ff4dc2036b8dd980203ae6", + "sha256:320ffd3de9699d3892048baee45ebfbbf9388a7d65d832d7e580243ade426d2b", + "sha256:50e3b9a464d5d08cc5227413db0d1c4707b6172e4d4d915c1c70e4de0bbff1f5", + "sha256:5276db7ff62bb7b52f77f1f51ed58850e315154249aceb42e7f4c611f0f847ff", + "sha256:61a6cf00dcb1a7f0c773ed4acc509cb636af2d6337a08f362413c76b2b47a8dd", + "sha256:6ae6c4cb59f199d8827c5a07546b2ab7e85d262acaccaacd49b62f53f7c456f7", + "sha256:7661d401d60d8bf15bb5da39e4dd72f5d764c5aff5a86ef52a042506e3e970ff", + "sha256:7bd527f36a605c914efca5d3d014170b2cb184723e423d26b1fb2fd9108e264d", + "sha256:7cb54db3535c8686ea12e9535eb087d32421184eacc6939ef15ef50f83a5e7e2", + "sha256:7f3a2d740291f7f2c111d86a1c4851b70fb000a6c8883a59660d95ad57b9df35", + "sha256:81304b7d8e9c824d058087dcb89144842c8e0dea6d281c031f59f0acf66963d4", + "sha256:933947e8b4fbe617a51528b09851685138b49d511af0b6c0da2539115d6d4514", + "sha256:94223d7f060301b3a8c09c9b3bc3294b56b2188e7d8179c762a1cda72c979252", + "sha256:ab3ca49afcb47058393b0122428358d2fbe0408cf99f1b58b295cfeb4ed39109", + "sha256:bd6292f565ca46dee4e737ebcc20742e3b5be2b01556dafe169f6c65d088875f", + "sha256:cb924aa3e4a3fb644d0c463cad5bc2572649a6a3f68a7f8e4fbe44aaa6d77e4c", + "sha256:d0fc7a286feac9077ec52a927fc9fe8fe2fabab95426722be4c953c9a8bede92", + "sha256:ddc34786490a6e4ec0a855d401034cbd1242ef186c20d79d2166d6a4bd449577", + "sha256:e34b155e36fa9da7e1b7c738ed7767fc9491a62ec6af70fe9da4a057759edc2d", + "sha256:e5b9e8f6bda48460b7b143c3821b21b452cb3a835e6bbd5dd33aa0c8d3f5137d", + "sha256:e81ebf6c5ee9684be8f2c87563880f93eedd56dd2b6146d8a725b50b7e5adb0f", + "sha256:eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a", + "sha256:f460d1ceb0e4a5dcb2a652db0904224f367c9b3c1470d5a7683c0480e582468b" + ], + "version": "==1.3.1" + }, + "mccabe": { + "hashes": [ + "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", + "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f" + ], + "version": "==0.6.1" + }, + "more-itertools": { + "hashes": [ + "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4", + "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc", + "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9" + ], + "version": "==5.0.0" + }, + "pluggy": { + "hashes": [ + "sha256:8ddc32f03971bfdf900a81961a48ccf2fb677cf7715108f85295c67405798616", + "sha256:980710797ff6a041e9a73a5787804f848996ecaa6f8a1b1e08224a5894f2074a" + ], + "version": "==0.8.1" + }, + "py": { + "hashes": [ + "sha256:bf92637198836372b520efcba9e020c330123be8ce527e535d185ed4b6f45694", + "sha256:e76826342cefe3c3d5f7e8ee4316b80d1dd8a300781612ddbc765c17ba25a6c6" + ], + "version": "==1.7.0" + }, + "pylint": { + "hashes": [ + "sha256:689de29ae747642ab230c6d37be2b969bf75663176658851f456619aacf27492", + "sha256:771467c434d0d9f081741fec1d64dfb011ed26e65e12a28fe06ca2f61c4d556c" + ], + "index": "pypi", + "version": "==2.2.2" + }, + "pytest": { + "hashes": [ + "sha256:41568ea7ecb4a68d7f63837cf65b92ce8d0105e43196ff2b26622995bb3dc4b2", + "sha256:c3c573a29d7c9547fb90217ece8a8843aa0c1328a797e200290dc3d0b4b823be" + ], + "index": "pypi", + "version": "==4.1.1" + }, + "pytest-cov": { + "hashes": [ + "sha256:0ab664b25c6aa9716cbf203b17ddb301932383046082c081b9848a0edf5add33", + "sha256:230ef817450ab0699c6cc3c9c8f7a829c34674456f2ed8df1fe1d39780f7c87f" + ], + "index": "pypi", + "version": "==2.6.1" + }, + "six": { + "hashes": [ + "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", + "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" + ], + "version": "==1.12.0" + }, + "typed-ast": { + "hashes": [ + "sha256:023625bfa9359e29bd6e24cac2a4503495b49761d48a5f1e38333fc4ac4d93fe", + "sha256:07591f7a5fdff50e2e566c4c1e9df545c75d21e27d98d18cb405727ed0ef329c", + "sha256:153e526b0f4ffbfada72d0bb5ffe8574ba02803d2f3a9c605c8cf99dfedd72a2", + "sha256:3ad2bdcd46a4a1518d7376e9f5016d17718a9ed3c6a3f09203d832f6c165de4a", + "sha256:3ea98c84df53ada97ee1c5159bb3bc784bd734231235a1ede14c8ae0775049f7", + "sha256:51a7141ccd076fa561af107cfb7a8b6d06a008d92451a1ac7e73149d18e9a827", + "sha256:52c93cd10e6c24e7ac97e8615da9f224fd75c61770515cb323316c30830ddb33", + "sha256:6344c84baeda3d7b33e157f0b292e4dd53d05ddb57a63f738178c01cac4635c9", + "sha256:64699ca1b3bd5070bdeb043e6d43bc1d0cebe08008548f4a6bee782b0ecce032", + "sha256:74903f2e56bbffe29282ef8a5487d207d10be0f8513b41aff787d954a4cf91c9", + "sha256:7891710dba83c29ee2bd51ecaa82f60f6bede40271af781110c08be134207bf2", + "sha256:91976c56224e26c256a0de0f76d2004ab885a29423737684b4f7ebdd2f46dde2", + "sha256:9bad678a576ecc71f25eba9f1e3fd8d01c28c12a2834850b458428b3e855f062", + "sha256:b4726339a4c180a8b6ad9d8b50d2b6dc247e1b79b38fe2290549c98e82e4fd15", + "sha256:ba36f6aa3f8933edf94ea35826daf92cbb3ec248b89eccdc053d4a815d285357", + "sha256:bbc96bde544fd19e9ef168e4dfa5c3dfe704bfa78128fa76f361d64d6b0f731a", + "sha256:c0c927f1e44469056f7f2dada266c79b577da378bbde3f6d2ada726d131e4824", + "sha256:c0f9a3708008aa59f560fa1bd22385e05b79b8e38e0721a15a8402b089243442", + "sha256:f0bf6f36ff9c5643004171f11d2fdc745aa3953c5aacf2536a0685db9ceb3fb1", + "sha256:f5be39a0146be663cbf210a4d95c3c58b2d7df7b043c9047c5448e358f0550a2", + "sha256:fcd198bf19d9213e5cbf2cde2b9ef20a9856e716f76f9476157f90ae6de06cc6" + ], + "markers": "python_version < '3.7' and implementation_name == 'cpython'", + "version": "==1.2.0" + }, + "wrapt": { + "hashes": [ + "sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533" + ], + "version": "==1.11.1" + } + } +} diff --git a/README.md b/README.md index de053da..d1cfab5 100644 --- a/README.md +++ b/README.md @@ -12,6 +12,25 @@ Or install from GitHub: pip install -e git+https://github.com/GeographicaGS/Longitude#egg=longitude ``` +## Configure development environment + +Install pipenv in your development machine if you still do not have it. + +Set up Python environment: + +```shell +$ cd [path-to-longitude-folder] +$ pipenv install +``` + +To activate the virtual environment: `$ pipenv shell`. If the environment variables are defined in a `.env` file, they are loaded in this shell. + +## Testing and coverage + +The ```pytest-cov``` plugin is being used. Coverage configuration is at ```.coveragerc``` (including output folder). + +You can run something like: ```pytest --cov-report=html --cov=core core``` and the results will go in the defined html folder. + ## Upload a new version to PyPi You need to be part of *Geographica's development team* to be able to accomplish this task. diff --git a/requirements.txt b/core/__init__.py similarity index 100% rename from requirements.txt rename to core/__init__.py diff --git a/core/data_source.py b/core/data_source.py new file mode 100644 index 0000000..0c1a3bd --- /dev/null +++ b/core/data_source.py @@ -0,0 +1,2 @@ +class DataSource: + pass diff --git a/core/data_sources/__init__.py b/core/data_sources/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/core/data_sources/carto.py b/core/data_sources/carto.py new file mode 100644 index 0000000..6429de5 --- /dev/null +++ b/core/data_sources/carto.py @@ -0,0 +1,5 @@ +from core.data_source import DataSource + + +class CartoDataSource(DataSource): + pass diff --git a/core/data_sources/postgre.py b/core/data_sources/postgre.py new file mode 100644 index 0000000..d2494f8 --- /dev/null +++ b/core/data_sources/postgre.py @@ -0,0 +1,5 @@ +from core.data_source import DataSource + + +class PostgreDataSource(DataSource): + pass diff --git a/tests/test_carto_data_source.py b/tests/test_carto_data_source.py new file mode 100644 index 0000000..88fa8c0 --- /dev/null +++ b/tests/test_carto_data_source.py @@ -0,0 +1 @@ +from core.data_sources.carto import CartoDataSource \ No newline at end of file diff --git a/tests/test_data_source.py b/tests/test_data_source.py new file mode 100644 index 0000000..c01ad32 --- /dev/null +++ b/tests/test_data_source.py @@ -0,0 +1 @@ +from core.data_source import DataSource \ No newline at end of file diff --git a/tests/test_postgre_data_source.py b/tests/test_postgre_data_source.py new file mode 100644 index 0000000..3dbc08a --- /dev/null +++ b/tests/test_postgre_data_source.py @@ -0,0 +1 @@ +from core.data_sources.postgre import PostgreDataSource \ No newline at end of file From 64073a740949f4f8e213e243c7d807c80651e762 Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Wed, 23 Jan 2019 17:38:37 +0100 Subject: [PATCH 03/47] Scaffold for carto data source including tests. --- .coveragerc | 1 + Pipfile | 1 + Pipfile.lock | 73 ++++++++++++++++++- README.md | 2 +- core/data_source.py | 45 +++++++++++- core/data_sources/carto.py | 30 +++++++- core/tests/__init__.py | 0 core/tests/test_carto_data_source.py | 41 +++++++++++ core/tests/test_data_source.py | 32 ++++++++ .../tests}/test_postgre_data_source.py | 0 generate_core_coverage.sh | 3 + tests/test_carto_data_source.py | 1 - tests/test_data_source.py | 1 - 13 files changed, 223 insertions(+), 7 deletions(-) create mode 100644 core/tests/__init__.py create mode 100644 core/tests/test_carto_data_source.py create mode 100644 core/tests/test_data_source.py rename {tests => core/tests}/test_postgre_data_source.py (100%) create mode 100755 generate_core_coverage.sh delete mode 100644 tests/test_carto_data_source.py delete mode 100644 tests/test_data_source.py diff --git a/.coveragerc b/.coveragerc index f971665..58b58e5 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,6 +1,7 @@ # .coveragerc to control coverage.py [run] branch = True +omit = tests/* [report] # Regexes for lines to exclude from consideration diff --git a/Pipfile b/Pipfile index 432c140..2882383 100644 --- a/Pipfile +++ b/Pipfile @@ -10,6 +10,7 @@ coverage = "*" pytest-cov = "*" [packages] +carto = "*" [requires] python_version = "3.6" diff --git a/Pipfile.lock b/Pipfile.lock index 6cf91c9..8a8c4e1 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "b2981eef83d1abd74aa04e93645e9acad199828ac6882fb3ad9f508a83be00d8" + "sha256": "686700a73ddd35aeb2c8718ed2b94bf73198540a3d195baabc8b5dc142e2c5e7" }, "pipfile-spec": 6, "requires": { @@ -15,7 +15,76 @@ } ] }, - "default": {}, + "default": { + "carto": { + "hashes": [ + "sha256:9a54ece9d8f940bc3de3cb742e189c4ea681494d5ec251fec469319a39093dbc" + ], + "index": "pypi", + "version": "==1.4.0" + }, + "certifi": { + "hashes": [ + "sha256:47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7", + "sha256:993f830721089fef441cdfeb4b2c8c9df86f0c63239f06bd025a76a7daddb033" + ], + "version": "==2018.11.29" + }, + "chardet": { + "hashes": [ + "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", + "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" + ], + "version": "==3.0.4" + }, + "future": { + "hashes": [ + "sha256:67045236dcfd6816dc439556d009594abf643e5eb48992e36beac09c2ca659b8" + ], + "version": "==0.17.1" + }, + "idna": { + "hashes": [ + "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", + "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c" + ], + "version": "==2.8" + }, + "pyrestcli": { + "hashes": [ + "sha256:b8f9b67380bf9024f8c73dc25bf0466afe4b0714732590247acdabeb8137deb9" + ], + "version": "==0.6.7" + }, + "python-dateutil": { + "hashes": [ + "sha256:063df5763652e21de43de7d9e00ccf239f953a832941e37be541614732cdfc93", + "sha256:88f9287c0174266bb0d8cedd395cfba9c58e87e5ad86b2ce58859bc11be3cf02" + ], + "version": "==2.7.5" + }, + "requests": { + "hashes": [ + "sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e", + "sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b" + ], + "version": "==2.21.0" + }, + "six": { + "hashes": [ + "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", + "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" + ], + "version": "==1.12.0" + }, + "urllib3": { + "hashes": [ + "sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39", + "sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22" + ], + "version": "==1.24.1" + } + }, "develop": { "astroid": { "hashes": [ diff --git a/README.md b/README.md index d1cfab5..ff5578a 100644 --- a/README.md +++ b/README.md @@ -27,7 +27,7 @@ To activate the virtual environment: `$ pipenv shell`. If the environment variab ## Testing and coverage -The ```pytest-cov``` plugin is being used. Coverage configuration is at ```.coveragerc``` (including output folder). +The [```pytest-cov```](https://pytest-cov.readthedocs.io/en/latest/) plugin is being used. Coverage configuration is at ```.coveragerc``` (including output folder). You can run something like: ```pytest --cov-report=html --cov=core core``` and the results will go in the defined html folder. diff --git a/core/data_source.py b/core/data_source.py index 0c1a3bd..b77dae9 100644 --- a/core/data_source.py +++ b/core/data_source.py @@ -1,2 +1,45 @@ +import logging + + class DataSource: - pass + logger = logging.getLogger(__name__) + default_config = {} + + def __init__(self, config=None): + if config is None: + config = {} + + if not isinstance(config, dict): + raise TypeError('Config object must be a dictionary') + + default_keys = set(self.default_config.keys()) + config_keys = set(config.keys()) + unexpected_config_keys = list(config_keys.difference(default_keys)) + using_defaults_for = list(default_keys.difference(config_keys)) + + unexpected_config_keys.sort() + using_defaults_for.sort() + + for k in unexpected_config_keys: + self.logger.warning("%s is an unexpected config value" % k) + + for k in using_defaults_for: + self.logger.info("%s key is using default value" % k) + + self._config = config + + @property + def is_ready(self): + return NotImplementedError + + def query(self, params): + raise NotImplementedError + + def get_config(self, key): + try: + return self._config[key] + except KeyError: + try: + return self.default_config[key] + except KeyError: + return None diff --git a/core/data_sources/carto.py b/core/data_sources/carto.py index 6429de5..dbd7106 100644 --- a/core/data_sources/carto.py +++ b/core/data_sources/carto.py @@ -1,5 +1,33 @@ from core.data_source import DataSource +from carto.auth import APIKeyAuthClient +from carto.sql import BatchSQLClient, SQLClient class CartoDataSource(DataSource): - pass + default_config = { + 'api_version': 'v2', + 'uses_batch': False, + 'api_key': '', + 'user_url': '' + } + + def __init__(self, config=None): + super().__init__(config) + self._sql_client = None + self._batch_client = None + + def setup(self): + auth_client = APIKeyAuthClient(api_key=self.get_config('api_key'), base_url=self.get_config('user_url')) + self._sql_client = SQLClient(auth_client, api_version=self.get_config('api_version')) + + if self.get_config('uses_batch'): + self._batch_client = BatchSQLClient(auth_client) + + @property + def is_ready(self): + sql_setup_ready = self._sql_client is not None + batch_setup_ready = not self.get_config('uses_batch') or (self._batch_client is not None) + return sql_setup_ready and batch_setup_ready + + def query(self, params): + pass diff --git a/core/tests/__init__.py b/core/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/core/tests/test_carto_data_source.py b/core/tests/test_carto_data_source.py new file mode 100644 index 0000000..67bd921 --- /dev/null +++ b/core/tests/test_carto_data_source.py @@ -0,0 +1,41 @@ +from unittest import TestCase, mock + +from carto.exceptions import CartoException + +from core.data_sources.carto import CartoDataSource + + +class TestCartoDataSource(TestCase): + + def test_default_configuration_loads(self): + with self.assertLogs(level='INFO') as log_test: + carto_ds = CartoDataSource() + self.assertEqual(log_test.output, + ['INFO:core.data_source:api_key key is using default value', + 'INFO:core.data_source:api_version key is using default value', + 'INFO:core.data_source:user_url key is using default value', + 'INFO:core.data_source:uses_batch key is using default value'] + ) + + self.assertEqual('', carto_ds.get_config('api_key')) + self.assertEqual('v2', carto_ds.get_config('api_version')) + self.assertEqual('', carto_ds.get_config('user_url')) + self.assertFalse(carto_ds.get_config('uses_batch')) + + def test_setup_fails_with_default_config(self): + import warnings + # Default config MUST NOT BE USABLE for authentication + with warnings.catch_warnings(record=True) as w: + with self.assertRaises(CartoException) as error: + CartoDataSource().setup() + self.assertEqual(1, len(w), 'Carto will warn us about not using https') + + def test_setup_needs_a_valid_user_url(self): + config = { + 'user_url': 'https://fake_user.carto.com', + 'uses_batch': True + + } + carto_ds = CartoDataSource(config=config) + carto_ds.setup() + self.assertTrue(carto_ds.is_ready) diff --git a/core/tests/test_data_source.py b/core/tests/test_data_source.py new file mode 100644 index 0000000..bae496c --- /dev/null +++ b/core/tests/test_data_source.py @@ -0,0 +1,32 @@ +from unittest import TestCase +from core.data_source import DataSource + + +class TestDataSource(TestCase): + def test_config(self): + + # Config must be a dictionary + with self.assertRaises(TypeError): + DataSource([]) + with self.assertRaises(TypeError): + DataSource("") + with self.assertRaises(TypeError): + DataSource(0) + + # Any values can go in the configuration dictionary but not expected ones trigger a warning + config = {"some_config_value": 0, "some_another_config_value": "tomato"} + with self.assertLogs(level='WARNING') as log_test: + ds = DataSource(config) + self.assertEqual(log_test.output, + ['WARNING:core.data_source:some_another_config_value is an unexpected config value', + 'WARNING:core.data_source:some_config_value is an unexpected config value']) + + # Values in the config can be retrieved using get_config. If no default or config is defined, None is returned. + self.assertEqual(0, ds.get_config('some_config_value')) + self.assertEqual("tomato", ds.get_config('some_another_config_value')) + self.assertIsNone(ds.get_config('some_random_value_that_does_not_exist_in_config_or_defaults')) + + def test_query_is_custom(self): + ds = DataSource({}) + with self.assertRaises(NotImplementedError): + ds.query({}) diff --git a/tests/test_postgre_data_source.py b/core/tests/test_postgre_data_source.py similarity index 100% rename from tests/test_postgre_data_source.py rename to core/tests/test_postgre_data_source.py diff --git a/generate_core_coverage.sh b/generate_core_coverage.sh new file mode 100755 index 0000000..2fe1ea3 --- /dev/null +++ b/generate_core_coverage.sh @@ -0,0 +1,3 @@ +#!/usr/bin/env bash +pytest --cov-report=html --cov=core core/tests/ +sensible-browser coverage_html_report/index.html diff --git a/tests/test_carto_data_source.py b/tests/test_carto_data_source.py deleted file mode 100644 index 88fa8c0..0000000 --- a/tests/test_carto_data_source.py +++ /dev/null @@ -1 +0,0 @@ -from core.data_sources.carto import CartoDataSource \ No newline at end of file diff --git a/tests/test_data_source.py b/tests/test_data_source.py deleted file mode 100644 index c01ad32..0000000 --- a/tests/test_data_source.py +++ /dev/null @@ -1 +0,0 @@ -from core.data_source import DataSource \ No newline at end of file From 37f443a77a292257797b8a32ad9947f0c908b17a Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Thu, 24 Jan 2019 10:19:44 +0100 Subject: [PATCH 04/47] Added per-query configuration (+ default). Exception wrapping to hide the CARTO layer. --- core/data_source.py | 45 -------- core/data_sources/base.py | 161 +++++++++++++++++++++++++++ core/data_sources/carto.py | 20 +++- core/data_sources/postgre.py | 2 +- core/samples/__init__.py | 0 core/samples/carto_sample.py | 12 ++ core/tests/test_carto_data_source.py | 10 +- core/tests/test_data_source.py | 8 +- 8 files changed, 199 insertions(+), 59 deletions(-) delete mode 100644 core/data_source.py create mode 100644 core/data_sources/base.py create mode 100644 core/samples/__init__.py create mode 100644 core/samples/carto_sample.py diff --git a/core/data_source.py b/core/data_source.py deleted file mode 100644 index b77dae9..0000000 --- a/core/data_source.py +++ /dev/null @@ -1,45 +0,0 @@ -import logging - - -class DataSource: - logger = logging.getLogger(__name__) - default_config = {} - - def __init__(self, config=None): - if config is None: - config = {} - - if not isinstance(config, dict): - raise TypeError('Config object must be a dictionary') - - default_keys = set(self.default_config.keys()) - config_keys = set(config.keys()) - unexpected_config_keys = list(config_keys.difference(default_keys)) - using_defaults_for = list(default_keys.difference(config_keys)) - - unexpected_config_keys.sort() - using_defaults_for.sort() - - for k in unexpected_config_keys: - self.logger.warning("%s is an unexpected config value" % k) - - for k in using_defaults_for: - self.logger.info("%s key is using default value" % k) - - self._config = config - - @property - def is_ready(self): - return NotImplementedError - - def query(self, params): - raise NotImplementedError - - def get_config(self, key): - try: - return self._config[key] - except KeyError: - try: - return self.default_config[key] - except KeyError: - return None diff --git a/core/data_sources/base.py b/core/data_sources/base.py new file mode 100644 index 0000000..0925a47 --- /dev/null +++ b/core/data_sources/base.py @@ -0,0 +1,161 @@ +import re +import logging + + +class LongitudeBaseException(Exception): + pass + + +class LongitudeRetriesExceeded(LongitudeBaseException): + pass + + +class LongitudeQueryCannotBeExecutedException(LongitudeBaseException): + pass + + +class LongitudeWrongQueryException(LongitudeBaseException): + pass + + +def is_write_query(sql_statement): + """ + Check if a query string is a write query + """ + write_cmds = 'drop|delete|insert|update|grant|execute|perform|create|begin|commit|alter' + is_write = re.search(write_cmds, sql_statement.lower()) + return is_write + + +class DataSourceQueryConfig: + def __init__(self, enable_writing=False, retries=0, custom=None): + self.enable_writing = enable_writing + self.retries = retries + + self.custom = custom or {} # Depending on the specific interface, sometimes we also need to specify per-query values + + def copy(self): + return DataSourceQueryConfig(self.enable_writing, self.retries, self.custom) + + +class DataSource: + default_config = {} + + def __init__(self, config=None): + self.logger = logging.getLogger(self.__class__.__module__) + self._default_query_config = DataSourceQueryConfig() + + if config is None: + config = {} + + if not isinstance(config, dict): + raise TypeError('Config object must be a dictionary') + + default_keys = set(self.default_config.keys()) + config_keys = set(config.keys()) + unexpected_config_keys = list(config_keys.difference(default_keys)) + using_defaults_for = list(default_keys.difference(config_keys)) + + unexpected_config_keys.sort() + using_defaults_for.sort() + + for k in unexpected_config_keys: + self.logger.warning("%s is an unexpected config value" % k) + + for k in using_defaults_for: + self.logger.info("%s key is using default value" % k) + + self._config = config + + def enable_writing_queries(self): + self._default_query_config.enable_writing = True + + def disable_writing_queries(self): + self._default_query_config.enable_writing = False + + @property + def tries(self): + return self._default_query_config.retries + 1 + + def set_retries(self, value=0): + """ + Sets the amount of times that a query will be re-asked in case of failure. + Zero means that there will be no RE-tries, BUT the first try will be done so the query is sent once at least. + + :param value: Amount of desired retries. Negative values will be forced to 0. + """ + self._default_query_config.retries = max(0, value) + + def set_custom_query_default(self, key, value): + self._default_query_config.custom[key] = value + + def copy_default_query_config(self): + """ + Helper for custom queries. When doing a query with some different configuration, copy the default one, modify it + and pass it to the query. + + :return: A new object with the same content as the current default query config + """ + return self._default_query_config.copy() + + @property + def is_ready(self): + """ + This method must be implemented by children classes. + :return: True if setup() call was successful. False if not. + """ + return NotImplementedError + + def get_config(self, key: str): + """ + Getter for configuration values + :param key: Key in the configuration dictionary + :return: Current value of the chosen key + """ + try: + return self._config[key] + except KeyError: + try: + return self.default_config[key] + except KeyError: + return None + + def query(self, statement, params=None, query_config=None, **opts): + """ + This method has to be called to interact with the data source. Each children class will have to implement + its own .execute_query(...) with the specific behavior for each interface. + + :param statement: Unformatted SQL query + :param params: Values to be passed to the query when formatting it + :param query_config: Specific query configuration. If None, the default one will be used. + :param opts: + :return: Result of querying the database + """ + if params is None: + params = {} + + if query_config is None: + query_config = self._default_query_config + + if is_write_query(statement): + raise LongitudeWrongQueryException('Aborted query. No write queries allowed.') + + for r in range(self.tries): + try: + return self.execute_query(formatted_statement=statement.format(**params), query_config=query_config, + **opts) + except LongitudeQueryCannotBeExecutedException: + self.logger.error('Query could not be executed. Retries left: %d' % (self.tries - r)) + + raise LongitudeRetriesExceeded + + def execute_query(self, formatted_statement, query_config, **opts): + """ + + :raise LongitudeQueryCannotBeExecutedException + :param formatted_statement: + :param query_config: + :param opts: + :return: + """ + raise NotImplementedError diff --git a/core/data_sources/carto.py b/core/data_sources/carto.py index dbd7106..a6b8511 100644 --- a/core/data_sources/carto.py +++ b/core/data_sources/carto.py @@ -1,4 +1,6 @@ -from core.data_source import DataSource +from carto.exceptions import CartoException + +from core.data_sources.base import DataSource, LongitudeQueryCannotBeExecutedException from carto.auth import APIKeyAuthClient from carto.sql import BatchSQLClient, SQLClient @@ -15,6 +17,9 @@ def __init__(self, config=None): super().__init__(config) self._sql_client = None self._batch_client = None + self.set_custom_query_default('do_post', False) + self.set_custom_query_default('parse_json', True) + self.set_custom_query_default('format', None) def setup(self): auth_client = APIKeyAuthClient(api_key=self.get_config('api_key'), base_url=self.get_config('user_url')) @@ -29,5 +34,14 @@ def is_ready(self): batch_setup_ready = not self.get_config('uses_batch') or (self._batch_client is not None) return sql_setup_ready and batch_setup_ready - def query(self, params): - pass + def execute_query(self, formatted_statement, query_config, **opts): + parse_json = query_config.custom['parse_json'] + do_post = query_config.custom['do_post'] + format_ = query_config.custom['format'] + try: + return self._sql_client.send(self, formatted_statement, + parse_json=parse_json, + do_post=do_post, + format=format_) + except CartoException: + raise LongitudeQueryCannotBeExecutedException diff --git a/core/data_sources/postgre.py b/core/data_sources/postgre.py index d2494f8..d4c20c9 100644 --- a/core/data_sources/postgre.py +++ b/core/data_sources/postgre.py @@ -1,4 +1,4 @@ -from core.data_source import DataSource +from core.data_sources.base import DataSource class PostgreDataSource(DataSource): diff --git a/core/samples/__init__.py b/core/samples/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/core/samples/carto_sample.py b/core/samples/carto_sample.py new file mode 100644 index 0000000..9039403 --- /dev/null +++ b/core/samples/carto_sample.py @@ -0,0 +1,12 @@ +import os +from core.data_sources.carto import CartoDataSource + +if __name__ == "__main__": + config = { + 'api_version': 'v2', + 'uses_batch': False, + 'api_key': os.environ('SAMPLE_CARTO_API_KEY'), + 'user_url': os.environ('SAMPLE_CARTO_USER_URL') + } + + ds = CartoDataSource(config) diff --git a/core/tests/test_carto_data_source.py b/core/tests/test_carto_data_source.py index 67bd921..955cf55 100644 --- a/core/tests/test_carto_data_source.py +++ b/core/tests/test_carto_data_source.py @@ -1,7 +1,5 @@ from unittest import TestCase, mock - from carto.exceptions import CartoException - from core.data_sources.carto import CartoDataSource @@ -11,10 +9,10 @@ def test_default_configuration_loads(self): with self.assertLogs(level='INFO') as log_test: carto_ds = CartoDataSource() self.assertEqual(log_test.output, - ['INFO:core.data_source:api_key key is using default value', - 'INFO:core.data_source:api_version key is using default value', - 'INFO:core.data_source:user_url key is using default value', - 'INFO:core.data_source:uses_batch key is using default value'] + ['INFO:core.data_sources.carto:api_key key is using default value', + 'INFO:core.data_sources.carto:api_version key is using default value', + 'INFO:core.data_sources.carto:user_url key is using default value', + 'INFO:core.data_sources.carto:uses_batch key is using default value'] ) self.assertEqual('', carto_ds.get_config('api_key')) diff --git a/core/tests/test_data_source.py b/core/tests/test_data_source.py index bae496c..3fb8195 100644 --- a/core/tests/test_data_source.py +++ b/core/tests/test_data_source.py @@ -1,5 +1,5 @@ from unittest import TestCase -from core.data_source import DataSource +from core.data_sources.base import DataSource class TestDataSource(TestCase): @@ -18,8 +18,8 @@ def test_config(self): with self.assertLogs(level='WARNING') as log_test: ds = DataSource(config) self.assertEqual(log_test.output, - ['WARNING:core.data_source:some_another_config_value is an unexpected config value', - 'WARNING:core.data_source:some_config_value is an unexpected config value']) + ['WARNING:core.data_sources.base:some_another_config_value is an unexpected config value', + 'WARNING:core.data_sources.base:some_config_value is an unexpected config value']) # Values in the config can be retrieved using get_config. If no default or config is defined, None is returned. self.assertEqual(0, ds.get_config('some_config_value')) @@ -29,4 +29,4 @@ def test_config(self): def test_query_is_custom(self): ds = DataSource({}) with self.assertRaises(NotImplementedError): - ds.query({}) + ds.query(statement='whatever') From 44bf4139ebcb6707add44611306be2b0496d847e Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Thu, 24 Jan 2019 11:26:22 +0100 Subject: [PATCH 05/47] Basic example added to query Carto --- .coveragerc | 7 +++++-- core/data_sources/carto.py | 10 ++++------ core/samples/.gitignore | 1 + core/samples/carto_sample.py | 19 ++++++++++++++++--- 4 files changed, 26 insertions(+), 11 deletions(-) create mode 100644 core/samples/.gitignore diff --git a/.coveragerc b/.coveragerc index 58b58e5..6c01260 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,7 +1,7 @@ # .coveragerc to control coverage.py [run] branch = True -omit = tests/* +omit = core/tests/*, **/__init__.py, core/samples/** [report] # Regexes for lines to exclude from consideration @@ -22,6 +22,9 @@ exclude_lines = if __name__ == .__main__.: ignore_errors = True +fail_under = 85 +show_missing = True [html] -directory = coverage_html_report \ No newline at end of file +title = Longitude Core Coverage +directory = coverage_html_report diff --git a/core/data_sources/carto.py b/core/data_sources/carto.py index a6b8511..c11ede7 100644 --- a/core/data_sources/carto.py +++ b/core/data_sources/carto.py @@ -19,7 +19,7 @@ def __init__(self, config=None): self._batch_client = None self.set_custom_query_default('do_post', False) self.set_custom_query_default('parse_json', True) - self.set_custom_query_default('format', None) + self.set_custom_query_default('format', 'json') def setup(self): auth_client = APIKeyAuthClient(api_key=self.get_config('api_key'), base_url=self.get_config('user_url')) @@ -39,9 +39,7 @@ def execute_query(self, formatted_statement, query_config, **opts): do_post = query_config.custom['do_post'] format_ = query_config.custom['format'] try: - return self._sql_client.send(self, formatted_statement, - parse_json=parse_json, - do_post=do_post, - format=format_) - except CartoException: + return self._sql_client.send(formatted_statement, parse_json=parse_json, do_post=do_post, format= format_) + + except CartoException as e: raise LongitudeQueryCannotBeExecutedException diff --git a/core/samples/.gitignore b/core/samples/.gitignore new file mode 100644 index 0000000..730f397 --- /dev/null +++ b/core/samples/.gitignore @@ -0,0 +1 @@ +carto_sample_config.py diff --git a/core/samples/carto_sample.py b/core/samples/carto_sample.py index 9039403..9963ae4 100644 --- a/core/samples/carto_sample.py +++ b/core/samples/carto_sample.py @@ -1,12 +1,25 @@ -import os +from core.data_sources.base import LongitudeRetriesExceeded from core.data_sources.carto import CartoDataSource +# This module is IGNORED in git. Create one in your repo and add the needed fields. +# Ask your PM about where to find these values +from core.samples.carto_sample_config import CARTO_API_KEY, CARTO_URL + if __name__ == "__main__": config = { 'api_version': 'v2', 'uses_batch': False, - 'api_key': os.environ('SAMPLE_CARTO_API_KEY'), - 'user_url': os.environ('SAMPLE_CARTO_USER_URL') + 'api_key': CARTO_API_KEY, + 'user_url': CARTO_URL } ds = CartoDataSource(config) + ds.setup() + if ds.is_ready: + try: + data = ds.query('select 1 + 1') + print(data) + except LongitudeRetriesExceeded: + print ("caca") + else: + print("tararí") From fb53a068d1086caf73f222762848f311ed0c6e6e Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Thu, 24 Jan 2019 13:54:23 +0100 Subject: [PATCH 06/47] 100% coverage for Carto data source. Common configuration for on premise and regular instances. --- core/data_sources/base.py | 14 +++++++ core/data_sources/carto.py | 21 ++++++++-- core/samples/README.md | 7 ++++ core/samples/carto_sample.py | 12 +++--- core/tests/test_carto_data_source.py | 60 ++++++++++++++++++++-------- 5 files changed, 86 insertions(+), 28 deletions(-) create mode 100644 core/samples/README.md diff --git a/core/data_sources/base.py b/core/data_sources/base.py index 0925a47..32e5ade 100644 --- a/core/data_sources/base.py +++ b/core/data_sources/base.py @@ -159,3 +159,17 @@ def execute_query(self, formatted_statement, query_config, **opts): :return: """ raise NotImplementedError + + def parse_response(self, response): + """" + :param response from an succesfully executed query + :return: A QueryResponse object + """ + raise NotImplementedError + + +class QueryResponse: + def __init__(self): + self.rows = [] + self.profiling = {} + self.fields = [] diff --git a/core/data_sources/carto.py b/core/data_sources/carto.py index c11ede7..9ddbeb9 100644 --- a/core/data_sources/carto.py +++ b/core/data_sources/carto.py @@ -6,11 +6,14 @@ class CartoDataSource(DataSource): + SUBDOMAIN_URL_PATTERN = "https://%s.carto.com" + ON_PREMISE_URL_PATTERN = "https://%s/user/%s" default_config = { 'api_version': 'v2', 'uses_batch': False, + 'on_premise_domain': '', 'api_key': '', - 'user_url': '' + 'user': '' } def __init__(self, config=None): @@ -22,24 +25,34 @@ def __init__(self, config=None): self.set_custom_query_default('format', 'json') def setup(self): - auth_client = APIKeyAuthClient(api_key=self.get_config('api_key'), base_url=self.get_config('user_url')) + auth_client = APIKeyAuthClient(api_key=self.get_config('api_key'), base_url=self.base_url) self._sql_client = SQLClient(auth_client, api_version=self.get_config('api_version')) if self.get_config('uses_batch'): self._batch_client = BatchSQLClient(auth_client) + @property + def base_url(self): + user = self.get_config('user') + on_premise_domain = self.get_config('on_premise_domain') + if on_premise_domain: + base_url = self.ON_PREMISE_URL_PATTERN % (on_premise_domain, user) + else: + base_url = self.SUBDOMAIN_URL_PATTERN % user + return base_url + @property def is_ready(self): sql_setup_ready = self._sql_client is not None batch_setup_ready = not self.get_config('uses_batch') or (self._batch_client is not None) - return sql_setup_ready and batch_setup_ready + return sql_setup_ready and batch_setup_ready and self.get_config('user') != '' def execute_query(self, formatted_statement, query_config, **opts): parse_json = query_config.custom['parse_json'] do_post = query_config.custom['do_post'] format_ = query_config.custom['format'] try: - return self._sql_client.send(formatted_statement, parse_json=parse_json, do_post=do_post, format= format_) + return self._sql_client.send(formatted_statement, parse_json=parse_json, do_post=do_post, format=format_) except CartoException as e: raise LongitudeQueryCannotBeExecutedException diff --git a/core/samples/README.md b/core/samples/README.md new file mode 100644 index 0000000..5c09f48 --- /dev/null +++ b/core/samples/README.md @@ -0,0 +1,7 @@ +# Longitude SAMPLE scripts + +In this folder you will find examples about how to use the features in the library. + +Please, keep in mind that these will be not so updated as the ```tests```. You should rely on the ```tests``` tp fully understand how the library works. + +Please, if you find any wrong example or something to improve, submit a PR :) \ No newline at end of file diff --git a/core/samples/carto_sample.py b/core/samples/carto_sample.py index 9963ae4..88240c6 100644 --- a/core/samples/carto_sample.py +++ b/core/samples/carto_sample.py @@ -3,23 +3,21 @@ # This module is IGNORED in git. Create one in your repo and add the needed fields. # Ask your PM about where to find these values -from core.samples.carto_sample_config import CARTO_API_KEY, CARTO_URL +from core.samples.carto_sample_config import CARTO_API_KEY, CARTO_USER if __name__ == "__main__": config = { - 'api_version': 'v2', - 'uses_batch': False, 'api_key': CARTO_API_KEY, - 'user_url': CARTO_URL + 'user': CARTO_USER } ds = CartoDataSource(config) ds.setup() if ds.is_ready: try: - data = ds.query('select 1 + 1') + data = ds.query('select * from county_population') print(data) except LongitudeRetriesExceeded: - print ("caca") + print("Too many retries and no success...") else: - print("tararí") + print("Data source is not properly configured.") diff --git a/core/tests/test_carto_data_source.py b/core/tests/test_carto_data_source.py index 955cf55..3bc6a4e 100644 --- a/core/tests/test_carto_data_source.py +++ b/core/tests/test_carto_data_source.py @@ -1,5 +1,8 @@ from unittest import TestCase, mock + from carto.exceptions import CartoException + +from core.data_sources.base import LongitudeRetriesExceeded from core.data_sources.carto import CartoDataSource @@ -11,29 +14,52 @@ def test_default_configuration_loads(self): self.assertEqual(log_test.output, ['INFO:core.data_sources.carto:api_key key is using default value', 'INFO:core.data_sources.carto:api_version key is using default value', - 'INFO:core.data_sources.carto:user_url key is using default value', + 'INFO:core.data_sources.carto:on_premise_domain key is using default value', + 'INFO:core.data_sources.carto:user key is using default value', 'INFO:core.data_sources.carto:uses_batch key is using default value'] ) self.assertEqual('', carto_ds.get_config('api_key')) self.assertEqual('v2', carto_ds.get_config('api_version')) - self.assertEqual('', carto_ds.get_config('user_url')) + self.assertEqual('', carto_ds.get_config('on_premise_domain')) + self.assertEqual('', carto_ds.get_config('user')) self.assertFalse(carto_ds.get_config('uses_batch')) - def test_setup_fails_with_default_config(self): - import warnings - # Default config MUST NOT BE USABLE for authentication - with warnings.catch_warnings(record=True) as w: - with self.assertRaises(CartoException) as error: - CartoDataSource().setup() - self.assertEqual(1, len(w), 'Carto will warn us about not using https') - - def test_setup_needs_a_valid_user_url(self): - config = { - 'user_url': 'https://fake_user.carto.com', - 'uses_batch': True - - } - carto_ds = CartoDataSource(config=config) + def test_setup_not_ready_if_empty_user(self): + carto_ds = CartoDataSource({ + 'uses_batch': True # Just to enable that coverage branch for now + }) + carto_ds.setup() + self.assertFalse(carto_ds.is_ready) + + def test_setup_needs_some_user(self): + carto_ds = CartoDataSource({ + 'user': 'some_user' + }) carto_ds.setup() self.assertTrue(carto_ds.is_ready) + self.assertEqual('https://some_user.carto.com', carto_ds.base_url) + + def test_setup_can_accept_on_premise_domain(self): + carto_ds = CartoDataSource({ + 'user': 'some_on_premise_user', + 'on_premise_domain': 'some_cool_domain.io' + }) + carto_ds.setup() + self.assertTrue(carto_ds.is_ready) + self.assertEqual('https://some_cool_domain.io/user/some_on_premise_user', carto_ds.base_url) + + def test_succesful_query(self): + ds = CartoDataSource() + ds._sql_client = mock.MagicMock() + ds._sql_client.send.return_value = "{}" + result = ds.query('some query') + ds._sql_client.send.assert_called_with('some query', do_post=False, format='json', parse_json=True) + self.assertEqual("{}", result) + + def test_wrong_query(self): + ds = CartoDataSource() + ds._sql_client = mock.MagicMock() + ds._sql_client.send.side_effect = CartoException + with self.assertRaises(LongitudeRetriesExceeded): + ds.query('some irrelevant query') From 7be594a522301e747a03ac61cc8dffd1ead5f356 Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Thu, 24 Jan 2019 14:52:06 +0100 Subject: [PATCH 07/47] Added class to wrap query responses, regardless of the database specific response. Basic preview of returned values as table. --- core/data_sources/base.py | 55 ++++++++++++----- core/data_sources/carto.py | 11 +++- core/samples/carto_sample.py | 2 +- core/tests/test_base_data_source.py | 91 ++++++++++++++++++++++++++++ core/tests/test_carto_data_source.py | 5 +- core/tests/test_data_source.py | 32 ---------- 6 files changed, 146 insertions(+), 50 deletions(-) create mode 100644 core/tests/test_base_data_source.py delete mode 100644 core/tests/test_data_source.py diff --git a/core/data_sources/base.py b/core/data_sources/base.py index 32e5ade..9db1a02 100644 --- a/core/data_sources/base.py +++ b/core/data_sources/base.py @@ -67,12 +67,6 @@ def __init__(self, config=None): self._config = config - def enable_writing_queries(self): - self._default_query_config.enable_writing = True - - def disable_writing_queries(self): - self._default_query_config.enable_writing = False - @property def tries(self): return self._default_query_config.retries + 1 @@ -104,7 +98,7 @@ def is_ready(self): This method must be implemented by children classes. :return: True if setup() call was successful. False if not. """ - return NotImplementedError + raise NotImplementedError def get_config(self, key: str): """ @@ -142,8 +136,9 @@ def query(self, statement, params=None, query_config=None, **opts): for r in range(self.tries): try: - return self.execute_query(formatted_statement=statement.format(**params), query_config=query_config, - **opts) + response = self.execute_query(formatted_statement=statement.format(**params), query_config=query_config, + **opts) + return self.parse_response(response) except LongitudeQueryCannotBeExecutedException: self.logger.error('Query could not be executed. Retries left: %d' % (self.tries - r)) @@ -168,8 +163,40 @@ def parse_response(self, response): raise NotImplementedError -class QueryResponse: - def __init__(self): - self.rows = [] - self.profiling = {} - self.fields = [] +class LongitudeQueryResponse: + def __init__(self, rows=None, fields=None, profiling=None): + self.rows = rows or [] + self.fields = fields or {} + self.profiling = profiling or {} + + def preview_top(self): + return self._preview(10) + + def preview_bottom(self): + return self._preview(-10) + + def _preview(self, limit): + def render_line(values): + def render_value(value): + value = str(value) + if len(value) > 20: + value = value[:14] + ' (...)' + return value + + values = [render_value(v) + '\t' for v in values] + return '| ' + '| '.join(values) + '\t|' + + if limit > 0: + preview_list = self.rows[:limit] + else: + preview_list = self.rows[limit:] + + lines = [render_line(l) for l in preview_list] + headers = [k for k, v in self.fields.items()] + + lines = [render_line(headers)] + lines + render = '\n'.join(lines) + '\n\n' + '... time = %f' % self.profiling['response_time'] + return render + + def __str__(self): + return self.preview_top() diff --git a/core/data_sources/carto.py b/core/data_sources/carto.py index 9ddbeb9..e64abf2 100644 --- a/core/data_sources/carto.py +++ b/core/data_sources/carto.py @@ -1,6 +1,6 @@ from carto.exceptions import CartoException -from core.data_sources.base import DataSource, LongitudeQueryCannotBeExecutedException +from core.data_sources.base import DataSource, LongitudeQueryCannotBeExecutedException, LongitudeQueryResponse from carto.auth import APIKeyAuthClient from carto.sql import BatchSQLClient, SQLClient @@ -56,3 +56,12 @@ def execute_query(self, formatted_statement, query_config, **opts): except CartoException as e: raise LongitudeQueryCannotBeExecutedException + + def parse_response(self, response): + return LongitudeQueryResponse( + rows=[[v for k, v in dictionary.items()] for dictionary in response['rows']], + fields=response['fields'], + profiling={ + 'response_time': response['time'] + } + ) diff --git a/core/samples/carto_sample.py b/core/samples/carto_sample.py index 88240c6..c3aa4be 100644 --- a/core/samples/carto_sample.py +++ b/core/samples/carto_sample.py @@ -15,7 +15,7 @@ ds.setup() if ds.is_ready: try: - data = ds.query('select * from county_population') + data = ds.query('select * from county_population limit 30') print(data) except LongitudeRetriesExceeded: print("Too many retries and no success...") diff --git a/core/tests/test_base_data_source.py b/core/tests/test_base_data_source.py new file mode 100644 index 0000000..ab210f4 --- /dev/null +++ b/core/tests/test_base_data_source.py @@ -0,0 +1,91 @@ +from unittest import TestCase +from core.data_sources.base import DataSource, DataSourceQueryConfig, LongitudeQueryResponse + + +class TestLongitudeQueryResponse(TestCase): + def test_preview(self): + qr = LongitudeQueryResponse( + rows=[['A' + str(v), 'B' + str(v)] for v in range(20)], + fields={'As': {'type': 'string'}, 'Bs': {'type': 'string'}}, + profiling={'response_time': 42.0} + ) + + render_top = qr.preview_top() + expected_render_top = \ +"""| As | Bs | +| A0 | B0 | +| A1 | B1 | +| A2 | B2 | +| A3 | B3 | +| A4 | B4 | +| A5 | B5 | +| A6 | B6 | +| A7 | B7 | +| A8 | B8 | +| A9 | B9 | + +... time = 42.000000""" + self.assertEqual(expected_render_top, render_top) + + render_bottom = qr.preview_bottom() + expected_render_bottom = \ +"""| As | Bs | +| A10 | B10 | +| A11 | B11 | +| A12 | B12 | +| A13 | B13 | +| A14 | B14 | +| A15 | B15 | +| A16 | B16 | +| A17 | B17 | +| A18 | B18 | +| A19 | B19 | + +... time = 42.000000""" + self.assertEqual(expected_render_bottom, render_bottom) + + +class TestDataSourceQueryConfig(TestCase): + def test_copy(self): + a = DataSourceQueryConfig() + b = a.copy() + + self.assertNotEqual(a, b) + self.assertEqual(a.__dict__, b.__dict__) + + +class TestDataSource(TestCase): + def test_config(self): + # Config must be a dictionary + with self.assertRaises(TypeError): + DataSource([]) + with self.assertRaises(TypeError): + DataSource("") + with self.assertRaises(TypeError): + DataSource(0) + + # Any values can go in the configuration dictionary but not expected ones trigger a warning + config = {"some_config_value": 0, "some_another_config_value": "tomato"} + with self.assertLogs(level='WARNING') as log_test: + ds = DataSource(config) + self.assertEqual(log_test.output, + ['WARNING:core.data_sources.base:some_another_config_value is an unexpected config value', + 'WARNING:core.data_sources.base:some_config_value is an unexpected config value']) + + # Values in the config can be retrieved using get_config. If no default or config is defined, None is returned. + self.assertEqual(0, ds.get_config('some_config_value')) + self.assertEqual("tomato", ds.get_config('some_another_config_value')) + self.assertIsNone(ds.get_config('some_random_value_that_does_not_exist_in_config_or_defaults')) + + def test_abstract_methods_are_not_implemented(self): + ds = DataSource({}) + with self.assertRaises(NotImplementedError): + ds.is_ready + with self.assertRaises(NotImplementedError): + ds.query(statement='whatever') + + def test_copy_default_query_config(self): + ds = DataSource({}) + the_copy = ds.copy_default_query_config() + self.assertNotEqual(the_copy, ds._default_query_config) + self.assertEqual(the_copy.__dict__, ds._default_query_config.__dict__) diff --git a/core/tests/test_carto_data_source.py b/core/tests/test_carto_data_source.py index 3bc6a4e..8a58d37 100644 --- a/core/tests/test_carto_data_source.py +++ b/core/tests/test_carto_data_source.py @@ -52,10 +52,11 @@ def test_setup_can_accept_on_premise_domain(self): def test_succesful_query(self): ds = CartoDataSource() ds._sql_client = mock.MagicMock() - ds._sql_client.send.return_value = "{}" + ds._sql_client.send.return_value = {'rows': [], 'time': 42.0, 'fields': {}} result = ds.query('some query') ds._sql_client.send.assert_called_with('some query', do_post=False, format='json', parse_json=True) - self.assertEqual("{}", result) + self.assertEqual([], result.rows) + self.assertEqual(42, result.profiling['response_time']) def test_wrong_query(self): ds = CartoDataSource() diff --git a/core/tests/test_data_source.py b/core/tests/test_data_source.py deleted file mode 100644 index 3fb8195..0000000 --- a/core/tests/test_data_source.py +++ /dev/null @@ -1,32 +0,0 @@ -from unittest import TestCase -from core.data_sources.base import DataSource - - -class TestDataSource(TestCase): - def test_config(self): - - # Config must be a dictionary - with self.assertRaises(TypeError): - DataSource([]) - with self.assertRaises(TypeError): - DataSource("") - with self.assertRaises(TypeError): - DataSource(0) - - # Any values can go in the configuration dictionary but not expected ones trigger a warning - config = {"some_config_value": 0, "some_another_config_value": "tomato"} - with self.assertLogs(level='WARNING') as log_test: - ds = DataSource(config) - self.assertEqual(log_test.output, - ['WARNING:core.data_sources.base:some_another_config_value is an unexpected config value', - 'WARNING:core.data_sources.base:some_config_value is an unexpected config value']) - - # Values in the config can be retrieved using get_config. If no default or config is defined, None is returned. - self.assertEqual(0, ds.get_config('some_config_value')) - self.assertEqual("tomato", ds.get_config('some_another_config_value')) - self.assertIsNone(ds.get_config('some_random_value_that_does_not_exist_in_config_or_defaults')) - - def test_query_is_custom(self): - ds = DataSource({}) - with self.assertRaises(NotImplementedError): - ds.query(statement='whatever') From 2c74c55295be613e23944eb3934ba647dc2f49ac Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Thu, 24 Jan 2019 15:11:07 +0100 Subject: [PATCH 08/47] Updated doc --- README.md | 13 +++++++++++-- core/samples/carto_sample.py | 15 ++++++++++++--- 2 files changed, 23 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index ff5578a..b1b2f72 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,8 @@ A **new** bunch of middleware functions to build applications on top of CARTO. +## As final user... + How to use: ```bash pip install geographica-longitude @@ -12,7 +14,7 @@ Or install from GitHub: pip install -e git+https://github.com/GeographicaGS/Longitude#egg=longitude ``` -## Configure development environment +## As developer... Install pipenv in your development machine if you still do not have it. @@ -25,17 +27,24 @@ $ pipenv install To activate the virtual environment: `$ pipenv shell`. If the environment variables are defined in a `.env` file, they are loaded in this shell. +## Sample scripts + +These are intended to be used with real databases (i.e. those in your profile) to check features of the library. + +You will probably need to provide credentials/api keys/urls/username/... Check each script and it will be explained there. + ## Testing and coverage The [```pytest-cov```](https://pytest-cov.readthedocs.io/en/latest/) plugin is being used. Coverage configuration is at ```.coveragerc``` (including output folder). You can run something like: ```pytest --cov-report=html --cov=core core``` and the results will go in the defined html folder. +There is a bash script called ```generate_core_coverage.sh``` that runs the coverage analysis and shows the report in your browser. + ## Upload a new version to PyPi You need to be part of *Geographica's development team* to be able to accomplish this task. - Start docker ``` docker-compose run --rm python bash diff --git a/core/samples/carto_sample.py b/core/samples/carto_sample.py index c3aa4be..0c6e565 100644 --- a/core/samples/carto_sample.py +++ b/core/samples/carto_sample.py @@ -1,8 +1,17 @@ +""" +██╗ ██╗ ██████╗ ██╗ ██╗ ████████╗ ██████╗ ██╗ ██╗███████╗███████╗ ████████╗██╗ ██╗██╗███████╗ +██║ ██║██╔═══██╗██║ ██║ ╚══██╔══╝██╔═══██╗ ██║ ██║██╔════╝██╔════╝ ╚══██╔══╝██║ ██║██║██╔════╝██╗ +███████║██║ ██║██║ █╗ ██║ ██║ ██║ ██║ ██║ ██║███████╗█████╗ ██║ ███████║██║███████╗╚═╝ +██╔══██║██║ ██║██║███╗██║ ██║ ██║ ██║ ██║ ██║╚════██║██╔══╝ ██║ ██╔══██║██║╚════██║██╗ +██║ ██║╚██████╔╝╚███╔███╔╝ ██║ ╚██████╔╝ ╚██████╔╝███████║███████╗ ██║ ██║ ██║██║███████║╚═╝ +╚═╝ ╚═╝ ╚═════╝ ╚══╝╚══╝ ╚═╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝ ╚═╝ ╚═╝ ╚═╝╚═╝╚══════╝ + +You must create a 'carto_sample_config.py' file at this folder with the needed fields (look at the import) +That file will be ignored in git, so do not worry about pushing credentials anywhere (but BE CAREFUL!) +DO NOT REPLACE THIS WITH HARD CODED CREDENTIALS EVER AND ALWAYS REVIEW YOUR COMMITS! +""" from core.data_sources.base import LongitudeRetriesExceeded from core.data_sources.carto import CartoDataSource - -# This module is IGNORED in git. Create one in your repo and add the needed fields. -# Ask your PM about where to find these values from core.samples.carto_sample_config import CARTO_API_KEY, CARTO_USER if __name__ == "__main__": From 8fb5849263ed19bc2632d69360535f22ed646d11 Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Fri, 25 Jan 2019 12:28:25 +0100 Subject: [PATCH 09/47] Transparent cache logic inserted in base data source logic. README now has the roadmap checklist. --- README.md | 52 +++++- core/caches/__init__.py | 0 core/caches/base.py | 52 ++++++ core/caches/ram.py | 5 + core/caches/redis.py | 5 + core/data_sources/base.py | 75 +++++--- core/data_sources/carto.py | 15 +- core/data_sources/{postgre.py => postgres.py} | 2 +- core/data_sources/util.py | 10 ++ .../raw_text/query_response_render_bottom.txt | 13 ++ .../raw_text/query_response_render_top.txt | 13 ++ core/tests/test_base_data_source.py | 91 ---------- core/tests/test_cache_base.py | 43 +++++ core/tests/test_cache_ram.py | 6 + core/tests/test_cache_redis.py | 6 + core/tests/test_data_source_base.py | 163 ++++++++++++++++++ ...ta_source.py => test_data_source_carto.py} | 0 core/tests/test_data_source_postgres.py | 1 + core/tests/test_postgre_data_source.py | 1 - 19 files changed, 427 insertions(+), 126 deletions(-) create mode 100644 core/caches/__init__.py create mode 100644 core/caches/base.py create mode 100644 core/caches/ram.py create mode 100644 core/caches/redis.py rename core/data_sources/{postgre.py => postgres.py} (60%) create mode 100644 core/data_sources/util.py create mode 100644 core/tests/raw_text/query_response_render_bottom.txt create mode 100644 core/tests/raw_text/query_response_render_top.txt delete mode 100644 core/tests/test_base_data_source.py create mode 100644 core/tests/test_cache_base.py create mode 100644 core/tests/test_cache_ram.py create mode 100644 core/tests/test_cache_redis.py create mode 100644 core/tests/test_data_source_base.py rename core/tests/{test_carto_data_source.py => test_data_source_carto.py} (100%) create mode 100644 core/tests/test_data_source_postgres.py delete mode 100644 core/tests/test_postgre_data_source.py diff --git a/README.md b/README.md index b1b2f72..284b6fb 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,57 @@ A **new** bunch of middleware functions to build applications on top of CARTO. -## As final user... +## Roadmap + +A live document for the roadmap is [shared here](https://docs.google.com/document/d/1nO_JLaKFmr5h6MudDklFutv96CfkNjJxfd0xyh1szwM/edit#heading=h.44g51xumzfku) + +- [ ] Database model + - [x] CARTO data source + - [x] Basic parametrized queries + - [ ] Bind/dynamic parameters in queries + - [ ] Postgres data source + - [ ] driver 1 + - [ ] driver 2 + - [ ] Cache + - [ ] Documentation + - [ ] Unit tests + - [ ] Sample scripts + +- [ ] Config + +- [ ] CI PyPi versioning + +- [ ] Datos + - [ ] Carto + - [ ] DataFrame read/write + - [ ] COPY + -[ ] Postgres + - [ ] DataFrame read/write + - [ ] COPY + +- [ ] Validations + - [ ] Marshmallow + - [ ] Wrapper (?) + - [ ] Documentation + +- [ ] Swagger + - [ ] Decorators + - [ ] Flassger (?) + - [ ] OAuth integration + - [ ] Postman integration + - [ ] Documentation + +- [ ] SQL Alchemy + - [ ] Model definition + - [ ] Jenkins integration + - [ ] Documentation + +- [ ] OAuth + - [ ] Role mapping + - [ ] Token storage + - [ ] Documentation + + ## As final user... How to use: ```bash diff --git a/core/caches/__init__.py b/core/caches/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/core/caches/base.py b/core/caches/base.py new file mode 100644 index 0000000..c0dd629 --- /dev/null +++ b/core/caches/base.py @@ -0,0 +1,52 @@ +import hashlib +from core.data_sources.util import is_write_query + + +class LongitudeCache: + @staticmethod + def generate_key(formatted_query): + """ + This is the default key generation algorithm, based in a digest from the sha256 hash of the query. + + Override this method to provide your own key generation in case you need a specific way to store your cache. + + :param formatted_query: Final query as it should be asked to the database + :return: An (most likely) unique hash, generated from the query text + """ + + return hashlib.sha256(formatted_query.encode('utf-8')).hexdigest() + + def setup(self): + raise NotImplementedError + + @property + def is_ready(self): + raise NotImplementedError + + def get(self, formatted_query): + if is_write_query(formatted_query): + return None + else: + return self.execute_get(self.generate_key(formatted_query)) + + def put(self, formatted_query, payload): + if is_write_query(formatted_query): + return None + else: + self.execute_put(self.generate_key(formatted_query), payload) + + def execute_get(self, key): + """ + Custom get action over the cache. + + :return: Query response as it was saved if hit. None if miss. + """ + raise NotImplementedError + + def execute_put(self, key, payload): + """ + Custom put action over the cache. + + :return: True if key was overwritten. False if key was new in the cache. + """ + raise NotImplementedError diff --git a/core/caches/ram.py b/core/caches/ram.py new file mode 100644 index 0000000..c1f7e82 --- /dev/null +++ b/core/caches/ram.py @@ -0,0 +1,5 @@ +from core.caches.base import LongitudeCache + + +class RamCache(LongitudeCache): + pass diff --git a/core/caches/redis.py b/core/caches/redis.py new file mode 100644 index 0000000..b767431 --- /dev/null +++ b/core/caches/redis.py @@ -0,0 +1,5 @@ +from core.caches.base import LongitudeCache + + +class RedisCache(LongitudeCache): + pass diff --git a/core/data_sources/base.py b/core/data_sources/base.py index 9db1a02..0313c80 100644 --- a/core/data_sources/base.py +++ b/core/data_sources/base.py @@ -1,5 +1,8 @@ -import re import logging +from typing import Type + +from core.caches.base import LongitudeCache +from .util import is_write_query class LongitudeBaseException(Exception): @@ -18,21 +21,14 @@ class LongitudeWrongQueryException(LongitudeBaseException): pass -def is_write_query(sql_statement): - """ - Check if a query string is a write query - """ - write_cmds = 'drop|delete|insert|update|grant|execute|perform|create|begin|commit|alter' - is_write = re.search(write_cmds, sql_statement.lower()) - return is_write - - class DataSourceQueryConfig: - def __init__(self, enable_writing=False, retries=0, custom=None): + def __init__(self, enable_writing=False, retries=0, custom=None, use_cache=True): + self.use_cache = use_cache self.enable_writing = enable_writing self.retries = retries - self.custom = custom or {} # Depending on the specific interface, sometimes we also need to specify per-query values + # Depending on the specific interface (i.e.: CARTO, Postgres...), we might also need to specify per-query values + self.custom = custom or {} def copy(self): return DataSourceQueryConfig(self.enable_writing, self.retries, self.custom) @@ -41,9 +37,10 @@ def copy(self): class DataSource: default_config = {} - def __init__(self, config=None): + def __init__(self, config=None, cache_class: Type[LongitudeCache] = None): self.logger = logging.getLogger(self.__class__.__module__) self._default_query_config = DataSourceQueryConfig() + self._cache = None if config is None: config = {} @@ -51,6 +48,12 @@ def __init__(self, config=None): if not isinstance(config, dict): raise TypeError('Config object must be a dictionary') + if cache_class: + if not issubclass(cache_class, LongitudeCache): + raise TypeError('Cache must derive from LongitudeCache or be None') + else: + self._cache = cache_class() + default_keys = set(self.default_config.keys()) config_keys = set(config.keys()) unexpected_config_keys = list(config_keys.difference(default_keys)) @@ -67,6 +70,10 @@ def __init__(self, config=None): self._config = config + def setup(self): + if self._cache: + self._cache.setup() + @property def tries(self): return self._default_query_config.retries + 1 @@ -95,10 +102,10 @@ def copy_default_query_config(self): @property def is_ready(self): """ - This method must be implemented by children classes. + This method must be implemented by children classes to reflect that setup was ok and must call super().is_ready :return: True if setup() call was successful. False if not. """ - raise NotImplementedError + return not self._cache or self._cache.is_ready def get_config(self, key: str): """ @@ -131,24 +138,38 @@ def query(self, statement, params=None, query_config=None, **opts): if query_config is None: query_config = self._default_query_config - if is_write_query(statement): - raise LongitudeWrongQueryException('Aborted query. No write queries allowed.') + query_is_writing = is_write_query(statement) - for r in range(self.tries): - try: - response = self.execute_query(formatted_statement=statement.format(**params), query_config=query_config, - **opts) - return self.parse_response(response) - except LongitudeQueryCannotBeExecutedException: - self.logger.error('Query could not be executed. Retries left: %d' % (self.tries - r)) + if query_is_writing: + raise LongitudeWrongQueryException('Aborted query. No write queries allowed for now.') + + formatted_query = statement.format(**params) + + response = None + if self._cache and query_config.use_cache and not query_is_writing: + response = self._cache.get(formatted_query) - raise LongitudeRetriesExceeded + if not response: + for r in range(self.tries): + try: + response = self.execute_query(formatted_query=formatted_query, + query_config=query_config, + **opts) + if self._cache and query_config.use_cache: + self._cache.put(formatted_query, response) + + return self.parse_response(response) + except LongitudeQueryCannotBeExecutedException: + self.logger.error('Query could not be executed. Retries left: %d' % (self.tries - r)) + raise LongitudeRetriesExceeded + else: + return self.parse_response(response) - def execute_query(self, formatted_statement, query_config, **opts): + def execute_query(self, formatted_query, query_config, **opts): """ :raise LongitudeQueryCannotBeExecutedException - :param formatted_statement: + :param formatted_query: :param query_config: :param opts: :return: diff --git a/core/data_sources/carto.py b/core/data_sources/carto.py index e64abf2..5cca537 100644 --- a/core/data_sources/carto.py +++ b/core/data_sources/carto.py @@ -30,6 +30,7 @@ def setup(self): if self.get_config('uses_batch'): self._batch_client = BatchSQLClient(auth_client) + super().setup() @property def base_url(self): @@ -43,16 +44,20 @@ def base_url(self): @property def is_ready(self): - sql_setup_ready = self._sql_client is not None - batch_setup_ready = not self.get_config('uses_batch') or (self._batch_client is not None) - return sql_setup_ready and batch_setup_ready and self.get_config('user') != '' + if super().is_ready: + sql_setup_ready = self._sql_client is not None + batch_setup_ready = not self.get_config('uses_batch') or (self._batch_client is not None) + is_ready = sql_setup_ready and batch_setup_ready and self.get_config('user') != '' + return is_ready + else: + return False - def execute_query(self, formatted_statement, query_config, **opts): + def execute_query(self, formatted_query, query_config, **opts): parse_json = query_config.custom['parse_json'] do_post = query_config.custom['do_post'] format_ = query_config.custom['format'] try: - return self._sql_client.send(formatted_statement, parse_json=parse_json, do_post=do_post, format=format_) + return self._sql_client.send(formatted_query, parse_json=parse_json, do_post=do_post, format=format_) except CartoException as e: raise LongitudeQueryCannotBeExecutedException diff --git a/core/data_sources/postgre.py b/core/data_sources/postgres.py similarity index 60% rename from core/data_sources/postgre.py rename to core/data_sources/postgres.py index d4c20c9..a4997df 100644 --- a/core/data_sources/postgre.py +++ b/core/data_sources/postgres.py @@ -1,5 +1,5 @@ from core.data_sources.base import DataSource -class PostgreDataSource(DataSource): +class PostgresDataSource(DataSource): pass diff --git a/core/data_sources/util.py b/core/data_sources/util.py new file mode 100644 index 0000000..ba4aea7 --- /dev/null +++ b/core/data_sources/util.py @@ -0,0 +1,10 @@ +import re + + +def is_write_query(sql_statement): + """ + Check if a query string is a write query + """ + write_cmds = 'drop|delete|insert|update|grant|execute|perform|create|begin|commit|alter' + is_write = re.search(write_cmds, sql_statement.lower()) + return is_write diff --git a/core/tests/raw_text/query_response_render_bottom.txt b/core/tests/raw_text/query_response_render_bottom.txt new file mode 100644 index 0000000..3323432 --- /dev/null +++ b/core/tests/raw_text/query_response_render_bottom.txt @@ -0,0 +1,13 @@ +| As | Bs | +| A10 | B10 | +| A11 | B11 | +| A12 | B12 | +| A13 | B13 | +| A14 | B14 | +| A15 | B15 | +| A16 | B16 | +| A17 | B17 | +| A18 | B18 | +| A19 | B19 | + +... time = 42.000000 \ No newline at end of file diff --git a/core/tests/raw_text/query_response_render_top.txt b/core/tests/raw_text/query_response_render_top.txt new file mode 100644 index 0000000..81587b1 --- /dev/null +++ b/core/tests/raw_text/query_response_render_top.txt @@ -0,0 +1,13 @@ +| As | Bs | +| A0 | B0 | +| A1 | B1 | +| A2 | B2 | +| A3 | B3 | +| A4 | B4 | +| A5 | B5 | +| A6 | B6 | +| A7 | B7 | +| A8 | B8 | +| A9 | B9 | + +... time = 42.000000 \ No newline at end of file diff --git a/core/tests/test_base_data_source.py b/core/tests/test_base_data_source.py deleted file mode 100644 index ab210f4..0000000 --- a/core/tests/test_base_data_source.py +++ /dev/null @@ -1,91 +0,0 @@ -from unittest import TestCase -from core.data_sources.base import DataSource, DataSourceQueryConfig, LongitudeQueryResponse - - -class TestLongitudeQueryResponse(TestCase): - def test_preview(self): - qr = LongitudeQueryResponse( - rows=[['A' + str(v), 'B' + str(v)] for v in range(20)], - fields={'As': {'type': 'string'}, 'Bs': {'type': 'string'}}, - profiling={'response_time': 42.0} - ) - - render_top = qr.preview_top() - expected_render_top = \ -"""| As | Bs | -| A0 | B0 | -| A1 | B1 | -| A2 | B2 | -| A3 | B3 | -| A4 | B4 | -| A5 | B5 | -| A6 | B6 | -| A7 | B7 | -| A8 | B8 | -| A9 | B9 | - -... time = 42.000000""" - self.assertEqual(expected_render_top, render_top) - - render_bottom = qr.preview_bottom() - expected_render_bottom = \ -"""| As | Bs | -| A10 | B10 | -| A11 | B11 | -| A12 | B12 | -| A13 | B13 | -| A14 | B14 | -| A15 | B15 | -| A16 | B16 | -| A17 | B17 | -| A18 | B18 | -| A19 | B19 | - -... time = 42.000000""" - self.assertEqual(expected_render_bottom, render_bottom) - - -class TestDataSourceQueryConfig(TestCase): - def test_copy(self): - a = DataSourceQueryConfig() - b = a.copy() - - self.assertNotEqual(a, b) - self.assertEqual(a.__dict__, b.__dict__) - - -class TestDataSource(TestCase): - def test_config(self): - # Config must be a dictionary - with self.assertRaises(TypeError): - DataSource([]) - with self.assertRaises(TypeError): - DataSource("") - with self.assertRaises(TypeError): - DataSource(0) - - # Any values can go in the configuration dictionary but not expected ones trigger a warning - config = {"some_config_value": 0, "some_another_config_value": "tomato"} - with self.assertLogs(level='WARNING') as log_test: - ds = DataSource(config) - self.assertEqual(log_test.output, - ['WARNING:core.data_sources.base:some_another_config_value is an unexpected config value', - 'WARNING:core.data_sources.base:some_config_value is an unexpected config value']) - - # Values in the config can be retrieved using get_config. If no default or config is defined, None is returned. - self.assertEqual(0, ds.get_config('some_config_value')) - self.assertEqual("tomato", ds.get_config('some_another_config_value')) - self.assertIsNone(ds.get_config('some_random_value_that_does_not_exist_in_config_or_defaults')) - - def test_abstract_methods_are_not_implemented(self): - ds = DataSource({}) - with self.assertRaises(NotImplementedError): - ds.is_ready - with self.assertRaises(NotImplementedError): - ds.query(statement='whatever') - - def test_copy_default_query_config(self): - ds = DataSource({}) - the_copy = ds.copy_default_query_config() - self.assertNotEqual(the_copy, ds._default_query_config) - self.assertEqual(the_copy.__dict__, ds._default_query_config.__dict__) diff --git a/core/tests/test_cache_base.py b/core/tests/test_cache_base.py new file mode 100644 index 0000000..201278a --- /dev/null +++ b/core/tests/test_cache_base.py @@ -0,0 +1,43 @@ +from unittest import TestCase, mock +from core.caches.base import LongitudeCache + + +class TestLongitudeCache(TestCase): + def test_generate_key(self): + import string + import random + # The interesting point here is to ensure that it is extremely difficult to have collisions + # We will test really similar payloads and test for unique hashes + queries_population = 100000 + + QUERY_PATTERN = "SELECT * FROM table_%s" + random_queries = set([QUERY_PATTERN % ''.join(random.choices(string.ascii_uppercase + string.digits, k=20)) + for _ in range(queries_population)]) + + keys = set([LongitudeCache.generate_key(q) for q in random_queries]) + + # By using sets we ensure uniqueness. + self.assertEqual(len(random_queries), len(keys)) + + # Also, function must return ALWAYS the same value for the same query, regarless of how many times the + # function is called + unique_key = set([LongitudeCache.generate_key('SOME_QUERY_OVER_AND_OVER') for _ in range(100)]) + self.assertEqual(1, len(unique_key)) + + @mock.patch('core.caches.base.is_write_query') + def test_get_and_put_returns_none_for_write_queries(self, is_write_mock): + is_write_mock.return_value = True + cache = LongitudeCache() + self.assertIsNone(cache.get('some_query')) + self.assertIsNone(cache.put('some_query', payload='whatever')) + self.assertEqual(2, is_write_mock.call_count) + + @mock.patch('core.caches.base.is_write_query') + def test_get_nor_put_are_implemented_in_base_class(self, is_write_mock): + is_write_mock.return_value = False + cache = LongitudeCache() + with self.assertRaises(NotImplementedError): + cache.get('some_query') + with self.assertRaises(NotImplementedError): + cache.put('some_query', payload='whatever') + self.assertEqual(2, is_write_mock.call_count) diff --git a/core/tests/test_cache_ram.py b/core/tests/test_cache_ram.py new file mode 100644 index 0000000..9263889 --- /dev/null +++ b/core/tests/test_cache_ram.py @@ -0,0 +1,6 @@ +from unittest import TestCase +from core.caches.redis import RedisCache + + +class TestRedisCache(TestCase): + pass diff --git a/core/tests/test_cache_redis.py b/core/tests/test_cache_redis.py new file mode 100644 index 0000000..9263889 --- /dev/null +++ b/core/tests/test_cache_redis.py @@ -0,0 +1,6 @@ +from unittest import TestCase +from core.caches.redis import RedisCache + + +class TestRedisCache(TestCase): + pass diff --git a/core/tests/test_data_source_base.py b/core/tests/test_data_source_base.py new file mode 100644 index 0000000..17bb1fd --- /dev/null +++ b/core/tests/test_data_source_base.py @@ -0,0 +1,163 @@ +import os +from unittest import TestCase, mock + +from core.caches.base import LongitudeCache +from core.data_sources.base import DataSource, DataSourceQueryConfig, LongitudeQueryResponse, \ + LongitudeWrongQueryException + + +def load_raw_text(filename): + file_path = os.path.join(os.path.dirname(__file__), 'raw_text', filename) + with open(file_path, 'r') as f: + return f.read() + + +class TestLongitudeQueryResponse(TestCase): + def test_preview(self): + qr = LongitudeQueryResponse( + rows=[['A' + str(v), 'B' + str(v)] for v in range(20)], + fields={'As': {'type': 'string'}, 'Bs': {'type': 'string'}}, + profiling={'response_time': 42.0} + ) + + render_top = qr.preview_top() + expected_render_top = load_raw_text('query_response_render_top.txt') + self.assertEqual(expected_render_top, render_top) + + render_bottom = qr.preview_bottom() + expected_render_bottom = load_raw_text('query_response_render_bottom.txt') + self.assertEqual(expected_render_bottom, render_bottom) + + +class TestDataSourceQueryConfig(TestCase): + def test_copy(self): + a = DataSourceQueryConfig() + b = a.copy() + + self.assertNotEqual(a, b) + self.assertEqual(a.__dict__, b.__dict__) + + +class TestDataSource(TestCase): + def setUp(self): + class FakeCache(LongitudeCache): + @staticmethod + def generate_key(formatted_query): + if formatted_query == 'some_query_in_cache': + return 'hit' + return 'miss' + + def setup(self): + pass + + @property + def is_ready(self): + return True + + def execute_get(self, key): + if key == 'hit': + return 'cache hit' + return None + + def execute_put(self, key, payload): + return True + + self._cache_class = FakeCache + + def test_cache_must_extend_longitude_cache(self): + class PoorlyImplementedCache: + pass + + with self.assertRaises(TypeError): + DataSource({}, cache_class=PoorlyImplementedCache) + + @mock.patch('core.data_sources.base.is_write_query') + def test_write_queries_do_not_use_cache(self, is_write_mock): + ds = DataSource({}, cache_class=self._cache_class) + ds.setup() + self.assertTrue(ds.is_ready) + + is_write_mock.return_value = True + with self.assertRaises(LongitudeWrongQueryException): + ds.query('some_query') + + @mock.patch('core.data_sources.base.is_write_query') + @mock.patch('core.data_sources.base.DataSource.parse_response') + def test_cache_hit(self, parse_response_mock, is_write_mock): + ds = DataSource({}, cache_class=self._cache_class) + ds.setup() + # At high level, ds.query will return a normalized LongitudeQueryResponse + # In this test we are interested in triggering that call to the parse function that would return such object, + # but we do not care, in the abstract class, about what content is generated there. + is_write_mock.return_value = False + parse_response_mock.return_value = 'normalized cache hit' + self.assertEqual('normalized cache hit', ds.query('some_query_in_cache')) + parse_response_mock.assert_called_once_with('cache hit') + + @mock.patch('core.data_sources.base.is_write_query') + @mock.patch('core.data_sources.base.DataSource.parse_response') + @mock.patch('core.data_sources.base.DataSource.execute_query') + def test_cache_miss(self, execute_query_mock, parse_response_mock, is_write_mock): + ds = DataSource({}, cache_class=self._cache_class) + ds.setup() + is_write_mock.return_value = False + execute_query_mock.return_value = 'some response from the server' + parse_response_mock.return_value = 'normalized response from data source' + self.assertEqual('normalized response from data source', ds.query('some_query_not_in_cache')) + parse_response_mock.assert_called_once_with('some response from the server') + + def test_config(self): + # Config must be a dictionary + with self.assertRaises(TypeError): + DataSource([]) + with self.assertRaises(TypeError): + DataSource("") + with self.assertRaises(TypeError): + DataSource(0) + + # Any values can go in the configuration dictionary but not expected ones trigger a warning + config = {"some_config_value": 0, "some_another_config_value": "tomato"} + with self.assertLogs(level='WARNING') as log_test: + ds = DataSource(config) + self.assertEqual(log_test.output, + ['WARNING:core.data_sources.base:some_another_config_value is an unexpected config value', + 'WARNING:core.data_sources.base:some_config_value is an unexpected config value']) + + # Values in the config can be retrieved using get_config. If no default or config is defined, None is returned. + self.assertEqual(0, ds.get_config('some_config_value')) + self.assertEqual("tomato", ds.get_config('some_another_config_value')) + self.assertIsNone(ds.get_config('some_random_value_that_does_not_exist_in_config_or_defaults')) + + def test_abstract_methods_are_not_implemented(self): + ds = DataSource({}) + + with self.assertRaises(NotImplementedError): + ds.query(statement='whatever') + + def test_is_ready(self): + class FakeReadyCache(LongitudeCache): + def setup(self): + pass + + @property + def is_ready(self): + return True + + class FakeNotReadyCache(LongitudeCache): + def setup(self): + pass + + @property + def is_ready(self): + return False + + ds = DataSource(config={}, cache_class=FakeReadyCache) + self.assertTrue(ds.is_ready) + ds = DataSource(config={}, cache_class=FakeNotReadyCache) + self.assertFalse(ds.is_ready) + + def test_copy_default_query_config(self): + ds = DataSource({}) + the_copy = ds.copy_default_query_config() + self.assertNotEqual(the_copy, ds._default_query_config) + self.assertEqual(the_copy.__dict__, ds._default_query_config.__dict__) diff --git a/core/tests/test_carto_data_source.py b/core/tests/test_data_source_carto.py similarity index 100% rename from core/tests/test_carto_data_source.py rename to core/tests/test_data_source_carto.py diff --git a/core/tests/test_data_source_postgres.py b/core/tests/test_data_source_postgres.py new file mode 100644 index 0000000..36af547 --- /dev/null +++ b/core/tests/test_data_source_postgres.py @@ -0,0 +1 @@ +from core.data_sources.postgres import PostgresDataSource \ No newline at end of file diff --git a/core/tests/test_postgre_data_source.py b/core/tests/test_postgre_data_source.py deleted file mode 100644 index 3dbc08a..0000000 --- a/core/tests/test_postgre_data_source.py +++ /dev/null @@ -1 +0,0 @@ -from core.data_sources.postgre import PostgreDataSource \ No newline at end of file From ed0023b5212fd75ddbafa43ef536d221d2d53eaa Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Mon, 28 Jan 2019 10:58:30 +0100 Subject: [PATCH 10/47] Basic RAM cache implementation. Source code folder structure rework to separate samples. --- .coveragerc | 2 +- core/caches/ram.py | 5 -- core/tests/test_data_source_postgres.py | 1 - generate_core_coverage.sh | 2 +- src/__init__.py | 1 + {core => src/core}/__init__.py | 0 {core => src/core}/caches/__init__.py | 0 {core => src/core}/caches/base.py | 2 +- src/core/caches/ram.py | 23 ++++++ {core => src/core}/caches/redis.py | 2 +- {core => src/core}/data_sources/__init__.py | 0 {core => src/core}/data_sources/base.py | 30 ++++++-- {core => src/core}/data_sources/carto.py | 6 +- {core => src/core}/data_sources/postgres.py | 2 +- {core => src/core}/data_sources/util.py | 0 {core/samples => src/core/tests}/__init__.py | 0 .../raw_text/query_response_render_bottom.txt | 0 .../raw_text/query_response_render_top.txt | 0 {core => src/core}/tests/test_cache_base.py | 6 +- {core => src/core}/tests/test_cache_ram.py | 2 +- {core => src/core}/tests/test_cache_redis.py | 2 +- .../core}/tests/test_data_source_base.py | 25 +++---- .../core}/tests/test_data_source_carto.py | 14 ++-- src/core/tests/test_data_source_postgres.py | 2 + {core => src}/samples/.gitignore | 0 {core => src}/samples/README.md | 0 src/samples/__init__.py | 1 + {core => src}/samples/carto_sample.py | 12 ++- src/samples/carto_sample_with_ram_cache.py | 75 +++++++++++++++++++ .../samples/load_sys_path.py | 0 30 files changed, 166 insertions(+), 49 deletions(-) delete mode 100644 core/caches/ram.py delete mode 100644 core/tests/test_data_source_postgres.py create mode 100644 src/__init__.py rename {core => src/core}/__init__.py (100%) rename {core => src/core}/caches/__init__.py (100%) rename {core => src/core}/caches/base.py (96%) create mode 100644 src/core/caches/ram.py rename {core => src/core}/caches/redis.py (50%) rename {core => src/core}/data_sources/__init__.py (100%) rename {core => src/core}/data_sources/base.py (90%) rename {core => src/core}/data_sources/carto.py (92%) rename {core => src/core}/data_sources/postgres.py (51%) rename {core => src/core}/data_sources/util.py (100%) rename {core/samples => src/core/tests}/__init__.py (100%) rename {core => src/core}/tests/raw_text/query_response_render_bottom.txt (100%) rename {core => src/core}/tests/raw_text/query_response_render_top.txt (100%) rename {core => src/core}/tests/test_cache_base.py (92%) rename {core => src/core}/tests/test_cache_ram.py (64%) rename {core => src/core}/tests/test_cache_redis.py (64%) rename {core => src/core}/tests/test_data_source_base.py (85%) rename {core => src/core}/tests/test_data_source_carto.py (78%) create mode 100644 src/core/tests/test_data_source_postgres.py rename {core => src}/samples/.gitignore (100%) rename {core => src}/samples/README.md (100%) create mode 100644 src/samples/__init__.py rename {core => src}/samples/carto_sample.py (86%) create mode 100644 src/samples/carto_sample_with_ram_cache.py rename core/tests/__init__.py => src/samples/load_sys_path.py (100%) diff --git a/.coveragerc b/.coveragerc index 6c01260..fda03bc 100644 --- a/.coveragerc +++ b/.coveragerc @@ -1,7 +1,7 @@ # .coveragerc to control coverage.py [run] branch = True -omit = core/tests/*, **/__init__.py, core/samples/** +omit = src/core/tests/*, **/__init__.py, src/samples/** [report] # Regexes for lines to exclude from consideration diff --git a/core/caches/ram.py b/core/caches/ram.py deleted file mode 100644 index c1f7e82..0000000 --- a/core/caches/ram.py +++ /dev/null @@ -1,5 +0,0 @@ -from core.caches.base import LongitudeCache - - -class RamCache(LongitudeCache): - pass diff --git a/core/tests/test_data_source_postgres.py b/core/tests/test_data_source_postgres.py deleted file mode 100644 index 36af547..0000000 --- a/core/tests/test_data_source_postgres.py +++ /dev/null @@ -1 +0,0 @@ -from core.data_sources.postgres import PostgresDataSource \ No newline at end of file diff --git a/generate_core_coverage.sh b/generate_core_coverage.sh index 2fe1ea3..5ca1975 100755 --- a/generate_core_coverage.sh +++ b/generate_core_coverage.sh @@ -1,3 +1,3 @@ #!/usr/bin/env bash -pytest --cov-report=html --cov=core core/tests/ +pytest --cov-report=html --cov=src.core src/core/tests/ sensible-browser coverage_html_report/index.html diff --git a/src/__init__.py b/src/__init__.py new file mode 100644 index 0000000..bb67a43 --- /dev/null +++ b/src/__init__.py @@ -0,0 +1 @@ +from .core import * diff --git a/core/__init__.py b/src/core/__init__.py similarity index 100% rename from core/__init__.py rename to src/core/__init__.py diff --git a/core/caches/__init__.py b/src/core/caches/__init__.py similarity index 100% rename from core/caches/__init__.py rename to src/core/caches/__init__.py diff --git a/core/caches/base.py b/src/core/caches/base.py similarity index 96% rename from core/caches/base.py rename to src/core/caches/base.py index c0dd629..b756ec7 100644 --- a/core/caches/base.py +++ b/src/core/caches/base.py @@ -1,5 +1,5 @@ import hashlib -from core.data_sources.util import is_write_query +from ..data_sources.util import is_write_query class LongitudeCache: diff --git a/src/core/caches/ram.py b/src/core/caches/ram.py new file mode 100644 index 0000000..cd68b18 --- /dev/null +++ b/src/core/caches/ram.py @@ -0,0 +1,23 @@ +from .base import LongitudeCache + + +class RamCache(LongitudeCache): + """ + This is the simplest cache we can use: a dictionary in memory. + """ + _values = {} + + def setup(self): + self._values = {} + + @property + def is_ready(self): + return True + + def execute_get(self, key): + return self._values.get(key) + + def execute_put(self, key, payload): + is_overwrite = key in self._values.keys() + self._values[key] = payload + return is_overwrite diff --git a/core/caches/redis.py b/src/core/caches/redis.py similarity index 50% rename from core/caches/redis.py rename to src/core/caches/redis.py index b767431..553f303 100644 --- a/core/caches/redis.py +++ b/src/core/caches/redis.py @@ -1,4 +1,4 @@ -from core.caches.base import LongitudeCache +from .base import LongitudeCache class RedisCache(LongitudeCache): diff --git a/core/data_sources/__init__.py b/src/core/data_sources/__init__.py similarity index 100% rename from core/data_sources/__init__.py rename to src/core/data_sources/__init__.py diff --git a/core/data_sources/base.py b/src/core/data_sources/base.py similarity index 90% rename from core/data_sources/base.py rename to src/core/data_sources/base.py index 0313c80..b6bc796 100644 --- a/core/data_sources/base.py +++ b/src/core/data_sources/base.py @@ -1,7 +1,7 @@ import logging from typing import Type -from core.caches.base import LongitudeCache +from ..caches.base import LongitudeCache from .util import is_write_query @@ -40,6 +40,7 @@ class DataSource: def __init__(self, config=None, cache_class: Type[LongitudeCache] = None): self.logger = logging.getLogger(self.__class__.__module__) self._default_query_config = DataSourceQueryConfig() + self.use_cache = True self._cache = None if config is None: @@ -121,6 +122,12 @@ def get_config(self, key: str): except KeyError: return None + def enable_cache(self): + self.use_cache = True + + def disable_cache(self): + self.use_cache = False + def query(self, statement, params=None, query_config=None, **opts): """ This method has to be called to interact with the data source. Each children class will have to implement @@ -146,24 +153,27 @@ def query(self, statement, params=None, query_config=None, **opts): formatted_query = statement.format(**params) response = None - if self._cache and query_config.use_cache and not query_is_writing: + if self._cache and self.use_cache and query_config.use_cache and not query_is_writing: response = self._cache.get(formatted_query) - if not response: + if response: + # TODO: cached responses should, by default, be stored parsed/normalized + parsed_response = self.parse_response(response) + parsed_response.mark_as_cached() + return parsed_response + else: for r in range(self.tries): try: response = self.execute_query(formatted_query=formatted_query, query_config=query_config, **opts) - if self._cache and query_config.use_cache: + if self._cache and self.use_cache and query_config.use_cache: self._cache.put(formatted_query, response) return self.parse_response(response) except LongitudeQueryCannotBeExecutedException: self.logger.error('Query could not be executed. Retries left: %d' % (self.tries - r)) raise LongitudeRetriesExceeded - else: - return self.parse_response(response) def execute_query(self, formatted_query, query_config, **opts): """ @@ -189,6 +199,14 @@ def __init__(self, rows=None, fields=None, profiling=None): self.rows = rows or [] self.fields = fields or {} self.profiling = profiling or {} + self._from_cache = False + + @property + def comes_from_cache(self): + return self._from_cache + + def mark_as_cached(self): + self._from_cache = True def preview_top(self): return self._preview(10) diff --git a/core/data_sources/carto.py b/src/core/data_sources/carto.py similarity index 92% rename from core/data_sources/carto.py rename to src/core/data_sources/carto.py index 5cca537..d0d3c9b 100644 --- a/core/data_sources/carto.py +++ b/src/core/data_sources/carto.py @@ -1,6 +1,6 @@ from carto.exceptions import CartoException -from core.data_sources.base import DataSource, LongitudeQueryCannotBeExecutedException, LongitudeQueryResponse +from .base import DataSource, LongitudeQueryCannotBeExecutedException, LongitudeQueryResponse from carto.auth import APIKeyAuthClient from carto.sql import BatchSQLClient, SQLClient @@ -16,8 +16,8 @@ class CartoDataSource(DataSource): 'user': '' } - def __init__(self, config=None): - super().__init__(config) + def __init__(self, config=None, cache_class=None): + super().__init__(config, cache_class=cache_class) self._sql_client = None self._batch_client = None self.set_custom_query_default('do_post', False) diff --git a/core/data_sources/postgres.py b/src/core/data_sources/postgres.py similarity index 51% rename from core/data_sources/postgres.py rename to src/core/data_sources/postgres.py index a4997df..b2bca5d 100644 --- a/core/data_sources/postgres.py +++ b/src/core/data_sources/postgres.py @@ -1,4 +1,4 @@ -from core.data_sources.base import DataSource +from .base import DataSource class PostgresDataSource(DataSource): diff --git a/core/data_sources/util.py b/src/core/data_sources/util.py similarity index 100% rename from core/data_sources/util.py rename to src/core/data_sources/util.py diff --git a/core/samples/__init__.py b/src/core/tests/__init__.py similarity index 100% rename from core/samples/__init__.py rename to src/core/tests/__init__.py diff --git a/core/tests/raw_text/query_response_render_bottom.txt b/src/core/tests/raw_text/query_response_render_bottom.txt similarity index 100% rename from core/tests/raw_text/query_response_render_bottom.txt rename to src/core/tests/raw_text/query_response_render_bottom.txt diff --git a/core/tests/raw_text/query_response_render_top.txt b/src/core/tests/raw_text/query_response_render_top.txt similarity index 100% rename from core/tests/raw_text/query_response_render_top.txt rename to src/core/tests/raw_text/query_response_render_top.txt diff --git a/core/tests/test_cache_base.py b/src/core/tests/test_cache_base.py similarity index 92% rename from core/tests/test_cache_base.py rename to src/core/tests/test_cache_base.py index 201278a..9bb4bda 100644 --- a/core/tests/test_cache_base.py +++ b/src/core/tests/test_cache_base.py @@ -1,5 +1,5 @@ from unittest import TestCase, mock -from core.caches.base import LongitudeCache +from ..caches.base import LongitudeCache class TestLongitudeCache(TestCase): @@ -24,7 +24,7 @@ def test_generate_key(self): unique_key = set([LongitudeCache.generate_key('SOME_QUERY_OVER_AND_OVER') for _ in range(100)]) self.assertEqual(1, len(unique_key)) - @mock.patch('core.caches.base.is_write_query') + @mock.patch('src.core.caches.base.is_write_query') def test_get_and_put_returns_none_for_write_queries(self, is_write_mock): is_write_mock.return_value = True cache = LongitudeCache() @@ -32,7 +32,7 @@ def test_get_and_put_returns_none_for_write_queries(self, is_write_mock): self.assertIsNone(cache.put('some_query', payload='whatever')) self.assertEqual(2, is_write_mock.call_count) - @mock.patch('core.caches.base.is_write_query') + @mock.patch('src.core.caches.base.is_write_query') def test_get_nor_put_are_implemented_in_base_class(self, is_write_mock): is_write_mock.return_value = False cache = LongitudeCache() diff --git a/core/tests/test_cache_ram.py b/src/core/tests/test_cache_ram.py similarity index 64% rename from core/tests/test_cache_ram.py rename to src/core/tests/test_cache_ram.py index 9263889..7286294 100644 --- a/core/tests/test_cache_ram.py +++ b/src/core/tests/test_cache_ram.py @@ -1,5 +1,5 @@ from unittest import TestCase -from core.caches.redis import RedisCache +from ..caches.redis import RedisCache class TestRedisCache(TestCase): diff --git a/core/tests/test_cache_redis.py b/src/core/tests/test_cache_redis.py similarity index 64% rename from core/tests/test_cache_redis.py rename to src/core/tests/test_cache_redis.py index 9263889..7286294 100644 --- a/core/tests/test_cache_redis.py +++ b/src/core/tests/test_cache_redis.py @@ -1,5 +1,5 @@ from unittest import TestCase -from core.caches.redis import RedisCache +from ..caches.redis import RedisCache class TestRedisCache(TestCase): diff --git a/core/tests/test_data_source_base.py b/src/core/tests/test_data_source_base.py similarity index 85% rename from core/tests/test_data_source_base.py rename to src/core/tests/test_data_source_base.py index 17bb1fd..d99e68f 100644 --- a/core/tests/test_data_source_base.py +++ b/src/core/tests/test_data_source_base.py @@ -1,9 +1,8 @@ import os from unittest import TestCase, mock -from core.caches.base import LongitudeCache -from core.data_sources.base import DataSource, DataSourceQueryConfig, LongitudeQueryResponse, \ - LongitudeWrongQueryException +from ..caches.base import LongitudeCache +from ..data_sources.base import DataSource, DataSourceQueryConfig, LongitudeQueryResponse, LongitudeWrongQueryException def load_raw_text(filename): @@ -71,7 +70,7 @@ class PoorlyImplementedCache: with self.assertRaises(TypeError): DataSource({}, cache_class=PoorlyImplementedCache) - @mock.patch('core.data_sources.base.is_write_query') + @mock.patch('src.core.data_sources.base.is_write_query') def test_write_queries_do_not_use_cache(self, is_write_mock): ds = DataSource({}, cache_class=self._cache_class) ds.setup() @@ -81,8 +80,8 @@ def test_write_queries_do_not_use_cache(self, is_write_mock): with self.assertRaises(LongitudeWrongQueryException): ds.query('some_query') - @mock.patch('core.data_sources.base.is_write_query') - @mock.patch('core.data_sources.base.DataSource.parse_response') + @mock.patch('src.core.data_sources.base.is_write_query') + @mock.patch('src.core.data_sources.base.DataSource.parse_response') def test_cache_hit(self, parse_response_mock, is_write_mock): ds = DataSource({}, cache_class=self._cache_class) ds.setup() @@ -90,13 +89,13 @@ def test_cache_hit(self, parse_response_mock, is_write_mock): # In this test we are interested in triggering that call to the parse function that would return such object, # but we do not care, in the abstract class, about what content is generated there. is_write_mock.return_value = False - parse_response_mock.return_value = 'normalized cache hit' - self.assertEqual('normalized cache hit', ds.query('some_query_in_cache')) + parse_response_mock.mark_as_cached = True + self.assertTrue(ds.query('some_query_in_cache').comes_from_cache) parse_response_mock.assert_called_once_with('cache hit') - @mock.patch('core.data_sources.base.is_write_query') - @mock.patch('core.data_sources.base.DataSource.parse_response') - @mock.patch('core.data_sources.base.DataSource.execute_query') + @mock.patch('src.core.data_sources.base.is_write_query') + @mock.patch('src.core.data_sources.base.DataSource.parse_response') + @mock.patch('src.core.data_sources.base.DataSource.execute_query') def test_cache_miss(self, execute_query_mock, parse_response_mock, is_write_mock): ds = DataSource({}, cache_class=self._cache_class) ds.setup() @@ -120,8 +119,8 @@ def test_config(self): with self.assertLogs(level='WARNING') as log_test: ds = DataSource(config) self.assertEqual(log_test.output, - ['WARNING:core.data_sources.base:some_another_config_value is an unexpected config value', - 'WARNING:core.data_sources.base:some_config_value is an unexpected config value']) + ['WARNING:src.core.data_sources.base:some_another_config_value is an unexpected config value', + 'WARNING:src.core.data_sources.base:some_config_value is an unexpected config value']) # Values in the config can be retrieved using get_config. If no default or config is defined, None is returned. self.assertEqual(0, ds.get_config('some_config_value')) diff --git a/core/tests/test_data_source_carto.py b/src/core/tests/test_data_source_carto.py similarity index 78% rename from core/tests/test_data_source_carto.py rename to src/core/tests/test_data_source_carto.py index 8a58d37..acf0f6e 100644 --- a/core/tests/test_data_source_carto.py +++ b/src/core/tests/test_data_source_carto.py @@ -2,8 +2,8 @@ from carto.exceptions import CartoException -from core.data_sources.base import LongitudeRetriesExceeded -from core.data_sources.carto import CartoDataSource +from ..data_sources.base import LongitudeRetriesExceeded +from ..data_sources.carto import CartoDataSource class TestCartoDataSource(TestCase): @@ -12,11 +12,11 @@ def test_default_configuration_loads(self): with self.assertLogs(level='INFO') as log_test: carto_ds = CartoDataSource() self.assertEqual(log_test.output, - ['INFO:core.data_sources.carto:api_key key is using default value', - 'INFO:core.data_sources.carto:api_version key is using default value', - 'INFO:core.data_sources.carto:on_premise_domain key is using default value', - 'INFO:core.data_sources.carto:user key is using default value', - 'INFO:core.data_sources.carto:uses_batch key is using default value'] + ['INFO:src.core.data_sources.carto:api_key key is using default value', + 'INFO:src.core.data_sources.carto:api_version key is using default value', + 'INFO:src.core.data_sources.carto:on_premise_domain key is using default value', + 'INFO:src.core.data_sources.carto:user key is using default value', + 'INFO:src.core.data_sources.carto:uses_batch key is using default value'] ) self.assertEqual('', carto_ds.get_config('api_key')) diff --git a/src/core/tests/test_data_source_postgres.py b/src/core/tests/test_data_source_postgres.py new file mode 100644 index 0000000..849b865 --- /dev/null +++ b/src/core/tests/test_data_source_postgres.py @@ -0,0 +1,2 @@ +from ..data_sources.postgres import PostgresDataSource + diff --git a/core/samples/.gitignore b/src/samples/.gitignore similarity index 100% rename from core/samples/.gitignore rename to src/samples/.gitignore diff --git a/core/samples/README.md b/src/samples/README.md similarity index 100% rename from core/samples/README.md rename to src/samples/README.md diff --git a/src/samples/__init__.py b/src/samples/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/src/samples/__init__.py @@ -0,0 +1 @@ + diff --git a/core/samples/carto_sample.py b/src/samples/carto_sample.py similarity index 86% rename from core/samples/carto_sample.py rename to src/samples/carto_sample.py index 0c6e565..b866763 100644 --- a/core/samples/carto_sample.py +++ b/src/samples/carto_sample.py @@ -10,9 +10,13 @@ That file will be ignored in git, so do not worry about pushing credentials anywhere (but BE CAREFUL!) DO NOT REPLACE THIS WITH HARD CODED CREDENTIALS EVER AND ALWAYS REVIEW YOUR COMMITS! """ -from core.data_sources.base import LongitudeRetriesExceeded -from core.data_sources.carto import CartoDataSource -from core.samples.carto_sample_config import CARTO_API_KEY, CARTO_USER +import os +import sys + +sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) +from src.core.data_sources.base import LongitudeRetriesExceeded +from src.core.data_sources.carto import CartoDataSource +from src.samples.carto_sample_config import CARTO_API_KEY, CARTO_USER, CARTO_TABLE_NAME if __name__ == "__main__": config = { @@ -24,7 +28,7 @@ ds.setup() if ds.is_ready: try: - data = ds.query('select * from county_population limit 30') + data = ds.query('select * from %s limit 30' % CARTO_TABLE_NAME) print(data) except LongitudeRetriesExceeded: print("Too many retries and no success...") diff --git a/src/samples/carto_sample_with_ram_cache.py b/src/samples/carto_sample_with_ram_cache.py new file mode 100644 index 0000000..21eaae1 --- /dev/null +++ b/src/samples/carto_sample_with_ram_cache.py @@ -0,0 +1,75 @@ +""" +██╗ ██╗ ██████╗ ██╗ ██╗ ████████╗ ██████╗ ██╗ ██╗███████╗███████╗ ████████╗██╗ ██╗██╗███████╗ +██║ ██║██╔═══██╗██║ ██║ ╚══██╔══╝██╔═══██╗ ██║ ██║██╔════╝██╔════╝ ╚══██╔══╝██║ ██║██║██╔════╝██╗ +███████║██║ ██║██║ █╗ ██║ ██║ ██║ ██║ ██║ ██║███████╗█████╗ ██║ ███████║██║███████╗╚═╝ +██╔══██║██║ ██║██║███╗██║ ██║ ██║ ██║ ██║ ██║╚════██║██╔══╝ ██║ ██╔══██║██║╚════██║██╗ +██║ ██║╚██████╔╝╚███╔███╔╝ ██║ ╚██████╔╝ ╚██████╔╝███████║███████╗ ██║ ██║ ██║██║███████║╚═╝ +╚═╝ ╚═╝ ╚═════╝ ╚══╝╚══╝ ╚═╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝ ╚═╝ ╚═╝ ╚═╝╚═╝╚══════╝ + +You must create a 'carto_sample_config.py' file at this folder with the needed fields (look at the import) +That file will be ignored in git, so do not worry about pushing credentials anywhere (but BE CAREFUL!) +DO NOT REPLACE THIS WITH HARD CODED CREDENTIALS EVER AND ALWAYS REVIEW YOUR COMMITS! +""" + +import time +import os +import sys + +sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) +from src.core.caches.ram import RamCache +from src.core.data_sources.base import LongitudeRetriesExceeded +from src.core.data_sources.carto import CartoDataSource +from src.samples.carto_sample_config import CARTO_API_KEY, CARTO_USER, CARTO_TABLE_NAME + +if __name__ == "__main__": + config = { + 'api_key': CARTO_API_KEY, + 'user': CARTO_USER + } + + ds = CartoDataSource(config, cache_class=RamCache) + ds.setup() + if ds.is_ready: + try: + + REPEATED_QUERY = 'select * from %s limit 30' % CARTO_TABLE_NAME + start = time.time() + data = ds.query(REPEATED_QUERY) + elapsed = time.time() - start + print("It took %s without cache" % elapsed) + print('Uses cache? ' + str(data.comes_from_cache)) + + # Repeated read queries return cached values + start_with_cache = time.time() + cached_data = ds.query(REPEATED_QUERY) + elapsed_with_cache = time.time() - start_with_cache + print("It took %s with cache" % elapsed_with_cache) + print('Uses cache? ' + str(cached_data.comes_from_cache)) + + # Data is the same... + assert str(data) == str(cached_data) + + # You can also disable the cache for a while (nothing gets read or written) + ds.disable_cache() + start = time.time() + data = ds.query(REPEATED_QUERY) + elapsed = time.time() - start + print('It took %s with disabled cache' % str(elapsed)) + print('Uses cache? ' + str(data.comes_from_cache)) + ds.enable_cache() + + # Or disable specific queries via query_config (nothing gets read or written) + query_config = ds.copy_default_query_config() + query_config.use_cache = False + start = time.time() + data = ds.query(REPEATED_QUERY, query_config=query_config) + elapsed = time.time() - start + print('It took %s with disabled cache (per-query)' % str(elapsed)) + print('Uses cache? ' + str(data.comes_from_cache)) + + print('If you see decreasing times it is probably because CARTOs cache doing its job!') + + except LongitudeRetriesExceeded: + print("Too many retries and no success...") + else: + print("Data source is not properly configured.") diff --git a/core/tests/__init__.py b/src/samples/load_sys_path.py similarity index 100% rename from core/tests/__init__.py rename to src/samples/load_sys_path.py From 63ba5162543ac9487b83636b8f02b5163012bd9f Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Mon, 28 Jan 2019 11:37:32 +0100 Subject: [PATCH 11/47] Basic Redis put/get feature. Cache config added. Updated README. --- Pipfile | 1 + Pipfile.lock | 14 ++- README.md | 15 +++- docker-compose.yml | 10 +++ src/core/caches/base.py | 7 ++ src/core/caches/redis.py | 34 +++++++- src/core/data_sources/base.py | 2 +- src/samples/carto_sample_with_redis_cache.py | 89 ++++++++++++++++++++ 8 files changed, 164 insertions(+), 8 deletions(-) create mode 100644 src/samples/carto_sample_with_redis_cache.py diff --git a/Pipfile b/Pipfile index 2882383..0863cd0 100644 --- a/Pipfile +++ b/Pipfile @@ -11,6 +11,7 @@ pytest-cov = "*" [packages] carto = "*" +redis = "*" [requires] python_version = "3.6" diff --git a/Pipfile.lock b/Pipfile.lock index 8a8c4e1..02c9b38 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "686700a73ddd35aeb2c8718ed2b94bf73198540a3d195baabc8b5dc142e2c5e7" + "sha256": "dc93ba0a0c855d03807424c2598879586f22ed0da579923bc4eaaf3a8e2ba566" }, "pipfile-spec": 6, "requires": { @@ -52,9 +52,9 @@ }, "pyrestcli": { "hashes": [ - "sha256:b8f9b67380bf9024f8c73dc25bf0466afe4b0714732590247acdabeb8137deb9" + "sha256:4e98b5cfba0a300acc78a7a4b7c91826edf56b12b588aa316cae4bff8696c644" ], - "version": "==0.6.7" + "version": "==0.6.8" }, "python-dateutil": { "hashes": [ @@ -63,6 +63,14 @@ ], "version": "==2.7.5" }, + "redis": { + "hashes": [ + "sha256:2100750629beff143b6a200a2ea8e719fcf26420adabb81402895e144c5083cf", + "sha256:8e0bdd2de02e829b6225b25646f9fb9daffea99a252610d040409a6738541f0a" + ], + "index": "pypi", + "version": "==3.0.1" + }, "requests": { "hashes": [ "sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e", diff --git a/README.md b/README.md index 284b6fb..6bd7a4f 100644 --- a/README.md +++ b/README.md @@ -8,12 +8,21 @@ A live document for the roadmap is [shared here](https://docs.google.com/documen - [ ] Database model - [x] CARTO data source - - [x] Basic parametrized queries - - [ ] Bind/dynamic parameters in queries + - [x] Basic parametrized queries (i.e. templated queries) + - [ ] Protected parametrized queries (i.e. avoiding injection) + - [ ] Bind/dynamic parameters in queries (server-side render) - [ ] Postgres data source - [ ] driver 1 - [ ] driver 2 - [ ] Cache + - [ ] Base cache + - [x] Put + - [x] Get + - [x] Key generation + - [ ] Flush + - [ ] Expiration + - [x] Ram Cache + - [x] Redis Cache - [ ] Documentation - [ ] Unit tests - [ ] Sample scripts @@ -22,7 +31,7 @@ A live document for the roadmap is [shared here](https://docs.google.com/documen - [ ] CI PyPi versioning -- [ ] Datos +- [ ] Data manipulation - [ ] Carto - [ ] DataFrame read/write - [ ] COPY diff --git a/docker-compose.yml b/docker-compose.yml index e53abf4..ff8a05c 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -5,3 +5,13 @@ services: command: bash volumes: - .:/usr/src/app + + cache: + image: redis:alpine + ports: + - "6379:6379" + command: + - redis-server + - --maxmemory 256mb + - --maxmemory-policy allkeys-lru + restart: unless-stopped \ No newline at end of file diff --git a/src/core/caches/base.py b/src/core/caches/base.py index b756ec7..628396c 100644 --- a/src/core/caches/base.py +++ b/src/core/caches/base.py @@ -1,8 +1,15 @@ import hashlib +import logging from ..data_sources.util import is_write_query class LongitudeCache: + default_config = {} + + def __init__(self, config=None): + self._config = config or self.default_config + self.logger = logging.getLogger(self.__class__.__module__) + @staticmethod def generate_key(formatted_query): """ diff --git a/src/core/caches/redis.py b/src/core/caches/redis.py index 553f303..1c8b994 100644 --- a/src/core/caches/redis.py +++ b/src/core/caches/redis.py @@ -1,5 +1,37 @@ +import redis +import pickle from .base import LongitudeCache class RedisCache(LongitudeCache): - pass + default_config = { + 'host': 'localhost', + 'port': 6379, + 'db': 0 + } + + _values = None + + def setup(self): + self._values = redis.Redis(host=self._config['host'], port=self._config['port'], db=self._config['db']) + + @property + def is_ready(self): + try: + self._values.ping() + return True + except (ConnectionError, TimeoutError): + return False + except redis.exceptions.ConnectionError as e: + self.logger.error('Cannot connect to Redis server.') + self.logger.error(e) + return False + + def execute_get(self, key): + value = self._values.get(name=key) + if value: + return pickle.loads(value) + return None + + def execute_put(self, key, payload): + self._values.set(name=key, value=pickle.dumps(payload)) diff --git a/src/core/data_sources/base.py b/src/core/data_sources/base.py index b6bc796..8baa1e3 100644 --- a/src/core/data_sources/base.py +++ b/src/core/data_sources/base.py @@ -53,7 +53,7 @@ def __init__(self, config=None, cache_class: Type[LongitudeCache] = None): if not issubclass(cache_class, LongitudeCache): raise TypeError('Cache must derive from LongitudeCache or be None') else: - self._cache = cache_class() + self._cache = cache_class(config=config.get('cache')) default_keys = set(self.default_config.keys()) config_keys = set(config.keys()) diff --git a/src/samples/carto_sample_with_redis_cache.py b/src/samples/carto_sample_with_redis_cache.py new file mode 100644 index 0000000..f0766fc --- /dev/null +++ b/src/samples/carto_sample_with_redis_cache.py @@ -0,0 +1,89 @@ +""" +██╗ ██╗ ██████╗ ██╗ ██╗ ████████╗ ██████╗ ██╗ ██╗███████╗███████╗ ████████╗██╗ ██╗██╗███████╗ +██║ ██║██╔═══██╗██║ ██║ ╚══██╔══╝██╔═══██╗ ██║ ██║██╔════╝██╔════╝ ╚══██╔══╝██║ ██║██║██╔════╝██╗ +███████║██║ ██║██║ █╗ ██║ ██║ ██║ ██║ ██║ ██║███████╗█████╗ ██║ ███████║██║███████╗╚═╝ +██╔══██║██║ ██║██║███╗██║ ██║ ██║ ██║ ██║ ██║╚════██║██╔══╝ ██║ ██╔══██║██║╚════██║██╗ +██║ ██║╚██████╔╝╚███╔███╔╝ ██║ ╚██████╔╝ ╚██████╔╝███████║███████╗ ██║ ██║ ██║██║███████║╚═╝ +╚═╝ ╚═╝ ╚═════╝ ╚══╝╚══╝ ╚═╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝ ╚═╝ ╚═╝ ╚═╝╚═╝╚══════╝ + +You must create a 'carto_sample_config.py' file at this folder with the needed fields (look at the import) +That file will be ignored in git, so do not worry about pushing credentials anywhere (but BE CAREFUL!) +DO NOT REPLACE THIS WITH HARD CODED CREDENTIALS EVER AND ALWAYS REVIEW YOUR COMMITS! + +There is a cache entry in the docker-compose.yaml file. You can use it to run a local Redis container to test this: + +> sudo docker-compose up -d cache + +Also, you can connect to that container and check the cache using the CLI while running this program. + +> sudo docker exec -it longitude_cache_1 redis-cli + +""" + +import time +import os +import sys + +sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) +from src.core.caches.redis import RedisCache +from src.core.data_sources.base import LongitudeRetriesExceeded +from src.core.data_sources.carto import CartoDataSource +from src.samples.carto_sample_config import CARTO_API_KEY, CARTO_USER, CARTO_TABLE_NAME + +if __name__ == "__main__": + config = { + 'api_key': CARTO_API_KEY, + 'user': CARTO_USER, + 'cache': { + 'host': 'localhost', + 'port': 6379, + 'db': 0 + } + } + + ds = CartoDataSource(config, cache_class=RedisCache) + ds.setup() + if ds.is_ready: + try: + + REPEATED_QUERY = 'select * from %s limit 30' % CARTO_TABLE_NAME + start = time.time() + data = ds.query(REPEATED_QUERY) + elapsed = time.time() - start + print("It took %s without cache" % elapsed) + print('Uses cache? ' + str(data.comes_from_cache)) + + # Repeated read queries return cached values + start_with_cache = time.time() + cached_data = ds.query(REPEATED_QUERY) + elapsed_with_cache = time.time() - start_with_cache + print("It took %s with cache" % elapsed_with_cache) + print('Uses cache? ' + str(cached_data.comes_from_cache)) + + # Data is the same... + assert str(data) == str(cached_data) + + # You can also disable the cache for a while (nothing gets read or written) + ds.disable_cache() + start = time.time() + data = ds.query(REPEATED_QUERY) + elapsed = time.time() - start + print('It took %s with disabled cache' % str(elapsed)) + print('Uses cache? ' + str(data.comes_from_cache)) + ds.enable_cache() + + # Or disable specific queries via query_config (nothing gets read or written) + query_config = ds.copy_default_query_config() + query_config.use_cache = False + start = time.time() + data = ds.query(REPEATED_QUERY, query_config=query_config) + elapsed = time.time() - start + print('It took %s with disabled cache (per-query)' % str(elapsed)) + print('Uses cache? ' + str(data.comes_from_cache)) + + print('If you see decreasing times it is probably because CARTOs cache doing its job!') + + except LongitudeRetriesExceeded: + print("Too many retries and no success...") + else: + print("Data source is not properly configured.") From 2458d02e6f2a9ffa473516112f28ad4b984ae242 Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Mon, 28 Jan 2019 12:02:19 +0100 Subject: [PATCH 12/47] Added flush to caches --- README.md | 2 +- src/core/caches/base.py | 8 ++++++++ src/core/caches/ram.py | 5 ++++- src/core/caches/redis.py | 3 +++ src/core/data_sources/base.py | 3 +++ src/samples/carto_sample_with_redis_cache.py | 3 +++ 6 files changed, 22 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 6bd7a4f..6cf96a6 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,7 @@ A live document for the roadmap is [shared here](https://docs.google.com/documen - [x] Put - [x] Get - [x] Key generation - - [ ] Flush + - [x] Flush - [ ] Expiration - [x] Ram Cache - [x] Redis Cache diff --git a/src/core/caches/base.py b/src/core/caches/base.py index 628396c..c45fad2 100644 --- a/src/core/caches/base.py +++ b/src/core/caches/base.py @@ -57,3 +57,11 @@ def execute_put(self, key, payload): :return: True if key was overwritten. False if key was new in the cache. """ raise NotImplementedError + + def flush(self): + """ + Custom action to make the cache empty + + :return: + """ + raise NotImplementedError diff --git a/src/core/caches/ram.py b/src/core/caches/ram.py index cd68b18..45edec7 100644 --- a/src/core/caches/ram.py +++ b/src/core/caches/ram.py @@ -8,7 +8,7 @@ class RamCache(LongitudeCache): _values = {} def setup(self): - self._values = {} + self.flush() @property def is_ready(self): @@ -21,3 +21,6 @@ def execute_put(self, key, payload): is_overwrite = key in self._values.keys() self._values[key] = payload return is_overwrite + + def flush(self): + self._values = {} diff --git a/src/core/caches/redis.py b/src/core/caches/redis.py index 1c8b994..e198425 100644 --- a/src/core/caches/redis.py +++ b/src/core/caches/redis.py @@ -35,3 +35,6 @@ def execute_get(self, key): def execute_put(self, key, payload): self._values.set(name=key, value=pickle.dumps(payload)) + + def flush(self): + self._values.flushall() diff --git a/src/core/data_sources/base.py b/src/core/data_sources/base.py index 8baa1e3..298c7d3 100644 --- a/src/core/data_sources/base.py +++ b/src/core/data_sources/base.py @@ -193,6 +193,9 @@ def parse_response(self, response): """ raise NotImplementedError + def flush_cache(self): + if self._cache: + self._cache.flush() class LongitudeQueryResponse: def __init__(self, rows=None, fields=None, profiling=None): diff --git a/src/samples/carto_sample_with_redis_cache.py b/src/samples/carto_sample_with_redis_cache.py index f0766fc..efa06bf 100644 --- a/src/samples/carto_sample_with_redis_cache.py +++ b/src/samples/carto_sample_with_redis_cache.py @@ -83,6 +83,9 @@ print('If you see decreasing times it is probably because CARTOs cache doing its job!') + # As Redis is persistent for this script, we flush it after execution so next run does not hit at start + ds.flush_cache() + except LongitudeRetriesExceeded: print("Too many retries and no success...") else: From 000e826fb3a44f30773cfa00610378fa5df643d8 Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Mon, 28 Jan 2019 12:31:40 +0100 Subject: [PATCH 13/47] Picke as default cache serializer. --- src/core/caches/base.py | 18 ++++++++++++++++-- src/core/caches/ram.py | 8 ++++++++ src/core/caches/redis.py | 8 ++------ src/core/data_sources/carto.py | 3 ++- 4 files changed, 28 insertions(+), 9 deletions(-) diff --git a/src/core/caches/base.py b/src/core/caches/base.py index c45fad2..9d28b2d 100644 --- a/src/core/caches/base.py +++ b/src/core/caches/base.py @@ -1,5 +1,6 @@ import hashlib import logging +import pickle from ..data_sources.util import is_write_query @@ -34,13 +35,14 @@ def get(self, formatted_query): if is_write_query(formatted_query): return None else: - return self.execute_get(self.generate_key(formatted_query)) + payload = self.execute_get(self.generate_key(formatted_query)) + return self.deserialize_payload(payload) def put(self, formatted_query, payload): if is_write_query(formatted_query): return None else: - self.execute_put(self.generate_key(formatted_query), payload) + self.execute_put(self.generate_key(formatted_query), self.serialize_payload(payload)) def execute_get(self, key): """ @@ -65,3 +67,15 @@ def flush(self): :return: """ raise NotImplementedError + + @staticmethod + def serialize_payload(payload): + if payload: + return pickle.dumps(payload) + return None + + @staticmethod + def deserialize_payload(payload): + if payload: + return pickle.loads(payload) + return None diff --git a/src/core/caches/ram.py b/src/core/caches/ram.py index 45edec7..fec705d 100644 --- a/src/core/caches/ram.py +++ b/src/core/caches/ram.py @@ -24,3 +24,11 @@ def execute_put(self, key, payload): def flush(self): self._values = {} + + @staticmethod + def serialize_payload(payload): + return payload + + @staticmethod + def deserialize_payload(payload): + return payload diff --git a/src/core/caches/redis.py b/src/core/caches/redis.py index e198425..5d8a731 100644 --- a/src/core/caches/redis.py +++ b/src/core/caches/redis.py @@ -1,5 +1,4 @@ import redis -import pickle from .base import LongitudeCache @@ -28,13 +27,10 @@ def is_ready(self): return False def execute_get(self, key): - value = self._values.get(name=key) - if value: - return pickle.loads(value) - return None + return self._values.get(name=key) def execute_put(self, key, payload): - self._values.set(name=key, value=pickle.dumps(payload)) + self._values.set(name=key, value=payload) def flush(self): self._values.flushall() diff --git a/src/core/data_sources/carto.py b/src/core/data_sources/carto.py index d0d3c9b..6b51736 100644 --- a/src/core/data_sources/carto.py +++ b/src/core/data_sources/carto.py @@ -13,7 +13,8 @@ class CartoDataSource(DataSource): 'uses_batch': False, 'on_premise_domain': '', 'api_key': '', - 'user': '' + 'user': '', + 'cache': None } def __init__(self, config=None, cache_class=None): From 3fff4069a7eb66a42b39576f80e44facbd70b95d Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Mon, 28 Jan 2019 12:39:07 +0100 Subject: [PATCH 14/47] Cache now stores normalized payloads only --- README.md | 8 +++++--- src/core/data_sources/base.py | 21 +++++++++++---------- 2 files changed, 16 insertions(+), 13 deletions(-) diff --git a/README.md b/README.md index 6cf96a6..5e9aa95 100644 --- a/README.md +++ b/README.md @@ -20,9 +20,11 @@ A live document for the roadmap is [shared here](https://docs.google.com/documen - [x] Get - [x] Key generation - [x] Flush - - [ ] Expiration - - [x] Ram Cache - - [x] Redis Cache + - [ ] Tests + - [ ] Ram Cache + - [ ] Tests + - [ ] Redis Cache + - [ ] Tests - [ ] Documentation - [ ] Unit tests - [ ] Sample scripts diff --git a/src/core/data_sources/base.py b/src/core/data_sources/base.py index 298c7d3..ae0de55 100644 --- a/src/core/data_sources/base.py +++ b/src/core/data_sources/base.py @@ -152,25 +152,25 @@ def query(self, statement, params=None, query_config=None, **opts): formatted_query = statement.format(**params) - response = None + normalized_response = None if self._cache and self.use_cache and query_config.use_cache and not query_is_writing: - response = self._cache.get(formatted_query) + normalized_response = self._cache.get(formatted_query) - if response: - # TODO: cached responses should, by default, be stored parsed/normalized - parsed_response = self.parse_response(response) - parsed_response.mark_as_cached() - return parsed_response + if normalized_response: + + normalized_response.mark_as_cached() + return normalized_response else: for r in range(self.tries): try: response = self.execute_query(formatted_query=formatted_query, query_config=query_config, **opts) + normalized_response = self.parse_response(response) if self._cache and self.use_cache and query_config.use_cache: - self._cache.put(formatted_query, response) + self._cache.put(formatted_query, normalized_response) - return self.parse_response(response) + return normalized_response except LongitudeQueryCannotBeExecutedException: self.logger.error('Query could not be executed. Retries left: %d' % (self.tries - r)) raise LongitudeRetriesExceeded @@ -189,7 +189,7 @@ def execute_query(self, formatted_query, query_config, **opts): def parse_response(self, response): """" :param response from an succesfully executed query - :return: A QueryResponse object + :return: A LongitudeQueryResponse object """ raise NotImplementedError @@ -197,6 +197,7 @@ def flush_cache(self): if self._cache: self._cache.flush() + class LongitudeQueryResponse: def __init__(self, rows=None, fields=None, profiling=None): self.rows = rows or [] From dcd2f107c1664700cf3b10d58d13e5a7259eba5c Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Mon, 28 Jan 2019 13:05:25 +0100 Subject: [PATCH 15/47] Updated tests --- src/core/tests/test_data_source_base.py | 15 ++++++++------- src/core/tests/test_data_source_carto.py | 1 + 2 files changed, 9 insertions(+), 7 deletions(-) diff --git a/src/core/tests/test_data_source_base.py b/src/core/tests/test_data_source_base.py index d99e68f..5f32c61 100644 --- a/src/core/tests/test_data_source_base.py +++ b/src/core/tests/test_data_source_base.py @@ -39,7 +39,10 @@ def test_copy(self): class TestDataSource(TestCase): def setUp(self): + import pickle + class FakeCache(LongitudeCache): + @staticmethod def generate_key(formatted_query): if formatted_query == 'some_query_in_cache': @@ -55,7 +58,7 @@ def is_ready(self): def execute_get(self, key): if key == 'hit': - return 'cache hit' + return pickle.dumps(LongitudeQueryResponse()) return None def execute_put(self, key, payload): @@ -81,17 +84,14 @@ def test_write_queries_do_not_use_cache(self, is_write_mock): ds.query('some_query') @mock.patch('src.core.data_sources.base.is_write_query') - @mock.patch('src.core.data_sources.base.DataSource.parse_response') - def test_cache_hit(self, parse_response_mock, is_write_mock): + def test_cache_hit(self, is_write_mock): ds = DataSource({}, cache_class=self._cache_class) ds.setup() # At high level, ds.query will return a normalized LongitudeQueryResponse # In this test we are interested in triggering that call to the parse function that would return such object, # but we do not care, in the abstract class, about what content is generated there. is_write_mock.return_value = False - parse_response_mock.mark_as_cached = True self.assertTrue(ds.query('some_query_in_cache').comes_from_cache) - parse_response_mock.assert_called_once_with('cache hit') @mock.patch('src.core.data_sources.base.is_write_query') @mock.patch('src.core.data_sources.base.DataSource.parse_response') @@ -119,8 +119,9 @@ def test_config(self): with self.assertLogs(level='WARNING') as log_test: ds = DataSource(config) self.assertEqual(log_test.output, - ['WARNING:src.core.data_sources.base:some_another_config_value is an unexpected config value', - 'WARNING:src.core.data_sources.base:some_config_value is an unexpected config value']) + [ + 'WARNING:src.core.data_sources.base:some_another_config_value is an unexpected config value', + 'WARNING:src.core.data_sources.base:some_config_value is an unexpected config value']) # Values in the config can be retrieved using get_config. If no default or config is defined, None is returned. self.assertEqual(0, ds.get_config('some_config_value')) diff --git a/src/core/tests/test_data_source_carto.py b/src/core/tests/test_data_source_carto.py index acf0f6e..39c9892 100644 --- a/src/core/tests/test_data_source_carto.py +++ b/src/core/tests/test_data_source_carto.py @@ -14,6 +14,7 @@ def test_default_configuration_loads(self): self.assertEqual(log_test.output, ['INFO:src.core.data_sources.carto:api_key key is using default value', 'INFO:src.core.data_sources.carto:api_version key is using default value', + 'INFO:src.core.data_sources.carto:cache key is using default value', 'INFO:src.core.data_sources.carto:on_premise_domain key is using default value', 'INFO:src.core.data_sources.carto:user key is using default value', 'INFO:src.core.data_sources.carto:uses_batch key is using default value'] From 08aa39781ac908fb4c28d77a969132a93511cc4e Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Mon, 28 Jan 2019 13:47:29 +0100 Subject: [PATCH 16/47] Added tests for Redis cache and RAM cache. Improved total coverage up to 95%. --- README.md | 18 +++++------ src/core/caches/base.py | 2 +- src/core/caches/redis.py | 7 +++-- src/core/data_sources/base.py | 2 +- src/core/tests/test_cache_ram.py | 28 +++++++++++++++-- src/core/tests/test_cache_redis.py | 50 ++++++++++++++++++++++++++++-- 6 files changed, 88 insertions(+), 19 deletions(-) diff --git a/README.md b/README.md index 5e9aa95..e5af7ef 100644 --- a/README.md +++ b/README.md @@ -14,20 +14,20 @@ A live document for the roadmap is [shared here](https://docs.google.com/documen - [ ] Postgres data source - [ ] driver 1 - [ ] driver 2 - - [ ] Cache - - [ ] Base cache + - [x] Cache + - [x] Base cache - [x] Put - [x] Get - [x] Key generation - [x] Flush - - [ ] Tests - - [ ] Ram Cache - - [ ] Tests - - [ ] Redis Cache - - [ ] Tests + - [x] Tests + - [x] Ram Cache + - [x] Tests + - [x] Redis Cache + - [x] Tests - [ ] Documentation - - [ ] Unit tests - - [ ] Sample scripts + - [x] Unit tests + - [x] Sample scripts - [ ] Config diff --git a/src/core/caches/base.py b/src/core/caches/base.py index 9d28b2d..9b65903 100644 --- a/src/core/caches/base.py +++ b/src/core/caches/base.py @@ -42,7 +42,7 @@ def put(self, formatted_query, payload): if is_write_query(formatted_query): return None else: - self.execute_put(self.generate_key(formatted_query), self.serialize_payload(payload)) + return self.execute_put(self.generate_key(formatted_query), self.serialize_payload(payload)) def execute_get(self, key): """ diff --git a/src/core/caches/redis.py b/src/core/caches/redis.py index 5d8a731..784658b 100644 --- a/src/core/caches/redis.py +++ b/src/core/caches/redis.py @@ -19,18 +19,19 @@ def is_ready(self): try: self._values.ping() return True - except (ConnectionError, TimeoutError): + except TimeoutError: return False except redis.exceptions.ConnectionError as e: - self.logger.error('Cannot connect to Redis server.') - self.logger.error(e) + self.logger.error('Cannot connect to Redis server at %s:%d.' % (self._config['host'], self._config['port'])) return False def execute_get(self, key): return self._values.get(name=key) def execute_put(self, key, payload): + overwrite = self._values.exists(key) == 1 self._values.set(name=key, value=payload) + return overwrite def flush(self): self._values.flushall() diff --git a/src/core/data_sources/base.py b/src/core/data_sources/base.py index ae0de55..efbe2fd 100644 --- a/src/core/data_sources/base.py +++ b/src/core/data_sources/base.py @@ -194,7 +194,7 @@ def parse_response(self, response): raise NotImplementedError def flush_cache(self): - if self._cache: + if self._cache and self._cache.is_ready: self._cache.flush() diff --git a/src/core/tests/test_cache_ram.py b/src/core/tests/test_cache_ram.py index 7286294..b533c41 100644 --- a/src/core/tests/test_cache_ram.py +++ b/src/core/tests/test_cache_ram.py @@ -1,6 +1,28 @@ -from unittest import TestCase -from ..caches.redis import RedisCache +from unittest import TestCase, mock +from ..caches.ram import RamCache class TestRedisCache(TestCase): - pass + cache = None + + def setUp(self): + self.cache = RamCache() + + def test_setup_must_clean_cache(self): + with mock.patch('src.core.caches.ram.RamCache.flush') as fake_flush: + self.cache.setup() + fake_flush.assert_called_once() + self.assertTrue(self.cache.is_ready) + + def test_serialization_does_nothing(self): + self.assertEqual('value', self.cache.serialize_payload('value')) + self.assertEqual('value', self.cache.deserialize_payload('value')) + + def test_read_write_flush_cycle(self): + self.assertIsNone(self.cache.get('fake_key')) + self.assertFalse(self.cache.put('key', 'value')) + self.assertEqual('value', self.cache.get('key')) + self.assertTrue(self.cache.put('key', 'another value')) + self.assertEqual('another value', self.cache.get('key')) + self.cache.flush() + self.assertIsNone(self.cache.get('key')) diff --git a/src/core/tests/test_cache_redis.py b/src/core/tests/test_cache_redis.py index 7286294..719a74c 100644 --- a/src/core/tests/test_cache_redis.py +++ b/src/core/tests/test_cache_redis.py @@ -1,6 +1,52 @@ -from unittest import TestCase +import redis.exceptions +from unittest import TestCase, mock from ..caches.redis import RedisCache +@mock.patch('src.core.caches.redis.redis.Redis') class TestRedisCache(TestCase): - pass + cache = None + + def setUp(self): + test_config = { + 'host': 'some_host', + 'port': 666, + 'db': 0 + } + self.cache = RedisCache(config=test_config) + + def test_is_ready_if_redis_returns_ping(self, redis_mock): + redis_mock.return_value.ping.return_value = True + self.cache.setup() + self.assertTrue(self.cache.is_ready) + + redis_mock.return_value.get.return_value = None + self.assertIsNone(self.cache.get('fake_key')) + redis_mock.return_value.get.assert_called_once() + + redis_mock.return_value.set.return_value = None + self.assertFalse(self.cache.put('some_key', 'some_payload')) + redis_mock.return_value.exists.return_value = 1 + self.assertTrue(self.cache.put('some_key', 'some_payload')) + self.assertEqual(2, redis_mock.return_value.set.call_count) + + redis_mock.return_value.flushall.return_value = None + self.cache.flush() + redis_mock.return_value.flushall.assert_called_once() + + def test_is_not_ready_if_redis_fails_ping_because_of_connection_error(self, redis_mock): + redis_mock.return_value.ping.side_effect = redis.exceptions.ConnectionError + self.cache.setup() + + with self.assertLogs(level='ERROR') as log_test: + self.assertFalse(self.cache.is_ready) + expected_log = [ + 'ERROR:src.core.caches.redis:Cannot connect to Redis server at some_host:666.' + ] + + self.assertEqual(expected_log, log_test.output) + + def test_is_not_ready_if_redis_fails_ping_because_of_timeout(self, redis_mock): + redis_mock.return_value.ping.side_effect = TimeoutError + self.cache.setup() + self.assertFalse(self.cache.is_ready) From fec13592ecca16161dcbf3c23883f22e9d42d791 Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Mon, 28 Jan 2019 16:54:02 +0100 Subject: [PATCH 17/47] Cache configuration wrapped in class instead of dictionary (only for Redis for now). Password field added to Redis configuration, including associated error messages. --- docker-compose.yml | 1 + src/core/caches/redis.py | 33 +++++++++++++++----- src/core/tests/test_cache_redis.py | 30 +++++++++++++----- src/samples/carto_sample_with_redis_cache.py | 8 ++--- 4 files changed, 51 insertions(+), 21 deletions(-) diff --git a/docker-compose.yml b/docker-compose.yml index ff8a05c..0de2861 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -12,6 +12,7 @@ services: - "6379:6379" command: - redis-server + - --requirepass longitude - --maxmemory 256mb - --maxmemory-policy allkeys-lru restart: unless-stopped \ No newline at end of file diff --git a/src/core/caches/redis.py b/src/core/caches/redis.py index 784658b..8821347 100644 --- a/src/core/caches/redis.py +++ b/src/core/caches/redis.py @@ -2,17 +2,26 @@ from .base import LongitudeCache +class RedisCacheConfig: + def __init__(self, host='localhost', port=6379, db=0, password=None): + self.host = host + self.port = port + self.db = db + self.password = password + + class RedisCache(LongitudeCache): - default_config = { - 'host': 'localhost', - 'port': 6379, - 'db': 0 - } + _default_config = RedisCacheConfig() _values = None def setup(self): - self._values = redis.Redis(host=self._config['host'], port=self._config['port'], db=self._config['db']) + self._values = redis.Redis( + host=self._config.host, + port=self._config.port, + db=self._config.db, + password=self._config.password + ) @property def is_ready(self): @@ -21,8 +30,16 @@ def is_ready(self): return True except TimeoutError: return False - except redis.exceptions.ConnectionError as e: - self.logger.error('Cannot connect to Redis server at %s:%d.' % (self._config['host'], self._config['port'])) + except redis.exceptions.ConnectionError: + self.logger.error('Cannot connect to Redis server at %s:%d.' % (self._config.host, self._config.port)) + return False + except redis.exceptions.ResponseError as e: + msg = str(e) + if str(e) == 'invalid password': + msg = 'Redis password is wrong.' + elif str(e) == "NOAUTH Authentication required.": + msg = 'Redis password required.' + self.logger.error(msg) return False def execute_get(self, key): diff --git a/src/core/tests/test_cache_redis.py b/src/core/tests/test_cache_redis.py index 719a74c..defd7cf 100644 --- a/src/core/tests/test_cache_redis.py +++ b/src/core/tests/test_cache_redis.py @@ -1,6 +1,6 @@ import redis.exceptions from unittest import TestCase, mock -from ..caches.redis import RedisCache +from ..caches.redis import RedisCache, RedisCacheConfig @mock.patch('src.core.caches.redis.redis.Redis') @@ -8,12 +8,7 @@ class TestRedisCache(TestCase): cache = None def setUp(self): - test_config = { - 'host': 'some_host', - 'port': 666, - 'db': 0 - } - self.cache = RedisCache(config=test_config) + self.cache = RedisCache(config=RedisCacheConfig(host='some_host', port=666, db=0, password='some_pass')) def test_is_ready_if_redis_returns_ping(self, redis_mock): redis_mock.return_value.ping.return_value = True @@ -50,3 +45,24 @@ def test_is_not_ready_if_redis_fails_ping_because_of_timeout(self, redis_mock): redis_mock.return_value.ping.side_effect = TimeoutError self.cache.setup() self.assertFalse(self.cache.is_ready) + + def test_is_not_ready_because_no_password(self, redis_mock): + redis_mock.return_value.ping.side_effect = redis.exceptions.ResponseError('NOAUTH Authentication required.') + self.cache.setup() + with self.assertLogs(level='ERROR') as log_test: + self.assertFalse(self.cache.is_ready) + self.assertEqual(['ERROR:src.core.caches.redis:Redis password required.'], log_test.output) + + def test_is_not_ready_because_wrong_password(self, redis_mock): + redis_mock.return_value.ping.side_effect = redis.exceptions.ResponseError('invalid password') + self.cache.setup() + with self.assertLogs(level='ERROR') as log_test: + self.assertFalse(self.cache.is_ready) + self.assertEqual(['ERROR:src.core.caches.redis:Redis password is wrong.'], log_test.output) + + def test_is_not_ready_because_of_generic_response_error(self, redis_mock): + redis_mock.return_value.ping.side_effect = redis.exceptions.ResponseError('some error text') + self.cache.setup() + with self.assertLogs(level='ERROR') as log_test: + self.assertFalse(self.cache.is_ready) + self.assertEqual(['ERROR:src.core.caches.redis:some error text'], log_test.output) diff --git a/src/samples/carto_sample_with_redis_cache.py b/src/samples/carto_sample_with_redis_cache.py index efa06bf..cfbea64 100644 --- a/src/samples/carto_sample_with_redis_cache.py +++ b/src/samples/carto_sample_with_redis_cache.py @@ -25,7 +25,7 @@ import sys sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) -from src.core.caches.redis import RedisCache +from src.core.caches.redis import RedisCache, RedisCacheConfig from src.core.data_sources.base import LongitudeRetriesExceeded from src.core.data_sources.carto import CartoDataSource from src.samples.carto_sample_config import CARTO_API_KEY, CARTO_USER, CARTO_TABLE_NAME @@ -34,11 +34,7 @@ config = { 'api_key': CARTO_API_KEY, 'user': CARTO_USER, - 'cache': { - 'host': 'localhost', - 'port': 6379, - 'db': 0 - } + 'cache': RedisCacheConfig(password='as') } ds = CartoDataSource(config, cache_class=RedisCache) From 1d9c0fe86cd9e974b23b354365c48e5f142dbd82 Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Tue, 29 Jan 2019 12:06:44 +0100 Subject: [PATCH 18/47] Initial scaffold for postgres data source --- Pipfile | 1 + Pipfile.lock | 38 +++++++++++++++++++++- README.md | 5 +-- docker-compose.yml | 12 ++++++- src/core/data_sources/postgres.py | 5 --- src/core/data_sources/postgres/__init__.py | 0 src/core/data_sources/postgres/default.py | 27 +++++++++++++++ 7 files changed, 79 insertions(+), 9 deletions(-) delete mode 100644 src/core/data_sources/postgres.py create mode 100644 src/core/data_sources/postgres/__init__.py create mode 100644 src/core/data_sources/postgres/default.py diff --git a/Pipfile b/Pipfile index 0863cd0..e096406 100644 --- a/Pipfile +++ b/Pipfile @@ -12,6 +12,7 @@ pytest-cov = "*" [packages] carto = "*" redis = "*" +psycopg2 = "*" [requires] python_version = "3.6" diff --git a/Pipfile.lock b/Pipfile.lock index 02c9b38..34dcfb1 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "dc93ba0a0c855d03807424c2598879586f22ed0da579923bc4eaaf3a8e2ba566" + "sha256": "c11c363cdb089b2f39c8f73ccac6ac44520fdd7e4fda64dcda17563757148f03" }, "pipfile-spec": 6, "requires": { @@ -50,6 +50,42 @@ ], "version": "==2.8" }, + "psycopg2": { + "hashes": [ + "sha256:02445ebbb3a11a3fe8202c413d5e6faf38bb75b4e336203ee144ca2c46529f94", + "sha256:0e9873e60f98f0c52339abf8f0339d1e22bfe5aae0bcf7aabd40c055175035ec", + "sha256:1148a5eb29073280bf9057c7fc45468592c1bb75a28f6df1591adb93c8cb63d0", + "sha256:259a8324e109d4922b0fcd046e223e289830e2568d6f4132a3702439e5fd532b", + "sha256:28dffa9ed4595429e61bacac41d3f9671bb613d1442ff43bcbec63d4f73ed5e8", + "sha256:314a74302d4737a3865d40ea50e430ce1543c921ba10f39d562e807cfe2edf2a", + "sha256:36b60201b6d215d7658a71493fdf6bd5e60ad9a0cffed39906627ff9f4f3afd3", + "sha256:3f9d532bce54c4234161176ff3b8688ff337575ca441ea27597e112dfcd0ee0c", + "sha256:5d222983847b40af989ad96c07fc3f07e47925e463baa5de716be8f805b41d9b", + "sha256:6757a6d2fc58f7d8f5d471ad180a0bd7b4dd3c7d681f051504fbea7ae29c8d6f", + "sha256:6a0e0f1e74edb0ab57d89680e59e7bfefad2bfbdf7c80eb38304d897d43674bb", + "sha256:6ca703ccdf734e886a1cf53eb702261110f6a8b0ed74bcad15f1399f74d3f189", + "sha256:8513b953d8f443c446aa79a4cc8a898bd415fc5e29349054f03a7d696d495542", + "sha256:9262a5ce2038570cb81b4d6413720484cb1bc52c064b2f36228d735b1f98b794", + "sha256:97441f851d862a0c844d981cbee7ee62566c322ebb3d68f86d66aa99d483985b", + "sha256:a07feade155eb8e69b54dd6774cf6acf2d936660c61d8123b8b6b1f9247b67d6", + "sha256:a9b9c02c91b1e3ec1f1886b2d0a90a0ea07cc529cb7e6e472b556bc20ce658f3", + "sha256:ae88216f94728d691b945983140bf40d51a1ff6c7fe57def93949bf9339ed54a", + "sha256:b360ffd17659491f1a6ad7c928350e229c7b7bd83a2b922b6ee541245c7a776f", + "sha256:b4221957ceccf14b2abdabef42d806e791350be10e21b260d7c9ce49012cc19e", + "sha256:b90758e49d5e6b152a460d10b92f8a6ccf318fcc0ee814dcf53f3a6fc5328789", + "sha256:c669ea986190ed05fb289d0c100cc88064351f2b85177cbfd3564c4f4847d18c", + "sha256:d1b61999d15c79cf7f4f7cc9021477aef35277fc52452cf50fd13b713c84424d", + "sha256:de7bb043d1adaaf46e38d47e7a5f703bb3dab01376111e522b07d25e1a79c1e1", + "sha256:e393568e288d884b94d263f2669215197840d097c7e5b0acd1a51c1ea7d1aba8", + "sha256:ed7e0849337bd37d89f2c2b0216a0de863399ee5d363d31b1e5330a99044737b", + "sha256:f153f71c3164665d269a5d03c7fa76ba675c7a8de9dc09a4e2c2cdc9936a7b41", + "sha256:f1fb5a8427af099beb7f65093cbdb52e021b8e6dbdfaf020402a623f4181baf5", + "sha256:f36b333e9f86a2fba960c72b90c34be6ca71819e300f7b1fc3d2b0f0b2c546cd", + "sha256:f4526d078aedd5187d0508aa5f9a01eae6a48a470ed678406da94b4cd6524b7e" + ], + "index": "pypi", + "version": "==2.7.7" + }, "pyrestcli": { "hashes": [ "sha256:4e98b5cfba0a300acc78a7a4b7c91826edf56b12b588aa316cae4bff8696c644" diff --git a/README.md b/README.md index e5af7ef..b21896e 100644 --- a/README.md +++ b/README.md @@ -12,8 +12,9 @@ A live document for the roadmap is [shared here](https://docs.google.com/documen - [ ] Protected parametrized queries (i.e. avoiding injection) - [ ] Bind/dynamic parameters in queries (server-side render) - [ ] Postgres data source - - [ ] driver 1 - - [ ] driver 2 + - [ ] psycopg2 + - [ ] SQLAlchemy + - [ ] Asyncpg - [x] Cache - [x] Base cache - [x] Put diff --git a/docker-compose.yml b/docker-compose.yml index 0de2861..02354c6 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -15,4 +15,14 @@ services: - --requirepass longitude - --maxmemory 256mb - --maxmemory-policy allkeys-lru - restart: unless-stopped \ No newline at end of file + restart: unless-stopped + + postgres: + image: kartoza/postgis + ports: + - "5432:5432" + environment: + POSTGRES_USER: longitude + POSTGRES_PASS: longitude + volumes: + - ./data/:/var/lib/postgresql/data/pgdata diff --git a/src/core/data_sources/postgres.py b/src/core/data_sources/postgres.py deleted file mode 100644 index b2bca5d..0000000 --- a/src/core/data_sources/postgres.py +++ /dev/null @@ -1,5 +0,0 @@ -from .base import DataSource - - -class PostgresDataSource(DataSource): - pass diff --git a/src/core/data_sources/postgres/__init__.py b/src/core/data_sources/postgres/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/core/data_sources/postgres/default.py b/src/core/data_sources/postgres/default.py new file mode 100644 index 0000000..f66afe9 --- /dev/null +++ b/src/core/data_sources/postgres/default.py @@ -0,0 +1,27 @@ +import psycopg2 +from ..base import DataSource + + +class DefaultPostgresDataSource(DataSource): + default_config = { + 'user': 'postgres', + 'password': '', + 'domain': 'localhost', + 'port': 5432 + + } + + def __init__(self, config=None, cache_class=None): + super().__init__(config, cache_class=cache_class) + + def setup(self): + super().setup() + + def is_ready(self): + return super().is_ready + + def execute_query(self, formatted_query, query_config, **opts): + pass + + def parse_response(self, response): + pass From 20ece33429f8aa4af6f4a9692f7fa0984b33df42 Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Tue, 29 Jan 2019 12:15:07 +0100 Subject: [PATCH 19/47] Requied Python version updated to 3.7. Reference to shared google doc with roadmap removed. --- Pipfile | 4 +-- Pipfile.lock | 37 +++----------------- README.md | 26 +------------- src/samples/carto_sample_with_redis_cache.py | 2 +- 4 files changed, 9 insertions(+), 60 deletions(-) diff --git a/Pipfile b/Pipfile index 0863cd0..cb85bfc 100644 --- a/Pipfile +++ b/Pipfile @@ -4,14 +4,14 @@ url = "https://pypi.org/simple" verify_ssl = true [dev-packages] -pytest = "*" pylint = "*" coverage = "*" pytest-cov = "*" +pytest = "*" [packages] carto = "*" redis = "*" [requires] -python_version = "3.6" +python_version = "3.7" diff --git a/Pipfile.lock b/Pipfile.lock index 02c9b38..2275b54 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,11 +1,11 @@ { "_meta": { "hash": { - "sha256": "dc93ba0a0c855d03807424c2598879586f22ed0da579923bc4eaaf3a8e2ba566" + "sha256": "ab3156acf3fa3052e180aa64a4edc4d6ae09a8384fdd1f420fa11b21d4075d92" }, "pipfile-spec": 6, "requires": { - "python_version": "3.6" + "python_version": "3.7" }, "sources": [ { @@ -65,11 +65,11 @@ }, "redis": { "hashes": [ - "sha256:2100750629beff143b6a200a2ea8e719fcf26420adabb81402895e144c5083cf", - "sha256:8e0bdd2de02e829b6225b25646f9fb9daffea99a252610d040409a6738541f0a" + "sha256:74c892041cba46078ae1ef845241548baa3bd3634f9a6f0f952f006eb1619c71", + "sha256:7ba8612bbfd966dea8c62322543fed0095da2834dbd5a7c124afbc617a156aa7" ], "index": "pypi", - "version": "==3.0.1" + "version": "==3.1.0" }, "requests": { "hashes": [ @@ -254,33 +254,6 @@ ], "version": "==1.12.0" }, - "typed-ast": { - "hashes": [ - "sha256:023625bfa9359e29bd6e24cac2a4503495b49761d48a5f1e38333fc4ac4d93fe", - "sha256:07591f7a5fdff50e2e566c4c1e9df545c75d21e27d98d18cb405727ed0ef329c", - "sha256:153e526b0f4ffbfada72d0bb5ffe8574ba02803d2f3a9c605c8cf99dfedd72a2", - "sha256:3ad2bdcd46a4a1518d7376e9f5016d17718a9ed3c6a3f09203d832f6c165de4a", - "sha256:3ea98c84df53ada97ee1c5159bb3bc784bd734231235a1ede14c8ae0775049f7", - "sha256:51a7141ccd076fa561af107cfb7a8b6d06a008d92451a1ac7e73149d18e9a827", - "sha256:52c93cd10e6c24e7ac97e8615da9f224fd75c61770515cb323316c30830ddb33", - "sha256:6344c84baeda3d7b33e157f0b292e4dd53d05ddb57a63f738178c01cac4635c9", - "sha256:64699ca1b3bd5070bdeb043e6d43bc1d0cebe08008548f4a6bee782b0ecce032", - "sha256:74903f2e56bbffe29282ef8a5487d207d10be0f8513b41aff787d954a4cf91c9", - "sha256:7891710dba83c29ee2bd51ecaa82f60f6bede40271af781110c08be134207bf2", - "sha256:91976c56224e26c256a0de0f76d2004ab885a29423737684b4f7ebdd2f46dde2", - "sha256:9bad678a576ecc71f25eba9f1e3fd8d01c28c12a2834850b458428b3e855f062", - "sha256:b4726339a4c180a8b6ad9d8b50d2b6dc247e1b79b38fe2290549c98e82e4fd15", - "sha256:ba36f6aa3f8933edf94ea35826daf92cbb3ec248b89eccdc053d4a815d285357", - "sha256:bbc96bde544fd19e9ef168e4dfa5c3dfe704bfa78128fa76f361d64d6b0f731a", - "sha256:c0c927f1e44469056f7f2dada266c79b577da378bbde3f6d2ada726d131e4824", - "sha256:c0f9a3708008aa59f560fa1bd22385e05b79b8e38e0721a15a8402b089243442", - "sha256:f0bf6f36ff9c5643004171f11d2fdc745aa3953c5aacf2536a0685db9ceb3fb1", - "sha256:f5be39a0146be663cbf210a4d95c3c58b2d7df7b043c9047c5448e358f0550a2", - "sha256:fcd198bf19d9213e5cbf2cde2b9ef20a9856e716f76f9476157f90ae6de06cc6" - ], - "markers": "python_version < '3.7' and implementation_name == 'cpython'", - "version": "==1.2.0" - }, "wrapt": { "hashes": [ "sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533" diff --git a/README.md b/README.md index e5af7ef..0e7517b 100644 --- a/README.md +++ b/README.md @@ -4,8 +4,6 @@ A **new** bunch of middleware functions to build applications on top of CARTO. ## Roadmap -A live document for the roadmap is [shared here](https://docs.google.com/document/d/1nO_JLaKFmr5h6MudDklFutv96CfkNjJxfd0xyh1szwM/edit#heading=h.44g51xumzfku) - - [ ] Database model - [x] CARTO data source - [x] Basic parametrized queries (i.e. templated queries) @@ -63,7 +61,7 @@ A live document for the roadmap is [shared here](https://docs.google.com/documen - [ ] Token storage - [ ] Documentation - ## As final user... +## As final user... How to use: ```bash @@ -101,25 +99,3 @@ The [```pytest-cov```](https://pytest-cov.readthedocs.io/en/latest/) plugin is b You can run something like: ```pytest --cov-report=html --cov=core core``` and the results will go in the defined html folder. There is a bash script called ```generate_core_coverage.sh``` that runs the coverage analysis and shows the report in your browser. - -## Upload a new version to PyPi - -You need to be part of *Geographica's development team* to be able to accomplish this task. - -Start docker -``` -docker-compose run --rm python bash -``` - -Install needed dependencies -``` -pip install -r requirements.txt -``` - -Set version at ```setup.py``` - -Upload: -``` -python setup.py sdist -twine upload dist/geographica-longitude-.tar.gz -``` diff --git a/src/samples/carto_sample_with_redis_cache.py b/src/samples/carto_sample_with_redis_cache.py index cfbea64..1c75a7b 100644 --- a/src/samples/carto_sample_with_redis_cache.py +++ b/src/samples/carto_sample_with_redis_cache.py @@ -34,7 +34,7 @@ config = { 'api_key': CARTO_API_KEY, 'user': CARTO_USER, - 'cache': RedisCacheConfig(password='as') + 'cache': RedisCacheConfig(password='longitude') } ds = CartoDataSource(config, cache_class=RedisCache) From 2ecf601efea50cffb5fa2aaf102377af6b2f9e0a Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Tue, 29 Jan 2019 19:03:21 +0100 Subject: [PATCH 20/47] Basic Postgres sample running --- src/core/data_sources/base.py | 9 ++--- src/core/data_sources/postgres/default.py | 48 +++++++++++++++++++---- src/samples/.gitignore | 1 + src/samples/postgres_sample.py | 44 +++++++++++++++++++++ 4 files changed, 89 insertions(+), 13 deletions(-) create mode 100644 src/samples/postgres_sample.py diff --git a/src/core/data_sources/base.py b/src/core/data_sources/base.py index efbe2fd..902d40c 100644 --- a/src/core/data_sources/base.py +++ b/src/core/data_sources/base.py @@ -146,10 +146,6 @@ def query(self, statement, params=None, query_config=None, **opts): query_config = self._default_query_config query_is_writing = is_write_query(statement) - - if query_is_writing: - raise LongitudeWrongQueryException('Aborted query. No write queries allowed for now.') - formatted_query = statement.format(**params) normalized_response = None @@ -157,7 +153,6 @@ def query(self, statement, params=None, query_config=None, **opts): normalized_response = self._cache.get(formatted_query) if normalized_response: - normalized_response.mark_as_cached() return normalized_response else: @@ -238,7 +233,9 @@ def render_value(value): headers = [k for k, v in self.fields.items()] lines = [render_line(headers)] + lines - render = '\n'.join(lines) + '\n\n' + '... time = %f' % self.profiling['response_time'] + render = '\n'.join(lines) + if self.profiling and 'response_time' in self.profiling.keys(): + render += '\n\n' + '... time = %f' % self.profiling['response_time'] return render def __str__(self): diff --git a/src/core/data_sources/postgres/default.py b/src/core/data_sources/postgres/default.py index f66afe9..198c3aa 100644 --- a/src/core/data_sources/postgres/default.py +++ b/src/core/data_sources/postgres/default.py @@ -1,27 +1,61 @@ import psycopg2 +import psycopg2.extensions from ..base import DataSource +from ..base import LongitudeQueryResponse class DefaultPostgresDataSource(DataSource): default_config = { + 'host': 'localhost', + 'port': 5432, + 'db': '', 'user': 'postgres', - 'password': '', - 'domain': 'localhost', - 'port': 5432 - + 'password': '' } def __init__(self, config=None, cache_class=None): + self._conn = None + self._cursor = None super().__init__(config, cache_class=cache_class) + def __del__(self): + if self._cursor: + self._cursor.close() + if self._conn: + self._conn.close() + def setup(self): + self._conn = psycopg2.connect( + host=self.get_config('host'), + port=self.get_config('port'), + database=self.get_config('db'), + user=self.get_config('user'), + password=self.get_config('password') + ) + + self._cursor = self._conn.cursor() super().setup() def is_ready(self): - return super().is_ready + return super().is_ready and self._conn and self._cursor def execute_query(self, formatted_query, query_config, **opts): - pass + self._cursor.execute(formatted_query) + data = None + if self._cursor.description: + data = { + 'fields': self._cursor.description, + 'rows': self._cursor.fetchall() + } + self._conn.commit() + return data + + @staticmethod + def _type_as_string(type_id): + return psycopg2.extensions.string_types[type_id] def parse_response(self, response): - pass + if response: + fields_names = {n.name: self._type_as_string(n.type_code).name for n in response['fields']} + return LongitudeQueryResponse(rows=response['rows'], fields=fields_names) + return None diff --git a/src/samples/.gitignore b/src/samples/.gitignore index 730f397..90d70b6 100644 --- a/src/samples/.gitignore +++ b/src/samples/.gitignore @@ -1 +1,2 @@ carto_sample_config.py +postgres_sample_config.py \ No newline at end of file diff --git a/src/samples/postgres_sample.py b/src/samples/postgres_sample.py new file mode 100644 index 0000000..e496ef3 --- /dev/null +++ b/src/samples/postgres_sample.py @@ -0,0 +1,44 @@ +""" +██╗ ██╗ ██████╗ ██╗ ██╗ ████████╗ ██████╗ ██╗ ██╗███████╗███████╗ ████████╗██╗ ██╗██╗███████╗ +██║ ██║██╔═══██╗██║ ██║ ╚══██╔══╝██╔═══██╗ ██║ ██║██╔════╝██╔════╝ ╚══██╔══╝██║ ██║██║██╔════╝██╗ +███████║██║ ██║██║ █╗ ██║ ██║ ██║ ██║ ██║ ██║███████╗█████╗ ██║ ███████║██║███████╗╚═╝ +██╔══██║██║ ██║██║███╗██║ ██║ ██║ ██║ ██║ ██║╚════██║██╔══╝ ██║ ██╔══██║██║╚════██║██╗ +██║ ██║╚██████╔╝╚███╔███╔╝ ██║ ╚██████╔╝ ╚██████╔╝███████║███████╗ ██║ ██║ ██║██║███████║╚═╝ +╚═╝ ╚═╝ ╚═════╝ ╚══╝╚══╝ ╚═╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝ ╚═╝ ╚═╝ ╚═╝╚═╝╚══════╝ + +You must create a 'postgresql_sample_config.py' file at this folder with the needed fields (look at the import) +That file will be ignored in git, so do not worry about pushing credentials anywhere (but BE CAREFUL!) +DO NOT REPLACE THIS WITH HARD CODED CREDENTIALS EVER AND ALWAYS REVIEW YOUR COMMITS! +""" +import os +import sys + +sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) +from src.core.data_sources.base import LongitudeRetriesExceeded +from src.core.data_sources.postgres.default import DefaultPostgresDataSource +from src.samples.postgres_sample_config import POSTGRES_DB, POSTGRES_PORT, POSTGRES_HOST, POSTGRES_USER, POSTGRES_PASS + +if __name__ == "__main__": + config = { + 'host': POSTGRES_HOST or 'localhost', + 'port': POSTGRES_PORT or 5432, + 'db': POSTGRES_DB or 'longitude', + 'user': POSTGRES_USER or 'longitude', + 'password': POSTGRES_PASS or 'longitude' + } + + ds = DefaultPostgresDataSource(config) + ds.setup() + if ds.is_ready: + try: + ds.query("drop table if exists users") + ds.query( + 'create table users(id serial PRIMARY KEY, name varchar(50) UNIQUE NOT NULL, password varchar(50))') + ds.query("insert into users(name, password) values('longitude', 'password')") + data = ds.query('select * from users') + print(data) + + except LongitudeRetriesExceeded: + print("Too many retries and no success...") + else: + print("Data source is not properly configured.") From fe2078cd5ac54b2a42e261aeb5b5eda41a76a5b8 Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Tue, 29 Jan 2019 19:09:54 +0100 Subject: [PATCH 21/47] psycopg2 package removed in favour of psycopg2-binary distribution. Fixed tests. --- Pipfile | 2 +- Pipfile.lock | 64 ++++++++++----------- src/core/tests/test_data_source_base.py | 10 ---- src/core/tests/test_data_source_postgres.py | 4 +- 4 files changed, 35 insertions(+), 45 deletions(-) diff --git a/Pipfile b/Pipfile index 0cb374a..9df5c80 100644 --- a/Pipfile +++ b/Pipfile @@ -12,7 +12,7 @@ pytest = "*" [packages] carto = "*" redis = "*" -psycopg2 = "*" +psycopg2-binary = "*" [requires] python_version = "3.7" diff --git a/Pipfile.lock b/Pipfile.lock index 3ac68c1..909a484 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "60239ef9e6f8cc68705e00f948a573488a5d1bf6f99013e77a28b3739f243547" + "sha256": "488fbc4fb0af683f3f8a07d5fdb2b0da5e7cc0943de60f0ea3f6f4d24cc3f9d9" }, "pipfile-spec": 6, "requires": { @@ -50,38 +50,38 @@ ], "version": "==2.8" }, - "psycopg2": { + "psycopg2-binary": { "hashes": [ - "sha256:02445ebbb3a11a3fe8202c413d5e6faf38bb75b4e336203ee144ca2c46529f94", - "sha256:0e9873e60f98f0c52339abf8f0339d1e22bfe5aae0bcf7aabd40c055175035ec", - "sha256:1148a5eb29073280bf9057c7fc45468592c1bb75a28f6df1591adb93c8cb63d0", - "sha256:259a8324e109d4922b0fcd046e223e289830e2568d6f4132a3702439e5fd532b", - "sha256:28dffa9ed4595429e61bacac41d3f9671bb613d1442ff43bcbec63d4f73ed5e8", - "sha256:314a74302d4737a3865d40ea50e430ce1543c921ba10f39d562e807cfe2edf2a", - "sha256:36b60201b6d215d7658a71493fdf6bd5e60ad9a0cffed39906627ff9f4f3afd3", - "sha256:3f9d532bce54c4234161176ff3b8688ff337575ca441ea27597e112dfcd0ee0c", - "sha256:5d222983847b40af989ad96c07fc3f07e47925e463baa5de716be8f805b41d9b", - "sha256:6757a6d2fc58f7d8f5d471ad180a0bd7b4dd3c7d681f051504fbea7ae29c8d6f", - "sha256:6a0e0f1e74edb0ab57d89680e59e7bfefad2bfbdf7c80eb38304d897d43674bb", - "sha256:6ca703ccdf734e886a1cf53eb702261110f6a8b0ed74bcad15f1399f74d3f189", - "sha256:8513b953d8f443c446aa79a4cc8a898bd415fc5e29349054f03a7d696d495542", - "sha256:9262a5ce2038570cb81b4d6413720484cb1bc52c064b2f36228d735b1f98b794", - "sha256:97441f851d862a0c844d981cbee7ee62566c322ebb3d68f86d66aa99d483985b", - "sha256:a07feade155eb8e69b54dd6774cf6acf2d936660c61d8123b8b6b1f9247b67d6", - "sha256:a9b9c02c91b1e3ec1f1886b2d0a90a0ea07cc529cb7e6e472b556bc20ce658f3", - "sha256:ae88216f94728d691b945983140bf40d51a1ff6c7fe57def93949bf9339ed54a", - "sha256:b360ffd17659491f1a6ad7c928350e229c7b7bd83a2b922b6ee541245c7a776f", - "sha256:b4221957ceccf14b2abdabef42d806e791350be10e21b260d7c9ce49012cc19e", - "sha256:b90758e49d5e6b152a460d10b92f8a6ccf318fcc0ee814dcf53f3a6fc5328789", - "sha256:c669ea986190ed05fb289d0c100cc88064351f2b85177cbfd3564c4f4847d18c", - "sha256:d1b61999d15c79cf7f4f7cc9021477aef35277fc52452cf50fd13b713c84424d", - "sha256:de7bb043d1adaaf46e38d47e7a5f703bb3dab01376111e522b07d25e1a79c1e1", - "sha256:e393568e288d884b94d263f2669215197840d097c7e5b0acd1a51c1ea7d1aba8", - "sha256:ed7e0849337bd37d89f2c2b0216a0de863399ee5d363d31b1e5330a99044737b", - "sha256:f153f71c3164665d269a5d03c7fa76ba675c7a8de9dc09a4e2c2cdc9936a7b41", - "sha256:f1fb5a8427af099beb7f65093cbdb52e021b8e6dbdfaf020402a623f4181baf5", - "sha256:f36b333e9f86a2fba960c72b90c34be6ca71819e300f7b1fc3d2b0f0b2c546cd", - "sha256:f4526d078aedd5187d0508aa5f9a01eae6a48a470ed678406da94b4cd6524b7e" + "sha256:19a2d1f3567b30f6c2bb3baea23f74f69d51f0c06c2e2082d0d9c28b0733a4c2", + "sha256:2b69cf4b0fa2716fd977aa4e1fd39af6110eb47b2bb30b4e5a469d8fbecfc102", + "sha256:2e952fa17ba48cbc2dc063ddeec37d7dc4ea0ef7db0ac1eda8906365a8543f31", + "sha256:348b49dd737ff74cfb5e663e18cb069b44c64f77ec0523b5794efafbfa7df0b8", + "sha256:3d72a5fdc5f00ca85160915eb9a973cf9a0ab8148f6eda40708bf672c55ac1d1", + "sha256:4957452f7868f43f32c090dadb4188e9c74a4687323c87a882e943c2bd4780c3", + "sha256:5138cec2ee1e53a671e11cc519505eb08aaaaf390c508f25b09605763d48de4b", + "sha256:587098ca4fc46c95736459d171102336af12f0d415b3b865972a79c03f06259f", + "sha256:5b79368bcdb1da4a05f931b62760bea0955ee2c81531d8e84625df2defd3f709", + "sha256:5cf43807392247d9bc99737160da32d3fa619e0bfd85ba24d1c78db205f472a4", + "sha256:676d1a80b1eebc0cacae8dd09b2fde24213173bf65650d22b038c5ed4039f392", + "sha256:6b0211ecda389101a7d1d3df2eba0cf7ffbdd2480ca6f1d2257c7bd739e84110", + "sha256:79cde4660de6f0bb523c229763bd8ad9a93ac6760b72c369cf1213955c430934", + "sha256:7aba9786ac32c2a6d5fb446002ed936b47d5e1f10c466ef7e48f66eb9f9ebe3b", + "sha256:7c8159352244e11bdd422226aa17651110b600d175220c451a9acf795e7414e0", + "sha256:945f2eedf4fc6b2432697eb90bb98cc467de5147869e57405bfc31fa0b824741", + "sha256:96b4e902cde37a7fc6ab306b3ac089a3949e6ce3d824eeca5b19dc0bedb9f6e2", + "sha256:9a7bccb1212e63f309eb9fab47b6eaef796f59850f169a25695b248ca1bf681b", + "sha256:a3bfcac727538ec11af304b5eccadbac952d4cca1a551a29b8fe554e3ad535dc", + "sha256:b19e9f1b85c5d6136f5a0549abdc55dcbd63aba18b4f10d0d063eb65ef2c68b4", + "sha256:b664011bb14ca1f2287c17185e222f2098f7b4c857961dbcf9badb28786dbbf4", + "sha256:bde7959ef012b628868d69c474ec4920252656d0800835ed999ba5e4f57e3e2e", + "sha256:cb095a0657d792c8de9f7c9a0452385a309dfb1bbbb3357d6b1e216353ade6ca", + "sha256:d16d42a1b9772152c1fe606f679b2316551f7e1a1ce273e7f808e82a136cdb3d", + "sha256:d444b1545430ffc1e7a24ce5a9be122ccd3b135a7b7e695c5862c5aff0b11159", + "sha256:d93ccc7bf409ec0a23f2ac70977507e0b8a8d8c54e5ee46109af2f0ec9e411f3", + "sha256:df6444f952ca849016902662e1a47abf4fa0678d75f92fd9dd27f20525f809cd", + "sha256:e63850d8c52ba2b502662bf3c02603175c2397a9acc756090e444ce49508d41e", + "sha256:ec43358c105794bc2b6fd34c68d27f92bea7102393c01889e93f4b6a70975728", + "sha256:f4c6926d9c03dadce7a3b378b40d2fea912c1344ef9b29869f984fb3d2a2420b" ], "index": "pypi", "version": "==2.7.7" diff --git a/src/core/tests/test_data_source_base.py b/src/core/tests/test_data_source_base.py index 5f32c61..a759470 100644 --- a/src/core/tests/test_data_source_base.py +++ b/src/core/tests/test_data_source_base.py @@ -73,16 +73,6 @@ class PoorlyImplementedCache: with self.assertRaises(TypeError): DataSource({}, cache_class=PoorlyImplementedCache) - @mock.patch('src.core.data_sources.base.is_write_query') - def test_write_queries_do_not_use_cache(self, is_write_mock): - ds = DataSource({}, cache_class=self._cache_class) - ds.setup() - self.assertTrue(ds.is_ready) - - is_write_mock.return_value = True - with self.assertRaises(LongitudeWrongQueryException): - ds.query('some_query') - @mock.patch('src.core.data_sources.base.is_write_query') def test_cache_hit(self, is_write_mock): ds = DataSource({}, cache_class=self._cache_class) diff --git a/src/core/tests/test_data_source_postgres.py b/src/core/tests/test_data_source_postgres.py index 849b865..11143cb 100644 --- a/src/core/tests/test_data_source_postgres.py +++ b/src/core/tests/test_data_source_postgres.py @@ -1,2 +1,2 @@ -from ..data_sources.postgres import PostgresDataSource - +from unittest import TestCase +from ..data_sources.postgres.default import DefaultPostgresDataSource From a50754fa3e9726424fd0d63349c318c4e41d7fef Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Wed, 30 Jan 2019 11:59:49 +0100 Subject: [PATCH 22/47] Write query comprobation removed. Closes #33. Use of cache is now configurable in a per-query basis using a bool parameter in the query() method. --- src/core/caches/base.py | 13 +++--------- src/core/data_sources/base.py | 21 +++++++++----------- src/core/data_sources/util.py | 10 ---------- src/core/tests/test_cache_base.py | 14 ++----------- src/core/tests/test_data_source_base.py | 8 ++------ src/samples/carto_sample_with_ram_cache.py | 3 +-- src/samples/carto_sample_with_redis_cache.py | 3 +-- 7 files changed, 18 insertions(+), 54 deletions(-) delete mode 100644 src/core/data_sources/util.py diff --git a/src/core/caches/base.py b/src/core/caches/base.py index 9b65903..67f6df5 100644 --- a/src/core/caches/base.py +++ b/src/core/caches/base.py @@ -1,7 +1,6 @@ import hashlib import logging import pickle -from ..data_sources.util import is_write_query class LongitudeCache: @@ -32,17 +31,11 @@ def is_ready(self): raise NotImplementedError def get(self, formatted_query): - if is_write_query(formatted_query): - return None - else: - payload = self.execute_get(self.generate_key(formatted_query)) - return self.deserialize_payload(payload) + payload = self.execute_get(self.generate_key(formatted_query)) + return self.deserialize_payload(payload) def put(self, formatted_query, payload): - if is_write_query(formatted_query): - return None - else: - return self.execute_put(self.generate_key(formatted_query), self.serialize_payload(payload)) + return self.execute_put(self.generate_key(formatted_query), self.serialize_payload(payload)) def execute_get(self, key): """ diff --git a/src/core/data_sources/base.py b/src/core/data_sources/base.py index 902d40c..65bb2b8 100644 --- a/src/core/data_sources/base.py +++ b/src/core/data_sources/base.py @@ -2,7 +2,6 @@ from typing import Type from ..caches.base import LongitudeCache -from .util import is_write_query class LongitudeBaseException(Exception): @@ -22,16 +21,14 @@ class LongitudeWrongQueryException(LongitudeBaseException): class DataSourceQueryConfig: - def __init__(self, enable_writing=False, retries=0, custom=None, use_cache=True): - self.use_cache = use_cache - self.enable_writing = enable_writing + def __init__(self, retries=0, custom=None): self.retries = retries # Depending on the specific interface (i.e.: CARTO, Postgres...), we might also need to specify per-query values self.custom = custom or {} def copy(self): - return DataSourceQueryConfig(self.enable_writing, self.retries, self.custom) + return DataSourceQueryConfig(self.retries, self.custom) class DataSource: @@ -40,7 +37,7 @@ class DataSource: def __init__(self, config=None, cache_class: Type[LongitudeCache] = None): self.logger = logging.getLogger(self.__class__.__module__) self._default_query_config = DataSourceQueryConfig() - self.use_cache = True + self._use_cache = True self._cache = None if config is None: @@ -123,18 +120,19 @@ def get_config(self, key: str): return None def enable_cache(self): - self.use_cache = True + self._use_cache = True def disable_cache(self): - self.use_cache = False + self._use_cache = False - def query(self, statement, params=None, query_config=None, **opts): + def query(self, statement, params=None, use_cache=True, query_config=None, **opts): """ This method has to be called to interact with the data source. Each children class will have to implement its own .execute_query(...) with the specific behavior for each interface. :param statement: Unformatted SQL query :param params: Values to be passed to the query when formatting it + :param use_cache: Bool to indicate if this specific query should use cache or not (default: True) :param query_config: Specific query configuration. If None, the default one will be used. :param opts: :return: Result of querying the database @@ -145,11 +143,10 @@ def query(self, statement, params=None, query_config=None, **opts): if query_config is None: query_config = self._default_query_config - query_is_writing = is_write_query(statement) formatted_query = statement.format(**params) normalized_response = None - if self._cache and self.use_cache and query_config.use_cache and not query_is_writing: + if self._cache and self._use_cache and use_cache: normalized_response = self._cache.get(formatted_query) if normalized_response: @@ -162,7 +159,7 @@ def query(self, statement, params=None, query_config=None, **opts): query_config=query_config, **opts) normalized_response = self.parse_response(response) - if self._cache and self.use_cache and query_config.use_cache: + if self._cache and self._use_cache and use_cache: self._cache.put(formatted_query, normalized_response) return normalized_response diff --git a/src/core/data_sources/util.py b/src/core/data_sources/util.py deleted file mode 100644 index ba4aea7..0000000 --- a/src/core/data_sources/util.py +++ /dev/null @@ -1,10 +0,0 @@ -import re - - -def is_write_query(sql_statement): - """ - Check if a query string is a write query - """ - write_cmds = 'drop|delete|insert|update|grant|execute|perform|create|begin|commit|alter' - is_write = re.search(write_cmds, sql_statement.lower()) - return is_write diff --git a/src/core/tests/test_cache_base.py b/src/core/tests/test_cache_base.py index 9bb4bda..24290b1 100644 --- a/src/core/tests/test_cache_base.py +++ b/src/core/tests/test_cache_base.py @@ -24,20 +24,10 @@ def test_generate_key(self): unique_key = set([LongitudeCache.generate_key('SOME_QUERY_OVER_AND_OVER') for _ in range(100)]) self.assertEqual(1, len(unique_key)) - @mock.patch('src.core.caches.base.is_write_query') - def test_get_and_put_returns_none_for_write_queries(self, is_write_mock): - is_write_mock.return_value = True - cache = LongitudeCache() - self.assertIsNone(cache.get('some_query')) - self.assertIsNone(cache.put('some_query', payload='whatever')) - self.assertEqual(2, is_write_mock.call_count) - - @mock.patch('src.core.caches.base.is_write_query') - def test_get_nor_put_are_implemented_in_base_class(self, is_write_mock): - is_write_mock.return_value = False + def test_get_nor_put_are_implemented_in_base_class(self): cache = LongitudeCache() with self.assertRaises(NotImplementedError): cache.get('some_query') with self.assertRaises(NotImplementedError): cache.put('some_query', payload='whatever') - self.assertEqual(2, is_write_mock.call_count) + diff --git a/src/core/tests/test_data_source_base.py b/src/core/tests/test_data_source_base.py index a759470..c72437d 100644 --- a/src/core/tests/test_data_source_base.py +++ b/src/core/tests/test_data_source_base.py @@ -73,23 +73,19 @@ class PoorlyImplementedCache: with self.assertRaises(TypeError): DataSource({}, cache_class=PoorlyImplementedCache) - @mock.patch('src.core.data_sources.base.is_write_query') - def test_cache_hit(self, is_write_mock): + def test_cache_hit(self): ds = DataSource({}, cache_class=self._cache_class) ds.setup() # At high level, ds.query will return a normalized LongitudeQueryResponse # In this test we are interested in triggering that call to the parse function that would return such object, # but we do not care, in the abstract class, about what content is generated there. - is_write_mock.return_value = False self.assertTrue(ds.query('some_query_in_cache').comes_from_cache) - @mock.patch('src.core.data_sources.base.is_write_query') @mock.patch('src.core.data_sources.base.DataSource.parse_response') @mock.patch('src.core.data_sources.base.DataSource.execute_query') - def test_cache_miss(self, execute_query_mock, parse_response_mock, is_write_mock): + def test_cache_miss(self, execute_query_mock, parse_response_mock): ds = DataSource({}, cache_class=self._cache_class) ds.setup() - is_write_mock.return_value = False execute_query_mock.return_value = 'some response from the server' parse_response_mock.return_value = 'normalized response from data source' self.assertEqual('normalized response from data source', ds.query('some_query_not_in_cache')) diff --git a/src/samples/carto_sample_with_ram_cache.py b/src/samples/carto_sample_with_ram_cache.py index 21eaae1..afc7d93 100644 --- a/src/samples/carto_sample_with_ram_cache.py +++ b/src/samples/carto_sample_with_ram_cache.py @@ -60,9 +60,8 @@ # Or disable specific queries via query_config (nothing gets read or written) query_config = ds.copy_default_query_config() - query_config.use_cache = False start = time.time() - data = ds.query(REPEATED_QUERY, query_config=query_config) + data = ds.query(REPEATED_QUERY, query_config=query_config, use_cache=False) elapsed = time.time() - start print('It took %s with disabled cache (per-query)' % str(elapsed)) print('Uses cache? ' + str(data.comes_from_cache)) diff --git a/src/samples/carto_sample_with_redis_cache.py b/src/samples/carto_sample_with_redis_cache.py index 1c75a7b..5167409 100644 --- a/src/samples/carto_sample_with_redis_cache.py +++ b/src/samples/carto_sample_with_redis_cache.py @@ -70,9 +70,8 @@ # Or disable specific queries via query_config (nothing gets read or written) query_config = ds.copy_default_query_config() - query_config.use_cache = False start = time.time() - data = ds.query(REPEATED_QUERY, query_config=query_config) + data = ds.query(REPEATED_QUERY, query_config=query_config, use_cache=False) elapsed = time.time() - start print('It took %s with disabled cache (per-query)' % str(elapsed)) print('Uses cache? ' + str(data.comes_from_cache)) From edff45b1187ec11dbace5d466ecb823218db8172 Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Wed, 30 Jan 2019 12:27:38 +0100 Subject: [PATCH 23/47] Previews for query response removed. --- src/core/data_sources/base.py | 34 ------------------- .../raw_text/query_response_render_bottom.txt | 13 ------- .../raw_text/query_response_render_top.txt | 13 ------- src/core/tests/test_data_source_base.py | 19 +---------- 4 files changed, 1 insertion(+), 78 deletions(-) delete mode 100644 src/core/tests/raw_text/query_response_render_bottom.txt delete mode 100644 src/core/tests/raw_text/query_response_render_top.txt diff --git a/src/core/data_sources/base.py b/src/core/data_sources/base.py index 65bb2b8..aa988f7 100644 --- a/src/core/data_sources/base.py +++ b/src/core/data_sources/base.py @@ -203,37 +203,3 @@ def comes_from_cache(self): def mark_as_cached(self): self._from_cache = True - - def preview_top(self): - return self._preview(10) - - def preview_bottom(self): - return self._preview(-10) - - def _preview(self, limit): - def render_line(values): - def render_value(value): - value = str(value) - if len(value) > 20: - value = value[:14] + ' (...)' - return value - - values = [render_value(v) + '\t' for v in values] - return '| ' + '| '.join(values) + '\t|' - - if limit > 0: - preview_list = self.rows[:limit] - else: - preview_list = self.rows[limit:] - - lines = [render_line(l) for l in preview_list] - headers = [k for k, v in self.fields.items()] - - lines = [render_line(headers)] + lines - render = '\n'.join(lines) - if self.profiling and 'response_time' in self.profiling.keys(): - render += '\n\n' + '... time = %f' % self.profiling['response_time'] - return render - - def __str__(self): - return self.preview_top() diff --git a/src/core/tests/raw_text/query_response_render_bottom.txt b/src/core/tests/raw_text/query_response_render_bottom.txt deleted file mode 100644 index 3323432..0000000 --- a/src/core/tests/raw_text/query_response_render_bottom.txt +++ /dev/null @@ -1,13 +0,0 @@ -| As | Bs | -| A10 | B10 | -| A11 | B11 | -| A12 | B12 | -| A13 | B13 | -| A14 | B14 | -| A15 | B15 | -| A16 | B16 | -| A17 | B17 | -| A18 | B18 | -| A19 | B19 | - -... time = 42.000000 \ No newline at end of file diff --git a/src/core/tests/raw_text/query_response_render_top.txt b/src/core/tests/raw_text/query_response_render_top.txt deleted file mode 100644 index 81587b1..0000000 --- a/src/core/tests/raw_text/query_response_render_top.txt +++ /dev/null @@ -1,13 +0,0 @@ -| As | Bs | -| A0 | B0 | -| A1 | B1 | -| A2 | B2 | -| A3 | B3 | -| A4 | B4 | -| A5 | B5 | -| A6 | B6 | -| A7 | B7 | -| A8 | B8 | -| A9 | B9 | - -... time = 42.000000 \ No newline at end of file diff --git a/src/core/tests/test_data_source_base.py b/src/core/tests/test_data_source_base.py index c72437d..b6fefba 100644 --- a/src/core/tests/test_data_source_base.py +++ b/src/core/tests/test_data_source_base.py @@ -2,7 +2,7 @@ from unittest import TestCase, mock from ..caches.base import LongitudeCache -from ..data_sources.base import DataSource, DataSourceQueryConfig, LongitudeQueryResponse, LongitudeWrongQueryException +from ..data_sources.base import DataSource, DataSourceQueryConfig, LongitudeQueryResponse def load_raw_text(filename): @@ -11,23 +11,6 @@ def load_raw_text(filename): return f.read() -class TestLongitudeQueryResponse(TestCase): - def test_preview(self): - qr = LongitudeQueryResponse( - rows=[['A' + str(v), 'B' + str(v)] for v in range(20)], - fields={'As': {'type': 'string'}, 'Bs': {'type': 'string'}}, - profiling={'response_time': 42.0} - ) - - render_top = qr.preview_top() - expected_render_top = load_raw_text('query_response_render_top.txt') - self.assertEqual(expected_render_top, render_top) - - render_bottom = qr.preview_bottom() - expected_render_bottom = load_raw_text('query_response_render_bottom.txt') - self.assertEqual(expected_render_bottom, render_bottom) - - class TestDataSourceQueryConfig(TestCase): def test_copy(self): a = DataSourceQueryConfig() From 07054fc62f2117c162523a94a2e680bbbb07ab40 Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Wed, 30 Jan 2019 13:32:00 +0100 Subject: [PATCH 24/47] Configuration logic extracted to simple class. Now it is common for data sources and cache classes. --- src/core/caches/base.py | 8 ++- src/core/caches/redis.py | 26 ++++----- src/core/common/__init__.py | 1 + src/core/common/config.py | 48 ++++++++++++++++ src/core/common/exceptions.py | 18 ++++++ src/core/data_sources/base.py | 58 ++------------------ src/core/data_sources/carto.py | 2 +- src/core/data_sources/postgres/default.py | 2 +- src/core/tests/test_cache_redis.py | 4 +- src/core/tests/test_config.py | 34 ++++++++++++ src/core/tests/test_data_source_base.py | 24 +------- src/core/tests/test_data_source_carto.py | 13 +++-- src/samples/carto_sample_with_redis_cache.py | 4 +- 13 files changed, 137 insertions(+), 105 deletions(-) create mode 100644 src/core/common/__init__.py create mode 100644 src/core/common/config.py create mode 100644 src/core/common/exceptions.py create mode 100644 src/core/tests/test_config.py diff --git a/src/core/caches/base.py b/src/core/caches/base.py index 67f6df5..bb3ec40 100644 --- a/src/core/caches/base.py +++ b/src/core/caches/base.py @@ -2,12 +2,14 @@ import logging import pickle +from ..common.config import LongitudeConfigurable -class LongitudeCache: - default_config = {} + +class LongitudeCache(LongitudeConfigurable): + _default_config = {} def __init__(self, config=None): - self._config = config or self.default_config + super().__init__(config=config) self.logger = logging.getLogger(self.__class__.__module__) @staticmethod diff --git a/src/core/caches/redis.py b/src/core/caches/redis.py index 8821347..8f9554b 100644 --- a/src/core/caches/redis.py +++ b/src/core/caches/redis.py @@ -2,25 +2,22 @@ from .base import LongitudeCache -class RedisCacheConfig: - def __init__(self, host='localhost', port=6379, db=0, password=None): - self.host = host - self.port = port - self.db = db - self.password = password - - class RedisCache(LongitudeCache): - _default_config = RedisCacheConfig() + _default_config = { + 'host': 'localhost', + 'port': 6379, + 'db': 0, + 'password': None + } _values = None def setup(self): self._values = redis.Redis( - host=self._config.host, - port=self._config.port, - db=self._config.db, - password=self._config.password + host=self.get_config('host'), + port=self.get_config('port'), + db=self.get_config('db'), + password=self.get_config('password') ) @property @@ -31,7 +28,8 @@ def is_ready(self): except TimeoutError: return False except redis.exceptions.ConnectionError: - self.logger.error('Cannot connect to Redis server at %s:%d.' % (self._config.host, self._config.port)) + self.logger.error( + 'Cannot connect to Redis server at %s:%d.' % (self.get_config('host'), self.get_config('port'))) return False except redis.exceptions.ResponseError as e: msg = str(e) diff --git a/src/core/common/__init__.py b/src/core/common/__init__.py new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/src/core/common/__init__.py @@ -0,0 +1 @@ + diff --git a/src/core/common/config.py b/src/core/common/config.py new file mode 100644 index 0000000..1223443 --- /dev/null +++ b/src/core/common/config.py @@ -0,0 +1,48 @@ +import logging + +from .exceptions import LongitudeConfigError + + +class LongitudeConfigurable: + """ + Any subclass will have a nice get_config(key) method to retrieve configuration values + """ + _default_config = {} + _config = {} + + def __init__(self, config=None): + if config is not None and not isinstance(config, dict): + raise TypeError('Config object must be a dictionary') + + self._config = config or {} + self.logger = logging.getLogger(__class__.__module__) + default_keys = set(self._default_config.keys()) + config_keys = set(config.keys()) if config is not None else set([]) + unexpected_config_keys = list(config_keys.difference(default_keys)) + using_defaults_for = list(default_keys.difference(config_keys)) + + unexpected_config_keys.sort() + using_defaults_for.sort() + + for k in unexpected_config_keys: + self.logger.warning("%s is an unexpected config value" % k) + + for k in using_defaults_for: + self.logger.info("%s key is using default value" % k) + + def get_config(self, key): + """ + Getter for configuration values + :param key: Key in the configuration dictionary + :return: Current value of the chosen key + """ + + if key not in self._default_config.keys(): + raise LongitudeConfigError("%s is not a valid config value. Check your defaults as reference.") + try: + return self._config[key] + except (TypeError, KeyError): + try: + return self._default_config[key] + except KeyError: + return None diff --git a/src/core/common/exceptions.py b/src/core/common/exceptions.py new file mode 100644 index 0000000..335282b --- /dev/null +++ b/src/core/common/exceptions.py @@ -0,0 +1,18 @@ +class LongitudeBaseException(Exception): + pass + + +class LongitudeRetriesExceeded(LongitudeBaseException): + pass + + +class LongitudeQueryCannotBeExecutedException(LongitudeBaseException): + pass + + +class LongitudeWrongQueryException(LongitudeBaseException): + pass + + +class LongitudeConfigError(LongitudeBaseException): + pass diff --git a/src/core/data_sources/base.py b/src/core/data_sources/base.py index aa988f7..e5d73e6 100644 --- a/src/core/data_sources/base.py +++ b/src/core/data_sources/base.py @@ -2,22 +2,8 @@ from typing import Type from ..caches.base import LongitudeCache - - -class LongitudeBaseException(Exception): - pass - - -class LongitudeRetriesExceeded(LongitudeBaseException): - pass - - -class LongitudeQueryCannotBeExecutedException(LongitudeBaseException): - pass - - -class LongitudeWrongQueryException(LongitudeBaseException): - pass +from ..common.config import LongitudeConfigurable +from ..common.exceptions import LongitudeRetriesExceeded, LongitudeQueryCannotBeExecutedException class DataSourceQueryConfig: @@ -31,43 +17,21 @@ def copy(self): return DataSourceQueryConfig(self.retries, self.custom) -class DataSource: - default_config = {} +class DataSource(LongitudeConfigurable): def __init__(self, config=None, cache_class: Type[LongitudeCache] = None): + super().__init__(config=config) self.logger = logging.getLogger(self.__class__.__module__) self._default_query_config = DataSourceQueryConfig() self._use_cache = True self._cache = None - if config is None: - config = {} - - if not isinstance(config, dict): - raise TypeError('Config object must be a dictionary') - if cache_class: if not issubclass(cache_class, LongitudeCache): raise TypeError('Cache must derive from LongitudeCache or be None') else: self._cache = cache_class(config=config.get('cache')) - default_keys = set(self.default_config.keys()) - config_keys = set(config.keys()) - unexpected_config_keys = list(config_keys.difference(default_keys)) - using_defaults_for = list(default_keys.difference(config_keys)) - - unexpected_config_keys.sort() - using_defaults_for.sort() - - for k in unexpected_config_keys: - self.logger.warning("%s is an unexpected config value" % k) - - for k in using_defaults_for: - self.logger.info("%s key is using default value" % k) - - self._config = config - def setup(self): if self._cache: self._cache.setup() @@ -105,20 +69,6 @@ def is_ready(self): """ return not self._cache or self._cache.is_ready - def get_config(self, key: str): - """ - Getter for configuration values - :param key: Key in the configuration dictionary - :return: Current value of the chosen key - """ - try: - return self._config[key] - except KeyError: - try: - return self.default_config[key] - except KeyError: - return None - def enable_cache(self): self._use_cache = True diff --git a/src/core/data_sources/carto.py b/src/core/data_sources/carto.py index 6b51736..2dc6806 100644 --- a/src/core/data_sources/carto.py +++ b/src/core/data_sources/carto.py @@ -8,7 +8,7 @@ class CartoDataSource(DataSource): SUBDOMAIN_URL_PATTERN = "https://%s.carto.com" ON_PREMISE_URL_PATTERN = "https://%s/user/%s" - default_config = { + _default_config = { 'api_version': 'v2', 'uses_batch': False, 'on_premise_domain': '', diff --git a/src/core/data_sources/postgres/default.py b/src/core/data_sources/postgres/default.py index 198c3aa..b69e727 100644 --- a/src/core/data_sources/postgres/default.py +++ b/src/core/data_sources/postgres/default.py @@ -5,7 +5,7 @@ class DefaultPostgresDataSource(DataSource): - default_config = { + _default_config = { 'host': 'localhost', 'port': 5432, 'db': '', diff --git a/src/core/tests/test_cache_redis.py b/src/core/tests/test_cache_redis.py index defd7cf..7640bd5 100644 --- a/src/core/tests/test_cache_redis.py +++ b/src/core/tests/test_cache_redis.py @@ -1,6 +1,6 @@ import redis.exceptions from unittest import TestCase, mock -from ..caches.redis import RedisCache, RedisCacheConfig +from ..caches.redis import RedisCache @mock.patch('src.core.caches.redis.redis.Redis') @@ -8,7 +8,7 @@ class TestRedisCache(TestCase): cache = None def setUp(self): - self.cache = RedisCache(config=RedisCacheConfig(host='some_host', port=666, db=0, password='some_pass')) + self.cache = RedisCache(config={'host': 'some_host', 'port': 666, 'db': 0, 'password': 'some_pass'}) def test_is_ready_if_redis_returns_ping(self, redis_mock): redis_mock.return_value.ping.return_value = True diff --git a/src/core/tests/test_config.py b/src/core/tests/test_config.py new file mode 100644 index 0000000..8023a83 --- /dev/null +++ b/src/core/tests/test_config.py @@ -0,0 +1,34 @@ +from unittest import TestCase + +from src.core.common.exceptions import LongitudeConfigError +from src.core.common.config import LongitudeConfigurable + + +class TestConfig(TestCase): + def test_config(self): + # Config must be a dictionary + with self.assertRaises(TypeError): + LongitudeConfigurable(config=[]) + with self.assertRaises(TypeError): + LongitudeConfigurable(config="") + with self.assertRaises(TypeError): + LongitudeConfigurable(config=0) + + # Any values can go in the configuration dictionary but not expected ones trigger a warning + config = {"some_config_value": 0, "some_another_config_value": "tomato"} + with self.assertLogs(level='WARNING') as log_test: + ds = LongitudeConfigurable(config) + self.assertEqual(log_test.output, + [ + 'WARNING:src.core.common.config:some_another_config_value is an unexpected config value', + 'WARNING:src.core.common.config:some_config_value is an unexpected config value']) + + # Values in the config can be retrieved using get_config. If no default or config is defined, None is returned. + ds._default_config['some_config_value'] = 42 + ds._default_config['some_none_value'] = None + self.assertEqual(0, ds.get_config('some_config_value')) + self.assertEqual(None, ds.get_config('some_none_value')) + + # We do not allow trying to get a config value out of the default keys + with self.assertRaises(LongitudeConfigError): + self.assertIsNone(ds.get_config('some_random_value_that_does_not_exist_in_config_or_defaults')) diff --git a/src/core/tests/test_data_source_base.py b/src/core/tests/test_data_source_base.py index b6fefba..3a57c4f 100644 --- a/src/core/tests/test_data_source_base.py +++ b/src/core/tests/test_data_source_base.py @@ -1,6 +1,7 @@ import os from unittest import TestCase, mock +from src.core.common.exceptions import LongitudeConfigError from ..caches.base import LongitudeCache from ..data_sources.base import DataSource, DataSourceQueryConfig, LongitudeQueryResponse @@ -74,28 +75,7 @@ def test_cache_miss(self, execute_query_mock, parse_response_mock): self.assertEqual('normalized response from data source', ds.query('some_query_not_in_cache')) parse_response_mock.assert_called_once_with('some response from the server') - def test_config(self): - # Config must be a dictionary - with self.assertRaises(TypeError): - DataSource([]) - with self.assertRaises(TypeError): - DataSource("") - with self.assertRaises(TypeError): - DataSource(0) - - # Any values can go in the configuration dictionary but not expected ones trigger a warning - config = {"some_config_value": 0, "some_another_config_value": "tomato"} - with self.assertLogs(level='WARNING') as log_test: - ds = DataSource(config) - self.assertEqual(log_test.output, - [ - 'WARNING:src.core.data_sources.base:some_another_config_value is an unexpected config value', - 'WARNING:src.core.data_sources.base:some_config_value is an unexpected config value']) - - # Values in the config can be retrieved using get_config. If no default or config is defined, None is returned. - self.assertEqual(0, ds.get_config('some_config_value')) - self.assertEqual("tomato", ds.get_config('some_another_config_value')) - self.assertIsNone(ds.get_config('some_random_value_that_does_not_exist_in_config_or_defaults')) + def test_abstract_methods_are_not_implemented(self): ds = DataSource({}) diff --git a/src/core/tests/test_data_source_carto.py b/src/core/tests/test_data_source_carto.py index 39c9892..76710f1 100644 --- a/src/core/tests/test_data_source_carto.py +++ b/src/core/tests/test_data_source_carto.py @@ -11,13 +11,14 @@ class TestCartoDataSource(TestCase): def test_default_configuration_loads(self): with self.assertLogs(level='INFO') as log_test: carto_ds = CartoDataSource() + module_name = 'src.core.common.config' self.assertEqual(log_test.output, - ['INFO:src.core.data_sources.carto:api_key key is using default value', - 'INFO:src.core.data_sources.carto:api_version key is using default value', - 'INFO:src.core.data_sources.carto:cache key is using default value', - 'INFO:src.core.data_sources.carto:on_premise_domain key is using default value', - 'INFO:src.core.data_sources.carto:user key is using default value', - 'INFO:src.core.data_sources.carto:uses_batch key is using default value'] + ['INFO:%s:api_key key is using default value' % module_name, + 'INFO:%s:api_version key is using default value' % module_name, + 'INFO:%s:cache key is using default value' % module_name, + 'INFO:%s:on_premise_domain key is using default value' % module_name, + 'INFO:%s:user key is using default value' % module_name, + 'INFO:%s:uses_batch key is using default value' % module_name] ) self.assertEqual('', carto_ds.get_config('api_key')) diff --git a/src/samples/carto_sample_with_redis_cache.py b/src/samples/carto_sample_with_redis_cache.py index 5167409..5be8152 100644 --- a/src/samples/carto_sample_with_redis_cache.py +++ b/src/samples/carto_sample_with_redis_cache.py @@ -25,7 +25,7 @@ import sys sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) -from src.core.caches.redis import RedisCache, RedisCacheConfig +from src.core.caches.redis import RedisCache from src.core.data_sources.base import LongitudeRetriesExceeded from src.core.data_sources.carto import CartoDataSource from src.samples.carto_sample_config import CARTO_API_KEY, CARTO_USER, CARTO_TABLE_NAME @@ -34,7 +34,7 @@ config = { 'api_key': CARTO_API_KEY, 'user': CARTO_USER, - 'cache': RedisCacheConfig(password='longitude') + 'cache': {'password': 'longitude'} } ds = CartoDataSource(config, cache_class=RedisCache) From 2dd854644395d636b13d67c9688f14c5f570dc00 Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Wed, 30 Jan 2019 15:37:24 +0100 Subject: [PATCH 25/47] Cache can be safely disabled temporarily using the DisableCache context manager. Basic profiling times in the postgres data source. Added 'needs_commit' parameter to query method to avoid default committing. --- src/core/common/config.py | 5 +--- src/core/common/helpers.py | 16 +++++++++++ src/core/data_sources/base.py | 13 ++++++--- src/core/data_sources/carto.py | 2 +- src/core/data_sources/postgres/default.py | 28 ++++++++++++++------ src/core/tests/test_data_source_base.py | 3 --- src/samples/carto_sample.py | 3 ++- src/samples/carto_sample_with_redis_cache.py | 19 ++++++------- src/samples/postgres_sample.py | 18 ++++++++----- 9 files changed, 70 insertions(+), 37 deletions(-) create mode 100644 src/core/common/helpers.py diff --git a/src/core/common/config.py b/src/core/common/config.py index 1223443..255859b 100644 --- a/src/core/common/config.py +++ b/src/core/common/config.py @@ -42,7 +42,4 @@ def get_config(self, key): try: return self._config[key] except (TypeError, KeyError): - try: - return self._default_config[key] - except KeyError: - return None + return self._default_config[key] diff --git a/src/core/common/helpers.py b/src/core/common/helpers.py new file mode 100644 index 0000000..5855a80 --- /dev/null +++ b/src/core/common/helpers.py @@ -0,0 +1,16 @@ +from src.core.data_sources.base import DataSource + + +class DisabledCache: + data_source = None + + def __init__(self, ds): + if ds and not isinstance(ds, DataSource): + raise TypeError('DisabledCache can only be applied to DataSource subclasses.') + self.data_source = ds + + def __enter__(self): + self.data_source.disable_cache() + + def __exit__(self, *args): + self.data_source.enable_cache() diff --git a/src/core/data_sources/base.py b/src/core/data_sources/base.py index e5d73e6..6b283d3 100644 --- a/src/core/data_sources/base.py +++ b/src/core/data_sources/base.py @@ -1,4 +1,5 @@ import logging +from time import time from typing import Type from ..caches.base import LongitudeCache @@ -75,14 +76,15 @@ def enable_cache(self): def disable_cache(self): self._use_cache = False - def query(self, statement, params=None, use_cache=True, query_config=None, **opts): + def query(self, statement, params=None, use_cache=True, needs_commit=False, query_config=None, **opts): """ This method has to be called to interact with the data source. Each children class will have to implement its own .execute_query(...) with the specific behavior for each interface. :param statement: Unformatted SQL query :param params: Values to be passed to the query when formatting it - :param use_cache: Bool to indicate if this specific query should use cache or not (default: True) + :param use_cache: Boolean to indicate if this specific query should use cache or not (default: True) + :param needs_commit: Boolean to indicate if this specific query needs to commit to db (default: False) :param query_config: Specific query configuration. If None, the default one will be used. :param opts: :return: Result of querying the database @@ -97,7 +99,10 @@ def query(self, statement, params=None, use_cache=True, query_config=None, **opt normalized_response = None if self._cache and self._use_cache and use_cache: + start = time() normalized_response = self._cache.get(formatted_query) + if normalized_response: + normalized_response.profiling['cache_time'] = time() - start if normalized_response: normalized_response.mark_as_cached() @@ -106,6 +111,7 @@ def query(self, statement, params=None, use_cache=True, query_config=None, **opt for r in range(self.tries): try: response = self.execute_query(formatted_query=formatted_query, + needs_commit=needs_commit, query_config=query_config, **opts) normalized_response = self.parse_response(response) @@ -117,11 +123,12 @@ def query(self, statement, params=None, use_cache=True, query_config=None, **opt self.logger.error('Query could not be executed. Retries left: %d' % (self.tries - r)) raise LongitudeRetriesExceeded - def execute_query(self, formatted_query, query_config, **opts): + def execute_query(self, formatted_query, needs_commit, query_config, **opts): """ :raise LongitudeQueryCannotBeExecutedException :param formatted_query: + :param needs_commit: :param query_config: :param opts: :return: diff --git a/src/core/data_sources/carto.py b/src/core/data_sources/carto.py index 2dc6806..f63a8d9 100644 --- a/src/core/data_sources/carto.py +++ b/src/core/data_sources/carto.py @@ -53,7 +53,7 @@ def is_ready(self): else: return False - def execute_query(self, formatted_query, query_config, **opts): + def execute_query(self, formatted_query, needs_commit, query_config, **opts): parse_json = query_config.custom['parse_json'] do_post = query_config.custom['do_post'] format_ = query_config.custom['format'] diff --git a/src/core/data_sources/postgres/default.py b/src/core/data_sources/postgres/default.py index b69e727..dfedcfd 100644 --- a/src/core/data_sources/postgres/default.py +++ b/src/core/data_sources/postgres/default.py @@ -2,6 +2,7 @@ import psycopg2.extensions from ..base import DataSource from ..base import LongitudeQueryResponse +from time import time class DefaultPostgresDataSource(DataSource): @@ -39,15 +40,26 @@ def setup(self): def is_ready(self): return super().is_ready and self._conn and self._cursor - def execute_query(self, formatted_query, query_config, **opts): + def execute_query(self, formatted_query, needs_commit, query_config, **opts): + data = { + 'fields': [], + 'rows': [], + 'profiling': {} + } + + start = time() self._cursor.execute(formatted_query) - data = None + data['profiling']['execute_time'] = time() - start + if self._cursor.description: - data = { - 'fields': self._cursor.description, - 'rows': self._cursor.fetchall() - } - self._conn.commit() + data['fields'] = self._cursor.description + data['rows'] = self._cursor.fetchall() + + if needs_commit: + start = time() + self._conn.commit() + data['profiling']['commit_time'] = time() - start + return data @staticmethod @@ -57,5 +69,5 @@ def _type_as_string(type_id): def parse_response(self, response): if response: fields_names = {n.name: self._type_as_string(n.type_code).name for n in response['fields']} - return LongitudeQueryResponse(rows=response['rows'], fields=fields_names) + return LongitudeQueryResponse(rows=response['rows'], fields=fields_names, profiling=response['profiling']) return None diff --git a/src/core/tests/test_data_source_base.py b/src/core/tests/test_data_source_base.py index 3a57c4f..8488ca9 100644 --- a/src/core/tests/test_data_source_base.py +++ b/src/core/tests/test_data_source_base.py @@ -1,7 +1,6 @@ import os from unittest import TestCase, mock -from src.core.common.exceptions import LongitudeConfigError from ..caches.base import LongitudeCache from ..data_sources.base import DataSource, DataSourceQueryConfig, LongitudeQueryResponse @@ -75,8 +74,6 @@ def test_cache_miss(self, execute_query_mock, parse_response_mock): self.assertEqual('normalized response from data source', ds.query('some_query_not_in_cache')) parse_response_mock.assert_called_once_with('some response from the server') - - def test_abstract_methods_are_not_implemented(self): ds = DataSource({}) diff --git a/src/samples/carto_sample.py b/src/samples/carto_sample.py index b866763..655529b 100644 --- a/src/samples/carto_sample.py +++ b/src/samples/carto_sample.py @@ -29,7 +29,8 @@ if ds.is_ready: try: data = ds.query('select * from %s limit 30' % CARTO_TABLE_NAME) - print(data) + [print(r) for r in data.rows] + print(data.profiling) except LongitudeRetriesExceeded: print("Too many retries and no success...") else: diff --git a/src/samples/carto_sample_with_redis_cache.py b/src/samples/carto_sample_with_redis_cache.py index 5be8152..d3dfbe4 100644 --- a/src/samples/carto_sample_with_redis_cache.py +++ b/src/samples/carto_sample_with_redis_cache.py @@ -25,8 +25,9 @@ import sys sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) +from src.core.common.helpers import DisabledCache from src.core.caches.redis import RedisCache -from src.core.data_sources.base import LongitudeRetriesExceeded +from src.core.common.exceptions import LongitudeRetriesExceeded from src.core.data_sources.carto import CartoDataSource from src.samples.carto_sample_config import CARTO_API_KEY, CARTO_USER, CARTO_TABLE_NAME @@ -56,17 +57,13 @@ print("It took %s with cache" % elapsed_with_cache) print('Uses cache? ' + str(cached_data.comes_from_cache)) - # Data is the same... - assert str(data) == str(cached_data) - # You can also disable the cache for a while (nothing gets read or written) - ds.disable_cache() - start = time.time() - data = ds.query(REPEATED_QUERY) - elapsed = time.time() - start - print('It took %s with disabled cache' % str(elapsed)) - print('Uses cache? ' + str(data.comes_from_cache)) - ds.enable_cache() + with DisabledCache(ds): + start = time.time() + data = ds.query(REPEATED_QUERY) + elapsed = time.time() - start + print('It took %s with disabled cache' % str(elapsed)) + print('Uses cache? ' + str(data.comes_from_cache)) # Or disable specific queries via query_config (nothing gets read or written) query_config = ds.copy_default_query_config() diff --git a/src/samples/postgres_sample.py b/src/samples/postgres_sample.py index e496ef3..08f4a1a 100644 --- a/src/samples/postgres_sample.py +++ b/src/samples/postgres_sample.py @@ -31,12 +31,18 @@ ds.setup() if ds.is_ready: try: - ds.query("drop table if exists users") - ds.query( - 'create table users(id serial PRIMARY KEY, name varchar(50) UNIQUE NOT NULL, password varchar(50))') - ds.query("insert into users(name, password) values('longitude', 'password')") - data = ds.query('select * from users') - print(data) + r0 = ds.query("drop table if exists users") + r1 = ds.query( + 'create table users(id serial PRIMARY KEY, name varchar(50) UNIQUE NOT NULL, password varchar(50))', + needs_commit=True) + print(r1.profiling) + + r2 = ds.query("insert into users(name, password) values('longitude', 'password')", needs_commit=True) + print(r2.profiling) + + r3 = ds.query('select * from users') + print(r3.rows) + print(r3.profiling) except LongitudeRetriesExceeded: print("Too many retries and no success...") From 60d468036ca972e47d1b3974848bbdfe9c127f2b Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Wed, 30 Jan 2019 17:22:49 +0100 Subject: [PATCH 26/47] Common response class extracted to its own module. Rows data is now stored including field name, following the CARTO style. Closes #35 --- README.md | 3 ++- src/core/common/query_response.py | 15 +++++++++++++++ src/core/data_sources/base.py | 14 -------------- src/core/data_sources/carto.py | 8 +++++--- src/core/data_sources/postgres/default.py | 8 +++++--- src/core/tests/test_data_source_base.py | 3 ++- src/core/tests/test_data_source_carto.py | 2 +- src/samples/postgres_sample.py | 6 ++++-- 8 files changed, 34 insertions(+), 25 deletions(-) create mode 100644 src/core/common/query_response.py diff --git a/README.md b/README.md index 8d33f6a..4604675 100644 --- a/README.md +++ b/README.md @@ -10,7 +10,7 @@ A **new** bunch of middleware functions to build applications on top of CARTO. - [ ] Protected parametrized queries (i.e. avoiding injection) - [ ] Bind/dynamic parameters in queries (server-side render) - [ ] Postgres data source - - [ ] psycopg2 + - [x] psycopg2 - [ ] SQLAlchemy - [ ] Asyncpg - [x] Cache @@ -25,6 +25,7 @@ A **new** bunch of middleware functions to build applications on top of CARTO. - [x] Redis Cache - [x] Tests - [ ] Documentation + - [x] Sample scripts - [x] Unit tests - [x] Sample scripts diff --git a/src/core/common/query_response.py b/src/core/common/query_response.py new file mode 100644 index 0000000..13da0e2 --- /dev/null +++ b/src/core/common/query_response.py @@ -0,0 +1,15 @@ + +class LongitudeQueryResponse: + def __init__(self, rows=None, fields=None, profiling=None): + self.rows = rows or [] + self.fields = fields or {} + self.profiling = profiling or {} + self._from_cache = False + + + @property + def comes_from_cache(self): + return self._from_cache + + def mark_as_cached(self): + self._from_cache = True diff --git a/src/core/data_sources/base.py b/src/core/data_sources/base.py index 6b283d3..65c4d0a 100644 --- a/src/core/data_sources/base.py +++ b/src/core/data_sources/base.py @@ -146,17 +146,3 @@ def flush_cache(self): if self._cache and self._cache.is_ready: self._cache.flush() - -class LongitudeQueryResponse: - def __init__(self, rows=None, fields=None, profiling=None): - self.rows = rows or [] - self.fields = fields or {} - self.profiling = profiling or {} - self._from_cache = False - - @property - def comes_from_cache(self): - return self._from_cache - - def mark_as_cached(self): - self._from_cache = True diff --git a/src/core/data_sources/carto.py b/src/core/data_sources/carto.py index f63a8d9..4feb75a 100644 --- a/src/core/data_sources/carto.py +++ b/src/core/data_sources/carto.py @@ -1,6 +1,7 @@ from carto.exceptions import CartoException -from .base import DataSource, LongitudeQueryCannotBeExecutedException, LongitudeQueryResponse +from .base import DataSource, LongitudeQueryCannotBeExecutedException +from ..common.query_response import LongitudeQueryResponse from carto.auth import APIKeyAuthClient from carto.sql import BatchSQLClient, SQLClient @@ -65,9 +66,10 @@ def execute_query(self, formatted_query, needs_commit, query_config, **opts): def parse_response(self, response): return LongitudeQueryResponse( - rows=[[v for k, v in dictionary.items()] for dictionary in response['rows']], + rows=response['rows'], fields=response['fields'], profiling={ - 'response_time': response['time'] + 'response_time': response['time'], + 'total_rows': response['total_rows'] } ) diff --git a/src/core/data_sources/postgres/default.py b/src/core/data_sources/postgres/default.py index dfedcfd..fab651c 100644 --- a/src/core/data_sources/postgres/default.py +++ b/src/core/data_sources/postgres/default.py @@ -1,7 +1,7 @@ import psycopg2 import psycopg2.extensions from ..base import DataSource -from ..base import LongitudeQueryResponse +from ...common.query_response import LongitudeQueryResponse from time import time @@ -68,6 +68,8 @@ def _type_as_string(type_id): def parse_response(self, response): if response: - fields_names = {n.name: self._type_as_string(n.type_code).name for n in response['fields']} - return LongitudeQueryResponse(rows=response['rows'], fields=fields_names, profiling=response['profiling']) + raw_fields = response['fields'] + fields_names = {n.name: {'type': self._type_as_string(n.type_code).name} for n in raw_fields} + rows = [{raw_fields[i].name: f for i, f in enumerate(row_data)} for row_data in response['rows']] + return LongitudeQueryResponse(rows=rows, fields=fields_names, profiling=response['profiling']) return None diff --git a/src/core/tests/test_data_source_base.py b/src/core/tests/test_data_source_base.py index 8488ca9..d96a21d 100644 --- a/src/core/tests/test_data_source_base.py +++ b/src/core/tests/test_data_source_base.py @@ -2,7 +2,8 @@ from unittest import TestCase, mock from ..caches.base import LongitudeCache -from ..data_sources.base import DataSource, DataSourceQueryConfig, LongitudeQueryResponse +from ..data_sources.base import DataSource, DataSourceQueryConfig +from ..common.query_response import LongitudeQueryResponse def load_raw_text(filename): diff --git a/src/core/tests/test_data_source_carto.py b/src/core/tests/test_data_source_carto.py index 76710f1..6bc973d 100644 --- a/src/core/tests/test_data_source_carto.py +++ b/src/core/tests/test_data_source_carto.py @@ -54,7 +54,7 @@ def test_setup_can_accept_on_premise_domain(self): def test_succesful_query(self): ds = CartoDataSource() ds._sql_client = mock.MagicMock() - ds._sql_client.send.return_value = {'rows': [], 'time': 42.0, 'fields': {}} + ds._sql_client.send.return_value = {'rows': [], 'time': 42.0, 'fields': {}, 'total_rows': 0} result = ds.query('some query') ds._sql_client.send.assert_called_with('some query', do_post=False, format='json', parse_json=True) self.assertEqual([], result.rows) diff --git a/src/samples/postgres_sample.py b/src/samples/postgres_sample.py index 08f4a1a..2e285d3 100644 --- a/src/samples/postgres_sample.py +++ b/src/samples/postgres_sample.py @@ -37,8 +37,10 @@ needs_commit=True) print(r1.profiling) - r2 = ds.query("insert into users(name, password) values('longitude', 'password')", needs_commit=True) - print(r2.profiling) + for i in range(10): + r2 = ds.query("insert into users(name, password) values('longitude%d', 'password%d')" % (i, i), + needs_commit=True) + print(r2.profiling) r3 = ds.query('select * from users') print(r3.rows) From 4211956df116a29a6d545c62ee2deead608f4c92 Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Thu, 31 Jan 2019 10:04:07 +0100 Subject: [PATCH 27/47] Imports reordered (isort) and setup.py updated with current structure. --- Pipfile | 1 + Pipfile.lock | 8 +++---- setup.py | 25 +++++++++++++-------- src/core/caches/redis.py | 1 + src/core/data_sources/base.py | 4 ++-- src/core/data_sources/carto.py | 6 ++--- src/core/data_sources/postgres/default.py | 6 +++-- src/core/tests/test_cache_base.py | 2 +- src/core/tests/test_cache_ram.py | 1 + src/core/tests/test_cache_redis.py | 4 +++- src/core/tests/test_config.py | 2 +- src/core/tests/test_data_source_base.py | 2 +- src/core/tests/test_data_source_postgres.py | 1 + 13 files changed, 39 insertions(+), 24 deletions(-) diff --git a/Pipfile b/Pipfile index 9df5c80..c7fe959 100644 --- a/Pipfile +++ b/Pipfile @@ -8,6 +8,7 @@ pylint = "*" coverage = "*" pytest-cov = "*" pytest = "*" +setuptools = "*" [packages] carto = "*" diff --git a/Pipfile.lock b/Pipfile.lock index 909a484..21c8317 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "488fbc4fb0af683f3f8a07d5fdb2b0da5e7cc0943de60f0ea3f6f4d24cc3f9d9" + "sha256": "fac903367b3690ee97a0f096d12b493b861d326e596282f78d3210a9f5a11c3b" }, "pipfile-spec": 6, "requires": { @@ -269,11 +269,11 @@ }, "pytest": { "hashes": [ - "sha256:41568ea7ecb4a68d7f63837cf65b92ce8d0105e43196ff2b26622995bb3dc4b2", - "sha256:c3c573a29d7c9547fb90217ece8a8843aa0c1328a797e200290dc3d0b4b823be" + "sha256:65aeaa77ae87c7fc95de56285282546cfa9c886dc8e5dc78313db1c25e21bc07", + "sha256:6ac6d467d9f053e95aaacd79f831dbecfe730f419c6c7022cb316b365cd9199d" ], "index": "pypi", - "version": "==4.1.1" + "version": "==4.2.0" }, "pytest-cov": { "hashes": [ diff --git a/setup.py b/setup.py index e96d70e..5ff8757 100644 --- a/setup.py +++ b/setup.py @@ -1,23 +1,20 @@ # Always prefer setuptools over distutils -from setuptools import setup, find_packages # To use a consistent encoding from codecs import open from os import path +from setuptools import find_packages, setup + here = path.abspath(path.dirname(__file__)) # Get the long description from the README file with open(path.join(here, 'README.md'), encoding='utf-8') as f: long_description = f.read() -# get the requirements -with open('requirements.txt') as f: - required = f.read().splitlines() - setup( name='geographica-longitude', - version='0.3.0', + version='1.0.0', description='Longitude', long_description=long_description, @@ -29,6 +26,11 @@ author='Geographica', author_email='pypi@geographica.gs', + project_urls={ + "Company": 'https://geographica.gs', + "Source Code": "https://github.com/GeographicaGS/Longitude" + }, + package_dir={'': 'src'}, # Choose your license license='MIT', @@ -57,14 +59,19 @@ 'Programming Language :: Python :: 3.3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6' + 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7' ], # What does your project relate to? keywords='carto longitude', - packages=find_packages(exclude=['contrib', 'docs', 'tests']), + packages=find_packages(where='src', exclude=['test*']), - install_requires=[required], + install_requires=[ + 'carto==1.4.0', + 'redis==3.1.0', + 'psycopg2-binary==2.7.7' + ], ) diff --git a/src/core/caches/redis.py b/src/core/caches/redis.py index 8f9554b..8c35a79 100644 --- a/src/core/caches/redis.py +++ b/src/core/caches/redis.py @@ -1,4 +1,5 @@ import redis + from .base import LongitudeCache diff --git a/src/core/data_sources/base.py b/src/core/data_sources/base.py index 65c4d0a..a135aa0 100644 --- a/src/core/data_sources/base.py +++ b/src/core/data_sources/base.py @@ -4,7 +4,8 @@ from ..caches.base import LongitudeCache from ..common.config import LongitudeConfigurable -from ..common.exceptions import LongitudeRetriesExceeded, LongitudeQueryCannotBeExecutedException +from ..common.exceptions import (LongitudeQueryCannotBeExecutedException, + LongitudeRetriesExceeded) class DataSourceQueryConfig: @@ -145,4 +146,3 @@ def parse_response(self, response): def flush_cache(self): if self._cache and self._cache.is_ready: self._cache.flush() - diff --git a/src/core/data_sources/carto.py b/src/core/data_sources/carto.py index 4feb75a..a422b09 100644 --- a/src/core/data_sources/carto.py +++ b/src/core/data_sources/carto.py @@ -1,9 +1,9 @@ +from carto.auth import APIKeyAuthClient from carto.exceptions import CartoException +from carto.sql import BatchSQLClient, SQLClient -from .base import DataSource, LongitudeQueryCannotBeExecutedException from ..common.query_response import LongitudeQueryResponse -from carto.auth import APIKeyAuthClient -from carto.sql import BatchSQLClient, SQLClient +from .base import DataSource, LongitudeQueryCannotBeExecutedException class CartoDataSource(DataSource): diff --git a/src/core/data_sources/postgres/default.py b/src/core/data_sources/postgres/default.py index fab651c..7fbd65b 100644 --- a/src/core/data_sources/postgres/default.py +++ b/src/core/data_sources/postgres/default.py @@ -1,8 +1,10 @@ +from time import time + import psycopg2 import psycopg2.extensions -from ..base import DataSource + from ...common.query_response import LongitudeQueryResponse -from time import time +from ..base import DataSource class DefaultPostgresDataSource(DataSource): diff --git a/src/core/tests/test_cache_base.py b/src/core/tests/test_cache_base.py index 24290b1..b1b532b 100644 --- a/src/core/tests/test_cache_base.py +++ b/src/core/tests/test_cache_base.py @@ -1,4 +1,5 @@ from unittest import TestCase, mock + from ..caches.base import LongitudeCache @@ -30,4 +31,3 @@ def test_get_nor_put_are_implemented_in_base_class(self): cache.get('some_query') with self.assertRaises(NotImplementedError): cache.put('some_query', payload='whatever') - diff --git a/src/core/tests/test_cache_ram.py b/src/core/tests/test_cache_ram.py index b533c41..8bb0b77 100644 --- a/src/core/tests/test_cache_ram.py +++ b/src/core/tests/test_cache_ram.py @@ -1,4 +1,5 @@ from unittest import TestCase, mock + from ..caches.ram import RamCache diff --git a/src/core/tests/test_cache_redis.py b/src/core/tests/test_cache_redis.py index 7640bd5..9470a73 100644 --- a/src/core/tests/test_cache_redis.py +++ b/src/core/tests/test_cache_redis.py @@ -1,5 +1,7 @@ -import redis.exceptions from unittest import TestCase, mock + +import redis.exceptions + from ..caches.redis import RedisCache diff --git a/src/core/tests/test_config.py b/src/core/tests/test_config.py index 8023a83..0266f1b 100644 --- a/src/core/tests/test_config.py +++ b/src/core/tests/test_config.py @@ -1,7 +1,7 @@ from unittest import TestCase -from src.core.common.exceptions import LongitudeConfigError from src.core.common.config import LongitudeConfigurable +from src.core.common.exceptions import LongitudeConfigError class TestConfig(TestCase): diff --git a/src/core/tests/test_data_source_base.py b/src/core/tests/test_data_source_base.py index d96a21d..195a6c5 100644 --- a/src/core/tests/test_data_source_base.py +++ b/src/core/tests/test_data_source_base.py @@ -2,8 +2,8 @@ from unittest import TestCase, mock from ..caches.base import LongitudeCache -from ..data_sources.base import DataSource, DataSourceQueryConfig from ..common.query_response import LongitudeQueryResponse +from ..data_sources.base import DataSource, DataSourceQueryConfig def load_raw_text(filename): diff --git a/src/core/tests/test_data_source_postgres.py b/src/core/tests/test_data_source_postgres.py index 11143cb..b1209be 100644 --- a/src/core/tests/test_data_source_postgres.py +++ b/src/core/tests/test_data_source_postgres.py @@ -1,2 +1,3 @@ from unittest import TestCase + from ..data_sources.postgres.default import DefaultPostgresDataSource From e449a3cee1b35d09e6764270baf317a22f29a32b Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Thu, 31 Jan 2019 12:00:49 +0100 Subject: [PATCH 28/47] Updated pytest dependency. Added some pytest plugins. --- Pipfile | 3 ++ Pipfile.lock | 116 ++++++++++++++++++++++++++++++++++++++++++++++++--- 2 files changed, 114 insertions(+), 5 deletions(-) diff --git a/Pipfile b/Pipfile index 9df5c80..227dbd6 100644 --- a/Pipfile +++ b/Pipfile @@ -13,6 +13,9 @@ pytest = "*" carto = "*" redis = "*" psycopg2-binary = "*" +pytest-xdist = "*" +pytest-sugar = "*" +pytest-instafail = "*" [requires] python_version = "3.7" diff --git a/Pipfile.lock b/Pipfile.lock index 909a484..e417ad7 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "488fbc4fb0af683f3f8a07d5fdb2b0da5e7cc0943de60f0ea3f6f4d24cc3f9d9" + "sha256": "e26640798fbe1cda18166782acc3e67d94de719b0ec80d7824497a719408d3c4" }, "pipfile-spec": 6, "requires": { @@ -16,6 +16,27 @@ ] }, "default": { + "apipkg": { + "hashes": [ + "sha256:37228cda29411948b422fae072f57e31d3396d2ee1c9783775980ee9c9990af6", + "sha256:58587dd4dc3daefad0487f6d9ae32b4542b185e1c36db6993290e7c41ca2b47c" + ], + "version": "==1.5" + }, + "atomicwrites": { + "hashes": [ + "sha256:0312ad34fcad8fac3704d441f7b317e50af620823353ec657a53e981f92920c0", + "sha256:ec9ae8adaae229e4f8446952d204a3e4b5fdd2d099f9be3aaf556120135fb3ee" + ], + "version": "==1.2.1" + }, + "attrs": { + "hashes": [ + "sha256:10cbf6e27dbce8c30807caf056c8eb50917e0eaafe86347671b57254006c3e69", + "sha256:ca4be454458f9dec299268d472aaa5a11f67a4ff70093396e1ceae9c76cf4bbb" + ], + "version": "==18.2.0" + }, "carto": { "hashes": [ "sha256:9a54ece9d8f940bc3de3cb742e189c4ea681494d5ec251fec469319a39093dbc" @@ -37,6 +58,13 @@ ], "version": "==3.0.4" }, + "execnet": { + "hashes": [ + "sha256:a7a84d5fa07a089186a329528f127c9d73b9de57f1a1131b82bb5320ee651f6a", + "sha256:fc155a6b553c66c838d1a22dba1dc9f5f505c43285a878c6f74a79c024750b83" + ], + "version": "==1.5.0" + }, "future": { "hashes": [ "sha256:67045236dcfd6816dc439556d009594abf643e5eb48992e36beac09c2ca659b8" @@ -50,6 +78,28 @@ ], "version": "==2.8" }, + "more-itertools": { + "hashes": [ + "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4", + "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc", + "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9" + ], + "version": "==5.0.0" + }, + "packaging": { + "hashes": [ + "sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af", + "sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3" + ], + "version": "==19.0" + }, + "pluggy": { + "hashes": [ + "sha256:8ddc32f03971bfdf900a81961a48ccf2fb677cf7715108f85295c67405798616", + "sha256:980710797ff6a041e9a73a5787804f848996ecaa6f8a1b1e08224a5894f2074a" + ], + "version": "==0.8.1" + }, "psycopg2-binary": { "hashes": [ "sha256:19a2d1f3567b30f6c2bb3baea23f74f69d51f0c06c2e2082d0d9c28b0733a4c2", @@ -86,12 +136,63 @@ "index": "pypi", "version": "==2.7.7" }, + "py": { + "hashes": [ + "sha256:bf92637198836372b520efcba9e020c330123be8ce527e535d185ed4b6f45694", + "sha256:e76826342cefe3c3d5f7e8ee4316b80d1dd8a300781612ddbc765c17ba25a6c6" + ], + "version": "==1.7.0" + }, + "pyparsing": { + "hashes": [ + "sha256:66c9268862641abcac4a96ba74506e594c884e3f57690a696d21ad8210ed667a", + "sha256:f6c5ef0d7480ad048c054c37632c67fca55299990fff127850181659eea33fc3" + ], + "version": "==2.3.1" + }, "pyrestcli": { "hashes": [ "sha256:4e98b5cfba0a300acc78a7a4b7c91826edf56b12b588aa316cae4bff8696c644" ], "version": "==0.6.8" }, + "pytest": { + "hashes": [ + "sha256:65aeaa77ae87c7fc95de56285282546cfa9c886dc8e5dc78313db1c25e21bc07", + "sha256:6ac6d467d9f053e95aaacd79f831dbecfe730f419c6c7022cb316b365cd9199d" + ], + "version": "==4.2.0" + }, + "pytest-forked": { + "hashes": [ + "sha256:260d03fbd38d5ce41a657759e8d19bc7c8cfa6d0dcfa36c0bc9742d33bc30742", + "sha256:8d05c2e6f33cd4422571b2b1bb309720c398b0549cff499e3e4cde661875ab54" + ], + "version": "==1.0.1" + }, + "pytest-instafail": { + "hashes": [ + "sha256:162bd7c5c196e3b2fe2a5285b69362ee3d9f768d5451413ef914be38df74e3de" + ], + "index": "pypi", + "version": "==0.4.0" + }, + "pytest-sugar": { + "hashes": [ + "sha256:26cf8289fe10880cbbc130bd77398c4e6a8b936d8393b116a5c16121d95ab283", + "sha256:fcd87a74b2bce5386d244b49ad60549bfbc4602527797fac167da147983f58ab" + ], + "index": "pypi", + "version": "==0.9.2" + }, + "pytest-xdist": { + "hashes": [ + "sha256:4a201bb3ee60f5dd6bb40c5209d4e491cecc4d5bafd656cfb10f86178786e568", + "sha256:d03d1ff1b008458ed04fa73e642d840ac69b4107c168e06b71037c62d7813dd4" + ], + "index": "pypi", + "version": "==1.26.1" + }, "python-dateutil": { "hashes": [ "sha256:063df5763652e21de43de7d9e00ccf239f953a832941e37be541614732cdfc93", @@ -121,6 +222,12 @@ ], "version": "==1.12.0" }, + "termcolor": { + "hashes": [ + "sha256:1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b" + ], + "version": "==1.1.0" + }, "urllib3": { "hashes": [ "sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39", @@ -269,11 +376,10 @@ }, "pytest": { "hashes": [ - "sha256:41568ea7ecb4a68d7f63837cf65b92ce8d0105e43196ff2b26622995bb3dc4b2", - "sha256:c3c573a29d7c9547fb90217ece8a8843aa0c1328a797e200290dc3d0b4b823be" + "sha256:65aeaa77ae87c7fc95de56285282546cfa9c886dc8e5dc78313db1c25e21bc07", + "sha256:6ac6d467d9f053e95aaacd79f831dbecfe730f419c6c7022cb316b365cd9199d" ], - "index": "pypi", - "version": "==4.1.1" + "version": "==4.2.0" }, "pytest-cov": { "hashes": [ From 26e214fff1cd78af73756df6aed28b2ca89e3955 Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Thu, 31 Jan 2019 13:46:16 +0100 Subject: [PATCH 29/47] Fix. Queries were being passed already formatted to the specific sub-classes. This avoided each database connector to deal with parametrized queries. This had implications on how the cache keys were being generated. Now, keys are generated with the concatenation of the query template and the parameters. --- src/core/caches/base.py | 21 +++++++++--------- src/core/common/query_response.py | 1 + src/core/data_sources/base.py | 15 ++++++------- src/core/data_sources/carto.py | 9 +++++++- src/core/data_sources/postgres/default.py | 4 ++-- src/samples/postgres_sample.py | 27 ++++++++++++++++++----- 6 files changed, 50 insertions(+), 27 deletions(-) diff --git a/src/core/caches/base.py b/src/core/caches/base.py index bb3ec40..5227314 100644 --- a/src/core/caches/base.py +++ b/src/core/caches/base.py @@ -13,17 +13,18 @@ def __init__(self, config=None): self.logger = logging.getLogger(self.__class__.__module__) @staticmethod - def generate_key(formatted_query): + def generate_key(query_template, params): """ - This is the default key generation algorithm, based in a digest from the sha256 hash of the query. + This is the default key generation algorithm, based in a digest from the sha256 hash of the query and parameters Override this method to provide your own key generation in case you need a specific way to store your cache. - :param formatted_query: Final query as it should be asked to the database - :return: An (most likely) unique hash, generated from the query text + :param query_template: Query template (including placeholders) as it should be asked to the database + :param params: Dictionary of values to be replaced in the placeholders in a safe manner + :return: A (most likely) unique hash, generated from the query text """ - - return hashlib.sha256(formatted_query.encode('utf-8')).hexdigest() + query_payload = query_template + str(params) + return hashlib.sha256(query_payload.encode('utf-8')).hexdigest() def setup(self): raise NotImplementedError @@ -32,12 +33,12 @@ def setup(self): def is_ready(self): raise NotImplementedError - def get(self, formatted_query): - payload = self.execute_get(self.generate_key(formatted_query)) + def get(self, formatted_query, params): + payload = self.execute_get(self.generate_key(formatted_query, params)) return self.deserialize_payload(payload) - def put(self, formatted_query, payload): - return self.execute_put(self.generate_key(formatted_query), self.serialize_payload(payload)) + def put(self, formatted_query, params, payload): + return self.execute_put(self.generate_key(formatted_query, params), self.serialize_payload(payload)) def execute_get(self, key): """ diff --git a/src/core/common/query_response.py b/src/core/common/query_response.py index 13da0e2..c1395d6 100644 --- a/src/core/common/query_response.py +++ b/src/core/common/query_response.py @@ -13,3 +13,4 @@ def comes_from_cache(self): def mark_as_cached(self): self._from_cache = True + diff --git a/src/core/data_sources/base.py b/src/core/data_sources/base.py index 65c4d0a..f092e0c 100644 --- a/src/core/data_sources/base.py +++ b/src/core/data_sources/base.py @@ -76,12 +76,12 @@ def enable_cache(self): def disable_cache(self): self._use_cache = False - def query(self, statement, params=None, use_cache=True, needs_commit=False, query_config=None, **opts): + def query(self, query_template, params=None, use_cache=True, needs_commit=False, query_config=None, **opts): """ This method has to be called to interact with the data source. Each children class will have to implement its own .execute_query(...) with the specific behavior for each interface. - :param statement: Unformatted SQL query + :param query_template: Unformatted SQL query :param params: Values to be passed to the query when formatting it :param use_cache: Boolean to indicate if this specific query should use cache or not (default: True) :param needs_commit: Boolean to indicate if this specific query needs to commit to db (default: False) @@ -95,12 +95,10 @@ def query(self, statement, params=None, use_cache=True, needs_commit=False, quer if query_config is None: query_config = self._default_query_config - formatted_query = statement.format(**params) - normalized_response = None if self._cache and self._use_cache and use_cache: start = time() - normalized_response = self._cache.get(formatted_query) + normalized_response = self._cache.get(query_template, params) if normalized_response: normalized_response.profiling['cache_time'] = time() - start @@ -110,20 +108,21 @@ def query(self, statement, params=None, use_cache=True, needs_commit=False, quer else: for r in range(self.tries): try: - response = self.execute_query(formatted_query=formatted_query, + response = self.execute_query(query_template=query_template, + params=params, needs_commit=needs_commit, query_config=query_config, **opts) normalized_response = self.parse_response(response) if self._cache and self._use_cache and use_cache: - self._cache.put(formatted_query, normalized_response) + self._cache.put(query_template, params, normalized_response) return normalized_response except LongitudeQueryCannotBeExecutedException: self.logger.error('Query could not be executed. Retries left: %d' % (self.tries - r)) raise LongitudeRetriesExceeded - def execute_query(self, formatted_query, needs_commit, query_config, **opts): + def execute_query(self, query_template, params, needs_commit, query_config, **opts): """ :raise LongitudeQueryCannotBeExecutedException diff --git a/src/core/data_sources/carto.py b/src/core/data_sources/carto.py index 4feb75a..dcc4332 100644 --- a/src/core/data_sources/carto.py +++ b/src/core/data_sources/carto.py @@ -54,7 +54,14 @@ def is_ready(self): else: return False - def execute_query(self, formatted_query, needs_commit, query_config, **opts): + def execute_query(self, query_template, params, needs_commit, query_config, **opts): + # TODO: Here we are parsing the parameters and taking responsability for it. We do not make any safe parsing as + # this will be used in a backend-to-backend context and we build our own queries. + # --- + # There is an open issue in CARTO about having separated parameters and binding them in the server: + # https://github.com/CartoDB/Geographica-Product-Coordination/issues/57 + formatted_query = query_template.format(**params) + parse_json = query_config.custom['parse_json'] do_post = query_config.custom['do_post'] format_ = query_config.custom['format'] diff --git a/src/core/data_sources/postgres/default.py b/src/core/data_sources/postgres/default.py index fab651c..c13f80a 100644 --- a/src/core/data_sources/postgres/default.py +++ b/src/core/data_sources/postgres/default.py @@ -40,7 +40,7 @@ def setup(self): def is_ready(self): return super().is_ready and self._conn and self._cursor - def execute_query(self, formatted_query, needs_commit, query_config, **opts): + def execute_query(self, query_template, params, needs_commit, query_config, **opts): data = { 'fields': [], 'rows': [], @@ -48,7 +48,7 @@ def execute_query(self, formatted_query, needs_commit, query_config, **opts): } start = time() - self._cursor.execute(formatted_query) + self._cursor.execute(query_template, params) data['profiling']['execute_time'] = time() - start if self._cursor.description: diff --git a/src/samples/postgres_sample.py b/src/samples/postgres_sample.py index 2e285d3..61272b3 100644 --- a/src/samples/postgres_sample.py +++ b/src/samples/postgres_sample.py @@ -14,6 +14,7 @@ import sys sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) +from src.core.caches.ram import RamCache from src.core.data_sources.base import LongitudeRetriesExceeded from src.core.data_sources.postgres.default import DefaultPostgresDataSource from src.samples.postgres_sample_config import POSTGRES_DB, POSTGRES_PORT, POSTGRES_HOST, POSTGRES_USER, POSTGRES_PASS @@ -27,25 +28,39 @@ 'password': POSTGRES_PASS or 'longitude' } - ds = DefaultPostgresDataSource(config) + ds = DefaultPostgresDataSource(config, cache_class=RamCache) ds.setup() if ds.is_ready: try: - r0 = ds.query("drop table if exists users") + + r0 = ds.query("drop table if exists users", use_cache=False) r1 = ds.query( 'create table users(id serial PRIMARY KEY, name varchar(50) UNIQUE NOT NULL, password varchar(50))', - needs_commit=True) + needs_commit=True, + use_cache=False + ) print(r1.profiling) for i in range(10): - r2 = ds.query("insert into users(name, password) values('longitude%d', 'password%d')" % (i, i), - needs_commit=True) + r2 = ds.query("insert into users(name, password) values(%(user)s, %(password)s)", + needs_commit=True, + use_cache=False, + params={ + 'user': 'longitude_user_' + str(i), + 'password': 'unsafe_password_' + str(i) + + }) print(r2.profiling) - r3 = ds.query('select * from users') + r3 = ds.query('select * from users', use_cache=True) + print(r3.rows) print(r3.profiling) + r4 = ds.query('select * from users', use_cache=True) + print(r4.profiling) + print('It is %f times faster using cache' % (r4.profiling['execute_time'] / r4.profiling['cache_time'])) + except LongitudeRetriesExceeded: print("Too many retries and no success...") else: From 5a2b6105c0a7b8651165bcd0bfe41a194f45920c Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Thu, 31 Jan 2019 13:46:16 +0100 Subject: [PATCH 30/47] Fix. Queries were being passed already formatted to the specific sub-classes. This avoided each database connector to deal with parametrized queries. This had implications on how the cache keys were being generated. Now, keys are generated with the concatenation of the query template and the parameters. --- src/core/caches/base.py | 25 ++++++++++-------- src/core/common/query_response.py | 1 + src/core/data_sources/base.py | 15 +++++------ src/core/data_sources/carto.py | 9 ++++++- src/core/data_sources/postgres/default.py | 4 +-- src/core/data_sources/postgres/sqlalchemy.py | 5 ++++ src/core/tests/test_cache_base.py | 6 ++--- src/core/tests/test_data_source_base.py | 6 ++--- src/samples/postgres_sample.py | 27 +++++++++++++++----- 9 files changed, 65 insertions(+), 33 deletions(-) create mode 100644 src/core/data_sources/postgres/sqlalchemy.py diff --git a/src/core/caches/base.py b/src/core/caches/base.py index bb3ec40..9f8fa1b 100644 --- a/src/core/caches/base.py +++ b/src/core/caches/base.py @@ -13,17 +13,18 @@ def __init__(self, config=None): self.logger = logging.getLogger(self.__class__.__module__) @staticmethod - def generate_key(formatted_query): + def generate_key(query_template, params): """ - This is the default key generation algorithm, based in a digest from the sha256 hash of the query. + This is the default key generation algorithm, based in a digest from the sha256 hash of the query and parameters Override this method to provide your own key generation in case you need a specific way to store your cache. - :param formatted_query: Final query as it should be asked to the database - :return: An (most likely) unique hash, generated from the query text + :param query_template: Query template (including placeholders) as it should be asked to the database + :param params: Dictionary of values to be replaced in the placeholders in a safe manner + :return: A (most likely) unique hash, generated from the query text """ - - return hashlib.sha256(formatted_query.encode('utf-8')).hexdigest() + query_payload = query_template + str(params) + return hashlib.sha256(query_payload.encode('utf-8')).hexdigest() def setup(self): raise NotImplementedError @@ -32,12 +33,16 @@ def setup(self): def is_ready(self): raise NotImplementedError - def get(self, formatted_query): - payload = self.execute_get(self.generate_key(formatted_query)) + def get(self, query_template, query_params=None): + if query_params is None: + query_params = {} + payload = self.execute_get(self.generate_key(query_template, query_params)) return self.deserialize_payload(payload) - def put(self, formatted_query, payload): - return self.execute_put(self.generate_key(formatted_query), self.serialize_payload(payload)) + def put(self, query_template, payload, query_params=None): + if query_params is None: + query_params = {} + return self.execute_put(self.generate_key(query_template, query_params), self.serialize_payload(payload)) def execute_get(self, key): """ diff --git a/src/core/common/query_response.py b/src/core/common/query_response.py index 13da0e2..c1395d6 100644 --- a/src/core/common/query_response.py +++ b/src/core/common/query_response.py @@ -13,3 +13,4 @@ def comes_from_cache(self): def mark_as_cached(self): self._from_cache = True + diff --git a/src/core/data_sources/base.py b/src/core/data_sources/base.py index 65c4d0a..f092e0c 100644 --- a/src/core/data_sources/base.py +++ b/src/core/data_sources/base.py @@ -76,12 +76,12 @@ def enable_cache(self): def disable_cache(self): self._use_cache = False - def query(self, statement, params=None, use_cache=True, needs_commit=False, query_config=None, **opts): + def query(self, query_template, params=None, use_cache=True, needs_commit=False, query_config=None, **opts): """ This method has to be called to interact with the data source. Each children class will have to implement its own .execute_query(...) with the specific behavior for each interface. - :param statement: Unformatted SQL query + :param query_template: Unformatted SQL query :param params: Values to be passed to the query when formatting it :param use_cache: Boolean to indicate if this specific query should use cache or not (default: True) :param needs_commit: Boolean to indicate if this specific query needs to commit to db (default: False) @@ -95,12 +95,10 @@ def query(self, statement, params=None, use_cache=True, needs_commit=False, quer if query_config is None: query_config = self._default_query_config - formatted_query = statement.format(**params) - normalized_response = None if self._cache and self._use_cache and use_cache: start = time() - normalized_response = self._cache.get(formatted_query) + normalized_response = self._cache.get(query_template, params) if normalized_response: normalized_response.profiling['cache_time'] = time() - start @@ -110,20 +108,21 @@ def query(self, statement, params=None, use_cache=True, needs_commit=False, quer else: for r in range(self.tries): try: - response = self.execute_query(formatted_query=formatted_query, + response = self.execute_query(query_template=query_template, + params=params, needs_commit=needs_commit, query_config=query_config, **opts) normalized_response = self.parse_response(response) if self._cache and self._use_cache and use_cache: - self._cache.put(formatted_query, normalized_response) + self._cache.put(query_template, params, normalized_response) return normalized_response except LongitudeQueryCannotBeExecutedException: self.logger.error('Query could not be executed. Retries left: %d' % (self.tries - r)) raise LongitudeRetriesExceeded - def execute_query(self, formatted_query, needs_commit, query_config, **opts): + def execute_query(self, query_template, params, needs_commit, query_config, **opts): """ :raise LongitudeQueryCannotBeExecutedException diff --git a/src/core/data_sources/carto.py b/src/core/data_sources/carto.py index 4feb75a..dcc4332 100644 --- a/src/core/data_sources/carto.py +++ b/src/core/data_sources/carto.py @@ -54,7 +54,14 @@ def is_ready(self): else: return False - def execute_query(self, formatted_query, needs_commit, query_config, **opts): + def execute_query(self, query_template, params, needs_commit, query_config, **opts): + # TODO: Here we are parsing the parameters and taking responsability for it. We do not make any safe parsing as + # this will be used in a backend-to-backend context and we build our own queries. + # --- + # There is an open issue in CARTO about having separated parameters and binding them in the server: + # https://github.com/CartoDB/Geographica-Product-Coordination/issues/57 + formatted_query = query_template.format(**params) + parse_json = query_config.custom['parse_json'] do_post = query_config.custom['do_post'] format_ = query_config.custom['format'] diff --git a/src/core/data_sources/postgres/default.py b/src/core/data_sources/postgres/default.py index fab651c..c13f80a 100644 --- a/src/core/data_sources/postgres/default.py +++ b/src/core/data_sources/postgres/default.py @@ -40,7 +40,7 @@ def setup(self): def is_ready(self): return super().is_ready and self._conn and self._cursor - def execute_query(self, formatted_query, needs_commit, query_config, **opts): + def execute_query(self, query_template, params, needs_commit, query_config, **opts): data = { 'fields': [], 'rows': [], @@ -48,7 +48,7 @@ def execute_query(self, formatted_query, needs_commit, query_config, **opts): } start = time() - self._cursor.execute(formatted_query) + self._cursor.execute(query_template, params) data['profiling']['execute_time'] = time() - start if self._cursor.description: diff --git a/src/core/data_sources/postgres/sqlalchemy.py b/src/core/data_sources/postgres/sqlalchemy.py new file mode 100644 index 0000000..7a6fedf --- /dev/null +++ b/src/core/data_sources/postgres/sqlalchemy.py @@ -0,0 +1,5 @@ +from core.data_sources.base import DataSource + + +class SQLAlchemyDataSource(DataSource): + pass diff --git a/src/core/tests/test_cache_base.py b/src/core/tests/test_cache_base.py index 24290b1..ea74752 100644 --- a/src/core/tests/test_cache_base.py +++ b/src/core/tests/test_cache_base.py @@ -14,20 +14,20 @@ def test_generate_key(self): random_queries = set([QUERY_PATTERN % ''.join(random.choices(string.ascii_uppercase + string.digits, k=20)) for _ in range(queries_population)]) - keys = set([LongitudeCache.generate_key(q) for q in random_queries]) + keys = set([LongitudeCache.generate_key(q, {}) for q in random_queries]) # By using sets we ensure uniqueness. self.assertEqual(len(random_queries), len(keys)) # Also, function must return ALWAYS the same value for the same query, regarless of how many times the # function is called - unique_key = set([LongitudeCache.generate_key('SOME_QUERY_OVER_AND_OVER') for _ in range(100)]) + unique_key = set([LongitudeCache.generate_key('SOME_QUERY_OVER_AND_OVER', {}) for _ in range(100)]) self.assertEqual(1, len(unique_key)) def test_get_nor_put_are_implemented_in_base_class(self): cache = LongitudeCache() with self.assertRaises(NotImplementedError): - cache.get('some_query') + cache.get('some_query', {}) with self.assertRaises(NotImplementedError): cache.put('some_query', payload='whatever') diff --git a/src/core/tests/test_data_source_base.py b/src/core/tests/test_data_source_base.py index d96a21d..4b97769 100644 --- a/src/core/tests/test_data_source_base.py +++ b/src/core/tests/test_data_source_base.py @@ -28,8 +28,8 @@ def setUp(self): class FakeCache(LongitudeCache): @staticmethod - def generate_key(formatted_query): - if formatted_query == 'some_query_in_cache': + def generate_key(query_template, query_parameters): + if query_template == 'some_query_in_cache': return 'hit' return 'miss' @@ -79,7 +79,7 @@ def test_abstract_methods_are_not_implemented(self): ds = DataSource({}) with self.assertRaises(NotImplementedError): - ds.query(statement='whatever') + ds.query(query_template='whatever') def test_is_ready(self): class FakeReadyCache(LongitudeCache): diff --git a/src/samples/postgres_sample.py b/src/samples/postgres_sample.py index 2e285d3..61272b3 100644 --- a/src/samples/postgres_sample.py +++ b/src/samples/postgres_sample.py @@ -14,6 +14,7 @@ import sys sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) +from src.core.caches.ram import RamCache from src.core.data_sources.base import LongitudeRetriesExceeded from src.core.data_sources.postgres.default import DefaultPostgresDataSource from src.samples.postgres_sample_config import POSTGRES_DB, POSTGRES_PORT, POSTGRES_HOST, POSTGRES_USER, POSTGRES_PASS @@ -27,25 +28,39 @@ 'password': POSTGRES_PASS or 'longitude' } - ds = DefaultPostgresDataSource(config) + ds = DefaultPostgresDataSource(config, cache_class=RamCache) ds.setup() if ds.is_ready: try: - r0 = ds.query("drop table if exists users") + + r0 = ds.query("drop table if exists users", use_cache=False) r1 = ds.query( 'create table users(id serial PRIMARY KEY, name varchar(50) UNIQUE NOT NULL, password varchar(50))', - needs_commit=True) + needs_commit=True, + use_cache=False + ) print(r1.profiling) for i in range(10): - r2 = ds.query("insert into users(name, password) values('longitude%d', 'password%d')" % (i, i), - needs_commit=True) + r2 = ds.query("insert into users(name, password) values(%(user)s, %(password)s)", + needs_commit=True, + use_cache=False, + params={ + 'user': 'longitude_user_' + str(i), + 'password': 'unsafe_password_' + str(i) + + }) print(r2.profiling) - r3 = ds.query('select * from users') + r3 = ds.query('select * from users', use_cache=True) + print(r3.rows) print(r3.profiling) + r4 = ds.query('select * from users', use_cache=True) + print(r4.profiling) + print('It is %f times faster using cache' % (r4.profiling['execute_time'] / r4.profiling['cache_time'])) + except LongitudeRetriesExceeded: print("Too many retries and no success...") else: From 86a159b856ff4ad6f41657d5ac1bd1b3ace7c187 Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Thu, 31 Jan 2019 21:17:41 +0100 Subject: [PATCH 31/47] Added SQLAlchemy and dependencies updated. --- =1.3 | 2 ++ Pipfile | 4 +++ Pipfile.lock | 83 ++++++++++++++++++++++++++++------------------------ 3 files changed, 50 insertions(+), 39 deletions(-) create mode 100644 =1.3 diff --git a/=1.3 b/=1.3 new file mode 100644 index 0000000..4726fb3 --- /dev/null +++ b/=1.3 @@ -0,0 +1,2 @@ +Installing sqlalchemy... +Installing dependencies from Pipfile.lock (c782e8)... diff --git a/Pipfile b/Pipfile index 227dbd6..70c1b5f 100644 --- a/Pipfile +++ b/Pipfile @@ -16,6 +16,10 @@ psycopg2-binary = "*" pytest-xdist = "*" pytest-sugar = "*" pytest-instafail = "*" +sqlalchemy = "*" [requires] python_version = "3.7" + +[pipenv] +allow_prereleases = true diff --git a/Pipfile.lock b/Pipfile.lock index e417ad7..ac343ac 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "e26640798fbe1cda18166782acc3e67d94de719b0ec80d7824497a719408d3c4" + "sha256": "58ce606962b65ef7f4f36a63dd07b7e093d262d95ac8ceac0f501519bbc782e8" }, "pipfile-spec": 6, "requires": { @@ -222,6 +222,13 @@ ], "version": "==1.12.0" }, + "sqlalchemy": { + "hashes": [ + "sha256:c08cee353acaa05dd4ddf8ae0b0844ae779ed88e0b0784a2c9e0c0f9118eb64c" + ], + "index": "pypi", + "version": "==1.3.0b2" + }, "termcolor": { "hashes": [ "sha256:1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b" @@ -239,10 +246,10 @@ "develop": { "astroid": { "hashes": [ - "sha256:35b032003d6a863f5dcd7ec11abd5cd5893428beaa31ab164982403bcb311f22", - "sha256:6a5d668d7dc69110de01cdf7aeec69a679ef486862a0850cc0fd5571505b6b7e" + "sha256:7f5a9f32ba7acd09c3c437946a9fc779494fc4dc6110958fe440dda30ffa4db0", + "sha256:dd357d91d582bc775ad635ac6c35e0a5d305678650df23bd6b20138429b9765d" ], - "version": "==2.1.0" + "version": "==2.2.0.dev0" }, "atomicwrites": { "hashes": [ @@ -260,40 +267,38 @@ }, "coverage": { "hashes": [ - "sha256:09e47c529ff77bf042ecfe858fb55c3e3eb97aac2c87f0349ab5a7efd6b3939f", - "sha256:0a1f9b0eb3aa15c990c328535655847b3420231af299386cfe5efc98f9c250fe", - "sha256:0cc941b37b8c2ececfed341444a456912e740ecf515d560de58b9a76562d966d", - "sha256:10e8af18d1315de936d67775d3a814cc81d0747a1a0312d84e27ae5610e313b0", - "sha256:1b4276550b86caa60606bd3572b52769860a81a70754a54acc8ba789ce74d607", - "sha256:1e8a2627c48266c7b813975335cfdea58c706fe36f607c97d9392e61502dc79d", - "sha256:2b224052bfd801beb7478b03e8a66f3f25ea56ea488922e98903914ac9ac930b", - "sha256:447c450a093766744ab53bf1e7063ec82866f27bcb4f4c907da25ad293bba7e3", - "sha256:46101fc20c6f6568561cdd15a54018bb42980954b79aa46da8ae6f008066a30e", - "sha256:4710dc676bb4b779c4361b54eb308bc84d64a2fa3d78e5f7228921eccce5d815", - "sha256:510986f9a280cd05189b42eee2b69fecdf5bf9651d4cd315ea21d24a964a3c36", - "sha256:5535dda5739257effef56e49a1c51c71f1d37a6e5607bb25a5eee507c59580d1", - "sha256:5a7524042014642b39b1fcae85fb37556c200e64ec90824ae9ecf7b667ccfc14", - "sha256:5f55028169ef85e1fa8e4b8b1b91c0b3b0fa3297c4fb22990d46ff01d22c2d6c", - "sha256:6694d5573e7790a0e8d3d177d7a416ca5f5c150742ee703f3c18df76260de794", - "sha256:6831e1ac20ac52634da606b658b0b2712d26984999c9d93f0c6e59fe62ca741b", - "sha256:77f0d9fa5e10d03aa4528436e33423bfa3718b86c646615f04616294c935f840", - "sha256:828ad813c7cdc2e71dcf141912c685bfe4b548c0e6d9540db6418b807c345ddd", - "sha256:85a06c61598b14b015d4df233d249cd5abfa61084ef5b9f64a48e997fd829a82", - "sha256:8cb4febad0f0b26c6f62e1628f2053954ad2c555d67660f28dfb1b0496711952", - "sha256:a5c58664b23b248b16b96253880b2868fb34358911400a7ba39d7f6399935389", - "sha256:aaa0f296e503cda4bc07566f592cd7a28779d433f3a23c48082af425d6d5a78f", - "sha256:ab235d9fe64833f12d1334d29b558aacedfbca2356dfb9691f2d0d38a8a7bfb4", - "sha256:b3b0c8f660fae65eac74fbf003f3103769b90012ae7a460863010539bb7a80da", - "sha256:bab8e6d510d2ea0f1d14f12642e3f35cefa47a9b2e4c7cea1852b52bc9c49647", - "sha256:c45297bbdbc8bb79b02cf41417d63352b70bcb76f1bbb1ee7d47b3e89e42f95d", - "sha256:d19bca47c8a01b92640c614a9147b081a1974f69168ecd494687c827109e8f42", - "sha256:d64b4340a0c488a9e79b66ec9f9d77d02b99b772c8b8afd46c1294c1d39ca478", - "sha256:da969da069a82bbb5300b59161d8d7c8d423bc4ccd3b410a9b4d8932aeefc14b", - "sha256:ed02c7539705696ecb7dc9d476d861f3904a8d2b7e894bd418994920935d36bb", - "sha256:ee5b8abc35b549012e03a7b1e86c09491457dba6c94112a2482b18589cc2bdb9" + "sha256:029c69deaeeeae1b15bc6c59f0ffa28aa8473721c614a23f2c2976dec245cd12", + "sha256:02abbbebc6e9d5abe13cd28b5e963dedb6ffb51c146c916d17b18f141acd9947", + "sha256:1bbfe5b82a3921d285e999c6d256c1e16b31c554c29da62d326f86c173d30337", + "sha256:210c02f923df33a8d0e461c86fdcbbb17228ff4f6d92609fc06370a98d283c2d", + "sha256:2d0807ba935f540d20b49d5bf1c0237b90ce81e133402feda906e540003f2f7a", + "sha256:35d7a013874a7c927ce997350d314144ffc5465faf787bb4e46e6c4f381ef562", + "sha256:3636f9d0dcb01aed4180ef2e57a4e34bb4cac3ecd203c2a23db8526d86ab2fb4", + "sha256:42f4be770af2455a75e4640f033a82c62f3fb0d7a074123266e143269d7010ef", + "sha256:48440b25ba6cda72d4c638f3a9efa827b5b87b489c96ab5f4ff597d976413156", + "sha256:4dac8dfd1acf6a3ac657475dfdc66c621f291b1b7422a939cc33c13ac5356473", + "sha256:4e8474771c69c2991d5eab65764289a7dd450bbea050bc0ebb42b678d8222b42", + "sha256:551f10ddfeff56a1325e5a34eff304c5892aa981fd810babb98bfee77ee2fb17", + "sha256:5b104982f1809c1577912519eb249f17d9d7e66304ad026666cb60a5ef73309c", + "sha256:5c62aef73dfc87bfcca32cee149a1a7a602bc74bac72223236b0023543511c88", + "sha256:633151f8d1ad9467b9f7e90854a7f46ed8f2919e8bc7d98d737833e8938fc081", + "sha256:772207b9e2d5bf3f9d283b88915723e4e92d9a62c83f44ec92b9bd0cd685541b", + "sha256:7d5e02f647cd727afc2659ec14d4d1cc0508c47e6cfb07aea33d7aa9ca94d288", + "sha256:a9798a4111abb0f94584000ba2a2c74841f2cfe5f9254709756367aabbae0541", + "sha256:b38ea741ab9e35bfa7015c93c93bbd6a1623428f97a67083fc8ebd366238b91f", + "sha256:b6a5478c904236543c0347db8a05fac6fc0bd574c870e7970faa88e1d9890044", + "sha256:c6248bfc1de36a3844685a2e10ba17c18119ba6252547f921062a323fb31bff1", + "sha256:c705ab445936457359b1424ef25ccc0098b0491b26064677c39f1d14a539f056", + "sha256:d95a363d663ceee647291131dbd213af258df24f41350246842481ec3709bd33", + "sha256:e27265eb80cdc5dab55a40ef6f890e04ecc618649ad3da5265f128b141f93f78", + "sha256:ebc276c9cb5d917bd2ae959f84ffc279acafa9c9b50b0fa436ebb70bbe2166ea", + "sha256:f4d229866d030863d0fe3bf297d6d11e6133ca15bbb41ed2534a8b9a3d6bd061", + "sha256:f95675bd88b51474d4fe5165f3266f419ce754ffadfb97f10323931fa9ac95e5", + "sha256:f95bc54fb6d61b9f9ff09c4ae8ff6a3f5edc937cda3ca36fc937302a7c152bf1", + "sha256:fd0f6be53de40683584e5331c341e65a679dbe5ec489a0697cec7c2ef1a48cda" ], "index": "pypi", - "version": "==4.5.2" + "version": "==5.0a4" }, "isort": { "hashes": [ @@ -368,11 +373,11 @@ }, "pylint": { "hashes": [ - "sha256:689de29ae747642ab230c6d37be2b969bf75663176658851f456619aacf27492", - "sha256:771467c434d0d9f081741fec1d64dfb011ed26e65e12a28fe06ca2f61c4d556c" + "sha256:238df538ea18c9004981202e5bbbd56c47039fe8230c45d3b1f255d97181b716", + "sha256:3c031c10a276587ba5e73b3189c33749973d66473f77ecb53715e27cd2650348" ], "index": "pypi", - "version": "==2.2.2" + "version": "==2.3.0.dev1" }, "pytest": { "hashes": [ From d295ad14b24b1196d84796ffa8bccf181afe648c Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Thu, 31 Jan 2019 22:37:29 +0100 Subject: [PATCH 32/47] get_config() now returns default and custom configurations merged if no key is provided. Create table from model in SQLAlchemy seems to work (wrapped in data source object) --- src/core/common/config.py | 8 ++- src/core/data_sources/README.md | 61 ++++++++++++++++++++ src/core/data_sources/postgres/sqlalchemy.py | 51 +++++++++++++++- src/samples/sqlalchemy_sample.py | 50 ++++++++++++++++ src/samples/sqlalchemy_sample_config.py | 5 ++ 5 files changed, 172 insertions(+), 3 deletions(-) create mode 100644 src/core/data_sources/README.md create mode 100644 src/samples/sqlalchemy_sample.py create mode 100644 src/samples/sqlalchemy_sample_config.py diff --git a/src/core/common/config.py b/src/core/common/config.py index 255859b..169a724 100644 --- a/src/core/common/config.py +++ b/src/core/common/config.py @@ -30,12 +30,16 @@ def __init__(self, config=None): for k in using_defaults_for: self.logger.info("%s key is using default value" % k) - def get_config(self, key): + def get_config(self, key=None): """ Getter for configuration values - :param key: Key in the configuration dictionary + :param key: Key in the configuration dictionary. If no key is provided, the full config is returned. :return: Current value of the chosen key """ + if key is None: + config_template = dict(self._default_config) + config_template.update(self._config) + return config_template if key not in self._default_config.keys(): raise LongitudeConfigError("%s is not a valid config value. Check your defaults as reference.") diff --git a/src/core/data_sources/README.md b/src/core/data_sources/README.md new file mode 100644 index 0000000..e9e7290 --- /dev/null +++ b/src/core/data_sources/README.md @@ -0,0 +1,61 @@ +# How to add your own Data Source + +**IMPORTANT**: this is useful information if you are **developing the Longitude library**. If you are just using it, this information is irrelevant for you. + +If you want to add data source classes to Longitude you must create a new class that inherits from DataSource. + +* That class must implement ```execute_query()``` and ```parse_response()``` +* It can also override: + * ```__init___```: if it needs instance attributes to be defined + * ```setup()```: if it needs some process to be done **before** executing queries + * ```is_ready``` (*property*): if you need a ```setup()``` here you return if setup was successful. +* It can also define configuration fields using the ```_default_config``` dictionary + * Keep in mind that if you do not add a default key/value for a config, it will not be allowed as configuration value. + * Only existent keys in the default dictionary are allowed as configuration values. + +### Template + +Feel free to copy/paste this code and customize it: + +```python + +from core.data_sources.base import DataSource + + +class MyDatabaseTechnologyDataSource(DataSource): + _default_config = { + "...": "..." + } + + def __init__(self, config=None, cache_class=None): + # TODO: Here you can define instance values like cursors, connections, etc... + super().__init__(config, cache_class=cache_class) + + def setup(self): + # TODO: Write how your database connection is stablised, how to log... + super().setup() + + @property + def is_ready(self): + # TODO: Write how, after setup, you can know if queries can be executed (return True) or not (return False) + return False + + def execute_query(self, query_template, params, needs_commit, query_config, **opts): + # TODO: Write how the database query is executed and return the response or None + pass + + def parse_response(self, response): + # TODO: Write how the database query response is converted into a LongitudeQueryResponse object + pass + +``` + +### Do I need to override the methods always? + +No. If your data source is, for example, some REST API (or any service without session or permament connection), you do not need any preparation. You can just execute queries and parse responses. + +Sometimes the setup thing is needed for performance (i.e. instead of connecting/disconnecting a database in each query.) + +### Must I implement the execute and parse methods always? + +Yes. Those are the interface methods and are mandatory. \ No newline at end of file diff --git a/src/core/data_sources/postgres/sqlalchemy.py b/src/core/data_sources/postgres/sqlalchemy.py index 7a6fedf..3a205d7 100644 --- a/src/core/data_sources/postgres/sqlalchemy.py +++ b/src/core/data_sources/postgres/sqlalchemy.py @@ -1,5 +1,54 @@ +from sqlalchemy import create_engine +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker + from core.data_sources.base import DataSource class SQLAlchemyDataSource(DataSource): - pass + _default_config = { + 'host': 'localhost', + 'port': 5432, + 'db': '', + 'user': 'postgres', + 'password': '' + } + + _Base = None + + @property + def base_class(self): + if self._Base is None: + self._Base = declarative_base() + return self._Base + + def create_all(self): + self.base_class.metadata.create_all(self._engine) + + def __init__(self, config=None, cache_class=None): + # https://docs.sqlalchemy.org/en/latest/dialects/postgresql.html + + self._engine = None + self._session = None + + super().__init__(config, cache_class=cache_class) + + def setup(self): + connection_string_template = 'postgresql://%(user)s:%(password)s@%(host)s:%(port)d/%(db)s' + self._engine = create_engine(connection_string_template % self.get_config(), echo=True) + self._session = sessionmaker(bind=self._engine) + + super().setup() + + @property + def is_ready(self): + # TODO: Write how, after setup, you can know if queries can be executed (return True) or not (return False) + return self._engine is not None and self._session is not None + + def execute_query(self, query_template, params, needs_commit, query_config, **opts): + # TODO: Write how the database query is executed and return the response or None + pass + + def parse_response(self, response): + # TODO: Write how the database query response is converted into a LongitudeQueryResponse object + pass diff --git a/src/samples/sqlalchemy_sample.py b/src/samples/sqlalchemy_sample.py new file mode 100644 index 0000000..b56f866 --- /dev/null +++ b/src/samples/sqlalchemy_sample.py @@ -0,0 +1,50 @@ +""" +██╗ ██╗ ██████╗ ██╗ ██╗ ████████╗ ██████╗ ██╗ ██╗███████╗███████╗ ████████╗██╗ ██╗██╗███████╗ +██║ ██║██╔═══██╗██║ ██║ ╚══██╔══╝██╔═══██╗ ██║ ██║██╔════╝██╔════╝ ╚══██╔══╝██║ ██║██║██╔════╝██╗ +███████║██║ ██║██║ █╗ ██║ ██║ ██║ ██║ ██║ ██║███████╗█████╗ ██║ ███████║██║███████╗╚═╝ +██╔══██║██║ ██║██║███╗██║ ██║ ██║ ██║ ██║ ██║╚════██║██╔══╝ ██║ ██╔══██║██║╚════██║██╗ +██║ ██║╚██████╔╝╚███╔███╔╝ ██║ ╚██████╔╝ ╚██████╔╝███████║███████╗ ██║ ██║ ██║██║███████║╚═╝ +╚═╝ ╚═╝ ╚═════╝ ╚══╝╚══╝ ╚═╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝ ╚═╝ ╚═╝ ╚═╝╚═╝╚══════╝ + +You must create a 'sqlalchemy_sample_config.py' file at this folder with the needed fields (look at the import) +That file will be ignored in git, so do not worry about pushing credentials anywhere (but BE CAREFUL!) +DO NOT REPLACE THIS WITH HARD CODED CREDENTIALS EVER AND ALWAYS REVIEW YOUR COMMITS! +""" +import os +import sys + +sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) +from src.core.data_sources.base import LongitudeRetriesExceeded +from src.core.data_sources.postgres.sqlalchemy import SQLAlchemyDataSource +from src.samples.sqlalchemy_sample_config import POSTGRES_DB, POSTGRES_PORT, POSTGRES_HOST, POSTGRES_USER, POSTGRES_PASS + +from sqlalchemy import Column, Integer, String + +if __name__ == "__main__": + config = { + 'host': POSTGRES_HOST or 'localhost', + 'port': POSTGRES_PORT or 5432, + 'db': POSTGRES_DB or 'longitude', + 'user': POSTGRES_USER or 'longitude', + 'password': POSTGRES_PASS or 'longitude' + } + + ds = SQLAlchemyDataSource(config) + ds.setup() + if ds.is_ready: + + class User(ds.base_class): + __tablename__ = 'users' + + id = Column(Integer, primary_key=True) + name = Column(String) + fullname = Column(String) + password = Column(String) + + def __repr__(self): + return "" % (self.name, self.fullname, self.password) + + + ds.create_all() + else: + print("Data source is not properly configured.") diff --git a/src/samples/sqlalchemy_sample_config.py b/src/samples/sqlalchemy_sample_config.py new file mode 100644 index 0000000..b5f95fb --- /dev/null +++ b/src/samples/sqlalchemy_sample_config.py @@ -0,0 +1,5 @@ +POSTGRES_HOST = 'localhost' +POSTGRES_PORT = 5432 +POSTGRES_USER = 'longitude' +POSTGRES_PASS = 'longitude' +POSTGRES_DB = 'postgres' From 120336ebe4f878cf44962ca277c10538f0ef6722 Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Fri, 1 Feb 2019 09:07:45 +0100 Subject: [PATCH 33/47] 100% coverage for config and helpers. 86% total coverage. --- src/core/data_sources/postgres/sqlalchemy.py | 5 +- src/core/tests/test_config.py | 15 ++++++ src/core/tests/test_data_source_sqlalchemy.py | 49 +++++++++++++++++++ src/core/tests/test_helpers.py | 20 ++++++++ src/samples/sqlalchemy_sample.py | 1 - 5 files changed, 85 insertions(+), 5 deletions(-) create mode 100644 src/core/tests/test_data_source_sqlalchemy.py create mode 100644 src/core/tests/test_helpers.py diff --git a/src/core/data_sources/postgres/sqlalchemy.py b/src/core/data_sources/postgres/sqlalchemy.py index 3a205d7..f4d7808 100644 --- a/src/core/data_sources/postgres/sqlalchemy.py +++ b/src/core/data_sources/postgres/sqlalchemy.py @@ -2,7 +2,7 @@ from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import sessionmaker -from core.data_sources.base import DataSource +from src.core.data_sources.base import DataSource class SQLAlchemyDataSource(DataSource): @@ -42,13 +42,10 @@ def setup(self): @property def is_ready(self): - # TODO: Write how, after setup, you can know if queries can be executed (return True) or not (return False) return self._engine is not None and self._session is not None def execute_query(self, query_template, params, needs_commit, query_config, **opts): - # TODO: Write how the database query is executed and return the response or None pass def parse_response(self, response): - # TODO: Write how the database query response is converted into a LongitudeQueryResponse object pass diff --git a/src/core/tests/test_config.py b/src/core/tests/test_config.py index 0266f1b..39eb30d 100644 --- a/src/core/tests/test_config.py +++ b/src/core/tests/test_config.py @@ -32,3 +32,18 @@ def test_config(self): # We do not allow trying to get a config value out of the default keys with self.assertRaises(LongitudeConfigError): self.assertIsNone(ds.get_config('some_random_value_that_does_not_exist_in_config_or_defaults')) + + def test_get_config_root(self): + class SomeConfigurableClass(LongitudeConfigurable): + _default_config = { + 'a': None, + 'b': 'this will not be overwritten' + } + + ds = SomeConfigurableClass(config={'a': 'custom_value'}) + c = ds.get_config() + expected_config = { + 'a': 'custom_value', + 'b': 'this will not be overwritten' + } + self.assertEqual(expected_config, c) diff --git a/src/core/tests/test_data_source_sqlalchemy.py b/src/core/tests/test_data_source_sqlalchemy.py new file mode 100644 index 0000000..5a845fd --- /dev/null +++ b/src/core/tests/test_data_source_sqlalchemy.py @@ -0,0 +1,49 @@ +from unittest import TestCase, mock +from ..data_sources.postgres.sqlalchemy import SQLAlchemyDataSource + +TESTED_MODULE_PATH = 'src.core.data_sources.postgres.sqlalchemy.%s' + + +class TestSQLAlchemyDataSource(TestCase): + + def test_default_configuration_loads(self): + with self.assertLogs(level='INFO') as log_test: + carto_ds = SQLAlchemyDataSource() + module_name = 'src.core.common.config' + self.assertEqual(log_test.output, + ['INFO:%s:db key is using default value' % module_name, + 'INFO:%s:host key is using default value' % module_name, + 'INFO:%s:password key is using default value' % module_name, + 'INFO:%s:port key is using default value' % module_name, + 'INFO:%s:user key is using default value' % module_name + ] + ) + + self.assertEqual('', carto_ds.get_config('db')) + self.assertEqual('localhost', carto_ds.get_config('host')) + self.assertEqual('', carto_ds.get_config('password')) + self.assertEqual(5432, carto_ds.get_config('port')) + self.assertEqual('postgres', carto_ds.get_config('user')) + + @mock.patch(TESTED_MODULE_PATH % 'declarative_base') + def test_base_class(self, alchemy_base_mock): + alchemy_base_mock.return_value = object() + carto_ds = SQLAlchemyDataSource() + self.assertIsNotNone(carto_ds.base_class) # Here, first time instance is created + self.assertIsNotNone(carto_ds.base_class) # Here, instance is recovered + alchemy_base_mock.assert_called_once() # Base class is only created once by our wrapper + + @mock.patch(TESTED_MODULE_PATH % 'SQLAlchemyDataSource.base_class') + def test_create_all(self, base_class_mock): + base_class_mock.metadata.create_all = mock.MagicMock() + carto_ds = SQLAlchemyDataSource() + carto_ds.create_all() + base_class_mock.metadata.create_all.assert_called_once() + + def test_setup(self): + carto_ds = SQLAlchemyDataSource() + with mock.patch(TESTED_MODULE_PATH % 'create_engine') as fake_create_engine, \ + mock.patch(TESTED_MODULE_PATH % 'sessionmaker') as fake_sessionmaker: + carto_ds.setup() + fake_sessionmaker.assert_called_once() + fake_create_engine.assert_called_once() diff --git a/src/core/tests/test_helpers.py b/src/core/tests/test_helpers.py new file mode 100644 index 0000000..78ad2f1 --- /dev/null +++ b/src/core/tests/test_helpers.py @@ -0,0 +1,20 @@ +from unittest import TestCase, mock +from src.core.common.helpers import DisabledCache +from src.core.data_sources.base import DataSource + + +class TestHelpers(TestCase): + + @mock.patch('src.core.data_sources.base.DataSource', spec=DataSource) + def test_disable_cache_context_manager_triggers_cache(self, fake_data_source): + fake_data_source.enable_cache.return_value = None + fake_data_source.disable_cache.return_value = None + with DisabledCache(fake_data_source): + fake_data_source.disable_cache.assert_called_once() + fake_data_source.enable_cache.assert_called_once() + + @mock.patch('src.core.data_sources.base.DataSource') + def test_disable_cache_context_manager_must_receive_a_data_source(self, fake_data_source): + with self.assertRaises(TypeError): + with DisabledCache(fake_data_source): + print('This text should never be printed') diff --git a/src/samples/sqlalchemy_sample.py b/src/samples/sqlalchemy_sample.py index b56f866..33ffd5d 100644 --- a/src/samples/sqlalchemy_sample.py +++ b/src/samples/sqlalchemy_sample.py @@ -14,7 +14,6 @@ import sys sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) -from src.core.data_sources.base import LongitudeRetriesExceeded from src.core.data_sources.postgres.sqlalchemy import SQLAlchemyDataSource from src.samples.sqlalchemy_sample_config import POSTGRES_DB, POSTGRES_PORT, POSTGRES_HOST, POSTGRES_USER, POSTGRES_PASS From 6feda0853fe347eaa8f6b152f4e1466c1e0dfc9f Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Fri, 1 Feb 2019 13:34:02 +0100 Subject: [PATCH 34/47] Sample use of SQL Alchemy using Expression Language --- src/core/data_sources/postgres/sqlalchemy.py | 23 ++++++--- src/samples/sqlalchemy_sample.py | 52 ++++++++++++++------ 2 files changed, 54 insertions(+), 21 deletions(-) diff --git a/src/core/data_sources/postgres/sqlalchemy.py b/src/core/data_sources/postgres/sqlalchemy.py index f4d7808..45c52fc 100644 --- a/src/core/data_sources/postgres/sqlalchemy.py +++ b/src/core/data_sources/postgres/sqlalchemy.py @@ -1,6 +1,6 @@ from sqlalchemy import create_engine +from sqlalchemy.exc import ResourceClosedError from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.orm import sessionmaker from src.core.data_sources.base import DataSource @@ -29,23 +29,34 @@ def __init__(self, config=None, cache_class=None): # https://docs.sqlalchemy.org/en/latest/dialects/postgresql.html self._engine = None - self._session = None + self._connection = None super().__init__(config, cache_class=cache_class) + def __del__(self): + if self._connection: + self._connection.close() + def setup(self): connection_string_template = 'postgresql://%(user)s:%(password)s@%(host)s:%(port)d/%(db)s' self._engine = create_engine(connection_string_template % self.get_config(), echo=True) - self._session = sessionmaker(bind=self._engine) + self._connection = self._engine.connect() super().setup() @property def is_ready(self): - return self._engine is not None and self._session is not None + return self._engine is not None and self._connection is not None def execute_query(self, query_template, params, needs_commit, query_config, **opts): - pass + return self._connection.execute(query_template, params) def parse_response(self, response): - pass + + try: + raw_result = response.fetchall() + response.close() + except ResourceClosedError: + raw_result = None + + return raw_result diff --git a/src/samples/sqlalchemy_sample.py b/src/samples/sqlalchemy_sample.py index 33ffd5d..49cf88f 100644 --- a/src/samples/sqlalchemy_sample.py +++ b/src/samples/sqlalchemy_sample.py @@ -13,11 +13,35 @@ import os import sys +from sqlalchemy import text + sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) from src.core.data_sources.postgres.sqlalchemy import SQLAlchemyDataSource from src.samples.sqlalchemy_sample_config import POSTGRES_DB, POSTGRES_PORT, POSTGRES_HOST, POSTGRES_USER, POSTGRES_PASS -from sqlalchemy import Column, Integer, String + +def prepare_sample_table(engine): + """ This is just one way to provide table to show how queries work. + You can generate your queries as you prefer by using any of the SQL Alchemy APIs + """ + + class Avenger(ds.base_class): + from sqlalchemy import Column, Integer, String + __tablename__ = 'avengers' + + id = Column(Integer, primary_key=True) + name = Column(String) + fullname = Column(String) + password = Column(String) + + def __repr__(self): + return "" % (self.name, self.fullname, self.password) + + if Avenger.__table__.exists(engine): + Avenger.__table__.drop(engine) + Avenger.__table__.create(engine) + return Avenger.__table__ + if __name__ == "__main__": config = { @@ -31,19 +55,17 @@ ds = SQLAlchemyDataSource(config) ds.setup() if ds.is_ready: - - class User(ds.base_class): - __tablename__ = 'users' - - id = Column(Integer, primary_key=True) - name = Column(String) - fullname = Column(String) - password = Column(String) - - def __repr__(self): - return "" % (self.name, self.fullname, self.password) - - - ds.create_all() + table = prepare_sample_table(ds._engine) + q = table.insert() + params = [ + {'name': 'tony', 'fullname': 'Tony Stark Jr.', 'password': 'smartestavenger'}, + {'name': 'hulk', 'fullname': 'Dr. Phd. Bruce Banner', 'password': 'smartestavenger'}, + {'name': 'cap', 'fullname': 'Capt. Steve Rogers', 'password': 'igotthatreference'} + ] + ds.query(q, params, use_cache=False) + q = table.select('password = :password') + params = {'password': 'igotthatreference'} + r = ds.query(q, params) + print(r) else: print("Data source is not properly configured.") From 333e2756f3abae6fdf49120323ec140000e3146e Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Fri, 1 Feb 2019 14:34:10 +0100 Subject: [PATCH 35/47] Bugfix. Cache object was putting parameters instead of payloads. --- src/core/caches/base.py | 5 ++++- src/core/tests/test_cache_base.py | 3 ++- src/core/tests/test_cache_ram.py | 10 ++++++---- src/core/tests/test_cache_redis.py | 5 +++-- src/core/tests/test_data_source_base.py | 4 ++-- src/core/tests/test_data_source_sqlalchemy.py | 4 +--- 6 files changed, 18 insertions(+), 13 deletions(-) diff --git a/src/core/caches/base.py b/src/core/caches/base.py index 9f8fa1b..827f08f 100644 --- a/src/core/caches/base.py +++ b/src/core/caches/base.py @@ -2,6 +2,7 @@ import logging import pickle +from src.core.common.query_response import LongitudeQueryResponse from ..common.config import LongitudeConfigurable @@ -23,7 +24,7 @@ def generate_key(query_template, params): :param params: Dictionary of values to be replaced in the placeholders in a safe manner :return: A (most likely) unique hash, generated from the query text """ - query_payload = query_template + str(params) + query_payload = str(query_template) + str(params) return hashlib.sha256(query_payload.encode('utf-8')).hexdigest() def setup(self): @@ -42,6 +43,8 @@ def get(self, query_template, query_params=None): def put(self, query_template, payload, query_params=None): if query_params is None: query_params = {} + if not isinstance(payload, LongitudeQueryResponse): + raise TypeError('Payloads must be instances of LongitudeQueryResponse!') return self.execute_put(self.generate_key(query_template, query_params), self.serialize_payload(payload)) def execute_get(self, key): diff --git a/src/core/tests/test_cache_base.py b/src/core/tests/test_cache_base.py index 1f8058b..2eabab2 100644 --- a/src/core/tests/test_cache_base.py +++ b/src/core/tests/test_cache_base.py @@ -1,5 +1,6 @@ from unittest import TestCase, mock +from src.core.common.query_response import LongitudeQueryResponse from ..caches.base import LongitudeCache @@ -30,4 +31,4 @@ def test_get_nor_put_are_implemented_in_base_class(self): with self.assertRaises(NotImplementedError): cache.get('some_query', {}) with self.assertRaises(NotImplementedError): - cache.put('some_query', payload='whatever') + cache.put('some_query', payload=LongitudeQueryResponse()) diff --git a/src/core/tests/test_cache_ram.py b/src/core/tests/test_cache_ram.py index 8bb0b77..4800b3d 100644 --- a/src/core/tests/test_cache_ram.py +++ b/src/core/tests/test_cache_ram.py @@ -1,5 +1,6 @@ from unittest import TestCase, mock +from src.core.common.query_response import LongitudeQueryResponse from ..caches.ram import RamCache @@ -21,9 +22,10 @@ def test_serialization_does_nothing(self): def test_read_write_flush_cycle(self): self.assertIsNone(self.cache.get('fake_key')) - self.assertFalse(self.cache.put('key', 'value')) - self.assertEqual('value', self.cache.get('key')) - self.assertTrue(self.cache.put('key', 'another value')) - self.assertEqual('another value', self.cache.get('key')) + payload = LongitudeQueryResponse() + payload.profiling['value'] = 42 + self.assertFalse(self.cache.put('key', payload)) + self.assertEqual(42, self.cache.get('key').profiling['value']) + self.cache.flush() self.assertIsNone(self.cache.get('key')) diff --git a/src/core/tests/test_cache_redis.py b/src/core/tests/test_cache_redis.py index 9470a73..c234524 100644 --- a/src/core/tests/test_cache_redis.py +++ b/src/core/tests/test_cache_redis.py @@ -2,6 +2,7 @@ import redis.exceptions +from src.core.common.query_response import LongitudeQueryResponse from ..caches.redis import RedisCache @@ -22,9 +23,9 @@ def test_is_ready_if_redis_returns_ping(self, redis_mock): redis_mock.return_value.get.assert_called_once() redis_mock.return_value.set.return_value = None - self.assertFalse(self.cache.put('some_key', 'some_payload')) + self.assertFalse(self.cache.put('some_key', LongitudeQueryResponse())) redis_mock.return_value.exists.return_value = 1 - self.assertTrue(self.cache.put('some_key', 'some_payload')) + self.assertTrue(self.cache.put('some_key', LongitudeQueryResponse())) self.assertEqual(2, redis_mock.return_value.set.call_count) redis_mock.return_value.flushall.return_value = None diff --git a/src/core/tests/test_data_source_base.py b/src/core/tests/test_data_source_base.py index d150d05..a003fac 100644 --- a/src/core/tests/test_data_source_base.py +++ b/src/core/tests/test_data_source_base.py @@ -71,8 +71,8 @@ def test_cache_miss(self, execute_query_mock, parse_response_mock): ds = DataSource({}, cache_class=self._cache_class) ds.setup() execute_query_mock.return_value = 'some response from the server' - parse_response_mock.return_value = 'normalized response from data source' - self.assertEqual('normalized response from data source', ds.query('some_query_not_in_cache')) + parse_response_mock.return_value = LongitudeQueryResponse(profiling={'value': 42}) + self.assertEqual(42, ds.query('some_query_not_in_cache').profiling['value']) parse_response_mock.assert_called_once_with('some response from the server') def test_abstract_methods_are_not_implemented(self): diff --git a/src/core/tests/test_data_source_sqlalchemy.py b/src/core/tests/test_data_source_sqlalchemy.py index 5a845fd..6a5c716 100644 --- a/src/core/tests/test_data_source_sqlalchemy.py +++ b/src/core/tests/test_data_source_sqlalchemy.py @@ -42,8 +42,6 @@ def test_create_all(self, base_class_mock): def test_setup(self): carto_ds = SQLAlchemyDataSource() - with mock.patch(TESTED_MODULE_PATH % 'create_engine') as fake_create_engine, \ - mock.patch(TESTED_MODULE_PATH % 'sessionmaker') as fake_sessionmaker: + with mock.patch(TESTED_MODULE_PATH % 'create_engine') as fake_create_engine: carto_ds.setup() - fake_sessionmaker.assert_called_once() fake_create_engine.assert_called_once() From 10039b03996ebf19cace86b268c214552fc0a602 Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Fri, 1 Feb 2019 14:36:02 +0100 Subject: [PATCH 36/47] SQLAlchemy data source full sample. --- src/core/data_sources/base.py | 2 +- src/core/data_sources/postgres/common.py | 5 +++ src/core/data_sources/postgres/default.py | 6 ++-- src/core/data_sources/postgres/sqlalchemy.py | 37 ++++++++++++++------ src/samples/sqlalchemy_sample.py | 21 +++++++++-- 5 files changed, 53 insertions(+), 18 deletions(-) create mode 100644 src/core/data_sources/postgres/common.py diff --git a/src/core/data_sources/base.py b/src/core/data_sources/base.py index 8c0727e..11127c3 100644 --- a/src/core/data_sources/base.py +++ b/src/core/data_sources/base.py @@ -116,7 +116,7 @@ def query(self, query_template, params=None, use_cache=True, needs_commit=False, **opts) normalized_response = self.parse_response(response) if self._cache and self._use_cache and use_cache: - self._cache.put(query_template, params, normalized_response) + self._cache.put(query_template, payload=normalized_response, query_params=params) return normalized_response except LongitudeQueryCannotBeExecutedException: diff --git a/src/core/data_sources/postgres/common.py b/src/core/data_sources/postgres/common.py new file mode 100644 index 0000000..b86f37f --- /dev/null +++ b/src/core/data_sources/postgres/common.py @@ -0,0 +1,5 @@ +from psycopg2.extensions import string_types + + +def psycopg2_type_as_string(type_id): + return string_types[type_id] diff --git a/src/core/data_sources/postgres/default.py b/src/core/data_sources/postgres/default.py index 293e331..df51576 100644 --- a/src/core/data_sources/postgres/default.py +++ b/src/core/data_sources/postgres/default.py @@ -2,6 +2,7 @@ import psycopg2 import psycopg2.extensions +from .common import psycopg2_type_as_string from ...common.query_response import LongitudeQueryResponse from ..base import DataSource @@ -64,14 +65,11 @@ def execute_query(self, query_template, params, needs_commit, query_config, **op return data - @staticmethod - def _type_as_string(type_id): - return psycopg2.extensions.string_types[type_id] def parse_response(self, response): if response: raw_fields = response['fields'] - fields_names = {n.name: {'type': self._type_as_string(n.type_code).name} for n in raw_fields} + fields_names = {n.name: {'type': psycopg2_type_as_string(n.type_code).name} for n in raw_fields} rows = [{raw_fields[i].name: f for i, f in enumerate(row_data)} for row_data in response['rows']] return LongitudeQueryResponse(rows=rows, fields=fields_names, profiling=response['profiling']) return None diff --git a/src/core/data_sources/postgres/sqlalchemy.py b/src/core/data_sources/postgres/sqlalchemy.py index 45c52fc..a4231a0 100644 --- a/src/core/data_sources/postgres/sqlalchemy.py +++ b/src/core/data_sources/postgres/sqlalchemy.py @@ -1,7 +1,9 @@ +from time import time + from sqlalchemy import create_engine -from sqlalchemy.exc import ResourceClosedError from sqlalchemy.ext.declarative import declarative_base - +from .common import psycopg2_type_as_string +from src.core.common.query_response import LongitudeQueryResponse from src.core.data_sources.base import DataSource @@ -49,14 +51,29 @@ def is_ready(self): return self._engine is not None and self._connection is not None def execute_query(self, query_template, params, needs_commit, query_config, **opts): - return self._connection.execute(query_template, params) + data = { + 'fields': [], + 'rows': [], + 'profiling': {} + } - def parse_response(self, response): + start = time() + response = self._connection.execute(query_template, params) + data['profiling']['execute_time'] = time() - start + + if response.returns_rows: + data['fields'] = response.cursor.description + data['rows'] = response.fetchall() - try: - raw_result = response.fetchall() - response.close() - except ResourceClosedError: - raw_result = None + # TODO: Check auto-commit feature. How do we want to implement this here? + + return data + + def parse_response(self, response): - return raw_result + if response: + raw_fields = response['fields'] + fields_names = {n.name: {'type': psycopg2_type_as_string(n.type_code).name} for n in raw_fields} + rows = [{raw_fields[i].name: f for i, f in enumerate(row_data)} for row_data in response['rows']] + return LongitudeQueryResponse(rows=rows, fields=fields_names, profiling=response['profiling']) + return None diff --git a/src/samples/sqlalchemy_sample.py b/src/samples/sqlalchemy_sample.py index 49cf88f..530f571 100644 --- a/src/samples/sqlalchemy_sample.py +++ b/src/samples/sqlalchemy_sample.py @@ -15,6 +15,8 @@ from sqlalchemy import text +from src.core.caches.ram import RamCache + sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) from src.core.data_sources.postgres.sqlalchemy import SQLAlchemyDataSource from src.samples.sqlalchemy_sample_config import POSTGRES_DB, POSTGRES_PORT, POSTGRES_HOST, POSTGRES_USER, POSTGRES_PASS @@ -52,10 +54,13 @@ def __repr__(self): 'password': POSTGRES_PASS or 'longitude' } - ds = SQLAlchemyDataSource(config) + ds = SQLAlchemyDataSource(config, cache_class=RamCache) ds.setup() if ds.is_ready: + # We prepare a table to play around table = prepare_sample_table(ds._engine) + + # Demo insert. Notice how values are passed as parameters instead of just pasted into some string q = table.insert() params = [ {'name': 'tony', 'fullname': 'Tony Stark Jr.', 'password': 'smartestavenger'}, @@ -63,9 +68,19 @@ def __repr__(self): {'name': 'cap', 'fullname': 'Capt. Steve Rogers', 'password': 'igotthatreference'} ] ds.query(q, params, use_cache=False) + + # Demo select. Again, the search is done by a parametrized query. In this case, direct text is used as + # where clause. q = table.select('password = :password') params = {'password': 'igotthatreference'} - r = ds.query(q, params) - print(r) + r = ds.query(q, params, use_cache=True) + print(r.fields) + print(r.rows) + print("Cached? " + str(r.comes_from_cache)) + + # Just repeat to check the cache working + r = ds.query(q, params, use_cache=True) + print(r.rows) + print("Cached? " + str(r.comes_from_cache)) else: print("Data source is not properly configured.") From a17482e10bf28cc99f579e7c746a1d68c589b5f8 Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Fri, 1 Feb 2019 16:34:28 +0100 Subject: [PATCH 37/47] Configuration object class created. --- src/core/common/config.py | 34 +++++++++++++++++++ ...st_environment_configuration_dictionary.py | 32 +++++++++++++++++ 2 files changed, 66 insertions(+) create mode 100644 src/core/tests/test_environment_configuration_dictionary.py diff --git a/src/core/common/config.py b/src/core/common/config.py index 169a724..6a21a19 100644 --- a/src/core/common/config.py +++ b/src/core/common/config.py @@ -1,8 +1,42 @@ import logging +import os from .exceptions import LongitudeConfigError +class EnvironmentConfiguration: + + def __init__(self, d): + self._original_config = d + self._parsed_config = dict(d) + + self._parse_env_vars_references(self._parsed_config) + + def __getitem__(self, key): + return self._parsed_config[key] + + @staticmethod + def _parse_env_vars_references(dictionary): + """ + Modifies a dictionary like this: + * Recursively + * If a value is a string starting with '=', it gets substituted by the corresponding environment variable + :param dictionary: Dictionary that will be modified. + :return: Nothing + """ + + for k in dictionary.keys(): + if isinstance(dictionary[k], dict): + EnvironmentConfiguration._parse_env_vars_references(dictionary[k]) + elif isinstance(dictionary[k], str) and dictionary[k].startswith('='): + env_var = dictionary[k][1:] # We remove the '=' + value = os.environ.get(env_var) + if value: + dictionary[k] = value + else: + dictionary[k] += ' [NOT FOUND]' + + class LongitudeConfigurable: """ Any subclass will have a nice get_config(key) method to retrieve configuration values diff --git a/src/core/tests/test_environment_configuration_dictionary.py b/src/core/tests/test_environment_configuration_dictionary.py new file mode 100644 index 0000000..96a1002 --- /dev/null +++ b/src/core/tests/test_environment_configuration_dictionary.py @@ -0,0 +1,32 @@ +from unittest import TestCase, mock +from src.core.common.config import EnvironmentConfiguration + +fake_environment = { + 'PATATUELA_RULES': 'my_root_value' +} + + +class TestConfigurationDictionary(TestCase): + + @mock.patch.dict('src.core.common.config.os.environ', fake_environment) + def test_base(self): + d = EnvironmentConfiguration({ + 'root_patatuela': '=PATATUELA_RULES', + 'patata': 'patata value', + 'potato': 'potato value', + 'potatoes': [ + 'potato A', 'poteito B' + ], + 'potato_sack': { + 'colour': 'meh', + 'taste': 'buah', + 'texture': { + 'external': 'oh no', + 'internal': 'omg', + 'bumpiness': '=SOME_VALUE_FOR_BUMPINESS' + } + } + }) + + self.assertEqual('my_root_value', d['root_patatuela']) + self.assertEqual('=SOME_VALUE_FOR_BUMPINESS [NOT FOUND]', d['potato_sack']['texture']['bumpiness']) From 7af7f581b06e5b23c1a2a72383bef5260b1af13e Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Sat, 2 Feb 2019 15:43:55 +0100 Subject: [PATCH 38/47] Example for configuration file using a Carto and Postgres data sources combined --- .env.sample | 14 +++++ src/core/data_sources/base.py | 3 +- src/core/data_sources/carto.py | 4 +- src/samples/mixed_datasources.py | 91 ++++++++++++++++++++++++++++++++ src/samples/sqlalchemy_sample.py | 2 + 5 files changed, 112 insertions(+), 2 deletions(-) create mode 100644 .env.sample create mode 100644 src/samples/mixed_datasources.py diff --git a/.env.sample b/.env.sample new file mode 100644 index 0000000..41a60f7 --- /dev/null +++ b/.env.sample @@ -0,0 +1,14 @@ +# Remote db +CARTO_API_KEY= +CARTO_USER= +CARTO_TABLE= + +# Local db +POSTGRES_HOST=localhost +POSTGRES_PORT=5432 +POSTGRES_DB=longitude +POSTGRES_USER=longitude +POSTGRES_PASS=longitude + +# Cache +REDIS_PASSWORD=longitude \ No newline at end of file diff --git a/src/core/data_sources/base.py b/src/core/data_sources/base.py index 11127c3..0e12779 100644 --- a/src/core/data_sources/base.py +++ b/src/core/data_sources/base.py @@ -114,6 +114,7 @@ def query(self, query_template, params=None, use_cache=True, needs_commit=False, needs_commit=needs_commit, query_config=query_config, **opts) + normalized_response = self.parse_response(response) if self._cache and self._use_cache and use_cache: self._cache.put(query_template, payload=normalized_response, query_params=params) @@ -121,7 +122,7 @@ def query(self, query_template, params=None, use_cache=True, needs_commit=False, return normalized_response except LongitudeQueryCannotBeExecutedException: self.logger.error('Query could not be executed. Retries left: %d' % (self.tries - r)) - raise LongitudeRetriesExceeded + raise LongitudeRetriesExceeded def execute_query(self, query_template, params, needs_commit, query_config, **opts): """ diff --git a/src/core/data_sources/carto.py b/src/core/data_sources/carto.py index b9f7f94..77cc8ac 100644 --- a/src/core/data_sources/carto.py +++ b/src/core/data_sources/carto.py @@ -58,9 +58,11 @@ def execute_query(self, query_template, params, needs_commit, query_config, **op # TODO: Here we are parsing the parameters and taking responsability for it. We do not make any safe parsing as # this will be used in a backend-to-backend context and we build our own queries. # --- + # We can use the .mogrify method in psycopg2 to render a query as it is going to be executed + # --- # There is an open issue in CARTO about having separated parameters and binding them in the server: # https://github.com/CartoDB/Geographica-Product-Coordination/issues/57 - formatted_query = query_template.format(**params) + formatted_query = query_template % params parse_json = query_config.custom['parse_json'] do_post = query_config.custom['do_post'] diff --git a/src/samples/mixed_datasources.py b/src/samples/mixed_datasources.py new file mode 100644 index 0000000..b963e5e --- /dev/null +++ b/src/samples/mixed_datasources.py @@ -0,0 +1,91 @@ +""" +██╗ ██╗ ██████╗ ██╗ ██╗ ████████╗ ██████╗ ██╗ ██╗███████╗███████╗ ████████╗██╗ ██╗██╗███████╗ +██║ ██║██╔═══██╗██║ ██║ ╚══██╔══╝██╔═══██╗ ██║ ██║██╔════╝██╔════╝ ╚══██╔══╝██║ ██║██║██╔════╝██╗ +███████║██║ ██║██║ █╗ ██║ ██║ ██║ ██║ ██║ ██║███████╗█████╗ ██║ ███████║██║███████╗╚═╝ +██╔══██║██║ ██║██║███╗██║ ██║ ██║ ██║ ██║ ██║╚════██║██╔══╝ ██║ ██╔══██║██║╚════██║██╗ +██║ ██║╚██████╔╝╚███╔███╔╝ ██║ ╚██████╔╝ ╚██████╔╝███████║███████╗ ██║ ██║ ██║██║███████║╚═╝ +╚═╝ ╚═╝ ╚═════╝ ╚══╝╚══╝ ╚═╝ ╚═════╝ ╚═════╝ ╚══════╝╚══════╝ ╚═╝ ╚═╝ ╚═╝╚═╝╚══════╝ + +You must create the environment variables needed and the county_population in your Carto account. +(We just use the cartodb_id field, so it does not matter what you put there) +This is an example that you can run using the provided docker-compose configuration. + +A fast method is: + +1. copy .env.sample -> .env +2. edit .env adding the carto credentials and table +3. (re)activate your pipenv shell; it will load the variables in that shell + +We are focusing here on the configuration process so there is no error flow control nor fancy query construction. +For such features, check specific samples. + +""" +import os +import sys + +sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) + +from src.core.caches.redis import RedisCache +from src.core.data_sources.postgres.default import DefaultPostgresDataSource +from src.core.data_sources.carto import CartoDataSource +from src.core.common.config import EnvironmentConfiguration + + +def import_table_values_from_carto(limit): + # First, we read from CARTO our 'county_population' + carto_data = carto.query('select * from county_population limit %d' % limit, use_cache=True) + + # Then, we create a local table + postgres.query("drop table if exists county_population", use_cache=False) + postgres.query( + 'create table county_population(id serial PRIMARY KEY, cartodb_id integer UNIQUE NOT NULL, the_geom text)', + needs_commit=True, + use_cache=False + ) + + # Now we want to copy row by row these values using simple inserts: + + # Using psycopg2 directly, we must build our queries and parameters carefully + # i.e. when doing multiple values inserts: + values_template = ('(%s,%s),' * limit)[:-1] + params = [None] * limit * 2 # We reserve places for all values (=limit) for all columns (=2)... + params[::2] = [r['cartodb_id'] for r in carto_data.rows] # ... and we alternate id and geom in the values + params[1::2] = [r['the_geom'] for r in carto_data.rows] # This way is both efficient (not using copy) and safe + + postgres.query( + 'insert into county_population (cartodb_id, the_geom) values %s' % values_template, + params=params, + needs_commit=True) + + +if __name__ == "__main__": + + # This is the global config object + # We are going to retrieve some values from a table in Carto, create a local table and copy the values + # doing simple inserts (to show how to do queries) + + config = EnvironmentConfiguration({ + 'carto_main': { + 'api_key': "=CARTO_API_KEY", + 'user': "=CARTO_USER", + + 'cache': { + 'password': '=REDIS_PASSWORD' + } + }, + 'postgres_main': { + 'host': "=POSTGRES_HOST", + 'port': "=POSTGRES_PORT", + 'db': "=POSTGRES_DB", + 'user': "=POSTGRES_USER", + 'password': "=POSTGRES_PASS" + } + }) + + carto = CartoDataSource(config['carto_main'], cache_class=RedisCache) + postgres = DefaultPostgresDataSource(config['postgres_main']) + carto.setup() + postgres.setup() + + if carto.is_ready and postgres.is_ready(): + import_table_values_from_carto(limit=30) diff --git a/src/samples/sqlalchemy_sample.py b/src/samples/sqlalchemy_sample.py index 530f571..0ec59ee 100644 --- a/src/samples/sqlalchemy_sample.py +++ b/src/samples/sqlalchemy_sample.py @@ -62,6 +62,8 @@ def __repr__(self): # Demo insert. Notice how values are passed as parameters instead of just pasted into some string q = table.insert() + + # With SQLAlchemy we can bind lists and subsequent rendered queries will be executed params = [ {'name': 'tony', 'fullname': 'Tony Stark Jr.', 'password': 'smartestavenger'}, {'name': 'hulk', 'fullname': 'Dr. Phd. Bruce Banner', 'password': 'smartestavenger'}, From 746788b5bfdbe5afdd2a5c75314441f8e1ba7ba2 Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Sat, 2 Feb 2019 16:08:25 +0100 Subject: [PATCH 39/47] Expiration time added to cache interface --- src/core/caches/base.py | 8 +++++--- src/core/caches/ram.py | 4 +++- src/core/caches/redis.py | 4 +++- src/core/data_sources/base.py | 11 +++++++++-- src/core/tests/test_data_source_base.py | 2 +- src/samples/mixed_datasources.py | 4 ++-- 6 files changed, 23 insertions(+), 10 deletions(-) diff --git a/src/core/caches/base.py b/src/core/caches/base.py index 827f08f..e99176b 100644 --- a/src/core/caches/base.py +++ b/src/core/caches/base.py @@ -40,12 +40,14 @@ def get(self, query_template, query_params=None): payload = self.execute_get(self.generate_key(query_template, query_params)) return self.deserialize_payload(payload) - def put(self, query_template, payload, query_params=None): + def put(self, query_template, payload, query_params=None, expiration_time_s=None): if query_params is None: query_params = {} if not isinstance(payload, LongitudeQueryResponse): raise TypeError('Payloads must be instances of LongitudeQueryResponse!') - return self.execute_put(self.generate_key(query_template, query_params), self.serialize_payload(payload)) + return self.execute_put(self.generate_key(query_template, query_params), + self.serialize_payload(payload), + expiration_time_s=expiration_time_s) def execute_get(self, key): """ @@ -55,7 +57,7 @@ def execute_get(self, key): """ raise NotImplementedError - def execute_put(self, key, payload): + def execute_put(self, key, payload, expiration_time_s=None): """ Custom put action over the cache. diff --git a/src/core/caches/ram.py b/src/core/caches/ram.py index fec705d..ec5d1e4 100644 --- a/src/core/caches/ram.py +++ b/src/core/caches/ram.py @@ -17,7 +17,9 @@ def is_ready(self): def execute_get(self, key): return self._values.get(key) - def execute_put(self, key, payload): + def execute_put(self, key, payload, expiration_time_s=None): + if expiration_time_s: + self.logger.warning("RamCache does not support expiration time. Ignoring configuration.") is_overwrite = key in self._values.keys() self._values[key] = payload return is_overwrite diff --git a/src/core/caches/redis.py b/src/core/caches/redis.py index 8c35a79..dd287b7 100644 --- a/src/core/caches/redis.py +++ b/src/core/caches/redis.py @@ -44,9 +44,11 @@ def is_ready(self): def execute_get(self, key): return self._values.get(name=key) - def execute_put(self, key, payload): + def execute_put(self, key, payload, expiration_time_s=None): overwrite = self._values.exists(key) == 1 self._values.set(name=key, value=payload) + if expiration_time_s: + self._values.expire(name=key, time=expiration_time_s) return overwrite def flush(self): diff --git a/src/core/data_sources/base.py b/src/core/data_sources/base.py index 0e12779..78fde5f 100644 --- a/src/core/data_sources/base.py +++ b/src/core/data_sources/base.py @@ -77,7 +77,8 @@ def enable_cache(self): def disable_cache(self): self._use_cache = False - def query(self, query_template, params=None, use_cache=True, needs_commit=False, query_config=None, **opts): + def query(self, query_template, params=None, use_cache=True, expiration_time_s=None, needs_commit=False, + query_config=None, **opts): """ This method has to be called to interact with the data source. Each children class will have to implement its own .execute_query(...) with the specific behavior for each interface. @@ -85,6 +86,7 @@ def query(self, query_template, params=None, use_cache=True, needs_commit=False, :param query_template: Unformatted SQL query :param params: Values to be passed to the query when formatting it :param use_cache: Boolean to indicate if this specific query should use cache or not (default: True) + :param expiration_time_s: If using cache and cache supports expiration, amount of seconds for the payload to be stored :param needs_commit: Boolean to indicate if this specific query needs to commit to db (default: False) :param query_config: Specific query configuration. If None, the default one will be used. :param opts: @@ -117,7 +119,12 @@ def query(self, query_template, params=None, use_cache=True, needs_commit=False, normalized_response = self.parse_response(response) if self._cache and self._use_cache and use_cache: - self._cache.put(query_template, payload=normalized_response, query_params=params) + self._cache.put( + query_template, + payload=normalized_response, + query_params=params, + expiration_time_s=expiration_time_s + ) return normalized_response except LongitudeQueryCannotBeExecutedException: diff --git a/src/core/tests/test_data_source_base.py b/src/core/tests/test_data_source_base.py index a003fac..fbe89b9 100644 --- a/src/core/tests/test_data_source_base.py +++ b/src/core/tests/test_data_source_base.py @@ -45,7 +45,7 @@ def execute_get(self, key): return pickle.dumps(LongitudeQueryResponse()) return None - def execute_put(self, key, payload): + def execute_put(self, key, payload, expiration_time_s=None): return True self._cache_class = FakeCache diff --git a/src/samples/mixed_datasources.py b/src/samples/mixed_datasources.py index b963e5e..d93f1d6 100644 --- a/src/samples/mixed_datasources.py +++ b/src/samples/mixed_datasources.py @@ -33,8 +33,8 @@ def import_table_values_from_carto(limit): # First, we read from CARTO our 'county_population' - carto_data = carto.query('select * from county_population limit %d' % limit, use_cache=True) - + carto_data = carto.query('select * from county_population limit %d' % limit, use_cache=True, expiration_time_s=3) + print(carto_data.comes_from_cache) # Then, we create a local table postgres.query("drop table if exists county_population", use_cache=False) postgres.query( From ba494a3c5723a7317f7af01493302d8699abd920 Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Sat, 2 Feb 2019 16:17:25 +0100 Subject: [PATCH 40/47] Expanded explanation about issues when rendering binded parameters to carto queries. --- src/core/data_sources/carto.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/core/data_sources/carto.py b/src/core/data_sources/carto.py index 77cc8ac..69e7901 100644 --- a/src/core/data_sources/carto.py +++ b/src/core/data_sources/carto.py @@ -58,7 +58,10 @@ def execute_query(self, query_template, params, needs_commit, query_config, **op # TODO: Here we are parsing the parameters and taking responsability for it. We do not make any safe parsing as # this will be used in a backend-to-backend context and we build our own queries. # --- - # We can use the .mogrify method in psycopg2 to render a query as it is going to be executed + # This is also problematic as quoting is not done and relies in the query template + # --- + # Can we use the .mogrify method in psycopg2 to render a query as it is going to be executed ? -> NO + # -> .mogrify is a cursor method but in CARTO connections we lack a cursor. # --- # There is an open issue in CARTO about having separated parameters and binding them in the server: # https://github.com/CartoDB/Geographica-Product-Coordination/issues/57 From 78bc949f430b657decf50782246eba36d71c8ba0 Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Sat, 2 Feb 2019 20:02:47 +0100 Subject: [PATCH 41/47] Source folder renamed as longitude. --- docker-compose.yml | 2 +- {src => longitude}/__init__.py | 0 {src => longitude}/core/__init__.py | 0 {src => longitude}/core/caches/__init__.py | 0 {src => longitude}/core/caches/base.py | 2 +- {src => longitude}/core/caches/ram.py | 0 {src => longitude}/core/caches/redis.py | 0 {src => longitude}/core/common/__init__.py | 0 {src => longitude}/core/common/config.py | 0 {src => longitude}/core/common/exceptions.py | 0 {src => longitude}/core/common/helpers.py | 2 +- {src => longitude}/core/common/query_response.py | 0 {src => longitude}/core/data_sources/README.md | 0 {src => longitude}/core/data_sources/__init__.py | 0 {src => longitude}/core/data_sources/base.py | 0 {src => longitude}/core/data_sources/carto.py | 0 .../core/data_sources/postgres/__init__.py | 0 .../core/data_sources/postgres/common.py | 0 .../core/data_sources/postgres/default.py | 0 .../core/data_sources/postgres/sqlalchemy.py | 4 ++-- {src => longitude}/core/tests/__init__.py | 0 {src => longitude}/core/tests/test_cache_base.py | 2 +- {src => longitude}/core/tests/test_cache_ram.py | 4 ++-- {src => longitude}/core/tests/test_cache_redis.py | 12 ++++++------ {src => longitude}/core/tests/test_config.py | 8 ++++---- .../core/tests/test_data_source_base.py | 4 ++-- .../core/tests/test_data_source_carto.py | 2 +- .../core/tests/test_data_source_postgres.py | 0 .../core/tests/test_data_source_sqlalchemy.py | 4 ++-- .../test_environment_configuration_dictionary.py | 4 ++-- {src => longitude}/core/tests/test_helpers.py | 8 ++++---- {src => longitude}/samples/.gitignore | 0 {src => longitude}/samples/README.md | 0 {src => longitude}/samples/__init__.py | 0 {src => longitude}/samples/carto_sample.py | 6 +++--- .../samples/carto_sample_with_ram_cache.py | 8 ++++---- .../samples/carto_sample_with_redis_cache.py | 10 +++++----- {src => longitude}/samples/load_sys_path.py | 0 {src => longitude}/samples/mixed_datasources.py | 10 ++++++---- {src => longitude}/samples/postgres_sample.py | 8 ++++---- {src => longitude}/samples/sqlalchemy_sample.py | 6 +++--- .../samples/sqlalchemy_sample_config.py | 0 setup.py | 4 ++-- 43 files changed, 56 insertions(+), 54 deletions(-) rename {src => longitude}/__init__.py (100%) rename {src => longitude}/core/__init__.py (100%) rename {src => longitude}/core/caches/__init__.py (100%) rename {src => longitude}/core/caches/base.py (97%) rename {src => longitude}/core/caches/ram.py (100%) rename {src => longitude}/core/caches/redis.py (100%) rename {src => longitude}/core/common/__init__.py (100%) rename {src => longitude}/core/common/config.py (100%) rename {src => longitude}/core/common/exceptions.py (100%) rename {src => longitude}/core/common/helpers.py (87%) rename {src => longitude}/core/common/query_response.py (100%) rename {src => longitude}/core/data_sources/README.md (100%) rename {src => longitude}/core/data_sources/__init__.py (100%) rename {src => longitude}/core/data_sources/base.py (100%) rename {src => longitude}/core/data_sources/carto.py (100%) rename {src => longitude}/core/data_sources/postgres/__init__.py (100%) rename {src => longitude}/core/data_sources/postgres/common.py (100%) rename {src => longitude}/core/data_sources/postgres/default.py (100%) rename {src => longitude}/core/data_sources/postgres/sqlalchemy.py (94%) rename {src => longitude}/core/tests/__init__.py (100%) rename {src => longitude}/core/tests/test_cache_base.py (95%) rename {src => longitude}/core/tests/test_cache_ram.py (85%) rename {src => longitude}/core/tests/test_cache_redis.py (83%) rename {src => longitude}/core/tests/test_config.py (82%) rename {src => longitude}/core/tests/test_data_source_base.py (95%) rename {src => longitude}/core/tests/test_data_source_carto.py (98%) rename {src => longitude}/core/tests/test_data_source_postgres.py (100%) rename {src => longitude}/core/tests/test_data_source_sqlalchemy.py (94%) rename {src => longitude}/core/tests/test_environment_configuration_dictionary.py (85%) rename {src => longitude}/core/tests/test_helpers.py (72%) rename {src => longitude}/samples/.gitignore (100%) rename {src => longitude}/samples/README.md (100%) rename {src => longitude}/samples/__init__.py (100%) rename {src => longitude}/samples/carto_sample.py (91%) rename {src => longitude}/samples/carto_sample_with_ram_cache.py (93%) rename {src => longitude}/samples/carto_sample_with_redis_cache.py (93%) rename {src => longitude}/samples/load_sys_path.py (100%) rename {src => longitude}/samples/mixed_datasources.py (91%) rename {src => longitude}/samples/postgres_sample.py (92%) rename {src => longitude}/samples/sqlalchemy_sample.py (94%) rename {src => longitude}/samples/sqlalchemy_sample_config.py (100%) diff --git a/docker-compose.yml b/docker-compose.yml index 02354c6..2b21bec 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -4,7 +4,7 @@ services: image: python:3.6.3-onbuild command: bash volumes: - - .:/usr/src/app + - .:/usr/longitude/app cache: image: redis:alpine diff --git a/src/__init__.py b/longitude/__init__.py similarity index 100% rename from src/__init__.py rename to longitude/__init__.py diff --git a/src/core/__init__.py b/longitude/core/__init__.py similarity index 100% rename from src/core/__init__.py rename to longitude/core/__init__.py diff --git a/src/core/caches/__init__.py b/longitude/core/caches/__init__.py similarity index 100% rename from src/core/caches/__init__.py rename to longitude/core/caches/__init__.py diff --git a/src/core/caches/base.py b/longitude/core/caches/base.py similarity index 97% rename from src/core/caches/base.py rename to longitude/core/caches/base.py index e99176b..c2f4c1d 100644 --- a/src/core/caches/base.py +++ b/longitude/core/caches/base.py @@ -2,7 +2,7 @@ import logging import pickle -from src.core.common.query_response import LongitudeQueryResponse +from longitude.core.common.query_response import LongitudeQueryResponse from ..common.config import LongitudeConfigurable diff --git a/src/core/caches/ram.py b/longitude/core/caches/ram.py similarity index 100% rename from src/core/caches/ram.py rename to longitude/core/caches/ram.py diff --git a/src/core/caches/redis.py b/longitude/core/caches/redis.py similarity index 100% rename from src/core/caches/redis.py rename to longitude/core/caches/redis.py diff --git a/src/core/common/__init__.py b/longitude/core/common/__init__.py similarity index 100% rename from src/core/common/__init__.py rename to longitude/core/common/__init__.py diff --git a/src/core/common/config.py b/longitude/core/common/config.py similarity index 100% rename from src/core/common/config.py rename to longitude/core/common/config.py diff --git a/src/core/common/exceptions.py b/longitude/core/common/exceptions.py similarity index 100% rename from src/core/common/exceptions.py rename to longitude/core/common/exceptions.py diff --git a/src/core/common/helpers.py b/longitude/core/common/helpers.py similarity index 87% rename from src/core/common/helpers.py rename to longitude/core/common/helpers.py index 5855a80..b10ac8d 100644 --- a/src/core/common/helpers.py +++ b/longitude/core/common/helpers.py @@ -1,4 +1,4 @@ -from src.core.data_sources.base import DataSource +from longitude.core.data_sources.base import DataSource class DisabledCache: diff --git a/src/core/common/query_response.py b/longitude/core/common/query_response.py similarity index 100% rename from src/core/common/query_response.py rename to longitude/core/common/query_response.py diff --git a/src/core/data_sources/README.md b/longitude/core/data_sources/README.md similarity index 100% rename from src/core/data_sources/README.md rename to longitude/core/data_sources/README.md diff --git a/src/core/data_sources/__init__.py b/longitude/core/data_sources/__init__.py similarity index 100% rename from src/core/data_sources/__init__.py rename to longitude/core/data_sources/__init__.py diff --git a/src/core/data_sources/base.py b/longitude/core/data_sources/base.py similarity index 100% rename from src/core/data_sources/base.py rename to longitude/core/data_sources/base.py diff --git a/src/core/data_sources/carto.py b/longitude/core/data_sources/carto.py similarity index 100% rename from src/core/data_sources/carto.py rename to longitude/core/data_sources/carto.py diff --git a/src/core/data_sources/postgres/__init__.py b/longitude/core/data_sources/postgres/__init__.py similarity index 100% rename from src/core/data_sources/postgres/__init__.py rename to longitude/core/data_sources/postgres/__init__.py diff --git a/src/core/data_sources/postgres/common.py b/longitude/core/data_sources/postgres/common.py similarity index 100% rename from src/core/data_sources/postgres/common.py rename to longitude/core/data_sources/postgres/common.py diff --git a/src/core/data_sources/postgres/default.py b/longitude/core/data_sources/postgres/default.py similarity index 100% rename from src/core/data_sources/postgres/default.py rename to longitude/core/data_sources/postgres/default.py diff --git a/src/core/data_sources/postgres/sqlalchemy.py b/longitude/core/data_sources/postgres/sqlalchemy.py similarity index 94% rename from src/core/data_sources/postgres/sqlalchemy.py rename to longitude/core/data_sources/postgres/sqlalchemy.py index a4231a0..a75807d 100644 --- a/src/core/data_sources/postgres/sqlalchemy.py +++ b/longitude/core/data_sources/postgres/sqlalchemy.py @@ -3,8 +3,8 @@ from sqlalchemy import create_engine from sqlalchemy.ext.declarative import declarative_base from .common import psycopg2_type_as_string -from src.core.common.query_response import LongitudeQueryResponse -from src.core.data_sources.base import DataSource +from longitude.core.common.query_response import LongitudeQueryResponse +from longitude.core.data_sources.base import DataSource class SQLAlchemyDataSource(DataSource): diff --git a/src/core/tests/__init__.py b/longitude/core/tests/__init__.py similarity index 100% rename from src/core/tests/__init__.py rename to longitude/core/tests/__init__.py diff --git a/src/core/tests/test_cache_base.py b/longitude/core/tests/test_cache_base.py similarity index 95% rename from src/core/tests/test_cache_base.py rename to longitude/core/tests/test_cache_base.py index 2eabab2..366e53d 100644 --- a/src/core/tests/test_cache_base.py +++ b/longitude/core/tests/test_cache_base.py @@ -1,6 +1,6 @@ from unittest import TestCase, mock -from src.core.common.query_response import LongitudeQueryResponse +from longitude.core.common.query_response import LongitudeQueryResponse from ..caches.base import LongitudeCache diff --git a/src/core/tests/test_cache_ram.py b/longitude/core/tests/test_cache_ram.py similarity index 85% rename from src/core/tests/test_cache_ram.py rename to longitude/core/tests/test_cache_ram.py index 4800b3d..8863176 100644 --- a/src/core/tests/test_cache_ram.py +++ b/longitude/core/tests/test_cache_ram.py @@ -1,6 +1,6 @@ from unittest import TestCase, mock -from src.core.common.query_response import LongitudeQueryResponse +from longitude.core.common.query_response import LongitudeQueryResponse from ..caches.ram import RamCache @@ -11,7 +11,7 @@ def setUp(self): self.cache = RamCache() def test_setup_must_clean_cache(self): - with mock.patch('src.core.caches.ram.RamCache.flush') as fake_flush: + with mock.patch('longitude.core.caches.ram.RamCache.flush') as fake_flush: self.cache.setup() fake_flush.assert_called_once() self.assertTrue(self.cache.is_ready) diff --git a/src/core/tests/test_cache_redis.py b/longitude/core/tests/test_cache_redis.py similarity index 83% rename from src/core/tests/test_cache_redis.py rename to longitude/core/tests/test_cache_redis.py index c234524..ab48a60 100644 --- a/src/core/tests/test_cache_redis.py +++ b/longitude/core/tests/test_cache_redis.py @@ -2,11 +2,11 @@ import redis.exceptions -from src.core.common.query_response import LongitudeQueryResponse +from longitude.core.common.query_response import LongitudeQueryResponse from ..caches.redis import RedisCache -@mock.patch('src.core.caches.redis.redis.Redis') +@mock.patch('longitude.core.caches.redis.redis.Redis') class TestRedisCache(TestCase): cache = None @@ -39,7 +39,7 @@ def test_is_not_ready_if_redis_fails_ping_because_of_connection_error(self, redi with self.assertLogs(level='ERROR') as log_test: self.assertFalse(self.cache.is_ready) expected_log = [ - 'ERROR:src.core.caches.redis:Cannot connect to Redis server at some_host:666.' + 'ERROR:longitude.core.caches.redis:Cannot connect to Redis server at some_host:666.' ] self.assertEqual(expected_log, log_test.output) @@ -54,18 +54,18 @@ def test_is_not_ready_because_no_password(self, redis_mock): self.cache.setup() with self.assertLogs(level='ERROR') as log_test: self.assertFalse(self.cache.is_ready) - self.assertEqual(['ERROR:src.core.caches.redis:Redis password required.'], log_test.output) + self.assertEqual(['ERROR:longitude.core.caches.redis:Redis password required.'], log_test.output) def test_is_not_ready_because_wrong_password(self, redis_mock): redis_mock.return_value.ping.side_effect = redis.exceptions.ResponseError('invalid password') self.cache.setup() with self.assertLogs(level='ERROR') as log_test: self.assertFalse(self.cache.is_ready) - self.assertEqual(['ERROR:src.core.caches.redis:Redis password is wrong.'], log_test.output) + self.assertEqual(['ERROR:longitude.core.caches.redis:Redis password is wrong.'], log_test.output) def test_is_not_ready_because_of_generic_response_error(self, redis_mock): redis_mock.return_value.ping.side_effect = redis.exceptions.ResponseError('some error text') self.cache.setup() with self.assertLogs(level='ERROR') as log_test: self.assertFalse(self.cache.is_ready) - self.assertEqual(['ERROR:src.core.caches.redis:some error text'], log_test.output) + self.assertEqual(['ERROR:longitude.core.caches.redis:some error text'], log_test.output) diff --git a/src/core/tests/test_config.py b/longitude/core/tests/test_config.py similarity index 82% rename from src/core/tests/test_config.py rename to longitude/core/tests/test_config.py index 39eb30d..6a646a3 100644 --- a/src/core/tests/test_config.py +++ b/longitude/core/tests/test_config.py @@ -1,7 +1,7 @@ from unittest import TestCase -from src.core.common.config import LongitudeConfigurable -from src.core.common.exceptions import LongitudeConfigError +from longitude.core.common.config import LongitudeConfigurable +from longitude.core.common.exceptions import LongitudeConfigError class TestConfig(TestCase): @@ -20,8 +20,8 @@ def test_config(self): ds = LongitudeConfigurable(config) self.assertEqual(log_test.output, [ - 'WARNING:src.core.common.config:some_another_config_value is an unexpected config value', - 'WARNING:src.core.common.config:some_config_value is an unexpected config value']) + 'WARNING:longitude.core.common.config:some_another_config_value is an unexpected config value', + 'WARNING:longitude.core.common.config:some_config_value is an unexpected config value']) # Values in the config can be retrieved using get_config. If no default or config is defined, None is returned. ds._default_config['some_config_value'] = 42 diff --git a/src/core/tests/test_data_source_base.py b/longitude/core/tests/test_data_source_base.py similarity index 95% rename from src/core/tests/test_data_source_base.py rename to longitude/core/tests/test_data_source_base.py index fbe89b9..b68834a 100644 --- a/src/core/tests/test_data_source_base.py +++ b/longitude/core/tests/test_data_source_base.py @@ -65,8 +65,8 @@ def test_cache_hit(self): # but we do not care, in the abstract class, about what content is generated there. self.assertTrue(ds.query('some_query_in_cache').comes_from_cache) - @mock.patch('src.core.data_sources.base.DataSource.parse_response') - @mock.patch('src.core.data_sources.base.DataSource.execute_query') + @mock.patch('longitude.core.data_sources.base.DataSource.parse_response') + @mock.patch('longitude.core.data_sources.base.DataSource.execute_query') def test_cache_miss(self, execute_query_mock, parse_response_mock): ds = DataSource({}, cache_class=self._cache_class) ds.setup() diff --git a/src/core/tests/test_data_source_carto.py b/longitude/core/tests/test_data_source_carto.py similarity index 98% rename from src/core/tests/test_data_source_carto.py rename to longitude/core/tests/test_data_source_carto.py index 6bc973d..12c810f 100644 --- a/src/core/tests/test_data_source_carto.py +++ b/longitude/core/tests/test_data_source_carto.py @@ -11,7 +11,7 @@ class TestCartoDataSource(TestCase): def test_default_configuration_loads(self): with self.assertLogs(level='INFO') as log_test: carto_ds = CartoDataSource() - module_name = 'src.core.common.config' + module_name = 'longitude.core.common.config' self.assertEqual(log_test.output, ['INFO:%s:api_key key is using default value' % module_name, 'INFO:%s:api_version key is using default value' % module_name, diff --git a/src/core/tests/test_data_source_postgres.py b/longitude/core/tests/test_data_source_postgres.py similarity index 100% rename from src/core/tests/test_data_source_postgres.py rename to longitude/core/tests/test_data_source_postgres.py diff --git a/src/core/tests/test_data_source_sqlalchemy.py b/longitude/core/tests/test_data_source_sqlalchemy.py similarity index 94% rename from src/core/tests/test_data_source_sqlalchemy.py rename to longitude/core/tests/test_data_source_sqlalchemy.py index 6a5c716..707c252 100644 --- a/src/core/tests/test_data_source_sqlalchemy.py +++ b/longitude/core/tests/test_data_source_sqlalchemy.py @@ -1,7 +1,7 @@ from unittest import TestCase, mock from ..data_sources.postgres.sqlalchemy import SQLAlchemyDataSource -TESTED_MODULE_PATH = 'src.core.data_sources.postgres.sqlalchemy.%s' +TESTED_MODULE_PATH = 'longitude.core.data_sources.postgres.sqlalchemy.%s' class TestSQLAlchemyDataSource(TestCase): @@ -9,7 +9,7 @@ class TestSQLAlchemyDataSource(TestCase): def test_default_configuration_loads(self): with self.assertLogs(level='INFO') as log_test: carto_ds = SQLAlchemyDataSource() - module_name = 'src.core.common.config' + module_name = 'longitude.core.common.config' self.assertEqual(log_test.output, ['INFO:%s:db key is using default value' % module_name, 'INFO:%s:host key is using default value' % module_name, diff --git a/src/core/tests/test_environment_configuration_dictionary.py b/longitude/core/tests/test_environment_configuration_dictionary.py similarity index 85% rename from src/core/tests/test_environment_configuration_dictionary.py rename to longitude/core/tests/test_environment_configuration_dictionary.py index 96a1002..b51da8f 100644 --- a/src/core/tests/test_environment_configuration_dictionary.py +++ b/longitude/core/tests/test_environment_configuration_dictionary.py @@ -1,5 +1,5 @@ from unittest import TestCase, mock -from src.core.common.config import EnvironmentConfiguration +from longitude.core.common.config import EnvironmentConfiguration fake_environment = { 'PATATUELA_RULES': 'my_root_value' @@ -8,7 +8,7 @@ class TestConfigurationDictionary(TestCase): - @mock.patch.dict('src.core.common.config.os.environ', fake_environment) + @mock.patch.dict('longitude.core.common.config.os.environ', fake_environment) def test_base(self): d = EnvironmentConfiguration({ 'root_patatuela': '=PATATUELA_RULES', diff --git a/src/core/tests/test_helpers.py b/longitude/core/tests/test_helpers.py similarity index 72% rename from src/core/tests/test_helpers.py rename to longitude/core/tests/test_helpers.py index 78ad2f1..10efa1d 100644 --- a/src/core/tests/test_helpers.py +++ b/longitude/core/tests/test_helpers.py @@ -1,11 +1,11 @@ from unittest import TestCase, mock -from src.core.common.helpers import DisabledCache -from src.core.data_sources.base import DataSource +from longitude.core.common.helpers import DisabledCache +from longitude.core.data_sources.base import DataSource class TestHelpers(TestCase): - @mock.patch('src.core.data_sources.base.DataSource', spec=DataSource) + @mock.patch('longitude.core.data_sources.base.DataSource', spec=DataSource) def test_disable_cache_context_manager_triggers_cache(self, fake_data_source): fake_data_source.enable_cache.return_value = None fake_data_source.disable_cache.return_value = None @@ -13,7 +13,7 @@ def test_disable_cache_context_manager_triggers_cache(self, fake_data_source): fake_data_source.disable_cache.assert_called_once() fake_data_source.enable_cache.assert_called_once() - @mock.patch('src.core.data_sources.base.DataSource') + @mock.patch('longitude.core.data_sources.base.DataSource') def test_disable_cache_context_manager_must_receive_a_data_source(self, fake_data_source): with self.assertRaises(TypeError): with DisabledCache(fake_data_source): diff --git a/src/samples/.gitignore b/longitude/samples/.gitignore similarity index 100% rename from src/samples/.gitignore rename to longitude/samples/.gitignore diff --git a/src/samples/README.md b/longitude/samples/README.md similarity index 100% rename from src/samples/README.md rename to longitude/samples/README.md diff --git a/src/samples/__init__.py b/longitude/samples/__init__.py similarity index 100% rename from src/samples/__init__.py rename to longitude/samples/__init__.py diff --git a/src/samples/carto_sample.py b/longitude/samples/carto_sample.py similarity index 91% rename from src/samples/carto_sample.py rename to longitude/samples/carto_sample.py index 655529b..a07df9c 100644 --- a/src/samples/carto_sample.py +++ b/longitude/samples/carto_sample.py @@ -14,9 +14,9 @@ import sys sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) -from src.core.data_sources.base import LongitudeRetriesExceeded -from src.core.data_sources.carto import CartoDataSource -from src.samples.carto_sample_config import CARTO_API_KEY, CARTO_USER, CARTO_TABLE_NAME +from longitude.core.data_sources.base import LongitudeRetriesExceeded +from longitude.core.data_sources.carto import CartoDataSource +from longitude.samples.carto_sample_config import CARTO_API_KEY, CARTO_USER, CARTO_TABLE_NAME if __name__ == "__main__": config = { diff --git a/src/samples/carto_sample_with_ram_cache.py b/longitude/samples/carto_sample_with_ram_cache.py similarity index 93% rename from src/samples/carto_sample_with_ram_cache.py rename to longitude/samples/carto_sample_with_ram_cache.py index afc7d93..7a20e1c 100644 --- a/src/samples/carto_sample_with_ram_cache.py +++ b/longitude/samples/carto_sample_with_ram_cache.py @@ -16,10 +16,10 @@ import sys sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) -from src.core.caches.ram import RamCache -from src.core.data_sources.base import LongitudeRetriesExceeded -from src.core.data_sources.carto import CartoDataSource -from src.samples.carto_sample_config import CARTO_API_KEY, CARTO_USER, CARTO_TABLE_NAME +from longitude.core.caches.ram import RamCache +from longitude.core.data_sources.base import LongitudeRetriesExceeded +from longitude.core.data_sources.carto import CartoDataSource +from longitude.samples.carto_sample_config import CARTO_API_KEY, CARTO_USER, CARTO_TABLE_NAME if __name__ == "__main__": config = { diff --git a/src/samples/carto_sample_with_redis_cache.py b/longitude/samples/carto_sample_with_redis_cache.py similarity index 93% rename from src/samples/carto_sample_with_redis_cache.py rename to longitude/samples/carto_sample_with_redis_cache.py index d3dfbe4..807ee6b 100644 --- a/src/samples/carto_sample_with_redis_cache.py +++ b/longitude/samples/carto_sample_with_redis_cache.py @@ -25,11 +25,11 @@ import sys sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) -from src.core.common.helpers import DisabledCache -from src.core.caches.redis import RedisCache -from src.core.common.exceptions import LongitudeRetriesExceeded -from src.core.data_sources.carto import CartoDataSource -from src.samples.carto_sample_config import CARTO_API_KEY, CARTO_USER, CARTO_TABLE_NAME +from longitude.core.common.helpers import DisabledCache +from longitude.core.caches.redis import RedisCache +from longitude.core.common.exceptions import LongitudeRetriesExceeded +from longitude.core.data_sources.carto import CartoDataSource +from longitude.samples.carto_sample_config import CARTO_API_KEY, CARTO_USER, CARTO_TABLE_NAME if __name__ == "__main__": config = { diff --git a/src/samples/load_sys_path.py b/longitude/samples/load_sys_path.py similarity index 100% rename from src/samples/load_sys_path.py rename to longitude/samples/load_sys_path.py diff --git a/src/samples/mixed_datasources.py b/longitude/samples/mixed_datasources.py similarity index 91% rename from src/samples/mixed_datasources.py rename to longitude/samples/mixed_datasources.py index d93f1d6..91d88d1 100644 --- a/src/samples/mixed_datasources.py +++ b/longitude/samples/mixed_datasources.py @@ -25,14 +25,16 @@ sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) -from src.core.caches.redis import RedisCache -from src.core.data_sources.postgres.default import DefaultPostgresDataSource -from src.core.data_sources.carto import CartoDataSource -from src.core.common.config import EnvironmentConfiguration +from longitude.core.caches.redis import RedisCache +from longitude.core.data_sources.postgres.default import DefaultPostgresDataSource +from longitude.core.data_sources.carto import CartoDataSource +from longitude.core.common.config import EnvironmentConfiguration def import_table_values_from_carto(limit): # First, we read from CARTO our 'county_population' + # If you execute this script twice fast, you will make use of the cache. + # After 3 seconds, the Carto query will be executed again if requested. carto_data = carto.query('select * from county_population limit %d' % limit, use_cache=True, expiration_time_s=3) print(carto_data.comes_from_cache) # Then, we create a local table diff --git a/src/samples/postgres_sample.py b/longitude/samples/postgres_sample.py similarity index 92% rename from src/samples/postgres_sample.py rename to longitude/samples/postgres_sample.py index 61272b3..351a083 100644 --- a/src/samples/postgres_sample.py +++ b/longitude/samples/postgres_sample.py @@ -14,10 +14,10 @@ import sys sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) -from src.core.caches.ram import RamCache -from src.core.data_sources.base import LongitudeRetriesExceeded -from src.core.data_sources.postgres.default import DefaultPostgresDataSource -from src.samples.postgres_sample_config import POSTGRES_DB, POSTGRES_PORT, POSTGRES_HOST, POSTGRES_USER, POSTGRES_PASS +from longitude.core.caches.ram import RamCache +from longitude.core.data_sources.base import LongitudeRetriesExceeded +from longitude.core.data_sources.postgres.default import DefaultPostgresDataSource +from longitude.samples.postgres_sample_config import POSTGRES_DB, POSTGRES_PORT, POSTGRES_HOST, POSTGRES_USER, POSTGRES_PASS if __name__ == "__main__": config = { diff --git a/src/samples/sqlalchemy_sample.py b/longitude/samples/sqlalchemy_sample.py similarity index 94% rename from src/samples/sqlalchemy_sample.py rename to longitude/samples/sqlalchemy_sample.py index 0ec59ee..e19c531 100644 --- a/src/samples/sqlalchemy_sample.py +++ b/longitude/samples/sqlalchemy_sample.py @@ -15,11 +15,11 @@ from sqlalchemy import text -from src.core.caches.ram import RamCache +from longitude.core.caches.ram import RamCache sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..')) -from src.core.data_sources.postgres.sqlalchemy import SQLAlchemyDataSource -from src.samples.sqlalchemy_sample_config import POSTGRES_DB, POSTGRES_PORT, POSTGRES_HOST, POSTGRES_USER, POSTGRES_PASS +from longitude.core.data_sources.postgres.sqlalchemy import SQLAlchemyDataSource +from longitude.samples.sqlalchemy_sample_config import POSTGRES_DB, POSTGRES_PORT, POSTGRES_HOST, POSTGRES_USER, POSTGRES_PASS def prepare_sample_table(engine): diff --git a/src/samples/sqlalchemy_sample_config.py b/longitude/samples/sqlalchemy_sample_config.py similarity index 100% rename from src/samples/sqlalchemy_sample_config.py rename to longitude/samples/sqlalchemy_sample_config.py diff --git a/setup.py b/setup.py index 5ff8757..5b34b00 100644 --- a/setup.py +++ b/setup.py @@ -30,7 +30,7 @@ "Company": 'https://geographica.gs', "Source Code": "https://github.com/GeographicaGS/Longitude" }, - package_dir={'': 'src'}, + package_dir={'': 'longitude'}, # Choose your license license='MIT', @@ -66,7 +66,7 @@ # What does your project relate to? keywords='carto longitude', - packages=find_packages(where='src', exclude=['test*']), + packages=find_packages(where='longitude', exclude=['test*']), install_requires=[ 'carto==1.4.0', From 6566bb22f01164438994912c94a5b98baec6f9d5 Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Sat, 2 Feb 2019 21:03:55 +0100 Subject: [PATCH 42/47] Build + development environment switched to Poetry --- MANIFEST.in | 4 - Pipfile | 26 -- Pipfile.lock | 411 ------------------ README.md | 42 +- longitude/samples/.gitignore | 3 +- longitude/samples/load_sys_path.py | 0 longitude/samples/sqlalchemy_sample_config.py | 5 - poetry.lock | 409 +++++++++++++++++ pyproject.toml | 38 ++ setup.cfg | 2 - setup.py | 77 ---- 11 files changed, 473 insertions(+), 544 deletions(-) delete mode 100644 MANIFEST.in delete mode 100644 Pipfile delete mode 100644 Pipfile.lock delete mode 100644 longitude/samples/load_sys_path.py delete mode 100644 longitude/samples/sqlalchemy_sample_config.py create mode 100644 poetry.lock create mode 100644 pyproject.toml delete mode 100644 setup.cfg delete mode 100644 setup.py diff --git a/MANIFEST.in b/MANIFEST.in deleted file mode 100644 index 6b9eb5e..0000000 --- a/MANIFEST.in +++ /dev/null @@ -1,4 +0,0 @@ -# Include the license file -include LICENSE -include requirements.txt -include README.md diff --git a/Pipfile b/Pipfile deleted file mode 100644 index 964c2b1..0000000 --- a/Pipfile +++ /dev/null @@ -1,26 +0,0 @@ -[[source]] -name = "pypi" -url = "https://pypi.org/simple" -verify_ssl = true - -[dev-packages] -pylint = "*" -coverage = "*" -pytest-cov = "*" -pytest = "*" -setuptools = "*" - -[packages] -carto = "*" -redis = "*" -psycopg2-binary = "*" -pytest-xdist = "*" -pytest-sugar = "*" -pytest-instafail = "*" -sqlalchemy = "*" - -[requires] -python_version = "3.7" - -[pipenv] -allow_prereleases = true diff --git a/Pipfile.lock b/Pipfile.lock deleted file mode 100644 index edcbba6..0000000 --- a/Pipfile.lock +++ /dev/null @@ -1,411 +0,0 @@ -{ - "_meta": { - "hash": { - "sha256": "c3f82a0ecd44cb5862f4629d883e56fd718686f5d2d5ff85119e535c0d2c62ce" - }, - "pipfile-spec": 6, - "requires": { - "python_version": "3.7" - }, - "sources": [ - { - "name": "pypi", - "url": "https://pypi.org/simple", - "verify_ssl": true - } - ] - }, - "default": { - "apipkg": { - "hashes": [ - "sha256:37228cda29411948b422fae072f57e31d3396d2ee1c9783775980ee9c9990af6", - "sha256:58587dd4dc3daefad0487f6d9ae32b4542b185e1c36db6993290e7c41ca2b47c" - ], - "version": "==1.5" - }, - "atomicwrites": { - "hashes": [ - "sha256:0312ad34fcad8fac3704d441f7b317e50af620823353ec657a53e981f92920c0", - "sha256:ec9ae8adaae229e4f8446952d204a3e4b5fdd2d099f9be3aaf556120135fb3ee" - ], - "version": "==1.2.1" - }, - "attrs": { - "hashes": [ - "sha256:10cbf6e27dbce8c30807caf056c8eb50917e0eaafe86347671b57254006c3e69", - "sha256:ca4be454458f9dec299268d472aaa5a11f67a4ff70093396e1ceae9c76cf4bbb" - ], - "version": "==18.2.0" - }, - "carto": { - "hashes": [ - "sha256:9a54ece9d8f940bc3de3cb742e189c4ea681494d5ec251fec469319a39093dbc" - ], - "index": "pypi", - "version": "==1.4.0" - }, - "certifi": { - "hashes": [ - "sha256:47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7", - "sha256:993f830721089fef441cdfeb4b2c8c9df86f0c63239f06bd025a76a7daddb033" - ], - "version": "==2018.11.29" - }, - "chardet": { - "hashes": [ - "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", - "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" - ], - "version": "==3.0.4" - }, - "execnet": { - "hashes": [ - "sha256:a7a84d5fa07a089186a329528f127c9d73b9de57f1a1131b82bb5320ee651f6a", - "sha256:fc155a6b553c66c838d1a22dba1dc9f5f505c43285a878c6f74a79c024750b83" - ], - "version": "==1.5.0" - }, - "future": { - "hashes": [ - "sha256:67045236dcfd6816dc439556d009594abf643e5eb48992e36beac09c2ca659b8" - ], - "version": "==0.17.1" - }, - "idna": { - "hashes": [ - "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", - "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c" - ], - "version": "==2.8" - }, - "more-itertools": { - "hashes": [ - "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4", - "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc", - "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9" - ], - "version": "==5.0.0" - }, - "packaging": { - "hashes": [ - "sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af", - "sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3" - ], - "version": "==19.0" - }, - "pluggy": { - "hashes": [ - "sha256:8ddc32f03971bfdf900a81961a48ccf2fb677cf7715108f85295c67405798616", - "sha256:980710797ff6a041e9a73a5787804f848996ecaa6f8a1b1e08224a5894f2074a" - ], - "version": "==0.8.1" - }, - "psycopg2-binary": { - "hashes": [ - "sha256:19a2d1f3567b30f6c2bb3baea23f74f69d51f0c06c2e2082d0d9c28b0733a4c2", - "sha256:2b69cf4b0fa2716fd977aa4e1fd39af6110eb47b2bb30b4e5a469d8fbecfc102", - "sha256:2e952fa17ba48cbc2dc063ddeec37d7dc4ea0ef7db0ac1eda8906365a8543f31", - "sha256:348b49dd737ff74cfb5e663e18cb069b44c64f77ec0523b5794efafbfa7df0b8", - "sha256:3d72a5fdc5f00ca85160915eb9a973cf9a0ab8148f6eda40708bf672c55ac1d1", - "sha256:4957452f7868f43f32c090dadb4188e9c74a4687323c87a882e943c2bd4780c3", - "sha256:5138cec2ee1e53a671e11cc519505eb08aaaaf390c508f25b09605763d48de4b", - "sha256:587098ca4fc46c95736459d171102336af12f0d415b3b865972a79c03f06259f", - "sha256:5b79368bcdb1da4a05f931b62760bea0955ee2c81531d8e84625df2defd3f709", - "sha256:5cf43807392247d9bc99737160da32d3fa619e0bfd85ba24d1c78db205f472a4", - "sha256:676d1a80b1eebc0cacae8dd09b2fde24213173bf65650d22b038c5ed4039f392", - "sha256:6b0211ecda389101a7d1d3df2eba0cf7ffbdd2480ca6f1d2257c7bd739e84110", - "sha256:79cde4660de6f0bb523c229763bd8ad9a93ac6760b72c369cf1213955c430934", - "sha256:7aba9786ac32c2a6d5fb446002ed936b47d5e1f10c466ef7e48f66eb9f9ebe3b", - "sha256:7c8159352244e11bdd422226aa17651110b600d175220c451a9acf795e7414e0", - "sha256:945f2eedf4fc6b2432697eb90bb98cc467de5147869e57405bfc31fa0b824741", - "sha256:96b4e902cde37a7fc6ab306b3ac089a3949e6ce3d824eeca5b19dc0bedb9f6e2", - "sha256:9a7bccb1212e63f309eb9fab47b6eaef796f59850f169a25695b248ca1bf681b", - "sha256:a3bfcac727538ec11af304b5eccadbac952d4cca1a551a29b8fe554e3ad535dc", - "sha256:b19e9f1b85c5d6136f5a0549abdc55dcbd63aba18b4f10d0d063eb65ef2c68b4", - "sha256:b664011bb14ca1f2287c17185e222f2098f7b4c857961dbcf9badb28786dbbf4", - "sha256:bde7959ef012b628868d69c474ec4920252656d0800835ed999ba5e4f57e3e2e", - "sha256:cb095a0657d792c8de9f7c9a0452385a309dfb1bbbb3357d6b1e216353ade6ca", - "sha256:d16d42a1b9772152c1fe606f679b2316551f7e1a1ce273e7f808e82a136cdb3d", - "sha256:d444b1545430ffc1e7a24ce5a9be122ccd3b135a7b7e695c5862c5aff0b11159", - "sha256:d93ccc7bf409ec0a23f2ac70977507e0b8a8d8c54e5ee46109af2f0ec9e411f3", - "sha256:df6444f952ca849016902662e1a47abf4fa0678d75f92fd9dd27f20525f809cd", - "sha256:e63850d8c52ba2b502662bf3c02603175c2397a9acc756090e444ce49508d41e", - "sha256:ec43358c105794bc2b6fd34c68d27f92bea7102393c01889e93f4b6a70975728", - "sha256:f4c6926d9c03dadce7a3b378b40d2fea912c1344ef9b29869f984fb3d2a2420b" - ], - "index": "pypi", - "version": "==2.7.7" - }, - "py": { - "hashes": [ - "sha256:bf92637198836372b520efcba9e020c330123be8ce527e535d185ed4b6f45694", - "sha256:e76826342cefe3c3d5f7e8ee4316b80d1dd8a300781612ddbc765c17ba25a6c6" - ], - "version": "==1.7.0" - }, - "pyparsing": { - "hashes": [ - "sha256:66c9268862641abcac4a96ba74506e594c884e3f57690a696d21ad8210ed667a", - "sha256:f6c5ef0d7480ad048c054c37632c67fca55299990fff127850181659eea33fc3" - ], - "version": "==2.3.1" - }, - "pyrestcli": { - "hashes": [ - "sha256:4e98b5cfba0a300acc78a7a4b7c91826edf56b12b588aa316cae4bff8696c644" - ], - "version": "==0.6.8" - }, - "pytest": { - "hashes": [ - "sha256:65aeaa77ae87c7fc95de56285282546cfa9c886dc8e5dc78313db1c25e21bc07", - "sha256:6ac6d467d9f053e95aaacd79f831dbecfe730f419c6c7022cb316b365cd9199d" - ], - "version": "==4.2.0" - }, - "pytest-forked": { - "hashes": [ - "sha256:260d03fbd38d5ce41a657759e8d19bc7c8cfa6d0dcfa36c0bc9742d33bc30742", - "sha256:8d05c2e6f33cd4422571b2b1bb309720c398b0549cff499e3e4cde661875ab54" - ], - "version": "==1.0.1" - }, - "pytest-instafail": { - "hashes": [ - "sha256:162bd7c5c196e3b2fe2a5285b69362ee3d9f768d5451413ef914be38df74e3de" - ], - "index": "pypi", - "version": "==0.4.0" - }, - "pytest-sugar": { - "hashes": [ - "sha256:26cf8289fe10880cbbc130bd77398c4e6a8b936d8393b116a5c16121d95ab283", - "sha256:fcd87a74b2bce5386d244b49ad60549bfbc4602527797fac167da147983f58ab" - ], - "index": "pypi", - "version": "==0.9.2" - }, - "pytest-xdist": { - "hashes": [ - "sha256:4a201bb3ee60f5dd6bb40c5209d4e491cecc4d5bafd656cfb10f86178786e568", - "sha256:d03d1ff1b008458ed04fa73e642d840ac69b4107c168e06b71037c62d7813dd4" - ], - "index": "pypi", - "version": "==1.26.1" - }, - "python-dateutil": { - "hashes": [ - "sha256:063df5763652e21de43de7d9e00ccf239f953a832941e37be541614732cdfc93", - "sha256:88f9287c0174266bb0d8cedd395cfba9c58e87e5ad86b2ce58859bc11be3cf02" - ], - "version": "==2.7.5" - }, - "redis": { - "hashes": [ - "sha256:74c892041cba46078ae1ef845241548baa3bd3634f9a6f0f952f006eb1619c71", - "sha256:7ba8612bbfd966dea8c62322543fed0095da2834dbd5a7c124afbc617a156aa7" - ], - "index": "pypi", - "version": "==3.1.0" - }, - "requests": { - "hashes": [ - "sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e", - "sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b" - ], - "version": "==2.21.0" - }, - "six": { - "hashes": [ - "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", - "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" - ], - "version": "==1.12.0" - }, - "sqlalchemy": { - "hashes": [ - "sha256:c08cee353acaa05dd4ddf8ae0b0844ae779ed88e0b0784a2c9e0c0f9118eb64c" - ], - "index": "pypi", - "version": "==1.3.0b2" - }, - "termcolor": { - "hashes": [ - "sha256:1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b" - ], - "version": "==1.1.0" - }, - "urllib3": { - "hashes": [ - "sha256:61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39", - "sha256:de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22" - ], - "version": "==1.24.1" - } - }, - "develop": { - "astroid": { - "hashes": [ - "sha256:7f5a9f32ba7acd09c3c437946a9fc779494fc4dc6110958fe440dda30ffa4db0", - "sha256:dd357d91d582bc775ad635ac6c35e0a5d305678650df23bd6b20138429b9765d" - ], - "version": "==2.2.0.dev0" - }, - "atomicwrites": { - "hashes": [ - "sha256:0312ad34fcad8fac3704d441f7b317e50af620823353ec657a53e981f92920c0", - "sha256:ec9ae8adaae229e4f8446952d204a3e4b5fdd2d099f9be3aaf556120135fb3ee" - ], - "version": "==1.2.1" - }, - "attrs": { - "hashes": [ - "sha256:10cbf6e27dbce8c30807caf056c8eb50917e0eaafe86347671b57254006c3e69", - "sha256:ca4be454458f9dec299268d472aaa5a11f67a4ff70093396e1ceae9c76cf4bbb" - ], - "version": "==18.2.0" - }, - "coverage": { - "hashes": [ - "sha256:029c69deaeeeae1b15bc6c59f0ffa28aa8473721c614a23f2c2976dec245cd12", - "sha256:02abbbebc6e9d5abe13cd28b5e963dedb6ffb51c146c916d17b18f141acd9947", - "sha256:1bbfe5b82a3921d285e999c6d256c1e16b31c554c29da62d326f86c173d30337", - "sha256:210c02f923df33a8d0e461c86fdcbbb17228ff4f6d92609fc06370a98d283c2d", - "sha256:2d0807ba935f540d20b49d5bf1c0237b90ce81e133402feda906e540003f2f7a", - "sha256:35d7a013874a7c927ce997350d314144ffc5465faf787bb4e46e6c4f381ef562", - "sha256:3636f9d0dcb01aed4180ef2e57a4e34bb4cac3ecd203c2a23db8526d86ab2fb4", - "sha256:42f4be770af2455a75e4640f033a82c62f3fb0d7a074123266e143269d7010ef", - "sha256:48440b25ba6cda72d4c638f3a9efa827b5b87b489c96ab5f4ff597d976413156", - "sha256:4dac8dfd1acf6a3ac657475dfdc66c621f291b1b7422a939cc33c13ac5356473", - "sha256:4e8474771c69c2991d5eab65764289a7dd450bbea050bc0ebb42b678d8222b42", - "sha256:551f10ddfeff56a1325e5a34eff304c5892aa981fd810babb98bfee77ee2fb17", - "sha256:5b104982f1809c1577912519eb249f17d9d7e66304ad026666cb60a5ef73309c", - "sha256:5c62aef73dfc87bfcca32cee149a1a7a602bc74bac72223236b0023543511c88", - "sha256:633151f8d1ad9467b9f7e90854a7f46ed8f2919e8bc7d98d737833e8938fc081", - "sha256:772207b9e2d5bf3f9d283b88915723e4e92d9a62c83f44ec92b9bd0cd685541b", - "sha256:7d5e02f647cd727afc2659ec14d4d1cc0508c47e6cfb07aea33d7aa9ca94d288", - "sha256:a9798a4111abb0f94584000ba2a2c74841f2cfe5f9254709756367aabbae0541", - "sha256:b38ea741ab9e35bfa7015c93c93bbd6a1623428f97a67083fc8ebd366238b91f", - "sha256:b6a5478c904236543c0347db8a05fac6fc0bd574c870e7970faa88e1d9890044", - "sha256:c6248bfc1de36a3844685a2e10ba17c18119ba6252547f921062a323fb31bff1", - "sha256:c705ab445936457359b1424ef25ccc0098b0491b26064677c39f1d14a539f056", - "sha256:d95a363d663ceee647291131dbd213af258df24f41350246842481ec3709bd33", - "sha256:e27265eb80cdc5dab55a40ef6f890e04ecc618649ad3da5265f128b141f93f78", - "sha256:ebc276c9cb5d917bd2ae959f84ffc279acafa9c9b50b0fa436ebb70bbe2166ea", - "sha256:f4d229866d030863d0fe3bf297d6d11e6133ca15bbb41ed2534a8b9a3d6bd061", - "sha256:f95675bd88b51474d4fe5165f3266f419ce754ffadfb97f10323931fa9ac95e5", - "sha256:f95bc54fb6d61b9f9ff09c4ae8ff6a3f5edc937cda3ca36fc937302a7c152bf1", - "sha256:fd0f6be53de40683584e5331c341e65a679dbe5ec489a0697cec7c2ef1a48cda" - ], - "index": "pypi", - "version": "==5.0a4" - }, - "isort": { - "hashes": [ - "sha256:1153601da39a25b14ddc54955dbbacbb6b2d19135386699e2ad58517953b34af", - "sha256:b9c40e9750f3d77e6e4d441d8b0266cf555e7cdabdcff33c4fd06366ca761ef8", - "sha256:ec9ef8f4a9bc6f71eec99e1806bfa2de401650d996c59330782b89a5555c1497" - ], - "version": "==4.3.4" - }, - "lazy-object-proxy": { - "hashes": [ - "sha256:0ce34342b419bd8f018e6666bfef729aec3edf62345a53b537a4dcc115746a33", - "sha256:1b668120716eb7ee21d8a38815e5eb3bb8211117d9a90b0f8e21722c0758cc39", - "sha256:209615b0fe4624d79e50220ce3310ca1a9445fd8e6d3572a896e7f9146bbf019", - "sha256:27bf62cb2b1a2068d443ff7097ee33393f8483b570b475db8ebf7e1cba64f088", - "sha256:27ea6fd1c02dcc78172a82fc37fcc0992a94e4cecf53cb6d73f11749825bd98b", - "sha256:2c1b21b44ac9beb0fc848d3993924147ba45c4ebc24be19825e57aabbe74a99e", - "sha256:2df72ab12046a3496a92476020a1a0abf78b2a7db9ff4dc2036b8dd980203ae6", - "sha256:320ffd3de9699d3892048baee45ebfbbf9388a7d65d832d7e580243ade426d2b", - "sha256:50e3b9a464d5d08cc5227413db0d1c4707b6172e4d4d915c1c70e4de0bbff1f5", - "sha256:5276db7ff62bb7b52f77f1f51ed58850e315154249aceb42e7f4c611f0f847ff", - "sha256:61a6cf00dcb1a7f0c773ed4acc509cb636af2d6337a08f362413c76b2b47a8dd", - "sha256:6ae6c4cb59f199d8827c5a07546b2ab7e85d262acaccaacd49b62f53f7c456f7", - "sha256:7661d401d60d8bf15bb5da39e4dd72f5d764c5aff5a86ef52a042506e3e970ff", - "sha256:7bd527f36a605c914efca5d3d014170b2cb184723e423d26b1fb2fd9108e264d", - "sha256:7cb54db3535c8686ea12e9535eb087d32421184eacc6939ef15ef50f83a5e7e2", - "sha256:7f3a2d740291f7f2c111d86a1c4851b70fb000a6c8883a59660d95ad57b9df35", - "sha256:81304b7d8e9c824d058087dcb89144842c8e0dea6d281c031f59f0acf66963d4", - "sha256:933947e8b4fbe617a51528b09851685138b49d511af0b6c0da2539115d6d4514", - "sha256:94223d7f060301b3a8c09c9b3bc3294b56b2188e7d8179c762a1cda72c979252", - "sha256:ab3ca49afcb47058393b0122428358d2fbe0408cf99f1b58b295cfeb4ed39109", - "sha256:bd6292f565ca46dee4e737ebcc20742e3b5be2b01556dafe169f6c65d088875f", - "sha256:cb924aa3e4a3fb644d0c463cad5bc2572649a6a3f68a7f8e4fbe44aaa6d77e4c", - "sha256:d0fc7a286feac9077ec52a927fc9fe8fe2fabab95426722be4c953c9a8bede92", - "sha256:ddc34786490a6e4ec0a855d401034cbd1242ef186c20d79d2166d6a4bd449577", - "sha256:e34b155e36fa9da7e1b7c738ed7767fc9491a62ec6af70fe9da4a057759edc2d", - "sha256:e5b9e8f6bda48460b7b143c3821b21b452cb3a835e6bbd5dd33aa0c8d3f5137d", - "sha256:e81ebf6c5ee9684be8f2c87563880f93eedd56dd2b6146d8a725b50b7e5adb0f", - "sha256:eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a", - "sha256:f460d1ceb0e4a5dcb2a652db0904224f367c9b3c1470d5a7683c0480e582468b" - ], - "version": "==1.3.1" - }, - "mccabe": { - "hashes": [ - "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", - "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f" - ], - "version": "==0.6.1" - }, - "more-itertools": { - "hashes": [ - "sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4", - "sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc", - "sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9" - ], - "version": "==5.0.0" - }, - "pluggy": { - "hashes": [ - "sha256:8ddc32f03971bfdf900a81961a48ccf2fb677cf7715108f85295c67405798616", - "sha256:980710797ff6a041e9a73a5787804f848996ecaa6f8a1b1e08224a5894f2074a" - ], - "version": "==0.8.1" - }, - "py": { - "hashes": [ - "sha256:bf92637198836372b520efcba9e020c330123be8ce527e535d185ed4b6f45694", - "sha256:e76826342cefe3c3d5f7e8ee4316b80d1dd8a300781612ddbc765c17ba25a6c6" - ], - "version": "==1.7.0" - }, - "pylint": { - "hashes": [ - "sha256:238df538ea18c9004981202e5bbbd56c47039fe8230c45d3b1f255d97181b716", - "sha256:3c031c10a276587ba5e73b3189c33749973d66473f77ecb53715e27cd2650348" - ], - "index": "pypi", - "version": "==2.3.0.dev1" - }, - "pytest": { - "hashes": [ - "sha256:65aeaa77ae87c7fc95de56285282546cfa9c886dc8e5dc78313db1c25e21bc07", - "sha256:6ac6d467d9f053e95aaacd79f831dbecfe730f419c6c7022cb316b365cd9199d" - ], - "version": "==4.2.0" - }, - "pytest-cov": { - "hashes": [ - "sha256:0ab664b25c6aa9716cbf203b17ddb301932383046082c081b9848a0edf5add33", - "sha256:230ef817450ab0699c6cc3c9c8f7a829c34674456f2ed8df1fe1d39780f7c87f" - ], - "index": "pypi", - "version": "==2.6.1" - }, - "six": { - "hashes": [ - "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", - "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" - ], - "version": "==1.12.0" - }, - "wrapt": { - "hashes": [ - "sha256:4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533" - ], - "version": "==1.11.1" - } - } -} diff --git a/README.md b/README.md index 4604675..ee2bed5 100644 --- a/README.md +++ b/README.md @@ -7,12 +7,11 @@ A **new** bunch of middleware functions to build applications on top of CARTO. - [ ] Database model - [x] CARTO data source - [x] Basic parametrized queries (i.e. templated queries) - - [ ] Protected parametrized queries (i.e. avoiding injection) + - [x] Protected parametrized queries (i.e. avoiding injection) - [ ] Bind/dynamic parameters in queries (server-side render) - - [ ] Postgres data source + - [x] Postgres data source - [x] psycopg2 - - [ ] SQLAlchemy - - [ ] Asyncpg + - [x] SQLAlchemy - [x] Cache - [x] Base cache - [x] Put @@ -24,14 +23,14 @@ A **new** bunch of middleware functions to build applications on top of CARTO. - [x] Tests - [x] Redis Cache - [x] Tests - - [ ] Documentation + - [x] Documentation - [x] Sample scripts - [x] Unit tests - [x] Sample scripts -- [ ] Config +- [x] Config -- [ ] CI PyPi versioning +- [x] CI PyPi versioning - [ ] Data manipulation - [ ] Carto @@ -77,22 +76,31 @@ pip install -e git+https://github.com/GeographicaGS/Longitude#egg=longitude ## As developer... -Install pipenv in your development machine if you still do not have it. +### First time -Set up Python environment: +1. Install ```poetry``` using the [recommended process](https://github.com/sdispater/poetry#installation) + 1. poetry is installed globally as a tool + 1. It works along with virtualenvironments +1. Create a virtual environment for Python 3.x (check the current development version in ```pyproject.toml```) + 1. You can create it wherever you want but do not put it inside the project + 1. A nice place is ```$HOME/virtualenvs/longitude``` +1. Clone the ```longitude``` repo +1. `cd` to the repo and: + 1. Activate the virtual environment: `. ~/virtualenvs/longitude/bin/activate` + 1. Run `poetry install` +1. Configure your IDE to use the virtual environment -```shell -$ cd [path-to-longitude-folder] -$ pipenv install -``` +### Daily -To activate the virtual environment: `$ pipenv shell`. If the environment variables are defined in a `.env` file, they are loaded in this shell. +1. Remember to activate the virtual environment -## Sample scripts +### Why Poetry? -These are intended to be used with real databases (i.e. those in your profile) to check features of the library. +Because it handles development dependencies and packaging with a single file (```pyproject.toml```), which is [already standard](https://flit.readthedocs.io/en/latest/pyproject_toml.html). + +## Sample scripts -You will probably need to provide credentials/api keys/urls/username/... Check each script and it will be explained there. +These are intended to be used with real databases (i.e. those in your profile) to check features of the library. They must be run from the virtual environment. ## Testing and coverage diff --git a/longitude/samples/.gitignore b/longitude/samples/.gitignore index 90d70b6..c4d4a8f 100644 --- a/longitude/samples/.gitignore +++ b/longitude/samples/.gitignore @@ -1,2 +1 @@ -carto_sample_config.py -postgres_sample_config.py \ No newline at end of file +*_sample_config.py diff --git a/longitude/samples/load_sys_path.py b/longitude/samples/load_sys_path.py deleted file mode 100644 index e69de29..0000000 diff --git a/longitude/samples/sqlalchemy_sample_config.py b/longitude/samples/sqlalchemy_sample_config.py deleted file mode 100644 index b5f95fb..0000000 --- a/longitude/samples/sqlalchemy_sample_config.py +++ /dev/null @@ -1,5 +0,0 @@ -POSTGRES_HOST = 'localhost' -POSTGRES_PORT = 5432 -POSTGRES_USER = 'longitude' -POSTGRES_PASS = 'longitude' -POSTGRES_DB = 'postgres' diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..5930678 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,409 @@ +[[package]] +category = "dev" +description = "apipkg: namespace control and lazy-import mechanism" +name = "apipkg" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.5" + +[[package]] +category = "dev" +description = "An abstract syntax tree for Python with inference support." +name = "astroid" +optional = false +python-versions = ">=3.4.*" +version = "2.1.0" + +[package.dependencies] +lazy-object-proxy = "*" +six = "*" +wrapt = "*" + +[[package]] +category = "dev" +description = "Atomic file writes." +name = "atomicwrites" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.3.0" + +[[package]] +category = "dev" +description = "Classes Without Boilerplate" +name = "attrs" +optional = false +python-versions = "*" +version = "18.2.0" + +[[package]] +category = "main" +description = "SDK around CARTO's APIs" +name = "carto" +optional = false +python-versions = "*" +version = "1.4.0" + +[package.dependencies] +pyrestcli = ">=0.6.4" +requests = ">=2.7.0" + +[[package]] +category = "main" +description = "Python package for providing Mozilla's CA Bundle." +name = "certifi" +optional = false +python-versions = "*" +version = "2018.11.29" + +[[package]] +category = "main" +description = "Universal encoding detector for Python 2 and 3" +name = "chardet" +optional = false +python-versions = "*" +version = "3.0.4" + +[[package]] +category = "dev" +description = "Cross-platform colored terminal text." +marker = "sys_platform == \"win32\"" +name = "colorama" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "0.4.1" + +[[package]] +category = "dev" +description = "Code coverage measurement for Python" +name = "coverage" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, <4" +version = "4.5.2" + +[[package]] +category = "dev" +description = "execnet: rapid multi-Python deployment" +name = "execnet" +optional = false +python-versions = "*" +version = "1.5.0" + +[package.dependencies] +apipkg = ">=1.4" + +[[package]] +category = "main" +description = "Clean single-source support for Python 3 and 2" +name = "future" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +version = "0.17.1" + +[[package]] +category = "main" +description = "Internationalized Domain Names in Applications (IDNA)" +name = "idna" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.8" + +[[package]] +category = "dev" +description = "A Python utility / library to sort Python imports." +name = "isort" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "4.3.4" + +[[package]] +category = "dev" +description = "A fast and thorough lazy object proxy." +name = "lazy-object-proxy" +optional = false +python-versions = "*" +version = "1.3.1" + +[[package]] +category = "dev" +description = "McCabe checker, plugin for flake8" +name = "mccabe" +optional = false +python-versions = "*" +version = "0.6.1" + +[[package]] +category = "dev" +description = "More routines for operating on iterables, beyond itertools" +name = "more-itertools" +optional = false +python-versions = "*" +version = "5.0.0" + +[package.dependencies] +six = ">=1.0.0,<2.0.0" + +[[package]] +category = "dev" +description = "Core utilities for Python packages" +name = "packaging" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "19.0" + +[package.dependencies] +pyparsing = ">=2.0.2" +six = "*" + +[[package]] +category = "dev" +description = "plugin and hook calling mechanisms for python" +name = "pluggy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "0.8.1" + +[[package]] +category = "main" +description = "psycopg2 - Python-PostgreSQL Database Adapter" +name = "psycopg2-binary" +optional = false +python-versions = "*" +version = "2.7.7" + +[[package]] +category = "dev" +description = "library with cross-python path, ini-parsing, io, code, log facilities" +name = "py" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.7.0" + +[[package]] +category = "dev" +description = "python code static checker" +name = "pylint" +optional = false +python-versions = ">=3.4.*" +version = "2.2.2" + +[package.dependencies] +astroid = ">=2.0.0" +colorama = "*" +isort = ">=4.2.5" +mccabe = "*" + +[[package]] +category = "dev" +description = "Python parsing module" +name = "pyparsing" +optional = false +python-versions = "*" +version = "2.3.1" + +[[package]] +category = "main" +description = "Generic REST client for Python" +name = "pyrestcli" +optional = false +python-versions = "*" +version = "0.6.8" + +[package.dependencies] +future = ">=0.15.2" +python-dateutil = ">=2.5.3" +requests = ">=2.10.0" + +[[package]] +category = "dev" +description = "pytest: simple powerful testing with Python" +name = "pytest" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "4.2.0" + +[package.dependencies] +atomicwrites = ">=1.0" +attrs = ">=17.4.0" +colorama = "*" +more-itertools = ">=4.0.0" +pluggy = ">=0.7" +py = ">=1.5.0" +setuptools = "*" +six = ">=1.10.0" + +[[package]] +category = "dev" +description = "Pytest plugin for measuring coverage." +name = "pytest-cov" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.6.1" + +[package.dependencies] +coverage = ">=4.4" +pytest = ">=3.6" + +[[package]] +category = "dev" +description = "run tests in isolated forked subprocesses" +name = "pytest-forked" +optional = false +python-versions = "*" +version = "1.0.1" + +[package.dependencies] +pytest = ">=3.1.0" + +[[package]] +category = "dev" +description = "py.test plugin to show failures instantly" +name = "pytest-instafail" +optional = false +python-versions = "*" +version = "0.4.0" + +[package.dependencies] +pytest = ">=2.9" + +[[package]] +category = "dev" +description = "pytest-sugar is a plugin for pytest that changes the default look and feel of pytest (e.g. progressbar, show tests that fail instantly)." +name = "pytest-sugar" +optional = false +python-versions = "*" +version = "0.9.2" + +[package.dependencies] +packaging = ">=14.1" +pytest = ">=2.9" +termcolor = ">=1.1.0" + +[[package]] +category = "dev" +description = "pytest xdist plugin for distributed testing and loop-on-failing modes" +name = "pytest-xdist" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.26.1" + +[package.dependencies] +execnet = ">=1.1" +pytest = ">=3.6.0" +pytest-forked = "*" +six = "*" + +[[package]] +category = "main" +description = "Extensions to the standard Python datetime module" +name = "python-dateutil" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +version = "2.7.5" + +[package.dependencies] +six = ">=1.5" + +[[package]] +category = "main" +description = "Python client for Redis key-value store" +name = "redis" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "3.1.0" + +[[package]] +category = "main" +description = "Python HTTP for Humans." +name = "requests" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "2.21.0" + +[package.dependencies] +certifi = ">=2017.4.17" +chardet = ">=3.0.2,<3.1.0" +idna = ">=2.5,<2.9" +urllib3 = ">=1.21.1,<1.25" + +[[package]] +category = "main" +description = "Python 2 and 3 compatibility utilities" +name = "six" +optional = false +python-versions = ">=2.6, !=3.0.*, !=3.1.*" +version = "1.12.0" + +[[package]] +category = "main" +description = "Database Abstraction Library" +name = "sqlalchemy" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +version = "1.3.0b2" + +[[package]] +category = "dev" +description = "ANSII Color formatting for output in terminal." +name = "termcolor" +optional = false +python-versions = "*" +version = "1.1.0" + +[[package]] +category = "main" +description = "HTTP library with thread-safe connection pooling, file post, and more." +name = "urllib3" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, <4" +version = "1.24.1" + +[[package]] +category = "dev" +description = "Module for decorators, wrappers and monkey patching." +name = "wrapt" +optional = false +python-versions = "*" +version = "1.11.1" + +[metadata] +content-hash = "3c9f795821df26505ea9dcf7a11dbdcd2d00b89be698d4688f8fd23c28b24a62" +python-versions = "^3.7" + +[metadata.hashes] +apipkg = ["37228cda29411948b422fae072f57e31d3396d2ee1c9783775980ee9c9990af6", "58587dd4dc3daefad0487f6d9ae32b4542b185e1c36db6993290e7c41ca2b47c"] +astroid = ["35b032003d6a863f5dcd7ec11abd5cd5893428beaa31ab164982403bcb311f22", "6a5d668d7dc69110de01cdf7aeec69a679ef486862a0850cc0fd5571505b6b7e"] +atomicwrites = ["03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4", "75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"] +attrs = ["10cbf6e27dbce8c30807caf056c8eb50917e0eaafe86347671b57254006c3e69", "ca4be454458f9dec299268d472aaa5a11f67a4ff70093396e1ceae9c76cf4bbb"] +carto = ["9a54ece9d8f940bc3de3cb742e189c4ea681494d5ec251fec469319a39093dbc"] +certifi = ["47f9c83ef4c0c621eaef743f133f09fa8a74a9b75f037e8624f83bd1b6626cb7", "993f830721089fef441cdfeb4b2c8c9df86f0c63239f06bd025a76a7daddb033"] +chardet = ["84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", "fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"] +colorama = ["05eed71e2e327246ad6b38c540c4a3117230b19679b875190486ddd2d721422d", "f8ac84de7840f5b9c4e3347b3c1eaa50f7e49c2b07596221daec5edaabbd7c48"] +coverage = ["06123b58a1410873e22134ca2d88bd36680479fe354955b3579fb8ff150e4d27", "09e47c529ff77bf042ecfe858fb55c3e3eb97aac2c87f0349ab5a7efd6b3939f", "0a1f9b0eb3aa15c990c328535655847b3420231af299386cfe5efc98f9c250fe", "0cc941b37b8c2ececfed341444a456912e740ecf515d560de58b9a76562d966d", "0d34245f824cc3140150ab7848d08b7e2ba67ada959d77619c986f2062e1f0e8", "10e8af18d1315de936d67775d3a814cc81d0747a1a0312d84e27ae5610e313b0", "1b4276550b86caa60606bd3572b52769860a81a70754a54acc8ba789ce74d607", "1e8a2627c48266c7b813975335cfdea58c706fe36f607c97d9392e61502dc79d", "258b21c5cafb0c3768861a6df3ab0cfb4d8b495eee5ec660e16f928bf7385390", "2b224052bfd801beb7478b03e8a66f3f25ea56ea488922e98903914ac9ac930b", "3ad59c84c502cd134b0088ca9038d100e8fb5081bbd5ccca4863f3804d81f61d", "447c450a093766744ab53bf1e7063ec82866f27bcb4f4c907da25ad293bba7e3", "46101fc20c6f6568561cdd15a54018bb42980954b79aa46da8ae6f008066a30e", "4710dc676bb4b779c4361b54eb308bc84d64a2fa3d78e5f7228921eccce5d815", "510986f9a280cd05189b42eee2b69fecdf5bf9651d4cd315ea21d24a964a3c36", "5535dda5739257effef56e49a1c51c71f1d37a6e5607bb25a5eee507c59580d1", "5a7524042014642b39b1fcae85fb37556c200e64ec90824ae9ecf7b667ccfc14", "5f55028169ef85e1fa8e4b8b1b91c0b3b0fa3297c4fb22990d46ff01d22c2d6c", "6694d5573e7790a0e8d3d177d7a416ca5f5c150742ee703f3c18df76260de794", "6831e1ac20ac52634da606b658b0b2712d26984999c9d93f0c6e59fe62ca741b", "71afc1f5cd72ab97330126b566bbf4e8661aab7449f08895d21a5d08c6b051ff", "7349c27128334f787ae63ab49d90bf6d47c7288c63a0a5dfaa319d4b4541dd2c", "77f0d9fa5e10d03aa4528436e33423bfa3718b86c646615f04616294c935f840", "828ad813c7cdc2e71dcf141912c685bfe4b548c0e6d9540db6418b807c345ddd", "859714036274a75e6e57c7bab0c47a4602d2a8cfaaa33bbdb68c8359b2ed4f5c", "85a06c61598b14b015d4df233d249cd5abfa61084ef5b9f64a48e997fd829a82", "869ef4a19f6e4c6987e18b315721b8b971f7048e6eaea29c066854242b4e98d9", "8cb4febad0f0b26c6f62e1628f2053954ad2c555d67660f28dfb1b0496711952", "977e2d9a646773cc7428cdd9a34b069d6ee254fadfb4d09b3f430e95472f3cf3", "99bd767c49c775b79fdcd2eabff405f1063d9d959039c0bdd720527a7738748a", "a5c58664b23b248b16b96253880b2868fb34358911400a7ba39d7f6399935389", "aaa0f296e503cda4bc07566f592cd7a28779d433f3a23c48082af425d6d5a78f", "ab235d9fe64833f12d1334d29b558aacedfbca2356dfb9691f2d0d38a8a7bfb4", "b3b0c8f660fae65eac74fbf003f3103769b90012ae7a460863010539bb7a80da", "bab8e6d510d2ea0f1d14f12642e3f35cefa47a9b2e4c7cea1852b52bc9c49647", "c45297bbdbc8bb79b02cf41417d63352b70bcb76f1bbb1ee7d47b3e89e42f95d", "d19bca47c8a01b92640c614a9147b081a1974f69168ecd494687c827109e8f42", "d64b4340a0c488a9e79b66ec9f9d77d02b99b772c8b8afd46c1294c1d39ca478", "da969da069a82bbb5300b59161d8d7c8d423bc4ccd3b410a9b4d8932aeefc14b", "ed02c7539705696ecb7dc9d476d861f3904a8d2b7e894bd418994920935d36bb", "ee5b8abc35b549012e03a7b1e86c09491457dba6c94112a2482b18589cc2bdb9"] +execnet = ["a7a84d5fa07a089186a329528f127c9d73b9de57f1a1131b82bb5320ee651f6a", "fc155a6b553c66c838d1a22dba1dc9f5f505c43285a878c6f74a79c024750b83"] +future = ["67045236dcfd6816dc439556d009594abf643e5eb48992e36beac09c2ca659b8"] +idna = ["c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", "ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"] +isort = ["1153601da39a25b14ddc54955dbbacbb6b2d19135386699e2ad58517953b34af", "b9c40e9750f3d77e6e4d441d8b0266cf555e7cdabdcff33c4fd06366ca761ef8", "ec9ef8f4a9bc6f71eec99e1806bfa2de401650d996c59330782b89a5555c1497"] +lazy-object-proxy = ["0ce34342b419bd8f018e6666bfef729aec3edf62345a53b537a4dcc115746a33", "1b668120716eb7ee21d8a38815e5eb3bb8211117d9a90b0f8e21722c0758cc39", "209615b0fe4624d79e50220ce3310ca1a9445fd8e6d3572a896e7f9146bbf019", "27bf62cb2b1a2068d443ff7097ee33393f8483b570b475db8ebf7e1cba64f088", "27ea6fd1c02dcc78172a82fc37fcc0992a94e4cecf53cb6d73f11749825bd98b", "2c1b21b44ac9beb0fc848d3993924147ba45c4ebc24be19825e57aabbe74a99e", "2df72ab12046a3496a92476020a1a0abf78b2a7db9ff4dc2036b8dd980203ae6", "320ffd3de9699d3892048baee45ebfbbf9388a7d65d832d7e580243ade426d2b", "50e3b9a464d5d08cc5227413db0d1c4707b6172e4d4d915c1c70e4de0bbff1f5", "5276db7ff62bb7b52f77f1f51ed58850e315154249aceb42e7f4c611f0f847ff", "61a6cf00dcb1a7f0c773ed4acc509cb636af2d6337a08f362413c76b2b47a8dd", "6ae6c4cb59f199d8827c5a07546b2ab7e85d262acaccaacd49b62f53f7c456f7", "7661d401d60d8bf15bb5da39e4dd72f5d764c5aff5a86ef52a042506e3e970ff", "7bd527f36a605c914efca5d3d014170b2cb184723e423d26b1fb2fd9108e264d", "7cb54db3535c8686ea12e9535eb087d32421184eacc6939ef15ef50f83a5e7e2", "7f3a2d740291f7f2c111d86a1c4851b70fb000a6c8883a59660d95ad57b9df35", "81304b7d8e9c824d058087dcb89144842c8e0dea6d281c031f59f0acf66963d4", "933947e8b4fbe617a51528b09851685138b49d511af0b6c0da2539115d6d4514", "94223d7f060301b3a8c09c9b3bc3294b56b2188e7d8179c762a1cda72c979252", "ab3ca49afcb47058393b0122428358d2fbe0408cf99f1b58b295cfeb4ed39109", "bd6292f565ca46dee4e737ebcc20742e3b5be2b01556dafe169f6c65d088875f", "cb924aa3e4a3fb644d0c463cad5bc2572649a6a3f68a7f8e4fbe44aaa6d77e4c", "d0fc7a286feac9077ec52a927fc9fe8fe2fabab95426722be4c953c9a8bede92", "ddc34786490a6e4ec0a855d401034cbd1242ef186c20d79d2166d6a4bd449577", "e34b155e36fa9da7e1b7c738ed7767fc9491a62ec6af70fe9da4a057759edc2d", "e5b9e8f6bda48460b7b143c3821b21b452cb3a835e6bbd5dd33aa0c8d3f5137d", "e81ebf6c5ee9684be8f2c87563880f93eedd56dd2b6146d8a725b50b7e5adb0f", "eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a", "f460d1ceb0e4a5dcb2a652db0904224f367c9b3c1470d5a7683c0480e582468b"] +mccabe = ["ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", "dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"] +more-itertools = ["38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4", "c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc", "fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"] +packaging = ["0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af", "9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3"] +pluggy = ["8ddc32f03971bfdf900a81961a48ccf2fb677cf7715108f85295c67405798616", "980710797ff6a041e9a73a5787804f848996ecaa6f8a1b1e08224a5894f2074a"] +psycopg2-binary = ["19a2d1f3567b30f6c2bb3baea23f74f69d51f0c06c2e2082d0d9c28b0733a4c2", "2b69cf4b0fa2716fd977aa4e1fd39af6110eb47b2bb30b4e5a469d8fbecfc102", "2e952fa17ba48cbc2dc063ddeec37d7dc4ea0ef7db0ac1eda8906365a8543f31", "348b49dd737ff74cfb5e663e18cb069b44c64f77ec0523b5794efafbfa7df0b8", "3d72a5fdc5f00ca85160915eb9a973cf9a0ab8148f6eda40708bf672c55ac1d1", "4957452f7868f43f32c090dadb4188e9c74a4687323c87a882e943c2bd4780c3", "5138cec2ee1e53a671e11cc519505eb08aaaaf390c508f25b09605763d48de4b", "587098ca4fc46c95736459d171102336af12f0d415b3b865972a79c03f06259f", "5b79368bcdb1da4a05f931b62760bea0955ee2c81531d8e84625df2defd3f709", "5cf43807392247d9bc99737160da32d3fa619e0bfd85ba24d1c78db205f472a4", "676d1a80b1eebc0cacae8dd09b2fde24213173bf65650d22b038c5ed4039f392", "6b0211ecda389101a7d1d3df2eba0cf7ffbdd2480ca6f1d2257c7bd739e84110", "79cde4660de6f0bb523c229763bd8ad9a93ac6760b72c369cf1213955c430934", "7aba9786ac32c2a6d5fb446002ed936b47d5e1f10c466ef7e48f66eb9f9ebe3b", "7c8159352244e11bdd422226aa17651110b600d175220c451a9acf795e7414e0", "945f2eedf4fc6b2432697eb90bb98cc467de5147869e57405bfc31fa0b824741", "96b4e902cde37a7fc6ab306b3ac089a3949e6ce3d824eeca5b19dc0bedb9f6e2", "9a7bccb1212e63f309eb9fab47b6eaef796f59850f169a25695b248ca1bf681b", "a3bfcac727538ec11af304b5eccadbac952d4cca1a551a29b8fe554e3ad535dc", "b19e9f1b85c5d6136f5a0549abdc55dcbd63aba18b4f10d0d063eb65ef2c68b4", "b664011bb14ca1f2287c17185e222f2098f7b4c857961dbcf9badb28786dbbf4", "bde7959ef012b628868d69c474ec4920252656d0800835ed999ba5e4f57e3e2e", "cb095a0657d792c8de9f7c9a0452385a309dfb1bbbb3357d6b1e216353ade6ca", "d16d42a1b9772152c1fe606f679b2316551f7e1a1ce273e7f808e82a136cdb3d", "d444b1545430ffc1e7a24ce5a9be122ccd3b135a7b7e695c5862c5aff0b11159", "d93ccc7bf409ec0a23f2ac70977507e0b8a8d8c54e5ee46109af2f0ec9e411f3", "df6444f952ca849016902662e1a47abf4fa0678d75f92fd9dd27f20525f809cd", "e63850d8c52ba2b502662bf3c02603175c2397a9acc756090e444ce49508d41e", "ec43358c105794bc2b6fd34c68d27f92bea7102393c01889e93f4b6a70975728", "f4c6926d9c03dadce7a3b378b40d2fea912c1344ef9b29869f984fb3d2a2420b"] +py = ["bf92637198836372b520efcba9e020c330123be8ce527e535d185ed4b6f45694", "e76826342cefe3c3d5f7e8ee4316b80d1dd8a300781612ddbc765c17ba25a6c6"] +pylint = ["689de29ae747642ab230c6d37be2b969bf75663176658851f456619aacf27492", "771467c434d0d9f081741fec1d64dfb011ed26e65e12a28fe06ca2f61c4d556c"] +pyparsing = ["66c9268862641abcac4a96ba74506e594c884e3f57690a696d21ad8210ed667a", "f6c5ef0d7480ad048c054c37632c67fca55299990fff127850181659eea33fc3"] +pyrestcli = ["4e98b5cfba0a300acc78a7a4b7c91826edf56b12b588aa316cae4bff8696c644"] +pytest = ["65aeaa77ae87c7fc95de56285282546cfa9c886dc8e5dc78313db1c25e21bc07", "6ac6d467d9f053e95aaacd79f831dbecfe730f419c6c7022cb316b365cd9199d"] +pytest-cov = ["0ab664b25c6aa9716cbf203b17ddb301932383046082c081b9848a0edf5add33", "230ef817450ab0699c6cc3c9c8f7a829c34674456f2ed8df1fe1d39780f7c87f"] +pytest-forked = ["260d03fbd38d5ce41a657759e8d19bc7c8cfa6d0dcfa36c0bc9742d33bc30742", "8d05c2e6f33cd4422571b2b1bb309720c398b0549cff499e3e4cde661875ab54"] +pytest-instafail = ["162bd7c5c196e3b2fe2a5285b69362ee3d9f768d5451413ef914be38df74e3de"] +pytest-sugar = ["26cf8289fe10880cbbc130bd77398c4e6a8b936d8393b116a5c16121d95ab283", "fcd87a74b2bce5386d244b49ad60549bfbc4602527797fac167da147983f58ab"] +pytest-xdist = ["4a201bb3ee60f5dd6bb40c5209d4e491cecc4d5bafd656cfb10f86178786e568", "d03d1ff1b008458ed04fa73e642d840ac69b4107c168e06b71037c62d7813dd4"] +python-dateutil = ["063df5763652e21de43de7d9e00ccf239f953a832941e37be541614732cdfc93", "88f9287c0174266bb0d8cedd395cfba9c58e87e5ad86b2ce58859bc11be3cf02"] +redis = ["74c892041cba46078ae1ef845241548baa3bd3634f9a6f0f952f006eb1619c71", "7ba8612bbfd966dea8c62322543fed0095da2834dbd5a7c124afbc617a156aa7"] +requests = ["502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e", "7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b"] +six = ["3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", "d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"] +sqlalchemy = ["c08cee353acaa05dd4ddf8ae0b0844ae779ed88e0b0784a2c9e0c0f9118eb64c"] +termcolor = ["1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b"] +urllib3 = ["61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39", "de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22"] +wrapt = ["4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533"] diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..a55575e --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,38 @@ +[tool.poetry] +name = "longitude" +version = "1.0.0" +description = "" +authors = [ +"Dani Ramirez ", +"Alberto Asuero ", +"Jose María Camacho " +] +license = "MIT" +readme = "README.md" +exclude = [ +"longitude/core/tests/**", +"longitude/core/**/README.md" +] + +[tool.poetry.dependencies] +carto = "1.4" +python = "^3.7" +redis = "3.1" +psycopg2-binary = "2.7.7" +sqlalchemy = "1.3.0b2" + +[tool.poetry.dev-dependencies] +pylint = "^2.2" +coverage = "^4.5" +pytest-cov = "^2.6" +pytest-xdist = "^1.26" +pytest-sugar = "^0.9.2" +pytest-instafail = "^0.4.0" + +[build-system] +requires = ["poetry>=0.12"] +build-backend = "poetry.masonry.api" + +[tool.flit.metadata.urls] +Company = "https://geographica.gs" +Repository = "https://github.com/GeographicaGS/Longitude" \ No newline at end of file diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index b88034e..0000000 --- a/setup.cfg +++ /dev/null @@ -1,2 +0,0 @@ -[metadata] -description-file = README.md diff --git a/setup.py b/setup.py deleted file mode 100644 index 5b34b00..0000000 --- a/setup.py +++ /dev/null @@ -1,77 +0,0 @@ -# Always prefer setuptools over distutils -# To use a consistent encoding -from codecs import open -from os import path - -from setuptools import find_packages, setup - -here = path.abspath(path.dirname(__file__)) - -# Get the long description from the README file -with open(path.join(here, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - -setup( - name='geographica-longitude', - - version='1.0.0', - - description='Longitude', - long_description=long_description, - - # The project's main homepage. - url='https://github.com/GeographicaGS/Longitude', - - # Author details - author='Geographica', - author_email='pypi@geographica.gs', - - project_urls={ - "Company": 'https://geographica.gs', - "Source Code": "https://github.com/GeographicaGS/Longitude" - }, - package_dir={'': 'longitude'}, - # Choose your license - license='MIT', - - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers - classifiers=[ - # How mature is this project? Common values are - # 3 - Alpha - # 4 - Beta - # 5 - Production/Stable - 'Development Status :: 3 - Alpha', - - # Indicate who your project is intended for - 'Intended Audience :: Developers', - 'Intended Audience :: Information Technology', - 'Topic :: Database', - 'Topic :: Scientific/Engineering :: GIS', - 'Topic :: Scientific/Engineering :: Information Analysis', - 'Topic :: Software Development :: Libraries', - - # Pick your license as you wish (should match "license" above) - 'License :: OSI Approved :: MIT License', - - # Specify the Python versions you support here. In particular, ensure - # that you indicate whether you support Python 2, Python 3 or both. - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.3', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7' - ], - - # What does your project relate to? - keywords='carto longitude', - - packages=find_packages(where='longitude', exclude=['test*']), - - install_requires=[ - 'carto==1.4.0', - 'redis==3.1.0', - 'psycopg2-binary==2.7.7' - ], - -) From 1c430640b0a68475946462bdd19a8b20f94c0d7c Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Mon, 4 Feb 2019 11:35:44 +0100 Subject: [PATCH 43/47] Python requirement lowered to 3.6 or bigger instead of 3.7. Fixed coverage script for longitude path. --- generate_core_coverage.sh | 2 +- longitude/core/tests/test_cache_base.py | 2 +- poetry.lock | 18 ++++++++++++++++-- pyproject.toml | 2 +- 4 files changed, 19 insertions(+), 5 deletions(-) diff --git a/generate_core_coverage.sh b/generate_core_coverage.sh index 5ca1975..fa2f0ce 100755 --- a/generate_core_coverage.sh +++ b/generate_core_coverage.sh @@ -1,3 +1,3 @@ #!/usr/bin/env bash -pytest --cov-report=html --cov=src.core src/core/tests/ +pytest --cov-report=html --cov=longitude.core longitude/core/tests/ sensible-browser coverage_html_report/index.html diff --git a/longitude/core/tests/test_cache_base.py b/longitude/core/tests/test_cache_base.py index 366e53d..2a1a3b2 100644 --- a/longitude/core/tests/test_cache_base.py +++ b/longitude/core/tests/test_cache_base.py @@ -1,4 +1,4 @@ -from unittest import TestCase, mock +from unittest import TestCase from longitude.core.common.query_response import LongitudeQueryResponse from ..caches.base import LongitudeCache diff --git a/poetry.lock b/poetry.lock index 5930678..63cc9ec 100644 --- a/poetry.lock +++ b/poetry.lock @@ -19,6 +19,10 @@ lazy-object-proxy = "*" six = "*" wrapt = "*" +[package.dependencies.typed-ast] +python = "<3.7" +version = "*" + [[package]] category = "dev" description = "Atomic file writes." @@ -349,6 +353,15 @@ optional = false python-versions = "*" version = "1.1.0" +[[package]] +category = "dev" +description = "a fork of Python 2 and 3 ast modules with type comment support" +marker = "python_version < \"3.7\" and implementation_name == \"cpython\"" +name = "typed-ast" +optional = false +python-versions = "*" +version = "1.3.0" + [[package]] category = "main" description = "HTTP library with thread-safe connection pooling, file post, and more." @@ -366,8 +379,8 @@ python-versions = "*" version = "1.11.1" [metadata] -content-hash = "3c9f795821df26505ea9dcf7a11dbdcd2d00b89be698d4688f8fd23c28b24a62" -python-versions = "^3.7" +content-hash = "76b703f4edaabfcaafad2e8359d9e067478bbd32932ac2cb4f97bf7f4e1c72df" +python-versions = "^3.6" [metadata.hashes] apipkg = ["37228cda29411948b422fae072f57e31d3396d2ee1c9783775980ee9c9990af6", "58587dd4dc3daefad0487f6d9ae32b4542b185e1c36db6993290e7c41ca2b47c"] @@ -405,5 +418,6 @@ requests = ["502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e", six = ["3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", "d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"] sqlalchemy = ["c08cee353acaa05dd4ddf8ae0b0844ae779ed88e0b0784a2c9e0c0f9118eb64c"] termcolor = ["1d6d69ce66211143803fbc56652b41d73b4a400a2891d7bf7a1cdf4c02de613b"] +typed-ast = ["05b0fa87c5eabbbfaa28727adf18dc758c8afa4df2b7b2bb82ff95f3dad5eb24", "3031f55fc790d8838d08190477fd3536c9d18019ed6ea94455721f4320f8ab31", "3e7fac848e8278daa323f5e496df9c009993a0258861683463a173021cb9ee11", "442c505d2476796780e66140f68fdaf97baa19cf96511a52d41b51581d7f864f", "44afe8609e5ec041dcbe61b391fc5466084f58c4c6554563223f8aff92b04200", "584e9ae9b2aaa59f3535c06c595a3bf0419b0feef3a3511ff42b2b4ee4222f13", "5f403aef76934191cc0d8b0a657e058f9cce418f0851db9a8af092ea5cc79d9c", "6b1ddb53b1a079e4eb206df51e5d5e00bdb0b8e0a7990420c56ae79d3f1baec8", "8fbd7856774b7b65eef67163bb802fbffa18a4ccf548d2c040f3b956084c4951", "ab1739be98c346b4c223a28f4f2beaadd0417044f0e58a3b0b719a7ecb7d0e48", "b57566f67c24c8bde48ba67daa7862ebfef15012780684f688af9734c93f26af", "b980201ee3af335779a716a0d870be94d55385275d7c8fed7e5f3d9daaffb8b4", "bf0ad5d68226cbe11cd91e4e9713a93ad26d6c6de46b94d2a51a8d29f8c1e2b1", "c5f540eb30d52c7048b5fe52511247ab6c0bc80c23339e2b8642584efd3bb556", "daaca1e06690cb612a3b96ff20c9905e9c342de8cb504587a1707b91d2596271", "dacce1b97aaddeae2abbba8ecdb40bc8c3ff688ce295b2849730dcc5e82141c5", "daf72be36ebbeee7b6aa946195e5b59f1add22f5308e855a947fbd26ddecfbf1", "db629063c0ea5fd631924e27ff92a5419beeeb342d2a20b20e52c7c1a3d6b535", "e5698741ac0fb21ae5b1d5e92774b5ceda498f98f7a53270e0b4d516d20f07a0"] urllib3 = ["61bf29cada3fc2fbefad4fdf059ea4bd1b4a86d2b6d15e1c7c0b582b9752fe39", "de9529817c93f27c8ccbfead6985011db27bd0ddfcdb2d86f3f663385c6a9c22"] wrapt = ["4aea003270831cceb8a90ff27c4031da6ead7ec1886023b80ce0dfe0adf61533"] diff --git a/pyproject.toml b/pyproject.toml index a55575e..b71128f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -16,7 +16,7 @@ exclude = [ [tool.poetry.dependencies] carto = "1.4" -python = "^3.7" +python = "^3.6" redis = "3.1" psycopg2-binary = "2.7.7" sqlalchemy = "1.3.0b2" From 74773c52f482ddc6c8453d008cfa9ad6d3e04efe Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Mon, 4 Feb 2019 16:07:58 +0100 Subject: [PATCH 44/47] First try with Jenkins + Poetry --- Dockerfile | 25 +++++ Jenkinsfile | 127 +++++++++++++++++++++++++ README.md | 12 ++- longitude/samples/mixed_datasources.py | 2 +- 4 files changed, 164 insertions(+), 2 deletions(-) create mode 100644 Dockerfile create mode 100644 Jenkinsfile diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..67ad195 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,25 @@ +FROM python:3.6.6-slim + +ENV PYTHONUNBUFFERED=1 + +WORKDIR /usr/src/app +ENV PATH="$PATH:/usr/src/app" + + +# Install anything missing in the slim image, install dependencies +# Remove anything only needed for building +# This is run as one line so docker caches it as a single layer. + +COPY pyproject.toml . + +RUN set -x \ + && apt-get update \ + && apt-get install -y --no-install-recommends git gcc curl \ + && curl -sSL https://raw.githubusercontent.com/sdispater/poetry/master/get-poetry.py | python \ + && $HOME/.poetry/bin/poetry install \ + && apt-get remove -y --purge git gcc curl \ + && apt-get autoremove -y \ + && apt-get clean -y \ + && rm -rf /var/lib/apt/lists/* + +COPY . . diff --git a/Jenkinsfile b/Jenkinsfile new file mode 100644 index 0000000..d95bcc3 --- /dev/null +++ b/Jenkinsfile @@ -0,0 +1,127 @@ +#!/usr/bin/env groovy + +// Global Environment variables +FAILURE_EMAIL = "build@geographica.gs" +DESIRED_REPOSITORY = "https://github.com/GeographicaGS/Longitude.git" +PUBLISH_BRANCH = "publish" +REPO_NAME = "longitude" + +pipeline{ + agent { node { + label 'master' + } } + + options { + ansiColor('xterm') + } + + stages { + stage('Preparing for build') { + agent { node { + label 'master' + } } + steps { + prepareBuild() + } + } + stage ('Building') { + agent { node { + label 'docker' + } } + steps { + sh "docker build --pull=true -t geographica/${REPO_NAME}:${git_commit} ." + } + } + stage('Linter') + { + agent { node { + label 'docker' + } } + steps { + sh "docker run --rm geographica/${REPO_NAME}:${git_commit} /root/.poetry/bin/poetry run pylint --ignore=samples -E longitude" + } + } + stage('Testing') + { + agent { node { + label 'docker' + } } + steps { + sh "docker run --rm geographica/${REPO_NAME}:${git_commit} /root/.poetry/bin/poetry run pytest --cov=longitude.core longitude/core/tests/" + } + } + stage ('Publish') { + agent { node { + label 'docker' + } } + when { anyOf { + branch "${PUBLISH_BRANCH}" + } } + steps{ + // TODO: this must be "publish" but we keep "build" while testing the Jenkins pipeline + sh "docker run --rm geographica/${REPO_NAME}:${git_commit} /root/.poetry/bin/poetry build" + } + } + // TODO: Stage to check that module can be imported + } + post { + always { + deleteDir() /* clean up our workspace */ + } + unstable { + notifyStatus(currentBuild.currentResult) + } + failure { + notifyStatus(currentBuild.currentResult) + } + } +} + +def prepareBuild() { + script { + checkout scm + + sh "git rev-parse --short HEAD > .git/git_commit" + sh "git --no-pager show -s --format='%ae' HEAD > .git/git_committer_email" + + workspace = pwd() + branch_name = "${ env.BRANCH_NAME }".replaceAll("/", "_") + git_commit = readFile(".git/git_commit").replaceAll("\n", "").replaceAll("\r", "") + //git_commit = sh(returnStdout: true, script: "git describe").trim() + build_name = "${git_commit}" + job_name = "${ env.JOB_NAME }".replaceAll("%2F", "/") + committer_email = readFile(".git/git_committer_email").replaceAll("\n", "").replaceAll("\r", "") + GIT_URL = sh(returnStdout: true, script: "git config --get remote.origin.url").trim() + if ( GIT_URL != DESIRED_REPOSITORY ) { + error("This jenkinsfile is configured for '${ DESIRED_REPOSITORY }' but it was executed from '${ GIT_URL }'.") + } + } +} + +def notifyStatus(buildStatus) { + def status + def send_to + + try { + switch (branch_name) { + case 'master': + send_to = "${ committer_email }, ${ FAILURE_EMAIL }" + break + default: + send_to = "${ committer_email }" + break + } + } catch(Exception ex) { + send_to = "${ FAILURE_EMAIL }" + } + + echo "Sending error email to: ${ send_to }" + try { + mail to: "${ send_to }", + from: "Jenkins Geographica ", + subject: "[${ buildStatus }] ${currentBuild.fullDisplayName}", + body: "Something is wrong in '${currentBuild.fullDisplayName}'. \n\nSee ${env.BUILD_URL} for more details." + } catch(Exception ex) { + echo "Something was wrong sending error email :(" + } +} diff --git a/README.md b/README.md index ee2bed5..010ac70 100644 --- a/README.md +++ b/README.md @@ -66,7 +66,17 @@ A **new** bunch of middleware functions to build applications on top of CARTO. How to use: ```bash -pip install geographica-longitude +pip install longitude +``` + +Or: +```bash +pipenv install longitude +``` + +Or: +```bash +poetry add longitude ``` Or install from GitHub: diff --git a/longitude/samples/mixed_datasources.py b/longitude/samples/mixed_datasources.py index 91d88d1..6dfd402 100644 --- a/longitude/samples/mixed_datasources.py +++ b/longitude/samples/mixed_datasources.py @@ -89,5 +89,5 @@ def import_table_values_from_carto(limit): carto.setup() postgres.setup() - if carto.is_ready and postgres.is_ready(): + if carto.is_ready and postgres.is_ready: import_table_values_from_carto(limit=30) From ca0d96763b794b7b89f56f517680e41219bd6edc Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Tue, 5 Feb 2019 10:50:04 +0100 Subject: [PATCH 45/47] Updated data source base including some shortcut query methods and dataframe read/write abstract methods --- README.md | 2 +- longitude/core/data_sources/base.py | 44 +++++++++++++++++++++++++++++ 2 files changed, 45 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 010ac70..b365a78 100644 --- a/README.md +++ b/README.md @@ -36,7 +36,7 @@ A **new** bunch of middleware functions to build applications on top of CARTO. - [ ] Carto - [ ] DataFrame read/write - [ ] COPY - -[ ] Postgres + - [ ] Postgres - [ ] DataFrame read/write - [ ] COPY diff --git a/longitude/core/data_sources/base.py b/longitude/core/data_sources/base.py index 78fde5f..379421d 100644 --- a/longitude/core/data_sources/base.py +++ b/longitude/core/data_sources/base.py @@ -77,6 +77,50 @@ def enable_cache(self): def disable_cache(self): self._use_cache = False + def write_data_frame(self, data_frame, table_name): + """ + Writes a Pandas data frame in the specified table + :param data_frame: DataFrame to be written + :param table_name: String indicating target table + :return: + """ + raise NotImplementedError + + def read_data_frame(self, table_name): + """ + Reads the target table as a Pandas DataFrame + :param table_name: String indicating target table + :return: Data as DataFrame + """ + raise NotImplementedError + + def committed_query(self, query_template, params=None): + """ + This is a shortcut for INSERT queries and similar ones dealing with simple update operations. + + Makes a default non-cached query committing the result. If you need to specify more details such as cache or + query specific values, use .query(...) + + :param query_template: Unformatted SQL query + :param params: Values to be passed to the query when formatting it + :return: + """ + return self.query(query_template, params=params, use_cache=False, needs_commit=True) + + def cached_query(self, query_template, params=None, expiration_time_s=None): + """ + This is a shortcut for SELECT queries and similar ones requesting simple data. + + Makes a default cached query. This means that no commit is done and no specific config for the query is + available. If you need any of these, use .query(...) + + :param query_template: Unformatted SQL query + :param params: Values to be passed to the query when formatting it + :param expiration_time_s: Amount of seconds for the payload to be stored (if cache supports this) + :return: Result of the query + """ + return self.query(query_template, params=params, expiration_time_s=expiration_time_s) + def query(self, query_template, params=None, use_cache=True, expiration_time_s=None, needs_commit=False, query_config=None, **opts): """ From 11029dea4dc5bf2794ccc6f69790d80fb31e39b0 Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Tue, 5 Feb 2019 11:26:27 +0100 Subject: [PATCH 46/47] Updated dependencies --- poetry.lock | 45 ++++++++++++++++++++++++++++++++++++++++++++- pyproject.toml | 8 +++++--- 2 files changed, 49 insertions(+), 4 deletions(-) diff --git a/poetry.lock b/poetry.lock index 63cc9ec..3181d91 100644 --- a/poetry.lock +++ b/poetry.lock @@ -127,6 +127,33 @@ optional = false python-versions = "*" version = "1.3.1" +[[package]] +category = "dev" +description = "A super-fast templating language that borrows the best ideas from the existing templating languages." +name = "mako" +optional = false +python-versions = "*" +version = "1.0.7" + +[package.dependencies] +MarkupSafe = ">=0.9.2" + +[[package]] +category = "dev" +description = "Python implementation of Markdown." +name = "markdown" +optional = false +python-versions = "*" +version = "2.4.1" + +[[package]] +category = "dev" +description = "Safely add untrusted strings to HTML/XML markup." +name = "markupsafe" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" +version = "1.1.0" + [[package]] category = "dev" description = "McCabe checker, plugin for flake8" @@ -158,6 +185,18 @@ version = "19.0" pyparsing = ">=2.0.2" six = "*" +[[package]] +category = "dev" +description = "A simple program and library to auto generate API documentation for Python modules." +name = "pdoc" +optional = false +python-versions = "*" +version = "0.3.2" + +[package.dependencies] +mako = "*" +markdown = "<2.5" + [[package]] category = "dev" description = "plugin and hook calling mechanisms for python" @@ -379,7 +418,7 @@ python-versions = "*" version = "1.11.1" [metadata] -content-hash = "76b703f4edaabfcaafad2e8359d9e067478bbd32932ac2cb4f97bf7f4e1c72df" +content-hash = "4af8291bb558d197491f5c0dcef27141d04d8b452daad5c65d1d6b2c2246bcb6" python-versions = "^3.6" [metadata.hashes] @@ -397,9 +436,13 @@ future = ["67045236dcfd6816dc439556d009594abf643e5eb48992e36beac09c2ca659b8"] idna = ["c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", "ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"] isort = ["1153601da39a25b14ddc54955dbbacbb6b2d19135386699e2ad58517953b34af", "b9c40e9750f3d77e6e4d441d8b0266cf555e7cdabdcff33c4fd06366ca761ef8", "ec9ef8f4a9bc6f71eec99e1806bfa2de401650d996c59330782b89a5555c1497"] lazy-object-proxy = ["0ce34342b419bd8f018e6666bfef729aec3edf62345a53b537a4dcc115746a33", "1b668120716eb7ee21d8a38815e5eb3bb8211117d9a90b0f8e21722c0758cc39", "209615b0fe4624d79e50220ce3310ca1a9445fd8e6d3572a896e7f9146bbf019", "27bf62cb2b1a2068d443ff7097ee33393f8483b570b475db8ebf7e1cba64f088", "27ea6fd1c02dcc78172a82fc37fcc0992a94e4cecf53cb6d73f11749825bd98b", "2c1b21b44ac9beb0fc848d3993924147ba45c4ebc24be19825e57aabbe74a99e", "2df72ab12046a3496a92476020a1a0abf78b2a7db9ff4dc2036b8dd980203ae6", "320ffd3de9699d3892048baee45ebfbbf9388a7d65d832d7e580243ade426d2b", "50e3b9a464d5d08cc5227413db0d1c4707b6172e4d4d915c1c70e4de0bbff1f5", "5276db7ff62bb7b52f77f1f51ed58850e315154249aceb42e7f4c611f0f847ff", "61a6cf00dcb1a7f0c773ed4acc509cb636af2d6337a08f362413c76b2b47a8dd", "6ae6c4cb59f199d8827c5a07546b2ab7e85d262acaccaacd49b62f53f7c456f7", "7661d401d60d8bf15bb5da39e4dd72f5d764c5aff5a86ef52a042506e3e970ff", "7bd527f36a605c914efca5d3d014170b2cb184723e423d26b1fb2fd9108e264d", "7cb54db3535c8686ea12e9535eb087d32421184eacc6939ef15ef50f83a5e7e2", "7f3a2d740291f7f2c111d86a1c4851b70fb000a6c8883a59660d95ad57b9df35", "81304b7d8e9c824d058087dcb89144842c8e0dea6d281c031f59f0acf66963d4", "933947e8b4fbe617a51528b09851685138b49d511af0b6c0da2539115d6d4514", "94223d7f060301b3a8c09c9b3bc3294b56b2188e7d8179c762a1cda72c979252", "ab3ca49afcb47058393b0122428358d2fbe0408cf99f1b58b295cfeb4ed39109", "bd6292f565ca46dee4e737ebcc20742e3b5be2b01556dafe169f6c65d088875f", "cb924aa3e4a3fb644d0c463cad5bc2572649a6a3f68a7f8e4fbe44aaa6d77e4c", "d0fc7a286feac9077ec52a927fc9fe8fe2fabab95426722be4c953c9a8bede92", "ddc34786490a6e4ec0a855d401034cbd1242ef186c20d79d2166d6a4bd449577", "e34b155e36fa9da7e1b7c738ed7767fc9491a62ec6af70fe9da4a057759edc2d", "e5b9e8f6bda48460b7b143c3821b21b452cb3a835e6bbd5dd33aa0c8d3f5137d", "e81ebf6c5ee9684be8f2c87563880f93eedd56dd2b6146d8a725b50b7e5adb0f", "eb91be369f945f10d3a49f5f9be8b3d0b93a4c2be8f8a5b83b0571b8123e0a7a", "f460d1ceb0e4a5dcb2a652db0904224f367c9b3c1470d5a7683c0480e582468b"] +mako = ["4e02fde57bd4abb5ec400181e4c314f56ac3e49ba4fb8b0d50bba18cb27d25ae"] +markdown = ["812ec5249f45edc31330b7fb06e52aaf6ab2d83aa27047df7cb6837ef2d269b6", "866f5474c2361de7ccf806b438c0462380b4b90aacb9fdf59dfc4b166fb66389"] +markupsafe = ["048ef924c1623740e70204aa7143ec592504045ae4429b59c30054cb31e3c432", "130f844e7f5bdd8e9f3f42e7102ef1d49b2e6fdf0d7526df3f87281a532d8c8b", "19f637c2ac5ae9da8bfd98cef74d64b7e1bb8a63038a3505cd182c3fac5eb4d9", "1b8a7a87ad1b92bd887568ce54b23565f3fd7018c4180136e1cf412b405a47af", "1c25694ca680b6919de53a4bb3bdd0602beafc63ff001fea2f2fc16ec3a11834", "1f19ef5d3908110e1e891deefb5586aae1b49a7440db952454b4e281b41620cd", "1fa6058938190ebe8290e5cae6c351e14e7bb44505c4a7624555ce57fbbeba0d", "31cbb1359e8c25f9f48e156e59e2eaad51cd5242c05ed18a8de6dbe85184e4b7", "3e835d8841ae7863f64e40e19477f7eb398674da6a47f09871673742531e6f4b", "4e97332c9ce444b0c2c38dd22ddc61c743eb208d916e4265a2a3b575bdccb1d3", "525396ee324ee2da82919f2ee9c9e73b012f23e7640131dd1b53a90206a0f09c", "52b07fbc32032c21ad4ab060fec137b76eb804c4b9a1c7c7dc562549306afad2", "52ccb45e77a1085ec5461cde794e1aa037df79f473cbc69b974e73940655c8d7", "5c3fbebd7de20ce93103cb3183b47671f2885307df4a17a0ad56a1dd51273d36", "5e5851969aea17660e55f6a3be00037a25b96a9b44d2083651812c99d53b14d1", "5edfa27b2d3eefa2210fb2f5d539fbed81722b49f083b2c6566455eb7422fd7e", "7d263e5770efddf465a9e31b78362d84d015cc894ca2c131901a4445eaa61ee1", "83381342bfc22b3c8c06f2dd93a505413888694302de25add756254beee8449c", "857eebb2c1dc60e4219ec8e98dfa19553dae33608237e107db9c6078b1167856", "98e439297f78fca3a6169fd330fbe88d78b3bb72f967ad9961bcac0d7fdd1550", "bf54103892a83c64db58125b3f2a43df6d2cb2d28889f14c78519394feb41492", "d9ac82be533394d341b41d78aca7ed0e0f4ba5a2231602e2f05aa87f25c51672", "e982fe07ede9fada6ff6705af70514a52beb1b2c3d25d4e873e82114cf3c5401", "edce2ea7f3dfc981c4ddc97add8a61381d9642dc3273737e756517cc03e84dd6", "efdc45ef1afc238db84cb4963aa689c0408912a0239b0721cb172b4016eb31d6", "f137c02498f8b935892d5c0172560d7ab54bc45039de8805075e19079c639a9c", "f82e347a72f955b7017a39708a3667f106e6ad4d10b25f237396a7115d8ed5fd", "fb7c206e01ad85ce57feeaaa0bf784b97fa3cad0d4a5737bc5295785f5c613a1"] mccabe = ["ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", "dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"] more-itertools = ["38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4", "c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc", "fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"] packaging = ["0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af", "9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3"] +pdoc = ["7835909580d5a14a06bd3de4416cf17f86a146ecb12eeb5cd83d9a93d03e6d27"] pluggy = ["8ddc32f03971bfdf900a81961a48ccf2fb677cf7715108f85295c67405798616", "980710797ff6a041e9a73a5787804f848996ecaa6f8a1b1e08224a5894f2074a"] psycopg2-binary = ["19a2d1f3567b30f6c2bb3baea23f74f69d51f0c06c2e2082d0d9c28b0733a4c2", "2b69cf4b0fa2716fd977aa4e1fd39af6110eb47b2bb30b4e5a469d8fbecfc102", "2e952fa17ba48cbc2dc063ddeec37d7dc4ea0ef7db0ac1eda8906365a8543f31", "348b49dd737ff74cfb5e663e18cb069b44c64f77ec0523b5794efafbfa7df0b8", "3d72a5fdc5f00ca85160915eb9a973cf9a0ab8148f6eda40708bf672c55ac1d1", "4957452f7868f43f32c090dadb4188e9c74a4687323c87a882e943c2bd4780c3", "5138cec2ee1e53a671e11cc519505eb08aaaaf390c508f25b09605763d48de4b", "587098ca4fc46c95736459d171102336af12f0d415b3b865972a79c03f06259f", "5b79368bcdb1da4a05f931b62760bea0955ee2c81531d8e84625df2defd3f709", "5cf43807392247d9bc99737160da32d3fa619e0bfd85ba24d1c78db205f472a4", "676d1a80b1eebc0cacae8dd09b2fde24213173bf65650d22b038c5ed4039f392", "6b0211ecda389101a7d1d3df2eba0cf7ffbdd2480ca6f1d2257c7bd739e84110", "79cde4660de6f0bb523c229763bd8ad9a93ac6760b72c369cf1213955c430934", "7aba9786ac32c2a6d5fb446002ed936b47d5e1f10c466ef7e48f66eb9f9ebe3b", "7c8159352244e11bdd422226aa17651110b600d175220c451a9acf795e7414e0", "945f2eedf4fc6b2432697eb90bb98cc467de5147869e57405bfc31fa0b824741", "96b4e902cde37a7fc6ab306b3ac089a3949e6ce3d824eeca5b19dc0bedb9f6e2", "9a7bccb1212e63f309eb9fab47b6eaef796f59850f169a25695b248ca1bf681b", "a3bfcac727538ec11af304b5eccadbac952d4cca1a551a29b8fe554e3ad535dc", "b19e9f1b85c5d6136f5a0549abdc55dcbd63aba18b4f10d0d063eb65ef2c68b4", "b664011bb14ca1f2287c17185e222f2098f7b4c857961dbcf9badb28786dbbf4", "bde7959ef012b628868d69c474ec4920252656d0800835ed999ba5e4f57e3e2e", "cb095a0657d792c8de9f7c9a0452385a309dfb1bbbb3357d6b1e216353ade6ca", "d16d42a1b9772152c1fe606f679b2316551f7e1a1ce273e7f808e82a136cdb3d", "d444b1545430ffc1e7a24ce5a9be122ccd3b135a7b7e695c5862c5aff0b11159", "d93ccc7bf409ec0a23f2ac70977507e0b8a8d8c54e5ee46109af2f0ec9e411f3", "df6444f952ca849016902662e1a47abf4fa0678d75f92fd9dd27f20525f809cd", "e63850d8c52ba2b502662bf3c02603175c2397a9acc756090e444ce49508d41e", "ec43358c105794bc2b6fd34c68d27f92bea7102393c01889e93f4b6a70975728", "f4c6926d9c03dadce7a3b378b40d2fea912c1344ef9b29869f984fb3d2a2420b"] py = ["bf92637198836372b520efcba9e020c330123be8ce527e535d185ed4b6f45694", "e76826342cefe3c3d5f7e8ee4316b80d1dd8a300781612ddbc765c17ba25a6c6"] diff --git a/pyproject.toml b/pyproject.toml index b71128f..d0ce8ba 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,6 +22,7 @@ psycopg2-binary = "2.7.7" sqlalchemy = "1.3.0b2" [tool.poetry.dev-dependencies] +pdoc = "^0.3.2" pylint = "^2.2" coverage = "^4.5" pytest-cov = "^2.6" @@ -29,10 +30,11 @@ pytest-xdist = "^1.26" pytest-sugar = "^0.9.2" pytest-instafail = "^0.4.0" +[tool.flit.metadata.urls] +Company = "https://geographica.gs" +Repository = "https://github.com/GeographicaGS/Longitude" + [build-system] requires = ["poetry>=0.12"] build-backend = "poetry.masonry.api" -[tool.flit.metadata.urls] -Company = "https://geographica.gs" -Repository = "https://github.com/GeographicaGS/Longitude" \ No newline at end of file From ad0feb7b7622f6650ac8005e5b965c7daca50d6e Mon Sep 17 00:00:00 2001 From: Dani Ramirez Date: Tue, 5 Feb 2019 14:11:09 +0100 Subject: [PATCH 47/47] Config reworked as in ch31846. EnvironmentConfiguration is now a domain class that exposes a single get(key) method. It will parse environment variables in the form LONGITUDE__PARENT_OBJECT__CHILD_OBJECT__VALUE=42 as {'parent_object': {'child_object': {'value': 42 } } It also allows to recover the values using nested keys ('.' is the joiner): Config.get('parent_object.child_object.value') returns 42 (as integer). Also, if a value can be parsed as integer, it will be parsed. --- longitude/core/common/config.py | 86 ++++++++++++++----- .../{test_config.py => test_configurable.py} | 0 ...st_environment_configuration_dictionary.py | 52 ++++++----- longitude/samples/mixed_datasources.py | 32 ++----- 4 files changed, 100 insertions(+), 70 deletions(-) rename longitude/core/tests/{test_config.py => test_configurable.py} (100%) diff --git a/longitude/core/common/config.py b/longitude/core/common/config.py index 6a21a19..d7d2e88 100644 --- a/longitude/core/common/config.py +++ b/longitude/core/common/config.py @@ -5,36 +5,78 @@ class EnvironmentConfiguration: + prefix = 'LONGITUDE' + separator = '__' + config = None - def __init__(self, d): - self._original_config = d - self._parsed_config = dict(d) + @classmethod + def _load_environment_variables(cls): + """ + It loads environment variables into the internal dictionary. + + Load is done by grouping and nesting environment variables following this convention: + 1. Only variables starting with the prefix are taken (i.e. LONGITUDE) + 2. For each separator used, a new nested object is created inside its parent (i.e. SEPARATOR is '__') + 3. The prefix indicates the root object (i.e. LONGITUDE__ is the default root dictionary) + + :return: None + """ + cls.config = {} + for v in [k for k in os.environ.keys() if k.startswith(cls.prefix)]: + value_path = v.split(cls.separator)[1:] + cls._append_value(os.environ.get(v), value_path, cls.config) - self._parse_env_vars_references(self._parsed_config) + @classmethod + def get(cls, key=None): + """ + Returns a nested config value from the configuration. It allows getting values as a series of joined keys using + dot ('.') as separator. This will search for keys in nested dictionaries until a final value is found. - def __getitem__(self, key): - return self._parsed_config[key] + :param key: String in the form of 'parent.child.value...'. It must replicate the configuration nested structure. + :return: It returns an integer, a string or a nested dictionary. If none of these is found, it returns None. + """ + + # We do a lazy load in the first access + if cls.config is None: + cls._load_environment_variables() + + if key is not None: + return cls._get_nested_key(key, cls.config) + else: + return cls.config @staticmethod - def _parse_env_vars_references(dictionary): + def _get_nested_key(key, d): """ - Modifies a dictionary like this: - * Recursively - * If a value is a string starting with '=', it gets substituted by the corresponding environment variable - :param dictionary: Dictionary that will be modified. - :return: Nothing + + :param key: + :param d: + :return: """ + key_path = key.split('.') + root_key = key_path[0] - for k in dictionary.keys(): - if isinstance(dictionary[k], dict): - EnvironmentConfiguration._parse_env_vars_references(dictionary[k]) - elif isinstance(dictionary[k], str) and dictionary[k].startswith('='): - env_var = dictionary[k][1:] # We remove the '=' - value = os.environ.get(env_var) - if value: - dictionary[k] = value - else: - dictionary[k] += ' [NOT FOUND]' + if root_key in d.keys(): + if len(key_path) == 1: + return d[root_key] # If a single node is in the path, it is the final one + # If there are more than one nodes left, keep digging... + return EnvironmentConfiguration._get_nested_key('.'.join(key_path[1:]), d[root_key]) + else: + return None # Nested key was not found in the config + + @staticmethod + def _append_value(value, value_path, d): + root_path = value_path[0].lower() + if len(value_path) == 1: + + try: + d[root_path] = int(value) + except ValueError: + d[root_path] = value + else: + if root_path not in d.keys(): + d[root_path] = {} + EnvironmentConfiguration._append_value(value, value_path[1:], d[root_path]) class LongitudeConfigurable: diff --git a/longitude/core/tests/test_config.py b/longitude/core/tests/test_configurable.py similarity index 100% rename from longitude/core/tests/test_config.py rename to longitude/core/tests/test_configurable.py diff --git a/longitude/core/tests/test_environment_configuration_dictionary.py b/longitude/core/tests/test_environment_configuration_dictionary.py index b51da8f..29f0fd7 100644 --- a/longitude/core/tests/test_environment_configuration_dictionary.py +++ b/longitude/core/tests/test_environment_configuration_dictionary.py @@ -1,32 +1,38 @@ from unittest import TestCase, mock -from longitude.core.common.config import EnvironmentConfiguration +from longitude.core.common.config import EnvironmentConfiguration as Config fake_environment = { - 'PATATUELA_RULES': 'my_root_value' + 'LONGITUDE__PARENT__CHILD__VALUE_A': '42', + 'LONGITUDE__PARENT__CHILD__VALUE_B': 'wut', + 'LONGITUDE__VALUE_A': '8008' } +@mock.patch.dict('longitude.core.common.config.os.environ', fake_environment) class TestConfigurationDictionary(TestCase): - @mock.patch.dict('longitude.core.common.config.os.environ', fake_environment) - def test_base(self): - d = EnvironmentConfiguration({ - 'root_patatuela': '=PATATUELA_RULES', - 'patata': 'patata value', - 'potato': 'potato value', - 'potatoes': [ - 'potato A', 'poteito B' - ], - 'potato_sack': { - 'colour': 'meh', - 'taste': 'buah', - 'texture': { - 'external': 'oh no', - 'internal': 'omg', - 'bumpiness': '=SOME_VALUE_FOR_BUMPINESS' - } - } - }) + def test_existing_values_return_strings_or_integers(self): + self.assertEqual(42, Config.get('parent.child.value_a')) + self.assertEqual('wut', Config.get('parent.child.value_b')) + self.assertEqual(8008, Config.get('value_a')) - self.assertEqual('my_root_value', d['root_patatuela']) - self.assertEqual('=SOME_VALUE_FOR_BUMPINESS [NOT FOUND]', d['potato_sack']['texture']['bumpiness']) + def test_non_existing_values_return_none(self): + self.assertEqual(None, Config.get('wrong_value')) + self.assertEqual(None, Config.get('wrong_parent.child.value')) + self.assertEqual(None, Config.get('parent.wrong_child.value')) + self.assertEqual(None, Config.get('parent.child.wrong_value')) + self.assertEqual(None, Config.get('parent.wrong_child')) + + def test_existing_nested_values_return_dictionaries(self): + fake_config = { + 'parent': + {'child': + { + 'value_a': 42, + 'value_b': 'wut' + } + }, + 'value_a': 8008 + } + self.assertEqual(fake_config, Config.get()) + self.assertEqual(fake_config['parent']['child'], Config.get('parent.child')) diff --git a/longitude/samples/mixed_datasources.py b/longitude/samples/mixed_datasources.py index 6dfd402..af31b36 100644 --- a/longitude/samples/mixed_datasources.py +++ b/longitude/samples/mixed_datasources.py @@ -28,7 +28,7 @@ from longitude.core.caches.redis import RedisCache from longitude.core.data_sources.postgres.default import DefaultPostgresDataSource from longitude.core.data_sources.carto import CartoDataSource -from longitude.core.common.config import EnvironmentConfiguration +from longitude.core.common.config import EnvironmentConfiguration as Config def import_table_values_from_carto(limit): @@ -59,33 +59,15 @@ def import_table_values_from_carto(limit): params=params, needs_commit=True) + res = postgres.query('select * from county_population') + print(res.rows) + if __name__ == "__main__": - # This is the global config object - # We are going to retrieve some values from a table in Carto, create a local table and copy the values - # doing simple inserts (to show how to do queries) - - config = EnvironmentConfiguration({ - 'carto_main': { - 'api_key': "=CARTO_API_KEY", - 'user': "=CARTO_USER", - - 'cache': { - 'password': '=REDIS_PASSWORD' - } - }, - 'postgres_main': { - 'host': "=POSTGRES_HOST", - 'port': "=POSTGRES_PORT", - 'db': "=POSTGRES_DB", - 'user': "=POSTGRES_USER", - 'password': "=POSTGRES_PASS" - } - }) - - carto = CartoDataSource(config['carto_main'], cache_class=RedisCache) - postgres = DefaultPostgresDataSource(config['postgres_main']) + print('REDIS password is %s' % Config.get('carto_main.cache.password')) + carto = CartoDataSource(Config.get('carto_main'), cache_class=RedisCache) + postgres = DefaultPostgresDataSource(Config.get('postgres_main')) carto.setup() postgres.setup()