From d6df6e9a2bb33a245b1868f44c066394004617ff Mon Sep 17 00:00:00 2001 From: Frank Harkins Date: Mon, 28 Aug 2023 17:32:43 +0100 Subject: [PATCH] [Refactor] Remove TinyDB (#468) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Summary Removes TinyDB in favour of reading/writing from TOML files directly. --- docs/project_overview.md | 2 +- ecosystem/daos/__init__.py | 2 +- ecosystem/daos/dao.py | 257 ++++++++++++++++++++++++++++ ecosystem/daos/jsondao.py | 284 ------------------------------- ecosystem/manager.py | 42 ++--- ecosystem/models/test_results.py | 2 +- requirements.txt | 1 - tests/daos/test_dao.py | 87 +++++----- tests/test_manager.py | 8 +- tests/test_members_file.py | 4 +- 10 files changed, 322 insertions(+), 367 deletions(-) create mode 100644 ecosystem/daos/dao.py delete mode 100644 ecosystem/daos/jsondao.py diff --git a/docs/project_overview.md b/docs/project_overview.md index d31920e5bc..e688409ba2 100644 --- a/docs/project_overview.md +++ b/docs/project_overview.md @@ -107,7 +107,7 @@ We store each member of the ecosystem as a TOML file under [`ecosystem/resources/members`](https://github.com/qiskit-community/ecosystem/blob/main/ecosystem/resources/members); these are the files you should edit when adding / updating members to the ecosystem. Access to this file is handled through the -[`JsonDAO`](https://github.com/qiskit-community/ecosystem/blob/main/ecosystem/daos/jsondao.py) +[`DAO`](https://github.com/qiskit-community/ecosystem/blob/main/ecosystem/daos/dao.py) class. The qiskit.org page pulls information from the compiled diff --git a/ecosystem/daos/__init__.py b/ecosystem/daos/__init__.py index 6fd131b443..606e2aa81e 100644 --- a/ecosystem/daos/__init__.py +++ b/ecosystem/daos/__init__.py @@ -1,3 +1,3 @@ """DAOs for ecosystem.""" -from .jsondao import JsonDAO +from .dao import DAO diff --git a/ecosystem/daos/dao.py b/ecosystem/daos/dao.py new file mode 100644 index 0000000000..0d3656a40a --- /dev/null +++ b/ecosystem/daos/dao.py @@ -0,0 +1,257 @@ +""" +DAO for json db. + +File structure: + + root_path + ├── members.json # compiled file; don't edit manually + └── members +    └── repo-name.toml +""" +import json +from pathlib import Path +import shutil +import toml + +from ecosystem.models import TestResult, StyleResult, CoverageResult, TestType +from ecosystem.models.repository import Repository + + +class TomlStorage: + """ + Read / write TOML files from a dict where keys are repo URLs, and values + are Repository objects. + + Can use as a context manager like so: + + with TomlStorage() as data: # Data is read from TOML files + data[url] = new_repo # Mutate the data + # Changes are saved on exit + """ + + def __init__(self, root_path: str): + self.toml_dir = Path(root_path, "members") + self._data = None # for use with context manager + + def _url_to_path(self, url): + repo_name = url.strip("/").split("/")[-1] + return self.toml_dir / f"{repo_name}.toml" + + def read(self) -> dict: + """ + Search for TOML files and read into dict with types: + { url (str): repo (Repository) } + """ + data = {} + for path in self.toml_dir.glob("*"): + repo = Repository.from_dict(toml.load(path)) + data[repo.url] = repo + return data + + def write(self, data: dict): + """ + Dump everything to TOML files from dict of types + { key (any): repo (Repository) } + """ + # Erase existing TOML files + # (we erase everything to clean up any deleted repos) + if self.toml_dir.exists(): + shutil.rmtree(self.toml_dir) + + # Write to human-readable TOML + self.toml_dir.mkdir() + for repo in data.values(): + with open(self._url_to_path(repo.url), "w") as file: + toml.dump(repo.to_dict(), file) + + def __enter__(self) -> dict: + self._data = self.read() + return self._data + + def __exit__(self, _type, _value, exception): + if exception is not None: + raise exception + self.write(self._data) + + +class DAO: + """ + Data access object for repository database. + """ + + def __init__(self, path: str): + """ + Args: + path: path to store database in + """ + self.storage = TomlStorage(path) + self.labels_json_path = Path(path, "labels.json") + self.compiled_json_path = Path(path, "members.json") + + def write(self, repo: Repository): + """ + Update or insert repo (identified by URL). + """ + self.update_labels(repo.labels) + with self.storage as data: + data[repo.url] = repo + + def get_repos_by_tier(self, tier: str) -> list[Repository]: + """ + Returns all repositories in specified tier. + + Args: + tier: tier of the repo (MAIN, COMMUNITY, ...) + """ + matches = [repo for repo in self.storage.read().values() if repo.tier == tier] + return matches + + def delete(self, repo_url: str): + """Deletes repository from tier. + + Args: + repo_url: repository url + """ + with self.storage as data: + del data[repo_url] + + def get_by_url(self, url: str) -> Repository: + """ + Returns repository by URL. + """ + data = self.storage.read() + if url not in data: + raise KeyError(f"No repo with URL '{url}'") + return self.storage.read()[url] + + def update(self, repo_url: str, **kwargs): + """ + Update attributes of repository. + + Args: + repo_url (str): URL of repo + kwargs: Names of attributes and new values + + Example usage: + update("github.com/qiskit/qiskit, name="qiskit", stars=300) + """ + with self.storage as data: + for arg, value in kwargs.items(): + data[repo_url].__dict__[arg] = value + + def update_labels(self, labels: list[str]): + """ + Updates labels file for consumption by qiskit.org. + """ + with open(self.labels_json_path, "r") as labels_file: + existing_labels = { + label["name"]: label["description"] for label in json.load(labels_file) + } + + merged = {**{l: "" for l in labels}, **existing_labels} + new_label_list = [ + {"name": name, "description": dsc} for name, dsc in merged.items() + ] + with open(self.labels_json_path, "w") as labels_file: + json.dump( + sorted(new_label_list, key=lambda x: x["name"]), labels_file, indent=4 + ) + + def compile_json(self): + """ + Dump database to JSON file for consumption by qiskit.org + Needs this structure: + + { tier: { # e.g. Main, Community + index: repo # `repo` is data from repo-name.toml + }} + + """ + data = self.storage.read() + + out = {} + for repo in data.values(): + if repo.tier not in out: + out[repo.tier] = {} + index = str(len(out[repo.tier])) + out[repo.tier][index] = repo.to_dict() + + with open(self.compiled_json_path, "w") as file: + json.dump(out, file, indent=4) + + def add_repo_test_result(self, repo_url: str, test_result: TestResult): + """ + Adds test result to repository. + Overwrites the latest test results and adds to historical test results. + + Args: + repo_url: url of the repo + test_result: TestResult from the tox -epy3.x + """ + repo = self.get_by_url(repo_url) + + # add new result and remove old from list + new_test_results = [ + tr for tr in repo.tests_results if tr.test_type != test_result.test_type + ] + [test_result] + + # add last working version + if test_result.test_type == TestType.STABLE_COMPATIBLE and test_result.passed: + last_stable_test_result = TestResult( + passed=True, + test_type=TestType.LAST_WORKING_VERSION, + package=test_result.package, + package_version=test_result.package_version, + logs_link=test_result.logs_link, + ) + new_test_results_with_latest = [ + tr + for tr in new_test_results + if tr.test_type != last_stable_test_result.test_type + ] + [last_stable_test_result] + new_test_results = new_test_results_with_latest + + repo.tests_results = sorted(new_test_results, key=lambda r: r.test_type) + + new_historical_test_results = [ + tr + for tr in repo.historical_test_results + if tr.test_type != test_result.test_type + or tr.qiskit_version != test_result.qiskit_version + ] + [test_result] + repo.historical_test_results = new_historical_test_results + self.write(repo) + + def add_repo_style_result(self, repo_url: str, style_result: StyleResult): + """ + Adds style result for repository. + + Args: + repo_url: url of the repo + style_result: StyleResult from the tox -elint + """ + repo = self.get_by_url(repo_url) + + new_style_results = [ + tr for tr in repo.styles_results if tr.style_type != style_result.style_type + ] + [style_result] + repo.styles_results = new_style_results + self.write(repo) + + def add_repo_coverage_result(self, repo_url: str, coverage_result: CoverageResult): + """ + Adds coverage result for repository. + + Args: + repo_url: url of the repo + coverage_result: CoverageResult from the tox -ecoverage + """ + repo = self.get_by_url(repo_url) + + new_coverage_results = [ + tr + for tr in repo.coverages_results + if tr.coverage_type != coverage_result.coverage_type + ] + [coverage_result] + repo.coverages_results = new_coverage_results + self.write(repo) diff --git a/ecosystem/daos/jsondao.py b/ecosystem/daos/jsondao.py deleted file mode 100644 index 343e2bc104..0000000000 --- a/ecosystem/daos/jsondao.py +++ /dev/null @@ -1,284 +0,0 @@ -"""DAO for json db.""" -from typing import Optional, List -import os -import json -from pathlib import Path -import shutil -import toml - -from tinydb import TinyDB, Query - -from ecosystem.models import TestResult, StyleResult, CoverageResult, TestType -from ecosystem.models.repository import Repository - - -class EcosystemStorage: - """ - Tell TinyDB how to read/write from files - (see https://tinydb.readthedocs.io/en/latest/extend.html) - - File structure: - - root_path - ├── members.json # compiled file; don't edit manually - └── members -    └── repo-name.toml - - Database structure: - - { tier: { # e.g. Main, Community - index: repo # `repo` is data from repo-name.toml - }} - - """ - - def __init__(self, root_path): - self.toml_dir = Path(root_path, "members") - - def _url_to_path(self, url): - repo_name = url.strip("/").split("/")[-1] - return self.toml_dir / f"{repo_name}.toml" - - def read(self): - """ - Search for TOML files and add to DB - """ - if not self.toml_dir.is_dir(): - # For TinyDB initialization - return None - - data = {} - for path in self.toml_dir.glob("*"): - repo = toml.load(path) - tier = repo["tier"] - if tier not in data: - data[tier] = {} - index = len(data[tier].keys()) - data[tier][index] = repo - - return data - - def write(self, data): - """ - Write TOML files - """ - # Erase existing TOML files - # (we erase everything to clean up any deleted repos) - if self.toml_dir.exists(): - shutil.rmtree(self.toml_dir) - - # Write to human-readable TOML - self.toml_dir.mkdir() - for _, repos in data.items(): - for repo in repos.values(): - with open(self._url_to_path(repo["url"]), "w") as file: - toml.dump(repo, file) - - -class JsonDAO: - """JsonDAO for repo database.""" - - def __init__(self, path: str): - """JsonDAO for repository database. - - Args: - path: path to store database in - """ - self.path = path - self.database = TinyDB(self.path, storage=EcosystemStorage) - self.labels_json_path = os.path.join(self.path, "labels.json") - - def compile_json(self): - """ - Dump database to JSON file for consumption by qiskit.org - """ - # pylint: disable=protected-access - data = self.database._storage.read() - - compiled_json_path = Path(self.path, "members.json") - with open(compiled_json_path, "w") as file: - json.dump(data, file, indent=4) - - def insert(self, repo: Repository) -> int: - """Inserts repository into database. - - Args: - repo: Repository - - Return: int - """ - table = self.database.table(repo.tier) - self.update_labels(repo.labels) - return table.insert(repo.to_dict()) - - def get_repos_by_tier(self, tier: str) -> List[Repository]: - """Returns all repositories in specified tier. - - Args: - tier: tier of the repo (MAIN, COMMUNITY, ...) - - Return: Repository - """ - table = self.database.table(tier) - return [Repository.from_dict(r) for r in table.all()] - - def delete(self, repo_url: str, tier: str) -> List[int]: - """Deletes repository from tier. - - Args: - repo_url: repository url - tier: tier - - Return: List of int - """ - table = self.database.table(tier) - return table.remove(Query().url == repo_url) - - def get_by_url(self, url: str, tier: str) -> Optional[Repository]: - """Returns repository by URL. - - Args: - tier: tier of the repo (MAIN, COMMUNITY, ...) - - Return: Repository - """ - res = self.database.table(tier).get(Query().url == url) - return Repository.from_dict(res) if res else None - - def update_stars(self, url: str, tier: str, stars: int) -> List[int]: - """Updates repo with github stars.""" - table = self.database.table(tier) - return table.update({"stars": stars}, Query().url == url) - - def update_labels(self, labels: List[str]) -> List[int]: - """Updates labels db.""" - with open(self.labels_json_path, "r") as labels_file: - label_dscs = { - label["name"]: label["description"] for label in json.load(labels_file) - } - - merged = {**{l: "" for l in labels}, **label_dscs} - new_label_list = [ - {"name": name, "description": dsc} for name, dsc in merged.items() - ] - with open(self.labels_json_path, "w") as labels_file: - json.dump( - sorted(new_label_list, key=lambda x: x["name"]), labels_file, indent=4 - ) - - def add_repo_test_result( - self, repo_url: str, tier: str, test_result: TestResult - ) -> Optional[List[int]]: - """Adds test result for repository. - Overwrites the latest test results and adds to historical test results. - - Args: - repo_url: url of the repo - tier: tier of the repo (MAIN, COMMUNITY, ...) - test_result: TestResult from the tox -epy3.x - - Return: List of int - """ - table = self.database.table(tier) - repository = Query() - - fetched_repo_json = table.get(repository.url == repo_url) - if fetched_repo_json is not None: - repo = Repository.from_dict(fetched_repo_json) - - # add new result and remove old from list - new_test_results = [ - tr for tr in repo.tests_results if tr.test_type != test_result.test_type - ] + [test_result] - - # add last working version - if ( - test_result.test_type == TestType.STABLE_COMPATIBLE - and test_result.passed - ): - last_stable_test_result = TestResult( - passed=True, - test_type=TestType.LAST_WORKING_VERSION, - package=test_result.package, - package_version=test_result.package_version, - logs_link=test_result.logs_link, - ) - new_test_results_with_latest = [ - tr - for tr in new_test_results - if tr.test_type != last_stable_test_result.test_type - ] + [last_stable_test_result] - new_test_results = new_test_results_with_latest - - repo.tests_results = sorted(new_test_results, key=lambda r: r.test_type) - - new_historical_est_results = [ - tr - for tr in repo.historical_test_results - if tr.test_type != test_result.test_type - or tr.qiskit_version != test_result.qiskit_version - ] + [test_result] - repo.historical_test_results = new_historical_est_results - - return table.upsert(repo.to_dict(), repository.url == repo_url) - return None - - def add_repo_style_result( - self, repo_url: str, tier: str, style_result: StyleResult - ) -> Optional[List[int]]: - """Adds style result for repository. - - Args: - repo_url: url of the repo - tier: tier of the repo (MAIN, COMMUNITY, ...) - style_result: StyleResult from the tox -elint - - Return: List of int - """ - table = self.database.table(tier) - repository = Query() - - fetched_repo_json = table.get(repository.url == repo_url) - if fetched_repo_json is not None: - fetched_repo = Repository.from_dict(fetched_repo_json) - fetched_style_results = fetched_repo.styles_results - - new_style_results = [ - tr - for tr in fetched_style_results - if tr.style_type != style_result.style_type - ] + [style_result] - fetched_repo.styles_results = new_style_results - - return table.upsert(fetched_repo.to_dict(), repository.url == repo_url) - return None - - def add_repo_coverage_result( - self, repo_url: str, tier: str, coverage_result: CoverageResult - ) -> Optional[List[int]]: - """Adds style result for repository. - - Args: - repo_url: url of the repo - tier: tier of the repo (MAIN, COMMUNITY, ...) - coverage_result: CoverageResult from the tox -ecoverage - - Return: List of int - """ - table = self.database.table(tier) - repository = Query() - - fetched_repo_json = table.get(repository.url == repo_url) - if fetched_repo_json is not None: - fetched_repo = Repository.from_dict(fetched_repo_json) - fetched_coverage_results = fetched_repo.coverages_results - - new_coverage_results = [ - tr - for tr in fetched_coverage_results - if tr.coverage_type != coverage_result.coverage_type - ] + [coverage_result] - fetched_repo.coverages_results = new_coverage_results - - return table.upsert(fetched_repo.to_dict(), repository.url == repo_url) - return None diff --git a/ecosystem/manager.py b/ecosystem/manager.py index 8b6329783a..42691648f6 100644 --- a/ecosystem/manager.py +++ b/ecosystem/manager.py @@ -9,7 +9,7 @@ import requests from jinja2 import Environment, PackageLoader, select_autoescape -from ecosystem.daos import JsonDAO +from ecosystem.daos import DAO from ecosystem.models import TestResult, Tier, TestType from ecosystem.models.repository import Repository from ecosystem.models.test_results import StyleResult, CoverageResult, Package @@ -45,7 +45,7 @@ def __init__(self, root_path: Optional[str] = None): ) self.pylintrc_template = self.env.get_template(".pylintrc") self.coveragerc_template = self.env.get_template(".coveragerc") - self.dao = JsonDAO(path=self.resources_dir) + self.dao = DAO(path=self.resources_dir) self.logger = logger def recompile(self): @@ -157,7 +157,7 @@ def update_stars(self): json_data = json.loads(response.text) stars = json_data.get("stargazers_count") - self.dao.update_stars(project.url, tier, stars) + self.dao.update(project.url, stars=stars) self.logger.info("Updating star count for %s: %d", project.url, stars) @staticmethod @@ -214,9 +214,6 @@ def add_repo_2db( repo_labels: comma separated labels repo_tier: tier for repository repo_website: link to project website - - Returns: - JsonDAO: Integer """ new_repo = Repository( @@ -231,7 +228,7 @@ def add_repo_2db( tier=repo_tier or Tier.COMMUNITY, website=repo_website, ) - self.dao.insert(new_repo) + self.dao.write(new_repo) def _save_temp_test_result( self, @@ -282,7 +279,6 @@ def process_temp_test_results_files(self, folder_name: str) -> None: with open(path, "r") as json_temp_file: json_temp_file_data = json.load(json_temp_file) repo_url = json_temp_file_data.get("repo_url") - repo_tier = json_temp_file_data.get("tier") test_type = json_temp_file_data.get("type") test_result = json_temp_file_data.get("test_result") self.logger.info( @@ -293,19 +289,15 @@ def process_temp_test_results_files(self, folder_name: str) -> None: res = None if test_type == "TestResult": tres = TestResult.from_dict(test_result) - res = self.dao.add_repo_test_result( - repo_url=repo_url, tier=repo_tier, test_result=tres - ) + self.dao.add_repo_test_result(repo_url=repo_url, test_result=tres) elif test_type == "CoverageResult": cres = CoverageResult.from_dict(test_result) - res = self.dao.add_repo_coverage_result( - repo_url=repo_url, tier=repo_tier, coverage_result=cres + self.dao.add_repo_coverage_result( + repo_url=repo_url, coverage_result=cres ) elif test_type == "StyleResult": sres = StyleResult.from_dict(test_result) - res = self.dao.add_repo_style_result( - repo_url=repo_url, tier=repo_tier, style_result=sres - ) + self.dao.add_repo_style_result(repo_url=repo_url, style_result=sres) else: raise NotImplementedError( "Test type {} is not supported".format(test_type) @@ -353,7 +345,7 @@ def _run_python_tests( """ ecosystem_deps = ecosystem_deps or [] ecosystem_additional_commands = ecosystem_additional_commands or [] - repository = self.dao.get_by_url(repo_url, tier=tier) + repository = self.dao.get_by_url(repo_url) repo_configuration = ( repository.configuration if repository is not None else None ) @@ -428,7 +420,7 @@ def python_styles_check( output: log PASS We want to give the result of the test to the GitHub action """ - repository = self.dao.get_by_url(repo_url, tier=tier) + repository = self.dao.get_by_url(repo_url) repo_configuration = ( repository.configuration if repository is not None else None ) @@ -475,7 +467,7 @@ def python_coverage( output: log PASS We want to give the result of the test to the GitHub action """ - repository = self.dao.get_by_url(repo_url, tier=tier) + repository = self.dao.get_by_url(repo_url) repo_configuration = ( repository.configuration if repository is not None else None ) @@ -668,12 +660,12 @@ def fetch_and_update_main_tests_results(self): package_version=qiskit_version, test_type=test_type, ) - result = self.dao.add_repo_test_result( - repo_url=repo_to_url_mapping.get(repo), - tier=Tier.MAIN, - test_result=test_result, - ) - if result is None: + try: + self.dao.add_repo_test_result( + repo_url=repo_to_url_mapping.get(repo), + test_result=test_result, + ) + except KeyError: self.logger.warning( "Test result was not saved. There is not repo for url %s", repo, diff --git a/ecosystem/models/test_results.py b/ecosystem/models/test_results.py index 6e74f9d67c..958ba127d9 100644 --- a/ecosystem/models/test_results.py +++ b/ecosystem/models/test_results.py @@ -46,7 +46,7 @@ class TestResult(JsonSerializable): def __post_init__(self): self.qiskit_version = self.package_version - self.timestamp = datetime.datetime.now().timestamp() + self.timestamp = self.timestamp or datetime.datetime.now().timestamp() @classmethod def from_dict(cls, dictionary: dict): diff --git a/requirements.txt b/requirements.txt index 2f4ff2a04b..2958efd84f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,4 @@ fire==0.4.0 -tinydb==4.5.1 Jinja2==3.0.1 requests==2.31.0 coloredlogs==15.0.1 diff --git a/tests/daos/test_dao.py b/tests/daos/test_dao.py index 9a7d7b5ff1..fc6440e086 100644 --- a/tests/daos/test_dao.py +++ b/tests/daos/test_dao.py @@ -5,7 +5,7 @@ from unittest import TestCase from pathlib import Path -from ecosystem.daos import JsonDAO +from ecosystem.daos import DAO from ecosystem.models import TestResult, TestType, Tier from ecosystem.models.repository import Repository from ecosystem.models.test_results import Package @@ -24,7 +24,7 @@ def get_main_repo() -> Repository: ) -class TestJsonDao(TestCase): +class TestDao(TestCase): """Tests repository related functions.""" def setUp(self) -> None: @@ -41,7 +41,7 @@ def tearDown(self) -> None: def _delete_members_json(self): """Deletes database file. - Function: JsonDao + Function: Dao -> delete """ if os.path.exists(self.members_path): @@ -53,7 +53,7 @@ def _delete_members_json(self): def _delete_labels_json(self): """Deletes labels file. - Function: JsonDao + Function: Dao -> delete """ if os.path.exists(self.labels_path): @@ -65,14 +65,14 @@ def test_start_update(self): """Test update start for repo.""" self._delete_members_json() main_repo = get_main_repo() - dao = JsonDAO(self.path) - dao.insert(main_repo) + dao = DAO(self.path) + dao.write(main_repo) - repo_from_db = dao.get_by_url(main_repo.url, main_repo.tier) + repo_from_db = dao.get_by_url(main_repo.url) self.assertIsNone(repo_from_db.stars) - dao.update_stars(main_repo.url, main_repo.tier, 42) - repo_from_db = dao.get_by_url(main_repo.url, main_repo.tier) + dao.update(main_repo.url, stars=42) + repo_from_db = dao.get_by_url(main_repo.url) self.assertEqual(repo_from_db.stars, 42) def test_repository_insert_and_delete(self): @@ -80,29 +80,28 @@ def test_repository_insert_and_delete(self): self._delete_members_json() main_repo = get_main_repo() - dao = JsonDAO(self.path) + dao = DAO(self.path) # insert entry - dao.insert(main_repo) + dao.write(main_repo) fetched_repo = dao.get_repos_by_tier(Tier.MAIN)[0] self.assertEqual(main_repo, fetched_repo) self.assertEqual(main_repo.labels, fetched_repo.labels) self.assertEqual(len(fetched_repo.tests_results), 0) # delete entry - dao.delete(repo_url=main_repo.url, tier=main_repo.tier) + dao.delete(repo_url=main_repo.url) self.assertEqual([], dao.get_repos_by_tier(Tier.MAIN)) def test_latest_results(self): """Tests append of latest passed test results.""" self._delete_members_json() - dao = JsonDAO(self.path) + dao = DAO(self.path) main_repo = get_main_repo() - dao.insert(main_repo) + dao.write(main_repo) dao.add_repo_test_result( repo_url=main_repo.url, - tier=main_repo.tier, test_result=TestResult( passed=True, test_type=TestType.STANDARD, @@ -110,12 +109,11 @@ def test_latest_results(self): package_version="0.18.1", ), ) - recovered_repo = dao.get_by_url(main_repo.url, tier=main_repo.tier) + recovered_repo = dao.get_by_url(main_repo.url) self.assertEqual(len(recovered_repo.tests_results), 1) dao.add_repo_test_result( repo_url=main_repo.url, - tier=main_repo.tier, test_result=TestResult( passed=True, test_type=TestType.DEV_COMPATIBLE, @@ -123,12 +121,11 @@ def test_latest_results(self): package_version="0.18.1", ), ) - recovered_repo = dao.get_by_url(main_repo.url, tier=main_repo.tier) + recovered_repo = dao.get_by_url(main_repo.url) self.assertEqual(len(recovered_repo.tests_results), 2) dao.add_repo_test_result( repo_url=main_repo.url, - tier=main_repo.tier, test_result=TestResult( passed=False, test_type=TestType.STABLE_COMPATIBLE, @@ -136,13 +133,12 @@ def test_latest_results(self): package_version="0.18.1", ), ) - recovered_repo = dao.get_by_url(main_repo.url, tier=main_repo.tier) + recovered_repo = dao.get_by_url(main_repo.url) self.assertEqual(len(recovered_repo.tests_results), 3) # here latest passed should be added dao.add_repo_test_result( repo_url=main_repo.url, - tier=main_repo.tier, test_result=TestResult( passed=True, test_type=TestType.STABLE_COMPATIBLE, @@ -150,7 +146,7 @@ def test_latest_results(self): package_version="0.18.1", ), ) - recovered_repo = dao.get_by_url(main_repo.url, tier=main_repo.tier) + recovered_repo = dao.get_by_url(main_repo.url) self.assertEqual(len(recovered_repo.tests_results), 4) self.assertIn( TestResult( @@ -165,7 +161,6 @@ def test_latest_results(self): # here we check that last passed is updated dao.add_repo_test_result( repo_url=main_repo.url, - tier=main_repo.tier, test_result=TestResult( passed=True, test_type=TestType.STABLE_COMPATIBLE, @@ -173,7 +168,7 @@ def test_latest_results(self): package_version="0.20.0", ), ) - recovered_repo = dao.get_by_url(main_repo.url, tier=main_repo.tier) + recovered_repo = dao.get_by_url(main_repo.url) self.assertEqual(len(recovered_repo.tests_results), 4) self.assertIn( TestResult( @@ -187,17 +182,16 @@ def test_latest_results(self): def test_add_test_result(self): """Tests adding result to repo. - JsonDao + Dao -> add_repo_test_result """ self._delete_members_json() - dao = JsonDAO(self.path) + dao = DAO(self.path) main_repo = get_main_repo() - dao.insert(main_repo) - res = dao.add_repo_test_result( + dao.write(main_repo) + dao.add_repo_test_result( main_repo.url, - main_repo.tier, TestResult( passed=False, test_type=TestType.DEV_COMPATIBLE, @@ -205,7 +199,6 @@ def test_add_test_result(self): package_version="0.18.1", ), ) - self.assertEqual(len(res), 1) self.assertLabelsFile( [ {"description": "description for label 1", "name": "label 1"}, @@ -217,7 +210,7 @@ def test_add_test_result(self): ] ) - recovered_repo = dao.get_by_url(main_repo.url, tier=main_repo.tier) + recovered_repo = dao.get_by_url(main_repo.url) self.assertEqual( recovered_repo.tests_results, [ @@ -241,9 +234,8 @@ def test_add_test_result(self): ], ) - res = dao.add_repo_test_result( + dao.add_repo_test_result( main_repo.url, - main_repo.tier, TestResult( passed=True, test_type=TestType.DEV_COMPATIBLE, @@ -251,10 +243,8 @@ def test_add_test_result(self): package_version="0.18.2", ), ) - self.assertEqual(len(res), 1) - res = dao.add_repo_test_result( + dao.add_repo_test_result( main_repo.url, - main_repo.tier, TestResult( passed=False, test_type=TestType.STANDARD, @@ -262,8 +252,7 @@ def test_add_test_result(self): package_version="0.18.2", ), ) - self.assertEqual(len(res), 1) - recovered_repo = dao.get_by_url(main_repo.url, tier=main_repo.tier) + recovered_repo = dao.get_by_url(main_repo.url) self.assertEqual( recovered_repo.tests_results, [ @@ -308,13 +297,12 @@ def test_add_test_result(self): def test_add_test_result_order(self): """Test order of test results.""" self._delete_members_json() - dao = JsonDAO(self.path) + dao = DAO(self.path) main_repo = get_main_repo() - dao.insert(main_repo) + dao.write(main_repo) dao.add_repo_test_result( main_repo.url, - main_repo.tier, TestResult( passed=False, test_type=TestType.STABLE_COMPATIBLE, @@ -324,7 +312,6 @@ def test_add_test_result_order(self): ) dao.add_repo_test_result( main_repo.url, - main_repo.tier, TestResult( passed=False, test_type=TestType.STANDARD, @@ -334,7 +321,6 @@ def test_add_test_result_order(self): ) dao.add_repo_test_result( main_repo.url, - main_repo.tier, TestResult( passed=False, test_type=TestType.DEV_COMPATIBLE, @@ -343,7 +329,7 @@ def test_add_test_result_order(self): ), ) - recovered_repo = dao.get_by_url(main_repo.url, tier=main_repo.tier) + recovered_repo = dao.get_by_url(main_repo.url) test_results = recovered_repo.tests_results self.assertEqual(test_results[0].test_type, TestType.DEV_COMPATIBLE) self.assertEqual(test_results[1].test_type, TestType.STABLE_COMPATIBLE) @@ -353,17 +339,22 @@ def test_compile_json(self): """ Recompiles the JSON file, then checks it matches the read data. """ - # pylint: disable=protected-access self._delete_members_json() - dao = JsonDAO(self.path) + dao = DAO(self.path) # Dump JSON file dao.compile_json() - # Open and check it matches data + # Open and check 1:1 correspondence + repo_list = dao.storage.read().values() with open(Path(self.path, "members.json")) as file: dumped_data = json.loads(file.read()) - self.assertEqual(dao.database._storage.read(), dumped_data) + + for tier in dumped_data.values(): + for repo in tier.values(): + self.assertIn(repo_list, Repository.from_dict(repo)) + for repo in repo_list: + self.assertIn(dumped_data[repo.tier].values(), repo.to_dict()) def assertLabelsFile(self, result): # pylint: disable=invalid-name """Asserts the content of labels.json matches the result dict""" diff --git a/tests/test_manager.py b/tests/test_manager.py index 051f9aa80a..4a786b417b 100644 --- a/tests/test_manager.py +++ b/tests/test_manager.py @@ -6,7 +6,7 @@ import responses -from ecosystem.daos import JsonDAO +from ecosystem.daos import DAO from ecosystem.manager import Manager from ecosystem.models import TestResult, Tier, TestType from ecosystem.models.repository import Repository @@ -200,11 +200,11 @@ def test_update_badges(self): commu_success = get_community_repo() commu_failed = get_community_fail_repo() - dao = JsonDAO(self.path) + dao = DAO(self.path) # insert entry - dao.insert(commu_success) - dao.insert(commu_failed) + dao.write(commu_success) + dao.write(commu_failed) manager = Manager(root_path=os.path.join(self.current_dir, "..")) manager.resources_dir = "../resources" diff --git a/tests/test_members_file.py b/tests/test_members_file.py index 5c758653ae..4e39c8c68f 100644 --- a/tests/test_members_file.py +++ b/tests/test_members_file.py @@ -3,7 +3,7 @@ import os from unittest import TestCase -from ecosystem.daos import JsonDAO +from ecosystem.daos import DAO from ecosystem.models import Tier from ecosystem.models.repository import Repository @@ -15,7 +15,7 @@ def setUp(self) -> None: current_directory = os.path.dirname(os.path.abspath(__file__)) self.path = "{}/../ecosystem/resources".format(current_directory) self.members_path = "{}/members.json".format(self.path) - self.dao = JsonDAO(self.path) + self.dao = DAO(self.path) def test_members_json(self): """Tests members json file for correctness."""