From a7f18560d35539a4dbc7b12b94e6132ef88b5401 Mon Sep 17 00:00:00 2001 From: Byron Himes Date: Tue, 23 May 2023 10:04:11 +0200 Subject: [PATCH] Add handover documentation (GSI-154) (#5) * Update template files and readme * Add description and design details * Tweak description for topic wording * Added info on email templates * Move some information into description --------- Co-authored-by: TheByronHimes --- .deprecated_files | 2 + .description.md | 18 +++ .design.md | 7 + .github/workflows/check_readme.yaml | 16 ++ .gitignore | 1 + .mandatory_files | 8 +- .readme_template.md | 109 ++++++++++++++ .static_files | 5 + README.md | 150 ++++++++++++------- docs/README.md | 3 - readme_generation.md | 47 ++++++ requirements-dev-common.txt | 18 ++- scripts/update_readme.py | 217 ++++++++++++++++++++++++++++ 13 files changed, 534 insertions(+), 67 deletions(-) create mode 100644 .description.md create mode 100644 .design.md create mode 100644 .github/workflows/check_readme.yaml create mode 100644 .readme_template.md delete mode 100644 docs/README.md create mode 100644 readme_generation.md create mode 100755 scripts/update_readme.py diff --git a/.deprecated_files b/.deprecated_files index d522224..888a8e8 100644 --- a/.deprecated_files +++ b/.deprecated_files @@ -12,3 +12,5 @@ scripts/check_mandatory_and_static_files.py scripts/update_static_files.py + +docs diff --git a/.description.md b/.description.md new file mode 100644 index 0000000..a75217e --- /dev/null +++ b/.description.md @@ -0,0 +1,18 @@ +The Notification Service is a microservice intended to provide general notification capabilities. At this time, notifications are only generated via Kafka events, and they are only issued via email. +However, the architecture of the service would allow for the addition of other submission options, such as REST APIs, as well as new notification channels, such as SMS, with relatively little work. + +To send an email notification using this service, publish a kafka event conforming to the Notification event schema to the topic configured under "notification_event_topic" (see configuration details below). Because email client authentication is handled by the notification service itself, nothing beyond publishing the event is required. + + +### Typical operation + +This service doesn't have a REST API. It is fully stateless and does not require a database. +It's a straightforward service running a Kafka consumer that listens for one kind of event. +Notification events are picked up by the consumer, validated against the Notification event schema, and sent to the Notifier module. +The Notifier looks at the notification event details and determines what to do with it. +Right now, this always means sending an email. +The information is sent to the SMTP client, where a secure connection is established and the email is dispatched. + +### Email Templates + +In the configuration there are two template requirements: a plaintext email template and an HTML email template. The point of these is to produce consistently formatted emails while keeping the requirements light for microservices trying to send notifications. The templates are both used to make the email. Template variables are denoted with "$", e.g. $recipient_name, and are required to match the notification schema field names defined [here](https://github.com/ghga-de/ghga-event-schemas/blob/8e535ac271e7f27b6132505aad8cf572decc7ab4/ghga_event_schemas/pydantic_.py#L304). Having both HTML and plaintext means everyone should be able to receive the emails without a problem, and most of the time they should look nice. Because email clients like Outlook, Gmail, etc. have differences in the way they render HTML emails, it is recommended that styling be kept to a minimum or to use a pre-made template where these things have been taken into account. diff --git a/.design.md b/.design.md new file mode 100644 index 0000000..7624efb --- /dev/null +++ b/.design.md @@ -0,0 +1,7 @@ +This is a Python-based service following the Triple Hexagonal Architecture pattern. +It uses protocol/provider pairs and dependency injection mechanisms provided by the +[hexkit](https://github.com/ghga-de/hexkit) library. + + +### Testing +The only notable thing about the test setup is that it uses a local test server (tests/fixtures/server.py) via [aiosmtpd](https://aiosmtpd.readthedocs.io/en/latest/), which has sort of replaced the old smtpd module. There is a DummyServer, which has an 'expect_email()' method that is used similarly to the [expect_events()](https://github.com/ghga-de/hexkit/blob/7382c19b84136ea5b1652087ba1da4890267b1b5/hexkit/providers/akafka/testutils.py#L368) method from hexkit's kafka testing module. It can perform simple a authentication check so error handling can be tested. When an email is sent to the test server, the connection is closed and the received/expected emails are compared to make sure that the header and body content is intact. This enables testing the flow of sending an email without actually issuing any real emails and without using real credentials. diff --git a/.github/workflows/check_readme.yaml b/.github/workflows/check_readme.yaml new file mode 100644 index 0000000..a052d1e --- /dev/null +++ b/.github/workflows/check_readme.yaml @@ -0,0 +1,16 @@ +name: Check if the readme is up to date. + +on: push + +jobs: + static-code-analysis: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - id: common + uses: ghga-de/gh-action-common@v2 + + - name: Check readme + run: | + ./scripts/update_readme.py --check diff --git a/.gitignore b/.gitignore index 231fdfa..28b8a9d 100644 --- a/.gitignore +++ b/.gitignore @@ -50,6 +50,7 @@ coverage.xml *.py,cover .hypothesis/ .pytest_cache/ +prof/ # Translations *.mo diff --git a/.mandatory_files b/.mandatory_files index bf48ac2..d7cf949 100644 --- a/.mandatory_files +++ b/.mandatory_files @@ -10,11 +10,6 @@ .devcontainer/docker-compose.yml .devcontainer/Dockerfile - -docs/README.md - -example_data - tests/__init__.py tests/fixtures/__init__.py @@ -28,3 +23,6 @@ README.md setup.py setup.cfg requirements-dev.txt + +.description.md +.design.md diff --git a/.readme_template.md b/.readme_template.md new file mode 100644 index 0000000..878dbdc --- /dev/null +++ b/.readme_template.md @@ -0,0 +1,109 @@ + +[![tests](https://github.com/ghga-de/$name/actions/workflows/unit_and_int_tests.yaml/badge.svg)](https://github.com/ghga-de/$name/actions/workflows/unit_and_int_tests.yaml) +[![Coverage Status](https://coveralls.io/repos/github/ghga-de/$name/badge.svg?branch=main)](https://coveralls.io/github/ghga-de/$name?branch=main) + +# $title + +$summary + +## Description + +$description + +## Installation +We recommend using the provided Docker container. + +A pre-build version is available at [docker hub](https://hub.docker.com/repository/docker/ghga/$name): +```bash +docker pull ghga/$name:$version +``` + +Or you can build the container yourself from the [`./Dockerfile`](./Dockerfile): +```bash +# Execute in the repo's root dir: +docker build -t ghga/$name:$version . +``` + +For production-ready deployment, we recommend using Kubernetes, however, +for simple use cases, you could execute the service using docker +on a single server: +```bash +# The entrypoint is preconfigured: +docker run -p 8080:8080 ghga/$name:$version --help +``` + +If you prefer not to use containers, you may install the service from source: +```bash +# Execute in the repo's root dir: +pip install . + +# To run the service: +$shortname --help +``` + +## Configuration +### Parameters + +The service requires the following configuration parameters: +$config_description + +### Usage: + +A template YAML for configurating the service can be found at +[`./example-config.yaml`](./example-config.yaml). +Please adapt it, rename it to `.$shortname.yaml`, and place it into one of the following locations: +- in the current working directory were you are execute the service (on unix: `./.$shortname.yaml`) +- in your home directory (on unix: `~/.$shortname.yaml`) + +The config yaml will be automatically parsed by the service. + +**Important: If you are using containers, the locations refer to paths within the container.** + +All parameters mentioned in the [`./example-config.yaml`](./example-config.yaml) +could also be set using environment variables or file secrets. + +For naming the environment variables, just prefix the parameter name with `${shortname}_`, +e.g. for the `host` set an environment variable named `${shortname}_host` +(you may use both upper or lower cases, however, it is standard to define all env +variables in upper cases). + +To using file secrets please refer to the +[corresponding section](https://pydantic-docs.helpmanual.io/usage/settings/#secret-support) +of the pydantic documentation. + +$openapi_doc + +## Architecture and Design: +$design_description + +## Development +For setting up the development environment, we rely on the +[devcontainer feature](https://code.visualstudio.com/docs/remote/containers) of vscode +in combination with Docker Compose. + +To use it, you have to have Docker Compose as well as vscode with its "Remote - Containers" +extension (`ms-vscode-remote.remote-containers`) installed. +Then open this repository in vscode and run the command +`Remote-Containers: Reopen in Container` from the vscode "Command Palette". + +This will give you a full-fledged, pre-configured development environment including: +- infrastructural dependencies of the service (databases, etc.) +- all relevant vscode extensions pre-installed +- pre-configured linting and auto-formating +- a pre-configured debugger +- automatic license-header insertion + +Moreover, inside the devcontainer, a convenience commands `dev_install` is available. +It installs the service with all development dependencies, installs pre-commit. + +The installation is performed automatically when you build the devcontainer. However, +if you update dependencies in the [`./setup.cfg`](./setup.cfg) or the +[`./requirements-dev.txt`](./requirements-dev.txt), please run it again. + +## License +This repository is free to use and modify according to the +[Apache 2.0 License](./LICENSE). + +## Readme Generation +This readme is autogenerate, please see [`readme_generation.md`](./readme_generation.md) +for details. diff --git a/.static_files b/.static_files index c9b555f..a2f8ff5 100644 --- a/.static_files +++ b/.static_files @@ -20,6 +20,7 @@ scripts/get_package_name.py scripts/update_config_docs.py scripts/update_template_files.py scripts/update_openapi_docs.py +scripts/update_readme.py scripts/README.md .github/workflows/check_config_docs.yaml @@ -27,6 +28,7 @@ scripts/README.md .github/workflows/static_code_analysis.yaml .github/workflows/unit_and_int_tests.yaml .github/workflows/check_openapi_spec.yaml +.github/workflows/check_readme.yaml .github/workflows/cd.yaml example_data/README.md @@ -44,3 +46,6 @@ LICENSE requirements.txt requirements-dev-common.txt setup.py + +.readme_template.md +readme_generation.md diff --git a/README.md b/README.md index ba9d25e..87f58cb 100644 --- a/README.md +++ b/README.md @@ -1,42 +1,53 @@ -![tests](https://github.com/ghga-de/notification-service/actions/workflows/unit_and_int_tests.yaml/badge.svg) +[![tests](https://github.com/ghga-de/notification-service/actions/workflows/unit_and_int_tests.yaml/badge.svg)](https://github.com/ghga-de/notification-service/actions/workflows/unit_and_int_tests.yaml) [![Coverage Status](https://coveralls.io/repos/github/ghga-de/notification-service/badge.svg?branch=main)](https://coveralls.io/github/ghga-de/notification-service?branch=main) -# Notification-Service -This service is used to handle notifications, especially email notifications. -Other microservices in the ecosystem can publish the appropriate notification event and -this service will process it accordingly. The primary use is to send emails based on -notification event content. For instance, a service might want to notify a user that -their upload was successful. The service in question could publish an event with that -information, and the notification service would take care of the rest. +# Notification Service -## Documentation: +The Notification Service (NS) handles notification kafka events. -An extensive documentation can be found [here](...) (coming soon). +## Description -## Quick Start -### Installation +The Notification Service is a microservice intended to provide general notification capabilities. At this time, notifications are only generated via Kafka events, and they are only issued via email. +However, the architecture of the service would allow for the addition of other submission options, such as REST APIs, as well as new notification channels, such as SMS, with relatively little work. + +To send an email notification using this service, publish a kafka event conforming to the Notification event schema to the topic configured under "notification_event_topic" (see configuration details below). Because email client authentication is handled by the notification service itself, nothing beyond publishing the event is required. + + +### Typical operation + +This service doesn't have a REST API. It is fully stateless and does not require a database. +It's a straightforward service running a Kafka consumer that listens for one kind of event. +Notification events are picked up by the consumer, validated against the Notification event schema, and sent to the Notifier module. +The Notifier looks at the notification event details and determines what to do with it. +Right now, this always means sending an email. +The information is sent to the SMTP client, where a secure connection is established and the email is dispatched. + +### Email Templates + +In the configuration there are two template requirements: a plaintext email template and an HTML email template. The point of these is to produce consistently formatted emails while keeping the requirements light for microservices trying to send notifications. The templates are both used to make the email. Template variables are denoted with "$", e.g. $recipient_name, and are required to match the notification schema field names defined [here](https://github.com/ghga-de/ghga-event-schemas/blob/8e535ac271e7f27b6132505aad8cf572decc7ab4/ghga_event_schemas/pydantic_.py#L304). Having both HTML and plaintext means everyone should be able to receive the emails without a problem, and most of the time they should look nice. Because email clients like Outlook, Gmail, etc. have differences in the way they render HTML emails, it is recommended that styling be kept to a minimum or to use a pre-made template where these things have been taken into account. + + +## Installation We recommend using the provided Docker container. A pre-build version is available at [docker hub](https://hub.docker.com/repository/docker/ghga/notification-service): ```bash -# Please feel free to choose the version as needed: -docker pull ghga/notification-service: +docker pull ghga/notification-service:0.1.0 ``` Or you can build the container yourself from the [`./Dockerfile`](./Dockerfile): ```bash # Execute in the repo's root dir: -# (Please feel free to adapt the name/tag.) -docker build -t ghga/notification-service: . +docker build -t ghga/notification-service:0.1.0 . ``` For production-ready deployment, we recommend using Kubernetes, however, for simple use cases, you could execute the service using docker on a single server: ```bash -# The entrypoint is pre-configured: -docker run -p 8080:8080 ghga/notification-service: +# The entrypoint is preconfigured: +docker run -p 8080:8080 ghga/notification-service:0.1.0 --help ``` If you prefer not to use containers, you may install the service from source: @@ -44,17 +55,48 @@ If you prefer not to use containers, you may install the service from source: # Execute in the repo's root dir: pip install . -# to run the service: -ns +# To run the service: +ns --help ``` -### Configuration: -The [`./example-config.yaml`](./example-config.yaml) gives an overview of the available configuration options. -Please adapt it and choose one of the following options for injecting it into the service: -- specify the path to via the `NS_CONFIG_YAML` env variable -- rename it to `.ns.yaml` and place it into one of the following locations: - - the current working directory were you are execute the service (on unix: `./.ns.yaml`) - - your home directory (on unix: `~/.ns.yaml`) +## Configuration +### Parameters + +The service requires the following configuration parameters: +- **`plaintext_email_template`** *(string)*: The plaintext template to use for email notifications. + +- **`html_email_template`** *(string)*: The HTML template to use for email notifications. + +- **`from_address`** *(string)*: The sender's address. + +- **`smtp_host`** *(string)*: The mail server host to connect to. + +- **`smtp_port`** *(integer)*: The port for the mail server connection. + +- **`login_user`** *(string)*: The login username or email. + +- **`login_password`** *(string)*: The login password. + +- **`notification_event_topic`** *(string)*: Name of the event topic used to track notification events. + +- **`notification_event_type`** *(string)*: The type to use for events containing content to be sent. + +- **`service_name`** *(string)*: Default: `ns`. + +- **`service_instance_id`** *(string)*: A string that uniquely identifies this instance across all instances of this service. A globally unique Kafka client ID will be created by concatenating the service_name and the service_instance_id. + +- **`kafka_servers`** *(array)*: A list of connection strings to connect to Kafka bootstrap servers. + + - **Items** *(string)* + + +### Usage: + +A template YAML for configurating the service can be found at +[`./example-config.yaml`](./example-config.yaml). +Please adapt it, rename it to `.ns.yaml`, and place it into one of the following locations: +- in the current working directory were you are execute the service (on unix: `./.ns.yaml`) +- in your home directory (on unix: `~/.ns.yaml`) The config yaml will be automatically parsed by the service. @@ -63,8 +105,8 @@ The config yaml will be automatically parsed by the service. All parameters mentioned in the [`./example-config.yaml`](./example-config.yaml) could also be set using environment variables or file secrets. -For naming the environment variables, just prefix the parameter name with `NS_`, -e.g. for the `host` set an environment variable named `NS_HOST` +For naming the environment variables, just prefix the parameter name with `ns_`, +e.g. for the `host` set an environment variable named `ns_host` (you may use both upper or lower cases, however, it is standard to define all env variables in upper cases). @@ -72,45 +114,47 @@ To using file secrets please refer to the [corresponding section](https://pydantic-docs.helpmanual.io/usage/settings/#secret-support) of the pydantic documentation. +## HTTP API +An OpenAPI specification for this service can be found [here](./openapi.yaml). + +## Architecture and Design: +This is a Python-based service following the Triple Hexagonal Architecture pattern. +It uses protocol/provider pairs and dependency injection mechanisms provided by the +[hexkit](https://github.com/ghga-de/hexkit) library. + + +### Testing +The only notable thing about the test setup is that it uses a local test server (tests/fixtures/server.py) via [aiosmtpd](https://aiosmtpd.readthedocs.io/en/latest/), which has sort of replaced the old smtpd module. There is a DummyServer, which has an 'expect_email()' method that is used similarly to the [expect_events()](https://github.com/ghga-de/hexkit/blob/7382c19b84136ea5b1652087ba1da4890267b1b5/hexkit/providers/akafka/testutils.py#L368) method from hexkit's kafka testing module. It can perform simple a authentication check so error handling can be tested. When an email is sent to the test server, the connection is closed and the received/expected emails are compared to make sure that the header and body content is intact. This enables testing the flow of sending an email without actually issuing any real emails and without using real credentials. + ## Development For setting up the development environment, we rely on the [devcontainer feature](https://code.visualstudio.com/docs/remote/containers) of vscode in combination with Docker Compose. -To use it, you have to have Docker Compose as well as vscode with its "Remote - Containers" extension (`ms-vscode-remote.remote-containers`) installed. +To use it, you have to have Docker Compose as well as vscode with its "Remote - Containers" +extension (`ms-vscode-remote.remote-containers`) installed. Then open this repository in vscode and run the command `Remote-Containers: Reopen in Container` from the vscode "Command Palette". This will give you a full-fledged, pre-configured development environment including: - infrastructural dependencies of the service (databases, etc.) - all relevant vscode extensions pre-installed -- pre-configured linting and auto-formatting +- pre-configured linting and auto-formating - a pre-configured debugger - automatic license-header insertion -Moreover, inside the devcontainer, there are two convenience commands available -(please type them in the integrated terminal of vscode): -- `dev_install` - install the service with all development dependencies, -installs pre-commit, and applies any migration scripts to the test database -(please run that if you are starting the devcontainer for the first time -or if you added any python dependencies to the [`./setup.cfg`](./setup.cfg)) -- `dev_launcher` - starts the service with the development config yaml -(located in the `./.devcontainer/` dir) - -If you prefer not to use vscode, you could get a similar setup (without the editor specific features) -by running the following commands: -``` bash -# Execute in the repo's root dir: -cd ./.devcontainer - -# build and run the environment with docker-compose -docker-compose up +Moreover, inside the devcontainer, a convenience commands `dev_install` is available. +It installs the service with all development dependencies, installs pre-commit. -# attach to the main container: -# (you can open multiple shell sessions like this) -docker exec -it devcontainer_app_1 /bin/bash -``` +The installation is performed automatically when you build the devcontainer. However, +if you update dependencies in the [`./setup.cfg`](./setup.cfg) or the +[`./requirements-dev.txt`](./requirements-dev.txt), please run it again. ## License -This repository is free to use and modify according to the [Apache 2.0 License](./LICENSE). +This repository is free to use and modify according to the +[Apache 2.0 License](./LICENSE). + +## Readme Generation +This readme is autogenerate, please see [`readme_generation.md`](./readme_generation.md) +for details. diff --git a/docs/README.md b/docs/README.md deleted file mode 100644 index a11c748..0000000 --- a/docs/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# Docs Folder -This folder is meant to contain the documentation of your microservice. -If needed, additional objects such as images, diagrams, etc. should be put into seperate subfolders. diff --git a/readme_generation.md b/readme_generation.md new file mode 100644 index 0000000..432153c --- /dev/null +++ b/readme_generation.md @@ -0,0 +1,47 @@ + + +# Readme Generation + +The README file is generated by collecting information from different sources as +outlined in the following. + +- name: The full name of the package is derived from the remote origin Git repository. +- title: A title case representation of the name. +- shortname: An abbreviation of the full name. This is derived from the name mentioned + in the [`./setup.cfg`](`./setup.cfg). +- summary: A short 1-2 sentence summary derived from the description in the + [`./setup.cfg`](`./setup.cfg). +- version: The package version derived from the version specified in the + [`./setup.cfg`](`./setup.cfg). +- description: A markdown-formatted description of the features and use cases of this + service or package. Obtained from the [`./.description.md`](./.description.md). +- design_description: A markdown-formatted description of the overall architecture and + design of the package. Obtained from the [`./.design.md`](./.design.md). +- config_description: A markdown-formatted description of all config parameters. + This is autogenerated from the [`./config_schema.json`](./config_schema.json). +- openapi_doc: A markdown-formatted description of the HTTP API. This is autogenerated + and links to the [`./openapi.yaml`](./openapi.yaml). If the openapi.yaml is not + this documentation is empty. + +The [`./.readme_template.md`](./.readme_template.md) serves as a template where the +above variable can be filled in using Pythons `string.Template` utility from the +standard library. + +The [`./scripts/update_readme.py`] script can be used to collect all information and +fill it into the template to generate the README file. diff --git a/requirements-dev-common.txt b/requirements-dev-common.txt index 9a18c54..b253cb8 100644 --- a/requirements-dev-common.txt +++ b/requirements-dev-common.txt @@ -3,6 +3,8 @@ pytest==7.2.0 pytest-asyncio==0.20.3 pytest-cov==4.0.0 +pytest-profiling==1.7.0 +snakeviz==2.2.0 pre-commit==3.1.1 @@ -21,10 +23,14 @@ click==8.1.3 typer==0.7.0 httpx==0.23.3 +pytest-httpx==0.21.3 -mkdocs==1.4.2 -mkdocs-autorefs==0.4.1 -mkdocs-material==9.0.3 -mkdocs-material-extensions==1.1.1 -mkdocstrings==0.19.1 -mkdocstrings-python-legacy==0.2.3 + +# work around until this issue is solved: +# https://github.com/docker/docker-py/issues/3113 +urllib3==1.26.15 +requests==2.28.2 + +stringcase==1.2.0 +jsonschema2md==0.4.0 +setuptools==67.7.2 diff --git a/scripts/update_readme.py b/scripts/update_readme.py new file mode 100755 index 0000000..594aedf --- /dev/null +++ b/scripts/update_readme.py @@ -0,0 +1,217 @@ +#!/usr/bin/env python3 + +# Copyright 2021 - 2023 Universität Tübingen, DKFZ, EMBL, and Universität zu Köln +# for the German Human Genome-Phenome Archive (GHGA) +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Generate documentation for this package using different sources.""" + +import json +import subprocess # nosec +import sys +from pathlib import Path +from string import Template + +import jsonschema2md +from pydantic import BaseModel, Field +from script_utils.cli import echo_failure, echo_success, run +from setuptools.config.setupcfg import read_configuration +from stringcase import spinalcase, titlecase + +ROOT_DIR = Path(__file__).parent.parent.resolve() +SETUP_CFG_PATH = ROOT_DIR / "setup.cfg" +DESCRIPTION_PATH = ROOT_DIR / ".description.md" +DESIGN_PATH = ROOT_DIR / ".design.md" +README_TEMPLATE_PATH = ROOT_DIR / ".readme_template.md" +CONFIG_SCHEMA_PATH = ROOT_DIR / "config_schema.json" +OPENAPI_YAML_REL_PATH = "./openapi.yaml" +README_PATH = ROOT_DIR / "README.md" + + +class PackageHeader(BaseModel): + """A basic summary of a package.""" + + shortname: str = Field( + ..., + description=( + "The abbreviation of the package name. Is identical to the package name." + ), + ) + version: str = Field(..., description="The version of the package.") + summary: str = Field( + ..., description="A short 1 or 2 sentence summary of the package." + ) + + +class PackageName(BaseModel): + """The name of a package and it's different representations.""" + + name: str = Field(..., description="The full name of the package in spinal case.") + title: str = Field(..., description="The name of the package formatted as title.") + + +class PackageDetails(PackageHeader, PackageName): + """A container for details on a package used to build documentation.""" + + description: str = Field( + ..., description="A markdown-formatted description of the package." + ) + design_description: str = Field( + ..., + description=( + "A markdown-formatted description of overall architecture and design of" + + " the package." + ), + ) + config_description: str = Field( + ..., + description=( + "A markdown-formatted list of all configuration parameters of this package." + ), + ) + openapi_doc: str = Field( + ..., + description=( + "A markdown-formatted description rendering or linking to an OpenAPI" + " specification of the package." + ), + ) + + +def read_package_header() -> PackageHeader: + """Read basic information about the package from the setup.cfg.""" + + setup_config = read_configuration(SETUP_CFG_PATH) + setup_metadata = setup_config["metadata"] + return PackageHeader( + shortname=setup_metadata["name"], + version=setup_metadata["version"], + summary=setup_metadata["description"], + ) + + +def read_package_name() -> PackageName: + """Infer the package name from the name of the git origin.""" + + with subprocess.Popen( + args="basename -s .git `git config --get remote.origin.url`", + cwd=ROOT_DIR, + stdout=subprocess.PIPE, + shell=True, + ) as process: + stdout, _ = process.communicate() + + if not stdout: + raise RuntimeError("The name of the git origin could not be resolved.") + git_origin_name = stdout.decode("utf-8").strip() + + return PackageName( + name=spinalcase(git_origin_name), title=titlecase(git_origin_name) + ) + + +def read_package_description() -> str: + """Read the package description.""" + + return DESCRIPTION_PATH.read_text() + + +def read_design_description() -> str: + """Read the design description.""" + + return DESIGN_PATH.read_text() + + +def generate_config_docs() -> str: + """Generate markdown-formatted documentation for the configration parameters + listed in the config schema.""" + + parser = jsonschema2md.Parser( + examples_as_yaml=False, + show_examples="all", + ) + with open(CONFIG_SCHEMA_PATH, "r", encoding="utf-8") as json_file: + config_schema = json.load(json_file) + + md_lines = parser.parse_schema(config_schema) + + # ignore everything before the properites header: + properties_index = md_lines.index("## Properties\n\n") + md_lines = md_lines[properties_index + 1 :] + + return "\n".join(md_lines) + + +def generate_openapi_docs() -> str: + """Generate markdown-formatted documentation linking to or rendering an OpenAPI + specification of the package. If no OpenAPI specification is present, return an + empty string.""" + + open_api_yaml_path = ROOT_DIR / OPENAPI_YAML_REL_PATH + + if not open_api_yaml_path.exists(): + return "" + + return ( + "## HTTP API\n" + + "An OpenAPI specification for this service can be found" + + f" [here]({OPENAPI_YAML_REL_PATH})." + ) + + +def get_package_details() -> PackageDetails: + """Get details required to build documentation for the package.""" + + header = read_package_header() + name = read_package_name() + description = read_package_description() + config_description = generate_config_docs() + return PackageDetails( + **header.dict(), + **name.dict(), + description=description, + config_description=config_description, + design_description=read_design_description(), + openapi_doc=generate_openapi_docs(), + ) + + +def generate_single_readme(*, details: PackageDetails) -> str: + """Generate a single markdown-formatted readme file for the package based on the + provided details.""" + + template_content = README_TEMPLATE_PATH.read_text() + template = Template(template_content) + return template.substitute(details.dict()) + + +def main(check: bool = False) -> None: + """Update the readme markdown.""" + + details = get_package_details() + readme_content = generate_single_readme(details=details) + + if check: + if README_PATH.read_text() != readme_content: + echo_failure("README.md is not up to date.") + sys.exit(1) + echo_success("README.md is up to date.") + return + + README_PATH.write_text(readme_content) + echo_success("Successfully updated README.md.") + + +if __name__ == "__main__": + run(main)