Skip to content

Commit

Permalink
Pass work type of dataset as expected by frontend (#13)
Browse files Browse the repository at this point in the history
  • Loading branch information
Cito authored Jul 4, 2023
1 parent 0155fcc commit 9d308fb
Show file tree
Hide file tree
Showing 12 changed files with 39 additions and 24 deletions.
2 changes: 1 addition & 1 deletion .devcontainer/.dev_config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ db_name: "dev-db"
service_instance_id: 001
kafka_servers: ["kafka:9092"]

download_access_url: "http://127.0.0.1:8080/download_access"
download_access_url: "http://127.0.0.1:8080/download-access"

dataset_overview_event_topic: metadata
dataset_overview_event_type: metadata_dataset_overview
Expand Down
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -52,21 +52,21 @@ We recommend using the provided Docker container.

A pre-build version is available at [docker hub](https://hub.docker.com/repository/docker/ghga/work-package-service):
```bash
docker pull ghga/work-package-service:0.1.2
docker pull ghga/work-package-service:0.1.3
```

Or you can build the container yourself from the [`./Dockerfile`](./Dockerfile):
```bash
# Execute in the repo's root dir:
docker build -t ghga/work-package-service:0.1.2 .
docker build -t ghga/work-package-service:0.1.3 .
```

For production-ready deployment, we recommend using Kubernetes, however,
for simple use cases, you could execute the service using docker
on a single server:
```bash
# The entrypoint is preconfigured:
docker run -p 8080:8080 ghga/work-package-service:0.1.2 --help
docker run -p 8080:8080 ghga/work-package-service:0.1.3 --help
```

If you prefer not to use containers, you may install the service from source:
Expand Down
2 changes: 1 addition & 1 deletion config_schema.json
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@
"download_access_url": {
"title": "Download Access Url",
"description": "URL pointing to the internal download access API.",
"example": "http://127.0.0.1/download_access",
"example": "http://127.0.0.1/download-access",
"env_names": [
"wps_download_access_url"
],
Expand Down
2 changes: 1 addition & 1 deletion example_config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ datasets_collection: datasets
db_connection_str: '**********'
db_name: dev-db
docs_url: /docs
download_access_url: http://127.0.0.1:8080/download_access
download_access_url: http://127.0.0.1:8080/download-access
host: 127.0.0.1
kafka_servers:
- kafka:9092
Expand Down
7 changes: 6 additions & 1 deletion openapi.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,17 @@ components:
description: ID of the dataset
title: Id
type: string
stage:
allOf:
- $ref: '#/components/schemas/WorkType'
description: Current stage of this dataset.
title:
description: The title of the dataset.
title: Title
type: string
required:
- id
- stage
- title
- description
- files
Expand Down Expand Up @@ -133,7 +138,7 @@ components:
info:
description: A service managing work packages for the GHGA CLI
title: Work Package Service
version: 0.1.2
version: 0.1.3
openapi: 3.0.2
paths:
/health:
Expand Down
2 changes: 1 addition & 1 deletion setup.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ include_package_data = True
packages = find:
install_requires =
ghga-service-commons[api,auth,crypt]==0.4.1
ghga-event-schemas==0.13.1
ghga-event-schemas==0.13.2
hexkit[akafka,mongodb]==0.10.0
httpx==0.23.3
typer==0.7.0
Expand Down
10 changes: 8 additions & 2 deletions tests/fixtures/datasets.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,16 +16,21 @@

"""Sample datasets for testing."""

from ghga_event_schemas.pydantic_ import MetadataDatasetFile, MetadataDatasetOverview
from ghga_event_schemas.pydantic_ import (
MetadataDatasetFile,
MetadataDatasetOverview,
MetadataDatasetStage,
)

from wps.core.models import Dataset, DatasetFile
from wps.core.models import Dataset, DatasetFile, WorkType

__all__ = ["DATASET", "DATASET_OVERVIEW_EVENT"]


DATASET = Dataset(
id="some-dataset-id",
title="Test dataset 1",
stage=WorkType.DOWNLOAD,
description="The first test dataset",
files=[
DatasetFile(id="file-id-1", extension=".json"),
Expand All @@ -37,6 +42,7 @@

DATASET_OVERVIEW_EVENT = MetadataDatasetOverview(
accession="some-dataset-id",
stage=MetadataDatasetStage.DOWNLOAD,
title="Test dataset 1",
description="The first test dataset",
files=[
Expand Down
2 changes: 1 addition & 1 deletion wps/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,4 +15,4 @@

"""Work Package Service"""

__version__ = "0.1.2"
__version__ = "0.1.3"
8 changes: 7 additions & 1 deletion wps/adapters/inbound/event_sub.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@
from hexkit.protocols.eventsub import EventSubscriberProtocol
from pydantic import BaseSettings, Field

from wps.core.models import Dataset, DatasetFile
from wps.core.models import Dataset, DatasetFile, WorkType
from wps.ports.inbound.repository import WorkPackageRepositoryPort

__all__ = ["EventSubTranslatorConfig", "EventSubTranslator"]
Expand Down Expand Up @@ -77,6 +77,11 @@ async def _consume_validated( # pylint: disable=unused-argument
payload=payload,
schema=event_schemas.MetadataDatasetOverview,
)
try:
stage = WorkType[validated_payload.stage.name]
except KeyError:
# stage does not correspond to a work type, ignore event
return

files = [
DatasetFile(
Expand All @@ -89,6 +94,7 @@ async def _consume_validated( # pylint: disable=unused-argument
dataset = Dataset(
id=validated_payload.accession,
title=validated_payload.title,
stage=stage,
description=validated_payload.description,
files=files,
)
Expand Down
2 changes: 1 addition & 1 deletion wps/adapters/outbound/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ class AccessCheckConfig(BaseSettings):

download_access_url: str = Field(
...,
example="http://127.0.0.1/download_access",
example="http://127.0.0.1/download-access",
description="URL pointing to the internal download access API.",
)

Expand Down
15 changes: 8 additions & 7 deletions wps/core/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,13 @@ class Config: # pylint: disable=missing-class-docstring
frozen = True


class WorkType(str, Enum):
"""The type of work that a work package describes."""

DOWNLOAD = "download"
UPLOAD = "upload"


class DatasetFile(BaseDto):
"""A file as that is part of a dataset."""

Expand All @@ -53,20 +60,14 @@ class Dataset(BaseDto):
"""A model describing a dataset."""

id: str = Field(default=..., description="ID of the dataset")
stage: WorkType = Field(default=..., description="Current stage of this dataset.")
title: str = Field(default=..., description="The title of the dataset.")
description: Optional[str] = Field(
..., description="The description of the dataset."
)
files: list[DatasetFile] = Field(..., description="Files contained in the dataset.")


class WorkType(str, Enum):
"""The type of work that a work package describes."""

DOWNLOAD = "download"
UPLOAD = "upload"


class WorkOrderToken(BaseDto):
"""A model describing the payload of a work order token."""

Expand Down
5 changes: 1 addition & 4 deletions wps/core/repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,16 +237,13 @@ async def work_order_token(

async def register_dataset(self, dataset: Dataset) -> None:
"""Register a dataset with all of its files."""
# write the dataset to the database
await self._dataset_dao.insert(dataset)
# await self._dataset_dao.upsert(dataset)
await self._dataset_dao.upsert(dataset)

async def get_dataset(self, dataset_id: str) -> Dataset:
"""Get a registered dataset using the given ID.
If the dataset does not exist, a DatasetNotFoundError will be raised.
"""
# get the dataset from the database
try:
return await self._dataset_dao.get_by_id(dataset_id)
except ResourceNotFoundError as error:
Expand Down

0 comments on commit 9d308fb

Please sign in to comment.