Skip to content

Commit

Permalink
Merge pull request #68 from openstreetmap-polska/dev
Browse files Browse the repository at this point in the history
Release to main
  • Loading branch information
Zaczero authored Apr 3, 2024
2 parents ef77dcd + 5afe91f commit 321a0e7
Show file tree
Hide file tree
Showing 66 changed files with 2,994 additions and 1,773 deletions.
1 change: 1 addition & 0 deletions .github/workflows/deploy.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,7 @@ jobs:
git reset --hard "origin/$tag"
echo "Restarting containers"
docker compose down --remove-orphans
TAG="$tag" docker compose --env-file "envs/compose/$tag.env" up -d
echo "Pruning dangling images"
Expand Down
3 changes: 1 addition & 2 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -202,5 +202,4 @@ pyrightconfig.json
data/*
cert/*

cython_lib/*.c
cython_lib/*.html
.migrate.json
61 changes: 61 additions & 0 deletions alembic_/env.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,61 @@
import asyncio
from logging.config import fileConfig

from alembic import context
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import create_async_engine

from config import POSTGRES_URL
from models.db import * # noqa: F403
from models.db.base import Base

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)

# add your model's MetaData object here
# for 'autogenerate' support
target_metadata = Base.metadata

# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.


def do_run_migrations(connection: Connection) -> None:
context.configure(
connection=connection,
target_metadata=target_metadata,
)

with context.begin_transaction():
context.run_migrations()


async def run_async_migrations() -> None:
"""In this scenario we need to create an Engine
and associate a connection with the context.
"""

connectable = create_async_engine(POSTGRES_URL)

async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)

await connectable.dispose()


def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""

asyncio.run(run_async_migrations())


run_migrations_online()
27 changes: 27 additions & 0 deletions alembic_/script.py.mako
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
"""${message}

Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}

"""
from typing import Sequence, Union

from alembic import op
import sqlalchemy as sa
from sqlalchemy import Text
${imports if imports else ""}

# revision identifiers, used by Alembic.
revision: str = ${repr(up_revision)}
down_revision: Union[str, None] = ${repr(down_revision)}
branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)}
depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)}


def upgrade() -> None:
${upgrades if upgrades else "pass"}


def downgrade() -> None:
${downgrades if downgrades else "pass"}
Original file line number Diff line number Diff line change
@@ -0,0 +1,89 @@
"""Initial migration
Revision ID: 9f60c90e8a21
Revises:
Create Date: 2024-04-02 18:19:53.332510+00:00
"""

from collections.abc import Sequence

import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql

import models.geometry

# revision identifiers, used by Alembic.
revision: str = '9f60c90e8a21'
down_revision: str | None = None
branch_labels: str | Sequence[str] | None = None
depends_on: str | Sequence[str] | None = None


def upgrade() -> None:
op.execute('CREATE EXTENSION IF NOT EXISTS postgis;')

# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
'aed',
sa.Column('id', sa.BigInteger(), nullable=False),
sa.Column('version', sa.BigInteger(), nullable=False),
sa.Column('tags', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column('position', models.geometry.PointType(), nullable=False),
sa.Column('country_codes', sa.ARRAY(sa.Unicode(length=8), dimensions=1), nullable=True),
sa.PrimaryKeyConstraint('id'),
)
op.create_index('aed_country_codes_idx', 'aed', ['country_codes'], unique=False, postgresql_using='gin')
op.create_index('aed_position_idx', 'aed', ['position'], unique=False, postgresql_using='gist')
op.create_table(
'country',
sa.Column('code', sa.Unicode(length=8), nullable=False),
sa.Column('names', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.Column('geometry', models.geometry.PolygonType(), nullable=False),
sa.Column('label_position', models.geometry.PointType(), nullable=False),
sa.PrimaryKeyConstraint('code'),
)
op.create_index('country_geometry_idx', 'country', ['geometry'], unique=False, postgresql_using='gist')
op.create_table(
'photo',
sa.Column('id', sa.Unicode(length=32), nullable=False),
sa.Column('node_id', sa.BigInteger(), nullable=False),
sa.Column('user_id', sa.BigInteger(), nullable=False),
sa.Column(
'created_at',
postgresql.TIMESTAMP(timezone=True),
server_default=sa.text('statement_timestamp()'),
nullable=False,
),
sa.PrimaryKeyConstraint('id'),
)
op.create_table(
'state',
sa.Column('key', sa.Unicode(), nullable=False),
sa.Column('data', postgresql.JSONB(astext_type=sa.Text()), nullable=False),
sa.PrimaryKeyConstraint('key'),
)
op.create_table(
'photo_report',
sa.Column('id', sa.Unicode(length=32), nullable=False),
sa.Column('photo_id', sa.Unicode(length=32), nullable=False),
sa.Column(
'created_at',
postgresql.TIMESTAMP(timezone=True),
server_default=sa.text('statement_timestamp()'),
nullable=False,
),
sa.ForeignKeyConstraint(
['photo_id'],
['photo.id'],
),
sa.PrimaryKeyConstraint('id'),
)
op.create_index('photo_report_created_at_idx', 'photo_report', ['created_at'], unique=False)
op.create_index('photo_report_photo_id_idx', 'photo_report', ['photo_id'], unique=True)
# ### end Alembic commands ###


def downgrade() -> None:
pass
46 changes: 24 additions & 22 deletions api/v1/countries.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,60 +4,59 @@
from anyio import create_task_group
from fastapi import APIRouter, Path
from sentry_sdk import start_span
from shapely.geometry import mapping
from shapely import get_coordinates

from middlewares.cache_middleware import configure_cache
from middlewares.cache_control_middleware import cache_control
from middlewares.skip_serialization import skip_serialization
from models.country import Country
from states.aed_state import AEDState
from states.country_state import CountryState
from utils import simple_point_mapping
from models.db.country import Country
from services.aed_service import AEDService
from services.country_service import CountryService

router = APIRouter(prefix='/countries')


@router.get('/names')
@configure_cache(timedelta(hours=1), stale=timedelta(days=7))
@cache_control(timedelta(hours=1), stale=timedelta(days=7))
@skip_serialization()
async def get_names(language: str | None = None):
countries = await CountryState.get_all_countries()
countries = await CountryService.get_all()
country_count_map: dict[str, int] = {}

with start_span(description='Counting AEDs'):

async def count_task(country: Country) -> None:
count = await AEDState.count_aeds_by_country_code(country.code)
country_count_map[country.name] = count
count = await AEDService.count_by_country_code(country.code)
country_count_map[country.code] = count

async with create_task_group() as tg:
for country in countries:
tg.start_soon(count_task, country)

def limit_country_names(names: dict[str, str]):
if language and (name := names.get(language)):
return {language: name}
return names
def limit_country_names(names: dict[str, str]) -> dict[str, str]:
return {language: name} if (language and (name := names.get(language))) else names

return [
result = [
{
'country_code': country.code,
'country_names': limit_country_names(country.names),
'feature_count': country_count_map[country.name],
'feature_count': country_count_map[country.code],
'data_path': f'/api/v1/countries/{country.code}.geojson',
}
for country in countries
] + [
]
result.append(
{
'country_code': 'WORLD',
'country_names': {'default': 'World'},
'feature_count': sum(country_count_map.values()),
'data_path': '/api/v1/countries/WORLD.geojson',
}
]
)
return result


@router.get('/{country_code}.geojson')
@configure_cache(timedelta(hours=1), stale=timedelta(seconds=0))
@cache_control(timedelta(hours=1), stale=timedelta(seconds=0))
@skip_serialization(
{
'Content-Disposition': 'attachment',
Expand All @@ -66,16 +65,19 @@ def limit_country_names(names: dict[str, str]):
)
async def get_geojson(country_code: Annotated[str, Path(min_length=2, max_length=5)]):
if country_code == 'WORLD':
aeds = await AEDState.get_all_aeds()
aeds = await AEDService.get_all()
else:
aeds = await AEDState.get_aeds_by_country_code(country_code)
aeds = await AEDService.get_by_country_code(country_code)

return {
'type': 'FeatureCollection',
'features': [
{
'type': 'Feature',
'geometry': simple_point_mapping(aed.position),
'geometry': {
'type': 'Point',
'coordinates': get_coordinates(aed.position)[0].tolist(),
},
'properties': {
'@osm_type': 'node',
'@osm_id': aed.id,
Expand Down
26 changes: 12 additions & 14 deletions api/v1/node.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,15 @@
from datetime import datetime, timedelta
from urllib.parse import quote_plus

from fastapi import APIRouter, HTTPException
from fastapi import APIRouter, Response
from pytz import timezone
from shapely import get_coordinates
from tzfpy import get_tz

from middlewares.cache_middleware import configure_cache
from middlewares.cache_control_middleware import cache_control
from middlewares.skip_serialization import skip_serialization
from states.aed_state import AEDState
from states.photo_state import PhotoState
from services.aed_service import AEDService
from services.photo_service import PhotoService
from utils import get_wikimedia_commons_url

router = APIRouter()
Expand Down Expand Up @@ -40,11 +40,11 @@ async def _get_image_data(tags: dict[str, str]) -> dict:
image_url
and (photo_id_match := photo_id_re.search(image_url))
and (photo_id := photo_id_match.group('id'))
and (photo_info := await PhotoState.get_photo_by_id(photo_id))
and (await PhotoService.get_by_id(photo_id)) is not None
):
return {
'@photo_id': photo_info.id,
'@photo_url': f'/api/v1/photos/view/{photo_info.id}.webp',
'@photo_id': photo_id,
'@photo_url': f'/api/v1/photos/view/{photo_id}.webp',
'@photo_source': None,
}

Expand Down Expand Up @@ -72,18 +72,16 @@ async def _get_image_data(tags: dict[str, str]) -> dict:


@router.get('/node/{node_id}')
@configure_cache(timedelta(minutes=1), stale=timedelta(minutes=5))
@cache_control(timedelta(minutes=1), stale=timedelta(minutes=5))
@skip_serialization()
async def get_node(node_id: int):
aed = await AEDState.get_aed_by_id(node_id)

aed = await AEDService.get_by_id(node_id)
if aed is None:
raise HTTPException(404, f'Node {node_id} not found')

x, y = get_coordinates(aed.position)[0].tolist()
return Response(f'Node {node_id} not found', 404)

photo_dict = await _get_image_data(aed.tags)

x, y = get_coordinates(aed.position)[0].tolist()
timezone_name, timezone_offset = _get_timezone(x, y)
timezone_dict = {
'@timezone_name': timezone_name,
Expand All @@ -100,7 +98,7 @@ async def get_node(node_id: int):
**photo_dict,
**timezone_dict,
'type': 'node',
'id': aed.id,
'id': node_id,
'lat': y,
'lon': x,
'tags': aed.tags,
Expand Down
Loading

0 comments on commit 321a0e7

Please sign in to comment.