Skip to content

Commit

Permalink
Merge pull request #570 from AlmaLinux/issues_562,561,560
Browse files Browse the repository at this point in the history
Issues #562,#561,#560:
  • Loading branch information
Stepan Oksanichenko authored Jul 7, 2022
2 parents b63f201 + 6802d4b commit 9e59f9b
Show file tree
Hide file tree
Showing 5 changed files with 79 additions and 15 deletions.
40 changes: 31 additions & 9 deletions src/backend/api/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,21 @@ async def _get_nearest_mirrors_by_network_data(
The function returns mirrors which are in the same subnet or have the same
ASN as a request's IP
"""

def _is_additional_mirrors_suitable(
mirror_data: MirrorData,
main_list_of_mirrors: list[MirrorData]
) -> bool:
"""
An additional mirror is a mirror
which is fresh (not outdated), not flapping and public, because
all suitable private mirrors we already found,
using ASN or subnets data
"""
return mirror_data.status == 'ok' and \
not mirror_data.private and \
mirror_data not in main_list_of_mirrors

match = get_geo_data_by_ip(ip_address)
asn = get_asn_by_ip(ip_address)
suitable_mirrors = []
Expand Down Expand Up @@ -105,15 +120,22 @@ async def _get_nearest_mirrors_by_network_data(
if 1 <= len(suitable_mirrors) < LENGTH_CLOUD_MIRRORS_LIST\
and match is not None:
continent, country, _, _, latitude, longitude = match
suitable_mirrors.extend(
mirror['mirror'] for mirror in
sort_mirrors_by_distance_and_country(
request_geo_data=(latitude, longitude),
mirrors=[mirror for mirror in mirrors
if mirror not in suitable_mirrors],
country=country,
)[:LENGTH_CLOUD_MIRRORS_LIST - len(suitable_mirrors)]
not_sorted_additional_mirrors = [
mirror for mirror in mirrors if _is_additional_mirrors_suitable(
mirror_data=mirror,
main_list_of_mirrors=suitable_mirrors,
)
]
sorted_additional_mirrors = sort_mirrors_by_distance_and_country(
request_geo_data=(latitude, longitude),
mirrors=not_sorted_additional_mirrors,
country=country,
)
randomized_additional_mirrors = randomize_mirrors_within_distance(
mirrors=sorted_additional_mirrors,
country=country,
)[:LENGTH_CLOUD_MIRRORS_LIST - len(suitable_mirrors)]
suitable_mirrors.extend(randomized_additional_mirrors)
return suitable_mirrors


Expand Down Expand Up @@ -186,7 +208,7 @@ async def _get_nearest_mirrors(
if not suitable_mirrors:
suitable_mirrors = await _get_nearest_mirrors_by_geo_data(
ip_address=ip_address,
without_private_mirrors=without_private_mirrors,
without_private_mirrors=True,
)
await set_mirrors_to_cache(
ip_address,
Expand Down
4 changes: 2 additions & 2 deletions src/backend/api/mirrors_update.py
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ async def set_repo_status(
) as resp:
timestamp_response = await resp.text()
except (asyncio.exceptions.TimeoutError, HTTPError):
logger.error(
logger.warning(
'Mirror "%s" has no timestamp file by url "%s"',
mirror_info.name,
timestamp_url,
Expand All @@ -107,7 +107,7 @@ async def set_repo_status(
try:
mirror_last_updated = float(timestamp_response)
except ValueError:
logger.info(
logger.warning(
'Mirror "%s" has broken timestamp file by url "%s"',
mirror_info.name,
timestamp_url,
Expand Down
31 changes: 31 additions & 0 deletions src/backend/api/redis.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,37 @@ async def set_mirrors_to_cache(
)


async def get_subnets_from_cache(
key: str,
) -> dict:
"""
Get a cached subnets of Azure/AWS cloud
"""
async with redis_context() as redis_engine:
subnets_string = await redis_engine.get(str(key))
if subnets_string is not None:
subnets_json = json.loads(
subnets_string,
)
return subnets_json


async def set_subnets_to_cache(
key: str,
subnets: dict,
) -> None:
"""
Save a mirror list for specified IP to cache
"""
async with redis_context() as redis_engine:
subnets = json.dumps(subnets)
await redis_engine.set(
str(key),
subnets,
24 * 60 * 60,
)


async def get_geolocation_from_cache(
key: str
) -> Union[tuple[float, float], tuple[None, None]]:
Expand Down
17 changes: 14 additions & 3 deletions src/backend/api/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,9 @@
from haversine import haversine
from api.redis import (
get_geolocation_from_cache,
set_geolocation_to_cache
set_geolocation_to_cache,
get_subnets_from_cache,
set_subnets_to_cache,
)

logger = get_logger(__name__)
Expand Down Expand Up @@ -152,7 +154,8 @@ def get_geo_data_by_ip(
db = GeoIPEngine.get_instance()
try:
city = db.city(ip)
except AddressNotFoundError:
# ValueError will be raised in case of incorrect IP
except (AddressNotFoundError, ValueError):
return
try:
city_name = city.city.name
Expand Down Expand Up @@ -209,7 +212,7 @@ async def get_azure_subnets_json(http_session: ClientSession) -> Optional[dict]:
response_json = await resp.json(
content_type='application/octet-stream',
)
except (ClientConnectorError, TimeoutError) as err:
except (ClientConnectorError, asyncio.exceptions.TimeoutError) as err:
logger.error(
'Cannot get json with Azure subnets by url "%s" because "%s"',
link_to_json_url,
Expand Down Expand Up @@ -239,6 +242,9 @@ async def get_aws_subnets_json(http_session: ClientSession) -> Optional[dict]:


async def get_azure_subnets(http_session: ClientSession):
subnets = await get_subnets_from_cache('azure_subnets')
if subnets is not None:
return subnets
data_json = await get_azure_subnets_json(http_session=http_session)
subnets = dict()
if data_json is None:
Expand All @@ -249,10 +255,14 @@ async def get_azure_subnets(http_session: ClientSession):
properties = value['properties']
subnets[properties['region'].lower()] = \
properties['addressPrefixes']
await set_subnets_to_cache('azure_subnets', subnets)
return subnets


async def get_aws_subnets(http_session: ClientSession):
subnets = await get_subnets_from_cache('aws_subnets')
if subnets is not None:
return subnets
data_json = await get_aws_subnets_json(http_session=http_session)
subnets = defaultdict(list)
if data_json is None:
Expand All @@ -261,6 +271,7 @@ async def get_aws_subnets(http_session: ClientSession):
subnets[v4_prefix['region'].lower()].append(v4_prefix['ip_prefix'])
for v6_prefix in data_json['ipv6_prefixes']:
subnets[v6_prefix['region'].lower()].append(v6_prefix['ipv6_prefix'])
await set_subnets_to_cache('aws_subnets', subnets)
return subnets


Expand Down
2 changes: 1 addition & 1 deletion src/backend/yaml_snippets

0 comments on commit 9e59f9b

Please sign in to comment.