Skip to content

Commit

Permalink
Merge pull request #948 from syucream/fix/advanced-search-export-v2
Browse files Browse the repository at this point in the history
Make advanced search result export v2 compatible with entry import v2
  • Loading branch information
userlocalhost authored Sep 25, 2023
2 parents 76df42d + d958f2f commit dec4b76
Show file tree
Hide file tree
Showing 13 changed files with 296 additions and 107 deletions.
1 change: 1 addition & 0 deletions airone/lib/http.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,6 +168,7 @@ def render(request, template, context={}):
"EXPORT_V2": JobOperation.EXPORT_ENTRY_V2.value,
"RESTORE": JobOperation.RESTORE_ENTRY.value,
"EXPORT_SEARCH_RESULT": JobOperation.EXPORT_SEARCH_RESULT.value,
"EXPORT_SEARCH_RESULT_V2": JobOperation.EXPORT_SEARCH_RESULT_V2.value,
"CREATE_ENTITY": JobOperation.CREATE_ENTITY.value,
"EDIT_ENTITY": JobOperation.EDIT_ENTITY.value,
"DELETE_ENTITY": JobOperation.DELETE_ENTITY.value,
Expand Down
1 change: 1 addition & 0 deletions api_v1/job/views.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ def get(self, request, format=None):
"export": JobOperation.EXPORT_ENTRY.value,
"export_v2": JobOperation.EXPORT_ENTRY_V2.value,
"export_search_result": JobOperation.EXPORT_SEARCH_RESULT.value,
"export_search_result_v2": JobOperation.EXPORT_SEARCH_RESULT_V2.value,
"restore": JobOperation.RESTORE_ENTRY.value,
"create_entity": JobOperation.CREATE_ENTITY.value,
"edit_entity": JobOperation.EDIT_ENTITY.value,
Expand Down
1 change: 1 addition & 0 deletions api_v1/tests/job/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,7 @@ def test_get_jobs(self):
"export": JobOperation.EXPORT_ENTRY.value,
"export_search_result": JobOperation.EXPORT_SEARCH_RESULT.value,
"export_v2": JobOperation.EXPORT_ENTRY_V2.value,
"export_search_result_v2": JobOperation.EXPORT_SEARCH_RESULT_V2.value,
"restore": JobOperation.RESTORE_ENTRY.value,
"create_entity": JobOperation.CREATE_ENTITY.value,
"edit_entity": JobOperation.EDIT_ENTITY.value,
Expand Down
27 changes: 13 additions & 14 deletions dashboard/tasks.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import csv
import io
import json
from typing import Any, Optional

import yaml
from django.conf import settings
Expand All @@ -12,7 +13,7 @@
from job.models import Job


def _csv_export(job, values, recv_data, has_referral):
def _csv_export(job: Job, values, recv_data: dict, has_referral: bool) -> Optional[io.StringIO]:
output = io.StringIO(newline="")
writer = csv.writer(output)

Expand All @@ -29,7 +30,7 @@ def _csv_export(job, values, recv_data, has_referral):

# Abort processing when job is canceled
if index % Job.STATUS_CHECK_FREQUENCY == 0 and job.is_canceled():
return
return None

# Append the data which specifies Entity name to which target Entry belongs
line_data.append(entry_info["entity"]["name"])
Expand All @@ -47,7 +48,7 @@ def _csv_export(job, values, recv_data, has_referral):
if (value is not None) and ("type" in value):
vtype = value["type"]

vval = None
vval: Any = None
if (value is not None) and ("value" in value):
vval = value["value"]

Expand Down Expand Up @@ -101,10 +102,10 @@ def _csv_export(job, values, recv_data, has_referral):
return output


def _yaml_export(job, values, recv_data, has_referral):
def _yaml_export(job: Job, values, recv_data: dict, has_referral: bool) -> Optional[io.StringIO]:
output = io.StringIO()

def _get_attr_value(atype, value):
def _get_attr_value(atype: int, value: dict):
if atype & AttrTypeValue["array"]:
return [_get_attr_value(atype ^ AttrTypeValue["array"], x) for x in value]

Expand All @@ -123,16 +124,16 @@ def _get_attr_value(atype, value):
else:
return value

resp_data = {}
resp_data: dict = {}
for index, entry_info in enumerate(values):
data = {
data: dict = {
"name": entry_info["entry"]["name"],
"attrs": {},
}

# Abort processing when job is canceled
if index % Job.STATUS_CHECK_FREQUENCY == 0 and job.is_canceled():
return
return None

for attrinfo in recv_data["attrinfo"]:
if attrinfo["name"] in entry_info["attrs"]:
Expand Down Expand Up @@ -175,10 +176,9 @@ def export_search_result(self, job_id):
recv_data = json.loads(job.params)

# Do not care whether the "has_referral" value is
has_referral = recv_data.get("has_referral", False)
referral_name = recv_data.get("referral_name")
entry_name = recv_data.get("entry_name")

has_referral: bool = recv_data.get("has_referral", False)
referral_name: Optional[str] = recv_data.get("referral_name")
entry_name: Optional[str] = recv_data.get("entry_name")
if has_referral and referral_name is None:
referral_name = ""

Expand All @@ -191,10 +191,9 @@ def export_search_result(self, job_id):
referral_name,
)

io_stream = None
io_stream: Optional[io.StringIO] = None
if recv_data["export_style"] == "yaml":
io_stream = _yaml_export(job, resp["ret_values"], recv_data, has_referral)

elif recv_data["export_style"] == "csv":
io_stream = _csv_export(job, resp["ret_values"], recv_data, has_referral)

Expand Down
10 changes: 4 additions & 6 deletions entry/api_v2/serializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -1126,11 +1126,9 @@ def save(self, **kwargs):
raise ValidationError("Same export processing is under execution")

# create a job to export search result and run it
job = Job.new_export_search_result(
user,
**{
"text": "search_results.%s" % self.validated_data["export_style"],
"params": self.validated_data,
},
job = Job.new_export_search_result_v2(
user=user,
text="search_results.%s" % self.validated_data["export_style"],
params=self.validated_data,
)
job.run()
2 changes: 1 addition & 1 deletion entry/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -1739,7 +1739,7 @@ def export(self, user):

return {"name": self.name, "attrs": attrinfo}

def export_v2(self, user, with_entity: bool = False):
def export_v2(self, user, with_entity: bool = False) -> dict:
attrinfo = []

# This calling of complement_attrs is needed to take into account the case of the Attributes
Expand Down
156 changes: 155 additions & 1 deletion entry/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,10 @@
import io
import json
from datetime import datetime
from typing import Optional
from typing import List, Optional

import yaml
from django.conf import settings
from rest_framework.exceptions import ValidationError

import custom_view
Expand All @@ -19,14 +20,18 @@
from airone.lib.job import may_schedule_until_job_is_ready
from airone.lib.log import Logger
from airone.lib.types import AttrTypeValue
from dashboard.tasks import _csv_export
from entity.models import Entity, EntityAttr
from entry.api_v2.serializers import (
AdvancedSearchResultExportSerializer,
EntryCreateSerializer,
EntryImportEntitySerializer,
EntryUpdateSerializer,
)
from entry.models import Attribute, Entry
from group.models import Group
from job.models import Job
from role.models import Role
from user.models import User


Expand Down Expand Up @@ -240,6 +245,112 @@ def _do_import_entries_v2(job: Job):
job.update(status=Job.STATUS["DONE"], text=text)


def _yaml_export_v2(job: Job, values, recv_data: dict, has_referral: bool) -> Optional[io.StringIO]:
output = io.StringIO()

def _get_attr_value(atype: int, value: dict):
if atype & AttrTypeValue["array"]:
return [_get_attr_value(atype ^ AttrTypeValue["array"], x) for x in value]

if atype == AttrTypeValue["named_object"]:
[(key, val)] = value.items()
entry = (
Entry.objects.filter(id=val["id"]).first()
if isinstance(val.get("id"), int)
else None
)
if entry:
return {
key: {
"entity": entry.schema.name,
"name": val["name"],
}
}
elif len(key) > 0:
return {
key: None,
}
else:
return {}

if atype == AttrTypeValue["object"]:
entry = (
Entry.objects.filter(id=value["id"]).first()
if isinstance(value.get("id"), int)
else None
)
if entry:
return {
"entity": entry.schema.name,
"name": value["name"],
}
else:
return None

elif atype == AttrTypeValue["group"]:
if isinstance(value.get("id"), int) and Group.objects.filter(id=value["id"]).exists():
return value["name"]
else:
return None

elif atype == AttrTypeValue["role"]:
if isinstance(value.get("id"), int) and Role.objects.filter(id=value["id"]).exists():
return value["name"]
else:
return None

else:
return value

resp_data: List[dict] = []
for index, entry_info in enumerate(values):
data: dict = {
"name": entry_info["entry"]["name"],
"attrs": [],
}

# Abort processing when job is canceled
if index % Job.STATUS_CHECK_FREQUENCY == 0 and job.is_canceled():
return None

for attrinfo in recv_data["attrinfo"]:
if attrinfo["name"] in entry_info["attrs"]:
_adata = entry_info["attrs"][attrinfo["name"]]
if "value" not in _adata:
continue

data["attrs"].append(
{
"name": attrinfo["name"],
"value": _get_attr_value(_adata["type"], _adata["value"]),
}
)

if has_referral is not False:
data["referrals"] = [
{
"entity": x["schema"]["name"],
"entry": x["name"],
}
for x in entry_info["referrals"]
]

found = next(filter(lambda x: x["entity"] == entry_info["entity"]["name"], resp_data), None)
if found:
found["entries"].append(data)
else:
resp_data.append(
{
"entity": entry_info["entity"]["name"],
"entries": [data],
}
)

output.write(yaml.dump(resp_data, default_flow_style=False, allow_unicode=True))

return output


@app.task(bind=True)
def create_entry_attrs(self, job_id):
job = Job.objects.get(id=job_id)
Expand Down Expand Up @@ -672,6 +783,49 @@ def data2str(data):
job.update(Job.STATUS["DONE"])


@app.task(bind=True)
def export_search_result_v2(self, job_id: int):
job: Job = Job.objects.get(id=job_id)

if not job.proceed_if_ready():
return
job.update(Job.STATUS["PROCESSING"])

user = job.user
serializer = AdvancedSearchResultExportSerializer(data=json.loads(job.params))
serializer.is_valid(raise_exception=True)
params: dict = serializer.validated_data

has_referral: bool = params.get("has_referral", False)
referral_name: Optional[str] = params.get("referral_name")
entry_name: Optional[str] = params.get("entry_name")
if has_referral and referral_name is None:
referral_name = ""

resp = Entry.search_entries(
user,
params["entities"],
params["attrinfo"],
settings.ES_CONFIG["MAXIMUM_RESULTS_NUM"],
entry_name,
referral_name,
)

output: Optional[io.StringIO] = None
if params["export_style"] == "yaml":
output = _yaml_export_v2(job, resp["ret_values"], params, has_referral)
elif params["export_style"] == "csv":
# NOTE reuse v1 internal export logic, but better to have a duplicated logic for v2
output = _csv_export(job, resp["ret_values"], params, has_referral)

if output:
job.set_cache(output.getvalue())

# update job status and save it except for the case that target job is canceled.
if not job.is_canceled():
job.update(Job.STATUS["DONE"])


@app.task(bind=True)
def register_referrals(self, job_id):
job = Job.objects.get(id=job_id)
Expand Down
Loading

0 comments on commit dec4b76

Please sign in to comment.