Skip to content

Commit

Permalink
Topic json upload rework (#1914)
Browse files Browse the repository at this point in the history
Co-authored-by: Ralf Peschke <[email protected]>
  • Loading branch information
reiterl and r-peschke authored Sep 29, 2023
1 parent 39bdb82 commit 732341a
Show file tree
Hide file tree
Showing 7 changed files with 441 additions and 167 deletions.
31 changes: 17 additions & 14 deletions openslides_backend/action/actions/agenda_item/agenda_creation.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,18 +82,21 @@ def check_dependant_action_execution_agenda_item(
def get_dependent_action_data_agenda_item(
self, instance: Dict[str, Any], CreateActionClass: Type[Action]
) -> List[Dict[str, Any]]:
agenda_item_action_data = {
"content_object_id": fqid_from_collection_and_id(
self.model.collection, instance["id"]
),
}
for extra_field in agenda_creation_properties.keys():
if extra_field == f"{AGENDA_PREFIX}create":
# This field should not be provided to the AgendaItemCreate action.
continue
prefix_len = len(AGENDA_PREFIX)
extra_field_without_prefix = extra_field[prefix_len:]
value = instance.pop(extra_field, None)
if value is not None:
agenda_item_action_data[extra_field_without_prefix] = value
agenda_item_action_data = self.remove_agenda_prefix_from_fieldnames(instance)
agenda_item_action_data["content_object_id"] = fqid_from_collection_and_id(
self.model.collection, instance["id"]
)
return [agenda_item_action_data]

@staticmethod
def remove_agenda_prefix_from_fieldnames(
instance: Dict[str, Any]
) -> Dict[str, Any]:
prefix_len = len(AGENDA_PREFIX)
extra_field = f"{AGENDA_PREFIX}create" # This field should not be provided to the AgendaItemCreate action.
agenda_item = {
field[prefix_len:]: value
for field in agenda_creation_properties.keys()
if field != extra_field and (value := instance.pop(field, None)) is not None
}
return agenda_item
116 changes: 99 additions & 17 deletions openslides_backend/action/actions/topic/import_.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,28 @@
from typing import Any, Dict
from typing import Any, Dict, List, cast

from ....models.models import ImportPreview
from ....permissions.permissions import Permissions
from ....shared.exceptions import ActionException
from ....shared.filters import FilterOperator
from ....shared.patterns import fqid_from_collection_and_id
from ....shared.schema import required_id_schema
from ...mixins.import_mixins import ImportMixin, ImportState
from ...mixins.import_mixins import (
ImportMixin,
ImportRow,
ImportState,
Lookup,
ResultType,
)
from ...util.default_schema import DefaultSchema
from ...util.register import register_action
from ..agenda_item.agenda_creation import CreateActionWithAgendaItemMixin
from ..agenda_item.update import AgendaItemUpdate
from .create import TopicCreate
from .mixins import DuplicateCheckMixin
from .update import TopicUpdate


@register_action("topic.import")
class TopicImport(DuplicateCheckMixin, ImportMixin):
class TopicImport(ImportMixin):
"""
Action to import a result from the import_preview.
"""
Expand All @@ -27,25 +36,84 @@ class TopicImport(DuplicateCheckMixin, ImportMixin):
)
permission = Permissions.AgendaItem.CAN_MANAGE
import_name = "topic"
agenda_item_fields = ["agenda_comment", "agenda_duration", "agenda_type"]

def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]:
instance = super().update_instance(instance)

# handle abort in on_success
if not instance["import"]:
return {}

instance = super().update_instance(instance)

meeting_id = self.get_meeting_id(instance)
self.init_duplicate_set(meeting_id)
action_payload = [
entry["data"]
for entry in self.result.get("rows", [])
if (entry["state"] in (ImportState.NEW, ImportState.WARNING))
and not self.check_for_duplicate(entry["data"]["title"])
]
self.execute_other_action(TopicCreate, action_payload)
self.error = False
return instance
self.setup_lookups(self.result.get("rows", []), meeting_id)

self.rows = [self.validate_entry(row) for row in self.result["rows"]]

if self.import_state != ImportState.ERROR:
create_action_payload: List[Dict[str, Any]] = []
update_action_payload: List[Dict[str, Any]] = []
update_agenda_item_payload: List[Dict[str, Any]] = []
self.flatten_object_fields(["title"])
for row in self.rows:
entry = row["data"]
if row["state"] == ImportState.NEW:
create_action_payload.append(entry)
else:
agenda_item = CreateActionWithAgendaItemMixin.remove_agenda_prefix_from_fieldnames(
entry
)
if agenda_item:
agenda_item["id"] = self.topic_lookup.get_field_by_name(
entry["title"], "agenda_item_id"
)
update_agenda_item_payload.append(agenda_item)
entry.pop("meeting_id", None)
update_action_payload.append(entry)
if create_action_payload:
self.execute_other_action(TopicCreate, create_action_payload)
if update_action_payload:
self.execute_other_action(TopicUpdate, update_action_payload)
if update_agenda_item_payload:
self.execute_other_action(AgendaItemUpdate, update_agenda_item_payload)

return {}

def validate_entry(self, row: ImportRow) -> ImportRow:
entry = row["data"]
title = cast(str, self.get_value_from_union_str_object(entry.get("title")))
check_result = self.topic_lookup.check_duplicate(title)
id_ = cast(int, self.topic_lookup.get_field_by_name(title, "id"))

if check_result == ResultType.FOUND_ID and id_ != 0:
if "id" not in entry:
raise ActionException(
f"Invalid JsonUpload data: A data row with state '{ImportState.DONE}' must have an 'id'"
)
elif entry["id"] != id_:
row["state"] = ImportState.ERROR
entry["title"]["info"] = ImportState.ERROR
row["messages"].append(
f"Error: topic '{title}' found in different id ({id_} instead of {entry['id']})"
)
elif check_result == ResultType.FOUND_MORE_IDS:
row["state"] = ImportState.ERROR
entry["title"]["info"] = ImportState.ERROR
row["messages"].append(f"Error: topic '{title}' is duplicated in import.")
elif check_result == ResultType.NOT_FOUND_ANYMORE:
row["messages"].append(
f"Error: topic {entry['title']['id']} not found anymore for updating topic '{title}'."
)
row["state"] = ImportState.ERROR
elif check_result == ResultType.NOT_FOUND:
pass # cannot create an error !

if row["state"] == ImportState.ERROR and self.import_state == ImportState.DONE:
self.import_state = ImportState.ERROR
return {
"state": row["state"],
"data": row["data"],
"messages": row.get("messages", []),
}

def get_meeting_id(self, instance: Dict[str, Any]) -> int:
store_id = instance["id"]
Expand All @@ -57,3 +125,17 @@ def get_meeting_id(self, instance: Dict[str, Any]) -> int:
if worker.get("name") == TopicImport.import_name:
return next(iter(worker.get("result", {})["rows"]))["data"]["meeting_id"]
raise ActionException("Import data cannot be found.")

def setup_lookups(self, data: List[Dict[str, Any]], meeting_id: int) -> None:
self.topic_lookup = Lookup(
self.datastore,
"topic",
[
(title, entry["data"])
for entry in data
if (title := entry["data"].get("title", {}).get("value"))
],
field="title",
mapped_fields=["agenda_item_id"],
global_and_filter=FilterOperator("meeting_id", "=", meeting_id),
)
64 changes: 42 additions & 22 deletions openslides_backend/action/actions/topic/json_upload.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,17 @@
from typing import Any, Dict

import fastjsonschema
from typing import Any, Dict, List

from ....models.models import Topic
from ....permissions.permissions import Permissions
from ....shared.filters import FilterOperator
from ....shared.schema import required_id_schema
from ...mixins.import_mixins import ImportState, JsonUploadMixin
from ...mixins.import_mixins import ImportState, JsonUploadMixin, Lookup, ResultType
from ...util.default_schema import DefaultSchema
from ...util.register import register_action
from ..agenda_item.agenda_creation import agenda_creation_properties
from .create import TopicCreate
from .mixins import DuplicateCheckMixin


@register_action("topic.json_upload")
class TopicJsonUpload(DuplicateCheckMixin, JsonUploadMixin):
class TopicJsonUpload(JsonUploadMixin):
"""
Action to allow to upload a json. It is used as first step of an import.
"""
Expand Down Expand Up @@ -46,14 +43,16 @@ class TopicJsonUpload(DuplicateCheckMixin, JsonUploadMixin):
"meeting_id": required_id_schema,
}
)
permission = Permissions.AgendaItem.CAN_MANAGE
headers = [
{"property": "title", "type": "string"},
{"property": "title", "type": "string", "is_object": True},
{"property": "text", "type": "string"},
{"property": "agenda_comment", "type": "string"},
{"property": "agenda_type", "type": "string"},
{"property": "agenda_duration", "type": "integer"},
]
permission = Permissions.AgendaItem.CAN_MANAGE
row_state: ImportState
topic_lookup: Lookup

def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]:
data = instance.pop("data")
Expand All @@ -62,15 +61,18 @@ def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]:
for entry in data:
entry["meeting_id"] = instance["meeting_id"]

# validate and check for duplicates
self.init_duplicate_set(instance["meeting_id"])
# setup and validate entries
self.setup_lookups(data, instance["meeting_id"])
self.rows = [self.validate_entry(entry) for entry in data]

# generate statistics
itemCount = len(self.rows)
state_to_count = {state: 0 for state in ImportState}
for entry in self.rows:
state_to_count[entry["state"]] += 1
for row in self.rows:
state_to_count[row["state"]] += 1
state_to_count[ImportState.WARNING] += self.count_warnings_in_payload(
row.get("data", {}).values()
)

self.statistics = [
{"name": "total", "value": itemCount},
Expand All @@ -80,6 +82,7 @@ def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]:
{"name": "warning", "value": state_to_count[ImportState.WARNING]},
]

# finalize
self.set_state(
state_to_count[ImportState.ERROR], state_to_count[ImportState.WARNING]
)
Expand All @@ -88,14 +91,31 @@ def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]:

def validate_entry(self, entry: Dict[str, Any]) -> Dict[str, Any]:
state, messages = None, []
try:
TopicCreate.schema_validator(entry)
if self.check_for_duplicate(entry["title"]):
state = ImportState.WARNING
messages.append("Duplicate")
else:
state = ImportState.NEW
except fastjsonschema.JsonSchemaException as exception:
check_result = self.topic_lookup.check_duplicate(title := entry["title"])
id_ = self.topic_lookup.get_field_by_name(title, "id")
if check_result == ResultType.FOUND_ID:
state = ImportState.DONE
messages.append("Existing topic will be updated.")
entry["id"] = id_
entry["title"] = {
"value": title,
"info": ImportState.WARNING,
"id": id_,
}
elif check_result == ResultType.NOT_FOUND:
state = ImportState.NEW
entry["title"] = {"value": title, "info": ImportState.NEW}
elif check_result == ResultType.FOUND_MORE_IDS:
state = ImportState.ERROR
messages.append(exception.message)
messages.append(f"Duplicated topic name '{title}'.")
entry["title"] = {"value": title, "info": ImportState.ERROR}
return {"state": state, "messages": messages, "data": entry}

def setup_lookups(self, data: List[Dict[str, Any]], meeting_id: int) -> None:
self.topic_lookup = Lookup(
self.datastore,
"topic",
[(title, entry) for entry in data if (title := entry.get("title"))],
field="title",
global_and_filter=FilterOperator("meeting_id", "=", meeting_id),
)
8 changes: 7 additions & 1 deletion openslides_backend/action/mixins/import_mixins.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ def __init__(
name_entries: List[Tuple[SearchFieldType, Dict[str, Any]]],
field: SearchFieldType = "name",
mapped_fields: Optional[List[str]] = None,
global_and_filter: Optional[Filter] = None,
) -> None:
if mapped_fields is None:
mapped_fields = []
Expand Down Expand Up @@ -86,9 +87,14 @@ def __init__(
for name_tpl, _ in name_entries
]
if or_filters:
if global_and_filter:
filter_: Filter = And(global_and_filter, Or(*or_filters))
else:
filter_ = Or(*or_filters)

for entry in datastore.filter(
collection,
Or(*or_filters),
filter_,
mapped_fields,
lock_result=False,
).values():
Expand Down
Loading

0 comments on commit 732341a

Please sign in to comment.