Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Topic json upload rework #1914

Merged
merged 29 commits into from
Sep 29, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
b60401e
intermediate commit with renames
r-peschke Aug 21, 2023
1b1fb6f
Base account import on Lookup class and fix some smaller problems in …
r-peschke Aug 29, 2023
2aa703b
json_upload ready for client test, import with state errors
r-peschke Aug 31, 2023
39e4e62
Fix some user.create bugs concerning saml_id
r-peschke Sep 1, 2023
7030304
fixed bugs in account.json_upload
r-peschke Sep 1, 2023
c52562a
Pre ready for review, still with some errors in test
r-peschke Sep 5, 2023
e75f560
fixed all errors
r-peschke Sep 6, 2023
5d5d6a5
add validate_instance
r-peschke Sep 6, 2023
aabd690
add 3 fields default_number, default_structure_level and default_vote…
r-peschke Sep 7, 2023
4265a54
intermediate
r-peschke Sep 8, 2023
0f0319f
intermediate state, used to create PullRequest
r-peschke Sep 18, 2023
a845288
more coverage
r-peschke Sep 18, 2023
97f6087
ready for review
r-peschke Sep 18, 2023
66742a7
Format decimal-type values
r-peschke Sep 19, 2023
6334649
Reuse HeaderEntry as typed dict
r-peschke Sep 21, 2023
9165e2e
use import_preview-collection for import instead of action_worker
r-peschke Sep 21, 2023
147ab66
Rework of the topic import
reiterl Sep 11, 2023
7a9fdd1
More rework of topic json upload
reiterl Sep 13, 2023
9123198
Update of topic json upload
reiterl Sep 14, 2023
f2af4f7
Update topic json upload
reiterl Sep 18, 2023
9c76642
Add topic import tests with json upload included
reiterl Sep 18, 2023
f0a06e3
Use Lookup class in topic import, title is object
reiterl Sep 19, 2023
793ef83
Update warning msg in topic json upload
reiterl Sep 20, 2023
bd77d48
topic.import: update topic and update agenda item
reiterl Sep 21, 2023
fc61abe
intermediate, action_worker collection => import_preview collection
r-peschke Sep 25, 2023
b1ad284
topic import fixed
r-peschke Sep 25, 2023
6481a44
remove unnecessary code (no test idea;-)) and better test coverage
r-peschke Sep 27, 2023
0ca9a2f
fix codereview remarks
r-peschke Sep 28, 2023
49a817f
fix some rebase problems
r-peschke Sep 29, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
31 changes: 17 additions & 14 deletions openslides_backend/action/actions/agenda_item/agenda_creation.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,18 +82,21 @@ def check_dependant_action_execution_agenda_item(
def get_dependent_action_data_agenda_item(
self, instance: Dict[str, Any], CreateActionClass: Type[Action]
) -> List[Dict[str, Any]]:
agenda_item_action_data = {
"content_object_id": fqid_from_collection_and_id(
self.model.collection, instance["id"]
),
}
for extra_field in agenda_creation_properties.keys():
if extra_field == f"{AGENDA_PREFIX}create":
# This field should not be provided to the AgendaItemCreate action.
continue
prefix_len = len(AGENDA_PREFIX)
extra_field_without_prefix = extra_field[prefix_len:]
value = instance.pop(extra_field, None)
if value is not None:
agenda_item_action_data[extra_field_without_prefix] = value
agenda_item_action_data = self.remove_agenda_prefix_from_fieldnames(instance)
agenda_item_action_data["content_object_id"] = fqid_from_collection_and_id(
self.model.collection, instance["id"]
)
return [agenda_item_action_data]

@staticmethod
def remove_agenda_prefix_from_fieldnames(
instance: Dict[str, Any]
) -> Dict[str, Any]:
prefix_len = len(AGENDA_PREFIX)
extra_field = f"{AGENDA_PREFIX}create" # This field should not be provided to the AgendaItemCreate action.
agenda_item = {
field[prefix_len:]: value
for field in agenda_creation_properties.keys()
if field != extra_field and (value := instance.pop(field, None)) is not None
}
return agenda_item
116 changes: 99 additions & 17 deletions openslides_backend/action/actions/topic/import_.py
Original file line number Diff line number Diff line change
@@ -1,19 +1,28 @@
from typing import Any, Dict
from typing import Any, Dict, List, cast

from ....models.models import ImportPreview
from ....permissions.permissions import Permissions
from ....shared.exceptions import ActionException
from ....shared.filters import FilterOperator
from ....shared.patterns import fqid_from_collection_and_id
from ....shared.schema import required_id_schema
from ...mixins.import_mixins import ImportMixin, ImportState
from ...mixins.import_mixins import (
ImportMixin,
ImportRow,
ImportState,
Lookup,
ResultType,
)
from ...util.default_schema import DefaultSchema
from ...util.register import register_action
from ..agenda_item.agenda_creation import CreateActionWithAgendaItemMixin
from ..agenda_item.update import AgendaItemUpdate
from .create import TopicCreate
from .mixins import DuplicateCheckMixin
from .update import TopicUpdate


@register_action("topic.import")
class TopicImport(DuplicateCheckMixin, ImportMixin):
class TopicImport(ImportMixin):
"""
Action to import a result from the import_preview.
"""
Expand All @@ -27,25 +36,84 @@ class TopicImport(DuplicateCheckMixin, ImportMixin):
)
permission = Permissions.AgendaItem.CAN_MANAGE
import_name = "topic"
agenda_item_fields = ["agenda_comment", "agenda_duration", "agenda_type"]

def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]:
instance = super().update_instance(instance)

# handle abort in on_success
if not instance["import"]:
return {}

instance = super().update_instance(instance)

meeting_id = self.get_meeting_id(instance)
self.init_duplicate_set(meeting_id)
action_payload = [
entry["data"]
for entry in self.result.get("rows", [])
if (entry["state"] in (ImportState.NEW, ImportState.WARNING))
and not self.check_for_duplicate(entry["data"]["title"])
]
self.execute_other_action(TopicCreate, action_payload)
self.error = False
return instance
self.setup_lookups(self.result.get("rows", []), meeting_id)

self.rows = [self.validate_entry(row) for row in self.result["rows"]]

if self.import_state != ImportState.ERROR:
create_action_payload: List[Dict[str, Any]] = []
update_action_payload: List[Dict[str, Any]] = []
update_agenda_item_payload: List[Dict[str, Any]] = []
self.flatten_object_fields(["title"])
for row in self.rows:
entry = row["data"]
if row["state"] == ImportState.NEW:
create_action_payload.append(entry)
else:
agenda_item = CreateActionWithAgendaItemMixin.remove_agenda_prefix_from_fieldnames(
entry
)
if agenda_item:
agenda_item["id"] = self.topic_lookup.get_field_by_name(
entry["title"], "agenda_item_id"
)
update_agenda_item_payload.append(agenda_item)
entry.pop("meeting_id", None)
update_action_payload.append(entry)
if create_action_payload:
self.execute_other_action(TopicCreate, create_action_payload)
if update_action_payload:
self.execute_other_action(TopicUpdate, update_action_payload)
if update_agenda_item_payload:
self.execute_other_action(AgendaItemUpdate, update_agenda_item_payload)

return {}

def validate_entry(self, row: ImportRow) -> ImportRow:
entry = row["data"]
title = cast(str, self.get_value_from_union_str_object(entry.get("title")))
check_result = self.topic_lookup.check_duplicate(title)
id_ = cast(int, self.topic_lookup.get_field_by_name(title, "id"))

if check_result == ResultType.FOUND_ID and id_ != 0:
if "id" not in entry:
raise ActionException(
f"Invalid JsonUpload data: A data row with state '{ImportState.DONE}' must have an 'id'"
)
elif entry["id"] != id_:
row["state"] = ImportState.ERROR
entry["title"]["info"] = ImportState.ERROR
row["messages"].append(
f"Error: topic '{title}' found in different id ({id_} instead of {entry['id']})"
)
elif check_result == ResultType.FOUND_MORE_IDS:
row["state"] = ImportState.ERROR
entry["title"]["info"] = ImportState.ERROR
row["messages"].append(f"Error: topic '{title}' is duplicated in import.")
elif check_result == ResultType.NOT_FOUND_ANYMORE:
row["messages"].append(
f"Error: topic {entry['title']['id']} not found anymore for updating topic '{title}'."
)
row["state"] = ImportState.ERROR
elif check_result == ResultType.NOT_FOUND:
pass # cannot create an error !

if row["state"] == ImportState.ERROR and self.import_state == ImportState.DONE:
self.import_state = ImportState.ERROR
return {
"state": row["state"],
"data": row["data"],
"messages": row.get("messages", []),
}

def get_meeting_id(self, instance: Dict[str, Any]) -> int:
store_id = instance["id"]
Expand All @@ -57,3 +125,17 @@ def get_meeting_id(self, instance: Dict[str, Any]) -> int:
if worker.get("name") == TopicImport.import_name:
return next(iter(worker.get("result", {})["rows"]))["data"]["meeting_id"]
raise ActionException("Import data cannot be found.")

def setup_lookups(self, data: List[Dict[str, Any]], meeting_id: int) -> None:
self.topic_lookup = Lookup(
self.datastore,
"topic",
[
(title, entry["data"])
for entry in data
if (title := entry["data"].get("title", {}).get("value"))
],
field="title",
mapped_fields=["agenda_item_id"],
global_and_filter=FilterOperator("meeting_id", "=", meeting_id),
)
64 changes: 42 additions & 22 deletions openslides_backend/action/actions/topic/json_upload.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,17 @@
from typing import Any, Dict

import fastjsonschema
from typing import Any, Dict, List

from ....models.models import Topic
from ....permissions.permissions import Permissions
from ....shared.filters import FilterOperator
from ....shared.schema import required_id_schema
from ...mixins.import_mixins import ImportState, JsonUploadMixin
from ...mixins.import_mixins import ImportState, JsonUploadMixin, Lookup, ResultType
from ...util.default_schema import DefaultSchema
from ...util.register import register_action
from ..agenda_item.agenda_creation import agenda_creation_properties
from .create import TopicCreate
from .mixins import DuplicateCheckMixin


@register_action("topic.json_upload")
class TopicJsonUpload(DuplicateCheckMixin, JsonUploadMixin):
class TopicJsonUpload(JsonUploadMixin):
"""
Action to allow to upload a json. It is used as first step of an import.
"""
Expand Down Expand Up @@ -46,14 +43,16 @@ class TopicJsonUpload(DuplicateCheckMixin, JsonUploadMixin):
"meeting_id": required_id_schema,
}
)
permission = Permissions.AgendaItem.CAN_MANAGE
headers = [
{"property": "title", "type": "string"},
{"property": "title", "type": "string", "is_object": True},
{"property": "text", "type": "string"},
{"property": "agenda_comment", "type": "string"},
{"property": "agenda_type", "type": "string"},
{"property": "agenda_duration", "type": "integer"},
]
permission = Permissions.AgendaItem.CAN_MANAGE
row_state: ImportState
topic_lookup: Lookup

def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]:
data = instance.pop("data")
Expand All @@ -62,15 +61,18 @@ def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]:
for entry in data:
entry["meeting_id"] = instance["meeting_id"]

# validate and check for duplicates
self.init_duplicate_set(instance["meeting_id"])
# setup and validate entries
self.setup_lookups(data, instance["meeting_id"])
self.rows = [self.validate_entry(entry) for entry in data]

# generate statistics
itemCount = len(self.rows)
state_to_count = {state: 0 for state in ImportState}
for entry in self.rows:
state_to_count[entry["state"]] += 1
for row in self.rows:
state_to_count[row["state"]] += 1
state_to_count[ImportState.WARNING] += self.count_warnings_in_payload(
row.get("data", {}).values()
)

self.statistics = [
{"name": "total", "value": itemCount},
Expand All @@ -80,6 +82,7 @@ def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]:
{"name": "warning", "value": state_to_count[ImportState.WARNING]},
]

# finalize
self.set_state(
state_to_count[ImportState.ERROR], state_to_count[ImportState.WARNING]
)
Expand All @@ -88,14 +91,31 @@ def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]:

def validate_entry(self, entry: Dict[str, Any]) -> Dict[str, Any]:
state, messages = None, []
try:
TopicCreate.schema_validator(entry)
if self.check_for_duplicate(entry["title"]):
state = ImportState.WARNING
messages.append("Duplicate")
else:
state = ImportState.NEW
except fastjsonschema.JsonSchemaException as exception:
check_result = self.topic_lookup.check_duplicate(title := entry["title"])
id_ = self.topic_lookup.get_field_by_name(title, "id")
if check_result == ResultType.FOUND_ID:
state = ImportState.DONE
messages.append("Existing topic will be updated.")
entry["id"] = id_
entry["title"] = {
"value": title,
"info": ImportState.WARNING,
"id": id_,
}
elif check_result == ResultType.NOT_FOUND:
state = ImportState.NEW
entry["title"] = {"value": title, "info": ImportState.NEW}
elif check_result == ResultType.FOUND_MORE_IDS:
state = ImportState.ERROR
messages.append(exception.message)
messages.append(f"Duplicated topic name '{title}'.")
entry["title"] = {"value": title, "info": ImportState.ERROR}
return {"state": state, "messages": messages, "data": entry}

def setup_lookups(self, data: List[Dict[str, Any]], meeting_id: int) -> None:
self.topic_lookup = Lookup(
self.datastore,
"topic",
[(title, entry) for entry in data if (title := entry.get("title"))],
field="title",
global_and_filter=FilterOperator("meeting_id", "=", meeting_id),
)
8 changes: 7 additions & 1 deletion openslides_backend/action/mixins/import_mixins.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,6 +56,7 @@ def __init__(
name_entries: List[Tuple[SearchFieldType, Dict[str, Any]]],
field: SearchFieldType = "name",
mapped_fields: Optional[List[str]] = None,
global_and_filter: Optional[Filter] = None,
) -> None:
if mapped_fields is None:
mapped_fields = []
Expand Down Expand Up @@ -86,9 +87,14 @@ def __init__(
for name_tpl, _ in name_entries
]
if or_filters:
if global_and_filter:
filter_: Filter = And(global_and_filter, Or(*or_filters))
else:
filter_ = Or(*or_filters)

for entry in datastore.filter(
collection,
Or(*or_filters),
filter_,
mapped_fields,
lock_result=False,
).values():
Expand Down
Loading