Skip to content

Commit

Permalink
Finish json_upload
Browse files Browse the repository at this point in the history
  • Loading branch information
luisa-beerboom committed Nov 14, 2023
1 parent 6d96b80 commit 1fbfbf4
Show file tree
Hide file tree
Showing 2 changed files with 488 additions and 40 deletions.
82 changes: 46 additions & 36 deletions openslides_backend/action/actions/motion/json_upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -278,7 +278,7 @@ def validate_entry(self, entry: Dict[str, Any]) -> Dict[str, Any]:
{"value": user, "info": ImportState.WARNING}
)
message_set.add(
f"At least one {fieldname} has been named multiple times"
f"At least one {fieldname} has been referenced multiple times"
)
else:
username_set.add(user)
Expand Down Expand Up @@ -362,53 +362,61 @@ def validate_entry(self, entry: Dict[str, Any]) -> Dict[str, Any]:
tags = [tags]
entry_list = []
message_set = set()
tags_set: Set[str] = set()
for tag in tags:
if isinstance(tag, str):
check_result = self.tags_lookup.check_duplicate(tag)
tag_id = cast(int, self.tags_lookup.get_field_by_name(tag, "id"))
if check_result == ResultType.FOUND_ID and tag_id != 0:
entry_list.append(
{
"value": tag,
"info": ImportState.DONE,
"id": tag_id,
}
if tag in tags_set:
entry_list.append({"value": tag, "info": ImportState.WARNING})
message_set.add(
"At least one tag has been referenced multiple times"
)
elif check_result == ResultType.NOT_FOUND or tag_id == 0:
entry_list.append(
{
"value": tag,
"info": ImportState.WARNING,
}
)
message_set.add("Could not find at least one tag")
elif check_result == ResultType.FOUND_MORE_IDS:
entry_list.append(
{
"value": tag,
"info": ImportState.WARNING,
}
)
message_set.add("Found multiple tags with the same name")
entry["tags"] = entry_list
messages.extend([message for message in message_set])
else:
tags_set.add(tag)
found_tags = self.tags_lookup.get_matching_data_by_name(tag)
if len(found_tags) == 1 and found_tags[0].get("id") != 0:
tag_id = cast(int, found_tags[0].get("id"))
entry_list.append(
{
"value": tag,
"info": ImportState.DONE,
"id": tag_id,
}
)
elif len(found_tags) <= 1:
entry_list.append(
{
"value": tag,
"info": ImportState.WARNING,
}
)
message_set.add("Could not find at least one tag")
else:
entry_list.append(
{
"value": tag,
"info": ImportState.WARNING,
}
)
message_set.add("Found multiple tags with the same name")
entry["tags"] = entry_list
messages.extend([message for message in message_set])

if (block := entry.get("block")) and type(block) == str:
check_result = self.block_lookup.check_duplicate(block)
block_id = cast(int, self.block_lookup.get_field_by_name(block, "id"))
if check_result == ResultType.FOUND_ID and block_id != 0:
found_blocks = self.block_lookup.get_matching_data_by_name(block)
if len(found_blocks) == 1 and found_blocks[0].get("id") != 0:
block_id = cast(int, found_blocks[0].get("id"))
entry["block"] = {
"value": block,
"info": ImportState.DONE,
"id": block_id,
}
elif check_result == ResultType.NOT_FOUND or block_id == 0:
elif len(found_blocks) <= 1:
entry["block"] = {
"value": block,
"info": ImportState.WARNING,
}
messages.append("Couldn't find motion block")
elif check_result == ResultType.FOUND_MORE_IDS:
messages.append("Could not find motion block")
else:
entry["block"] = {
"value": block,
"info": ImportState.WARNING,
Expand Down Expand Up @@ -496,7 +504,8 @@ def setup_lookups(self, data: Iterable[Dict[str, Any]], meeting_id: int) -> None
self.datastore,
"motion_block",
[(title, entry) for entry in data if (title := entry.get("block"))],
field="title",
collection_field="title",
field="block",
mapped_fields=[],
global_and_filter=FilterOperator("meeting_id", "=", meeting_id),
)
Expand Down Expand Up @@ -583,7 +592,8 @@ def setup_lookups(self, data: Iterable[Dict[str, Any]], meeting_id: int) -> None
for entry in data
for name in self._get_field_array(entry, "tags")
],
field="name",
field="tags",
collection_field="name",
mapped_fields=[],
global_and_filter=FilterOperator("meeting_id", "=", meeting_id),
)
Expand Down
Loading

0 comments on commit 1fbfbf4

Please sign in to comment.