From 16c015769034416ec57c60f7b622c8011bac5bea Mon Sep 17 00:00:00 2001 From: reiterl Date: Wed, 27 Sep 2023 14:42:14 +0200 Subject: [PATCH 1/5] Make meeting_user actions backend internal (#1895) --- .../action/actions/meeting_user/create.py | 12 +- .../action/actions/meeting_user/delete.py | 5 +- .../action/actions/meeting_user/mixin.py | 106 +-------- .../action/actions/meeting_user/set_data.py | 20 +- .../action/actions/meeting_user/update.py | 12 +- .../system/action/meeting_user/test_create.py | 80 ------- .../meeting_user/test_create_delegation.py | 213 ------------------ ...egation.py => test_set_data_delegation.py} | 2 +- .../system/action/meeting_user/test_update.py | 58 ----- tests/system/action/user/test_create.py | 8 +- tests/system/action/user/test_update.py | 19 +- 11 files changed, 48 insertions(+), 487 deletions(-) delete mode 100644 tests/system/action/meeting_user/test_create_delegation.py rename tests/system/action/meeting_user/{test_update_delegation.py => test_set_data_delegation.py} (99%) diff --git a/openslides_backend/action/actions/meeting_user/create.py b/openslides_backend/action/actions/meeting_user/create.py index 4cf628071..24154c999 100644 --- a/openslides_backend/action/actions/meeting_user/create.py +++ b/openslides_backend/action/actions/meeting_user/create.py @@ -5,16 +5,17 @@ from openslides_backend.shared.typing import HistoryInformation from ....models.models import MeetingUser -from ....permissions.permissions import Permissions from ...generics.create import CreateAction from ...mixins.meeting_user_helper import get_meeting_user_filter +from ...util.action_type import ActionType from ...util.default_schema import DefaultSchema from ...util.register import register_action -from .mixin import MeetingUserMixin +from .history_mixin import MeetingUserHistoryMixin +from .mixin import meeting_user_standard_fields -@register_action("meeting_user.create") -class MeetingUserCreate(MeetingUserMixin, CreateAction): +@register_action("meeting_user.create", action_type=ActionType.BACKEND_INTERNAL) +class MeetingUserCreate(MeetingUserHistoryMixin, CreateAction): """ Action to create a meeting user. """ @@ -25,10 +26,9 @@ class MeetingUserCreate(MeetingUserMixin, CreateAction): optional_properties=[ "about_me", "group_ids", - *MeetingUserMixin.standard_fields, + *meeting_user_standard_fields, ], ) - permission = Permissions.User.CAN_MANAGE def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]: if self.datastore.exists( diff --git a/openslides_backend/action/actions/meeting_user/delete.py b/openslides_backend/action/actions/meeting_user/delete.py index f9c09a6d6..1e4088646 100644 --- a/openslides_backend/action/actions/meeting_user/delete.py +++ b/openslides_backend/action/actions/meeting_user/delete.py @@ -4,13 +4,13 @@ from openslides_backend.shared.typing import HistoryInformation from ....models.models import MeetingUser -from ....permissions.permissions import Permissions from ...generics.delete import DeleteAction +from ...util.action_type import ActionType from ...util.default_schema import DefaultSchema from ...util.register import register_action -@register_action("meeting_user.delete") +@register_action("meeting_user.delete", action_type=ActionType.BACKEND_INTERNAL) class MeetingUserDelete(DeleteAction): """ Action to delete a meeting user. @@ -18,7 +18,6 @@ class MeetingUserDelete(DeleteAction): model = MeetingUser() schema = DefaultSchema(MeetingUser()).get_delete_schema() - permission = Permissions.User.CAN_MANAGE def get_history_information(self) -> Optional[HistoryInformation]: users = self.get_instances_with_fields(["user_id", "meeting_id"]) diff --git a/openslides_backend/action/actions/meeting_user/mixin.py b/openslides_backend/action/actions/meeting_user/mixin.py index 79e618d3b..3dc91280b 100644 --- a/openslides_backend/action/actions/meeting_user/mixin.py +++ b/openslides_backend/action/actions/meeting_user/mixin.py @@ -1,105 +1,21 @@ -from typing import Any, Dict, List, Tuple, cast +from typing import Any, Dict, List, cast -from openslides_backend.permissions.management_levels import ( - CommitteeManagementLevel, - OrganizationManagementLevel, -) -from openslides_backend.permissions.permissions import Permissions - -from ....shared.exceptions import ActionException, MissingPermission, PermissionDenied +from ....shared.exceptions import ActionException from ....shared.patterns import fqid_from_collection_and_id from .history_mixin import MeetingUserHistoryMixin +meeting_user_standard_fields = [ + "comment", + "number", + "structure_level", + "vote_weight", + "personal_note_ids", +] -class MeetingUserMixin(MeetingUserHistoryMixin): - standard_fields = [ - "comment", - "number", - "structure_level", - "vote_weight", - "personal_note_ids", - "speaker_ids", - "supported_motion_ids", - "motion_submitter_ids", - "assignment_candidate_ids", - "vote_delegated_to_id", - "vote_delegations_from_ids", - "chat_message_ids", - ] - - def check_permissions(self, instance: Dict[str, Any]) -> None: - """standard_fields have to be checked for user.can_manage, which is always sufficient and - even needed, if there is no data at all exempt the required fields. - Special fields like about_me and group_ids could be managed also with other permissions. - Details see https://github.com/OpenSlides/OpenSlides/wiki/meeting_user.create""" - if any(field in self.standard_fields for field in instance.keys()) or not any( - field in ["about_me", "group_ids"] for field in instance - ): - return super().check_permissions(instance) - - def get_user_and_meeting_id() -> Tuple[int, int]: - fields = ["user_id", "meeting_id"] - if any(field not in instance for field in fields): - mu = self.datastore.get( - fqid_from_collection_and_id("meeting_user", instance["id"]), - ["user_id", "meeting_id"], - lock_result=False, - ) - else: - mu = instance - return cast(Tuple[int, int], tuple(mu[field] for field in fields)) - - def get_request_user_data() -> Dict[str, Any]: - return self.datastore.get( - fqid_from_collection_and_id("user", self.user_id), - ["organization_management_level", "committee_management_ids"], - lock_result=False, - ) - - def get_committee_id() -> int: - return self.datastore.get( - fqid_from_collection_and_id("meeting", meeting_id), - ["committee_id"], - lock_result=False, - )["committee_id"] - - def raise_own_exception() -> bool: - try: - super(MeetingUserMixin, self).check_permissions(instance) - return False - except PermissionDenied: - return True - - user_id, meeting_id = get_user_and_meeting_id() - if "about_me" in instance: - if self.user_id != user_id: - if raise_own_exception(): - raise PermissionDenied( - f"The user needs Permission user.can_manage in meeting {meeting_id} to set 'about me', if it is not his own" - ) - else: - return - - if "group_ids" in instance: - user = get_request_user_data() - if ( - OrganizationManagementLevel(user.get("organization_management_level")) - < OrganizationManagementLevel.CAN_MANAGE_USERS - ): - committee_id = get_committee_id() - if ( - committee_id not in user.get("committee_management_ids", []) - and raise_own_exception() - ): - raise MissingPermission( - { - OrganizationManagementLevel.CAN_MANAGE_USERS: 1, - CommitteeManagementLevel.CAN_MANAGE: committee_id, - Permissions.User.CAN_MANAGE: meeting_id, - } - ) +class MeetingUserMixin(MeetingUserHistoryMixin): def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]: + instance = super().update_instance(instance) meeting_user_self = self.datastore.get( fqid_from_collection_and_id("meeting_user", instance["id"]), [ diff --git a/openslides_backend/action/actions/meeting_user/set_data.py b/openslides_backend/action/actions/meeting_user/set_data.py index d1b1fd2a2..bd12299e1 100644 --- a/openslides_backend/action/actions/meeting_user/set_data.py +++ b/openslides_backend/action/actions/meeting_user/set_data.py @@ -9,12 +9,15 @@ from ...util.default_schema import DefaultSchema from ...util.register import register_action from .helper_mixin import MeetingUserHelperMixin -from .mixin import MeetingUserHistoryMixin +from .mixin import MeetingUserMixin @register_action("meeting_user.set_data", action_type=ActionType.BACKEND_INTERNAL) class MeetingUserSetData( - MeetingUserHistoryMixin, ExtendHistoryMixin, MeetingUserHelperMixin, UpdateAction + MeetingUserMixin, + ExtendHistoryMixin, + MeetingUserHelperMixin, + UpdateAction, ): """ Action to create, update or delete a meeting_user. @@ -39,7 +42,7 @@ class MeetingUserSetData( extend_history_to = "user_id" def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]: - meeting_id = instance.pop("meeting_id", None) + meeting_id = instance.get("meeting_id") user_id = instance.pop("user_id", None) if instance.get("id"): fqid = fqid_from_collection_and_id("meeting_user", instance["id"]) @@ -56,14 +59,9 @@ def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]: ), "Not permitted to change user_id." elif meeting_id and user_id: instance["id"] = self.create_or_get_meeting_user(meeting_id, user_id) - if ( - instance.get("vote_delegated_to_id") - and instance["vote_delegated_to_id"] == instance["id"] - or instance.get("vote_delegations_from_ids") - and instance["id"] in instance["vote_delegations_from_ids"] - ): - raise ActionException("Self vote delegation is not allowed.") - + # MeetingUserMixin needs the meeting_id in "create" case + instance = super().update_instance(instance) + instance.pop("meeting_id", None) return instance def get_meeting_id(self, instance: Dict[str, Any]) -> int: diff --git a/openslides_backend/action/actions/meeting_user/update.py b/openslides_backend/action/actions/meeting_user/update.py index 8b88bc680..a6fe844c3 100644 --- a/openslides_backend/action/actions/meeting_user/update.py +++ b/openslides_backend/action/actions/meeting_user/update.py @@ -1,15 +1,16 @@ from openslides_backend.action.mixins.extend_history_mixin import ExtendHistoryMixin from ....models.models import MeetingUser -from ....permissions.permissions import Permissions from ...generics.update import UpdateAction +from ...util.action_type import ActionType from ...util.default_schema import DefaultSchema from ...util.register import register_action -from .mixin import MeetingUserMixin +from .history_mixin import MeetingUserHistoryMixin +from .mixin import meeting_user_standard_fields -@register_action("meeting_user.update") -class MeetingUserUpdate(MeetingUserMixin, UpdateAction, ExtendHistoryMixin): +@register_action("meeting_user.update", action_type=ActionType.BACKEND_INTERNAL) +class MeetingUserUpdate(MeetingUserHistoryMixin, UpdateAction, ExtendHistoryMixin): """ Action to update a meeting_user. """ @@ -19,8 +20,7 @@ class MeetingUserUpdate(MeetingUserMixin, UpdateAction, ExtendHistoryMixin): optional_properties=[ "about_me", "group_ids", - *MeetingUserMixin.standard_fields, + *meeting_user_standard_fields, ], ) - permission = Permissions.User.CAN_MANAGE extend_history_to = "user_id" diff --git a/tests/system/action/meeting_user/test_create.py b/tests/system/action/meeting_user/test_create.py index 9347417bd..e33b20d15 100644 --- a/tests/system/action/meeting_user/test_create.py +++ b/tests/system/action/meeting_user/test_create.py @@ -12,13 +12,6 @@ def test_create(self) -> None: "group_ids": [21], }, "personal_note/11": {"star": True, "meeting_id": 10}, - "speaker/12": {"meeting_id": 10}, - "chat_message/13": {"meeting_id": 10}, - "motion/14": {"meeting_id": 10}, - "motion_submitter/15": {"meeting_id": 10}, - "assignment_candidate/16": {"meeting_id": 10}, - "projection/17": {"meeting_id": 10}, - "vote/20": {"meeting_id": 10}, "group/21": {"meeting_id": 10}, } ) @@ -31,82 +24,9 @@ def test_create(self) -> None: "about_me": "A very long description.", "vote_weight": "1.500000", "personal_note_ids": [11], - "speaker_ids": [12], - "supported_motion_ids": [14], - "motion_submitter_ids": [15], - "assignment_candidate_ids": [16], - "chat_message_ids": [13], "group_ids": [21], } response = self.request("meeting_user.create", test_dict) self.assert_status_code(response, 200) self.assert_model_exists("meeting_user/1", test_dict) self.assert_model_exists("user/1", {"committee_ids": [1]}) - - def test_create_no_permission(self) -> None: - self.set_models( - { - "meeting/10": {"is_active_in_organization_id": 1}, - "user/1": {"organization_management_level": None}, - } - ) - response = self.request("meeting_user.create", {"meeting_id": 10, "user_id": 1}) - self.assert_status_code(response, 403) - - def test_create_permission_self_change_about_me(self) -> None: - self.set_models( - { - "meeting/10": {"is_active_in_organization_id": 1}, - "user/1": {"organization_management_level": None}, - } - ) - response = self.request( - "meeting_user.create", {"meeting_id": 10, "user_id": 1, "about_me": "test"} - ) - self.assert_status_code(response, 200) - self.assert_model_exists( - "meeting_user/1", {"meeting_id": 10, "user_id": 1, "about_me": "test"} - ) - self.assert_model_exists("meeting/10", {"meeting_user_ids": [1]}) - self.assert_model_exists("user/1", {"meeting_user_ids": [1]}) - - def test_create_no_permission_change_some_fields(self) -> None: - self.set_models( - { - "meeting/10": {"is_active_in_organization_id": 1}, - "user/1": {"organization_management_level": None}, - } - ) - response = self.request( - "meeting_user.create", - {"meeting_id": 10, "user_id": 1, "about_me": "test", "number": "XXIII"}, - ) - self.assert_status_code(response, 403) - - def test_create_permission_create_some_fields_with_user_can_manage(self) -> None: - self.set_models( - { - "meeting/10": { - "is_active_in_organization_id": 1, - "meeting_user_ids": [10], - }, - "user/1": {"organization_management_level": None}, - "meeting_user/10": {"user_id": 1, "meeting_id": 10, "group_ids": [21]}, - "group/21": { - "meeting_user_ids": [10], - "permissions": ["user.can_manage"], - }, - "user/2": {"username": "user2"}, - } - ) - response = self.request( - "meeting_user.create", - {"meeting_id": 10, "user_id": 2, "about_me": "test", "number": "XXIV"}, - ) - self.assert_status_code(response, 200) - self.assert_model_exists( - "meeting_user/11", - {"meeting_id": 10, "user_id": 2, "about_me": "test", "number": "XXIV"}, - ) - self.assert_model_exists("meeting/10", {"meeting_user_ids": [10, 11]}) - self.assert_model_exists("user/2", {"meeting_user_ids": [11]}) diff --git a/tests/system/action/meeting_user/test_create_delegation.py b/tests/system/action/meeting_user/test_create_delegation.py deleted file mode 100644 index 2e8589ffa..000000000 --- a/tests/system/action/meeting_user/test_create_delegation.py +++ /dev/null @@ -1,213 +0,0 @@ -from typing import Any, Dict - -from tests.system.action.base import BaseActionTestCase -from tests.util import Response - - -class UserCreateDelegationActionTest(BaseActionTestCase): - def setUp(self) -> None: - super().setUp() - self.set_models( - { - "committee/1": {"meeting_ids": [222]}, - "meeting/222": { - "name": "Meeting222", - "is_active_in_organization_id": 1, - "committee_id": 1, - "meeting_user_ids": [11, 12, 13], - }, - "group/1": {"meeting_id": 222, "meeting_user_ids": [11, 12, 13]}, - "user/1": {"meeting_user_ids": [11], "meeting_ids": [222]}, - "user/2": { - "username": "user/2", - "meeting_user_ids": [12], - "meeting_ids": [222], - }, - "user/3": { - "username": "user3", - "meeting_user_ids": [13], - "meeting_ids": [222], - }, - "user/4": { - "username": "user4", - }, - "meeting_user/11": { - "meeting_id": 222, - "user_id": 1, - "group_ids": [1], - }, - "meeting_user/12": { - "meeting_id": 222, - "user_id": 2, - "vote_delegated_to_id": 13, - "group_ids": [1], - }, - "meeting_user/13": { - "meeting_id": 222, - "user_id": 3, - "vote_delegations_from_ids": [12], - "group_ids": [1], - }, - } - ) - - def request_executor(self, meeting_user4_update: Dict[str, Any]) -> Response: - request_data: Dict[str, Any] = { - "user_id": 4, - "meeting_id": 222, - "group_ids": [1], - } - request_data.update(meeting_user4_update) - return self.request("meeting_user.create", request_data) - - def test_delegated_to_standard_user(self) -> None: - response = self.request_executor({"vote_delegated_to_id": 13}) - self.assert_status_code(response, 200) - self.assert_model_exists("meeting_user/14", {"vote_delegated_to_id": 13}) - self.assert_model_exists( - "meeting_user/13", {"vote_delegations_from_ids": [12, 14]} - ) - - def test_delegated_to_success_without_group(self) -> None: - response = self.request_executor({"group_ids": [], "vote_delegated_to_id": 13}) - self.assert_status_code(response, 200) - self.assert_model_exists( - "meeting_user/14", - { - "vote_delegated_to_id": 13, - "group_ids": [], - "meeting_id": 222, - "user_id": 4, - }, - ) - self.assert_model_exists( - "meeting_user/13", {"vote_delegations_from_ids": [12, 14]} - ) - - def test_delegated_to_error_group_do_not_match_meeting(self) -> None: - self.set_models( - { - "meeting/223": { - "name": "Meeting223", - "is_active_in_organization_id": 1, - }, - "group/2": {"meeting_id": 223}, - } - ) - response = self.request_executor({"vote_delegated_to_id": 13, "group_ids": [2]}) - self.assert_status_code(response, 400) - self.assertIn( - "The following models do not belong to meeting 222: ['group/2']", - response.json["message"], - ) - - def test_delegated_to_error_wrong_target_meeting(self) -> None: - self.set_models( - { - "meeting/223": { - "name": "Meeting223", - "is_active_in_organization_id": 1, - }, - "group/2": {"meeting_id": 223, "meeting_user_ids": [10]}, - "user/1": { - "meeting_user_ids": [11, 10], - "meeting_ids": [222, 223], - }, - "meeting_user/10": { - "user_id": 1, - "meeting_id": 223, - "group_ids": [2], - }, - }, - ) - response = self.request_executor({"vote_delegated_to_id": 10}) - self.assert_status_code(response, 400) - self.assertIn( - "User 1's delegation id don't belong to meeting 222.", - response.json["message"], - ) - - def test_delegated_to_error_target_user_delegated_himself(self) -> None: - response = self.request_executor({"vote_delegated_to_id": 12}) - self.assert_status_code(response, 400) - self.assertIn( - "User 4 cannot delegate his vote to user 2, because that user has delegated his vote himself.", - response.json["message"], - ) - - def test_delegated_to_error_target_not_exists(self) -> None: - response = self.request_executor({"vote_delegated_to_id": 1000}) - self.assert_status_code(response, 400) - self.assertIn( - "Model 'meeting_user/1000' does not exist.", - response.json["message"], - ) - - def test_delegations_from_ok(self) -> None: - response = self.request_executor({"vote_delegations_from_ids": [12]}) - self.assert_status_code(response, 200) - self.assert_model_exists("meeting_user/14", {"vote_delegations_from_ids": [12]}) - self.assert_model_exists("meeting_user/12", {"vote_delegated_to_id": 14}) - - def test_delegations_from_error_group_do_not_match_meeting(self) -> None: - self.set_models( - { - "meeting/223": { - "name": "Meeting223", - "is_active_in_organization_id": 1, - }, - "group/2": {"meeting_id": 223}, - } - ) - response = self.request_executor( - {"vote_delegations_from_ids": [12], "group_ids": [2]} - ) - self.assert_status_code(response, 400) - self.assertIn( - "The following models do not belong to meeting 222: ['group/2']", - response.json["message"], - ) - - def test_delegations_from_error_target_meeting_dont_match(self) -> None: - self.set_models( - { - "meeting/223": { - "name": "Meeting223", - "is_active_in_organization_id": 1, - }, - "group/2": {"meeting_id": 223, "meeting_user_ids": [11]}, - "user/1": { - "meeting_user_ids": [11], - "meeting_ids": [223], - }, - "meeting_user/11": { - "meeting_id": 223, - "user_id": 1, - "group_ids": [2], - }, - } - ) - response = self.request_executor( - {"vote_delegations_from_ids": [11], "group_ids": [1]} - ) - self.assert_status_code(response, 400) - self.assertIn( - "User(s) [1] delegation ids don't belong to meeting 222.", - response.json["message"], - ) - - def test_delegations_from_error_target_user_receives_delegations(self) -> None: - response = self.request_executor({"vote_delegations_from_ids": [13]}) - self.assert_status_code(response, 400) - self.assertIn( - "User(s) [3] can't delegate their votes because they receive vote delegations.", - response.json["message"], - ) - - def test_delegations_from_target_not_exists(self) -> None: - response = self.request_executor({"vote_delegations_from_ids": [1000]}) - self.assert_status_code(response, 400) - self.assertIn( - "Model 'meeting_user/1000' does not exist.", - response.json["message"], - ) diff --git a/tests/system/action/meeting_user/test_update_delegation.py b/tests/system/action/meeting_user/test_set_data_delegation.py similarity index 99% rename from tests/system/action/meeting_user/test_update_delegation.py rename to tests/system/action/meeting_user/test_set_data_delegation.py index bb5d59f8b..ee92c1ed6 100644 --- a/tests/system/action/meeting_user/test_update_delegation.py +++ b/tests/system/action/meeting_user/test_set_data_delegation.py @@ -77,7 +77,7 @@ def request_executor(self, meeting_user4_update: Dict[str, Any]) -> Response: "id": 14, } request_data.update(meeting_user4_update) - return self.request("meeting_user.update", request_data) + return self.request("meeting_user.set_data", request_data) def test_delegated_to_standard_user(self) -> None: response = self.request_executor({"vote_delegated_to_id": 13}) diff --git a/tests/system/action/meeting_user/test_update.py b/tests/system/action/meeting_user/test_update.py index a59c6020c..778a9936c 100644 --- a/tests/system/action/meeting_user/test_update.py +++ b/tests/system/action/meeting_user/test_update.py @@ -13,19 +13,11 @@ def test_update(self) -> None: "is_active_in_organization_id": 1, "meeting_user_ids": [5], "personal_note_ids": [11], - "speaker_ids": [12], "committee_id": 1, "default_group_id": 22, }, "meeting_user/5": {"user_id": 1, "meeting_id": 10}, "personal_note/11": {"star": True, "meeting_id": 10}, - "speaker/12": {"meeting_id": 10}, - "motion/14": {"meeting_id": 10}, - "motion_submitter/15": {"meeting_id": 10}, - "assignment_candidate/16": {"meeting_id": 10}, - "projection/17": {"meeting_id": 10}, - "chat_message/13": {"meeting_id": 10}, - "vote/20": {"meeting_id": 10}, "group/21": {"meeting_id": 10}, "group/22": {"meeting_id": 10, "default_group_for_meeting_id": 10}, } @@ -38,58 +30,8 @@ def test_update(self) -> None: "about_me": "A very long description.", "vote_weight": "1.500000", "personal_note_ids": [11], - "speaker_ids": [12], - "supported_motion_ids": [14], - "motion_submitter_ids": [15], - "assignment_candidate_ids": [16], - "chat_message_ids": [13], "group_ids": [21], } response = self.request("meeting_user.update", test_dict) self.assert_status_code(response, 200) self.assert_model_exists("meeting_user/5", test_dict) - - def test_update_no_permission(self) -> None: - self.set_models( - { - "meeting/10": { - "is_active_in_organization_id": 1, - "meeting_user_ids": [5], - }, - "meeting_user/5": {"user_id": 1, "meeting_id": 10}, - "user/1": {"organization_management_level": None}, - } - ) - response = self.request("meeting_user.update", {"id": 5, "number": "XX"}) - self.assert_status_code(response, 403) - - def test_update_permission_change_own_about_me(self) -> None: - self.set_models( - { - "meeting/10": { - "is_active_in_organization_id": 1, - "meeting_user_ids": [5], - }, - "meeting_user/5": {"user_id": 1, "meeting_id": 10}, - "user/1": {"organization_management_level": None}, - } - ) - response = self.request("meeting_user.update", {"id": 5, "about_me": "test"}) - self.assert_status_code(response, 200) - self.assert_model_exists("meeting_user/5", {"about_me": "test"}) - - def test_update_no_permission_some_fields(self) -> None: - self.set_models( - { - "meeting/10": { - "is_active_in_organization_id": 1, - "meeting_user_ids": [5], - }, - "meeting_user/5": {"user_id": 1, "meeting_id": 10}, - "user/1": {"organization_management_level": None}, - } - ) - response = self.request( - "meeting_user.update", {"id": 5, "about_me": "test", "number": "XX"} - ) - self.assert_status_code(response, 403) diff --git a/tests/system/action/user/test_create.py b/tests/system/action/user/test_create.py index 76b731f4f..9dc12f3a1 100644 --- a/tests/system/action/user/test_create.py +++ b/tests/system/action/user/test_create.py @@ -568,10 +568,10 @@ def test_create_permission_group_A_oml_manage_user(self) -> None: ], }, { - "action": "meeting_user.create", + "action": "user.update", "data": [ { - "user_id": 3, + "id": 3, "meeting_id": 4, "group_ids": [4], } @@ -633,10 +633,10 @@ def test_create_permission_group_A_cml_manage_user(self) -> None: ], }, { - "action": "meeting_user.create", + "action": "user.update", "data": [ { - "user_id": 3, + "id": 3, "meeting_id": 4, "group_ids": [4], } diff --git a/tests/system/action/user/test_update.py b/tests/system/action/user/test_update.py index 847483b53..7c7e4251c 100644 --- a/tests/system/action/user/test_update.py +++ b/tests/system/action/user/test_update.py @@ -266,7 +266,9 @@ def test_update_self_vote_delegation(self) -> None: "user.update", {"id": 111, "vote_delegated_to_id": 11, "meeting_id": 1} ) self.assert_status_code(response, 400) - assert "Self vote delegation is not allowed." in response.json["message"] + assert ( + "User 111 can't delegate the vote to himself." in response.json["message"] + ) def test_update_self_vote_delegation_2(self) -> None: self.set_models( @@ -284,7 +286,9 @@ def test_update_self_vote_delegation_2(self) -> None: {"id": 111, "vote_delegations_from_ids": [11], "meeting_id": 1}, ) self.assert_status_code(response, 400) - assert "Self vote delegation is not allowed." in response.json["message"] + assert ( + "User 111 can't delegate the vote to himself." in response.json["message"] + ) def test_committee_manager_without_committee_ids(self) -> None: """Giving committee management level requires committee_ids""" @@ -446,10 +450,11 @@ def test_committee_manager_add_and_remove_both(self) -> None: ], }, { - "action": "meeting_user.update", + "action": "user.update", "data": [ { - "id": 111, + "id": 123, + "meeting_id": 11, "group_ids": [], } ], @@ -900,7 +905,6 @@ def test_perm_group_B_user_can_manage(self) -> None: "vote_weight": "12.002345", "about_me": "about me 1", "comment": "comment for meeting/1", - "vote_delegated_to_id": 1, # meeting_user/1 => user/2 in meeting/1 "vote_delegations_from_ids": [3, 5], # from user/5 and 6 in meeting/1 }, ) @@ -917,7 +921,6 @@ def test_perm_group_B_user_can_manage(self) -> None: { "user_id": 111, "meeting_id": 1, - "vote_delegated_to_id": 1, "vote_delegations_from_ids": [3, 5], "number": "number1", "structure_level": "structure_level 1", @@ -930,10 +933,6 @@ def test_perm_group_B_user_can_manage(self) -> None: "meeting_user/8", {"user_id": 111, "meeting_id": 4, "number": "number1 in 4"}, ) - self.assert_model_exists( - "meeting_user/1", - {"user_id": 2, "meeting_id": 1, "vote_delegations_from_ids": [7]}, - ) self.assert_model_exists( "meeting_user/3", {"user_id": 5, "meeting_id": 1, "vote_delegated_to_id": 7} ) From a314f8c1787a7e2dda786061390285f6e1088e21 Mon Sep 17 00:00:00 2001 From: Ralf Peschke Date: Fri, 29 Sep 2023 11:24:14 +0200 Subject: [PATCH 2/5] I1896 account.import (#1912) --- global/meta/models.yml | 24 + openslides_backend/action/action_worker.py | 6 +- .../action/actions/meeting/import_.py | 1 + .../action/actions/topic/import_.py | 16 +- .../action/actions/topic/json_upload.py | 2 +- .../action/actions/user/__init__.py | 4 +- .../action/actions/user/account_import.py | 174 +++ .../actions/user/account_json_upload.py | 376 +++++++ .../action/actions/user/create.py | 16 +- .../action/actions/user/import_.py | 151 --- .../action/actions/user/json_upload.py | 219 ---- .../action/actions/user/update.py | 5 + .../action/actions/user/user_mixin.py | 48 +- .../action/mixins/import_mixins.py | 280 ++++- openslides_backend/models/models.py | 15 +- .../presenter/check_database_all.py | 2 +- .../services/datastore/adapter.py | 6 +- .../services/datastore/commands.py | 4 +- .../services/datastore/http_engine.py | 2 +- .../services/datastore/interface.py | 2 +- requirements/export_datastore_commit.sh | 2 +- tests/system/action/meeting/test_clone.py | 14 + tests/system/action/meeting/test_import.py | 17 + tests/system/action/test_action_worker.py | 6 +- tests/system/action/topic/test_import.py | 24 +- tests/system/action/topic/test_json_upload.py | 15 +- .../system/action/user/test_account_import.py | 762 +++++++++++++ .../action/user/test_account_json_upload.py | 1001 +++++++++++++++++ tests/system/action/user/test_create.py | 40 +- tests/system/action/user/test_import.py | 399 ------- tests/system/action/user/test_json_upload.py | 389 ------- .../presenter/test_check_database_all.py | 5 + tests/system/presenter/test_export_meeting.py | 9 +- 33 files changed, 2759 insertions(+), 1277 deletions(-) create mode 100644 openslides_backend/action/actions/user/account_import.py create mode 100644 openslides_backend/action/actions/user/account_json_upload.py delete mode 100644 openslides_backend/action/actions/user/import_.py delete mode 100644 openslides_backend/action/actions/user/json_upload.py create mode 100644 tests/system/action/user/test_account_import.py create mode 100644 tests/system/action/user/test_account_json_upload.py delete mode 100644 tests/system/action/user/test_import.py delete mode 100644 tests/system/action/user/test_json_upload.py diff --git a/global/meta/models.yml b/global/meta/models.yml index 9fbc516a7..2a665a12d 100644 --- a/global/meta/models.yml +++ b/global/meta/models.yml @@ -3837,3 +3837,27 @@ action_worker: result: type: JSON restriction_mode: A + +import_preview: + id: + type: number + restriction_mode: A + name: + type: string + required: true + restriction_mode: A + state: + type: string + required: true + enum: + - warning + - error + - done + restriction_mode: A + created: + type: timestamp + required: true + restriction_mode: A + result: + type: JSON + restriction_mode: A diff --git a/openslides_backend/action/action_worker.py b/openslides_backend/action/action_worker.py index dcff82f14..2a4696131 100644 --- a/openslides_backend/action/action_worker.py +++ b/openslides_backend/action/action_worker.py @@ -106,7 +106,7 @@ def initial_action_worker_write(self) -> str: self.new_id = self.datastore.reserve_id(collection="action_worker") self.fqid = fqid_from_collection_and_id("action_worker", self.new_id) try: - self.datastore.write_action_worker( + self.datastore.write_without_events( WriteRequest( events=[ Event( @@ -138,7 +138,7 @@ def initial_action_worker_write(self) -> str: def continue_action_worker_write(self) -> None: current_time = round(time()) with self.datastore.get_database_context(): - self.datastore.write_action_worker( + self.datastore.write_without_events( WriteRequest( events=[ Event( @@ -190,7 +190,7 @@ def final_action_worker_write(self, action_worker_thread: "ActionWorker") -> Non f"aborted action_worker '{self.fqid}' ({self.action_names}) {current_time}: {message}" ) - self.datastore.write_action_worker( + self.datastore.write_without_events( WriteRequest( events=[ Event( diff --git a/openslides_backend/action/actions/meeting/import_.py b/openslides_backend/action/actions/meeting/import_.py index 593cb087d..515221443 100644 --- a/openslides_backend/action/actions/meeting/import_.py +++ b/openslides_backend/action/actions/meeting/import_.py @@ -153,6 +153,7 @@ def remove_not_allowed_fields(self, instance: Dict[str, Any]) -> None: user.pop("committee_management_ids", None) self.get_meeting_from_json(json_data).pop("organization_tag_ids", None) json_data.pop("action_worker", None) + json_data.pop("import_preview", None) def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]: meeting_json = instance["meeting"] diff --git a/openslides_backend/action/actions/topic/import_.py b/openslides_backend/action/actions/topic/import_.py index 4420442fb..3f10f40fe 100644 --- a/openslides_backend/action/actions/topic/import_.py +++ b/openslides_backend/action/actions/topic/import_.py @@ -1,6 +1,6 @@ from typing import Any, Dict -from ....models.models import ActionWorker +from ....models.models import ImportPreview from ....permissions.permissions import Permissions from ....shared.exceptions import ActionException from ....shared.patterns import fqid_from_collection_and_id @@ -15,11 +15,11 @@ @register_action("topic.import") class TopicImport(DuplicateCheckMixin, ImportMixin): """ - Action to import a result from the action_worker. + Action to import a result from the import_preview. """ - model = ActionWorker() - schema = DefaultSchema(ActionWorker()).get_default_schema( + model = ImportPreview() + schema = DefaultSchema(ImportPreview()).get_default_schema( additional_required_fields={ "id": required_id_schema, "import": {"type": "boolean"}, @@ -50,10 +50,10 @@ def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]: def get_meeting_id(self, instance: Dict[str, Any]) -> int: store_id = instance["id"] worker = self.datastore.get( - fqid_from_collection_and_id("action_worker", store_id), - ["result"], + fqid_from_collection_and_id("import_preview", store_id), + ["name", "result"], lock_result=False, ) - if worker.get("result", {}).get("import") == TopicImport.import_name: - return next(iter(worker["result"]["rows"]))["data"]["meeting_id"] + if worker.get("name") == TopicImport.import_name: + return next(iter(worker.get("result", {})["rows"]))["data"]["meeting_id"] raise ActionException("Import data cannot be found.") diff --git a/openslides_backend/action/actions/topic/json_upload.py b/openslides_backend/action/actions/topic/json_upload.py index f45a0e2e3..3c348074b 100644 --- a/openslides_backend/action/actions/topic/json_upload.py +++ b/openslides_backend/action/actions/topic/json_upload.py @@ -83,7 +83,7 @@ def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]: self.set_state( state_to_count[ImportState.ERROR], state_to_count[ImportState.WARNING] ) - self.store_rows_in_the_action_worker("topic") + self.store_rows_in_the_import_preview("topic") return {} def validate_entry(self, entry: Dict[str, Any]) -> Dict[str, Any]: diff --git a/openslides_backend/action/actions/user/__init__.py b/openslides_backend/action/actions/user/__init__.py index f5cf031af..87fdee213 100644 --- a/openslides_backend/action/actions/user/__init__.py +++ b/openslides_backend/action/actions/user/__init__.py @@ -1,12 +1,12 @@ from . import ( # noqa + account_import, + account_json_upload, assign_meetings, create, delete, forget_password, forget_password_confirm, generate_new_password, - import_, - json_upload, merge_together, reset_password_to_default, save_saml_account, diff --git a/openslides_backend/action/actions/user/account_import.py b/openslides_backend/action/actions/user/account_import.py new file mode 100644 index 000000000..e662ac1b2 --- /dev/null +++ b/openslides_backend/action/actions/user/account_import.py @@ -0,0 +1,174 @@ +from typing import Any, Dict, List, cast + +from ....models.models import ImportPreview +from ....permissions.management_levels import OrganizationManagementLevel +from ....shared.exceptions import ActionException +from ....shared.schema import required_id_schema +from ...mixins.import_mixins import ( + ImportMixin, + ImportRow, + ImportState, + Lookup, + ResultType, +) +from ...util.default_schema import DefaultSchema +from ...util.register import register_action +from .create import UserCreate +from .update import UserUpdate + + +@register_action("account.import") +class AccountImport(ImportMixin): + """ + Action to import a result from the import_preview. + """ + + model = ImportPreview() + schema = DefaultSchema(ImportPreview()).get_default_schema( + additional_required_fields={ + "id": required_id_schema, + "import": {"type": "boolean"}, + } + ) + permission = OrganizationManagementLevel.CAN_MANAGE_USERS + skip_archived_meeting_check = True + import_name = "account" + + def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]: + if not instance["import"]: + return {} + + instance = super().update_instance(instance) + self.setup_lookups() + + self.rows = [self.validate_entry(row) for row in self.result["rows"]] + + if self.import_state != ImportState.ERROR: + create_action_payload: List[Dict[str, Any]] = [] + update_action_payload: List[Dict[str, Any]] = [] + self.flatten_object_fields(["username", "saml_id", "default_password"]) + for row in self.rows: + if row["state"] == ImportState.NEW: + create_action_payload.append(row["data"]) + else: + update_action_payload.append(row["data"]) + if create_action_payload: + self.execute_other_action(UserCreate, create_action_payload) + if update_action_payload: + self.execute_other_action(UserUpdate, update_action_payload) + + return {} + + def validate_entry(self, row: ImportRow) -> ImportRow: + entry = row["data"] + username = self.get_value_from_union_str_object(entry.get("username")) + if not username: + raise ActionException( + "Invalid JsonUpload data: The data from json upload must contain a valid username object" + ) + check_result = self.username_lookup.check_duplicate(username) + id_ = cast(int, self.username_lookup.get_field_by_name(username, "id")) + + if check_result == ResultType.FOUND_ID and id_ != 0: + if row["state"] != ImportState.DONE: + row["messages"].append( + f"Error: row state expected to be '{ImportState.DONE}', but it is '{row['state']}'." + ) + row["state"] = ImportState.ERROR + entry["username"]["info"] = ImportState.ERROR + elif "id" not in entry: + raise ActionException( + f"Invalid JsonUpload data: A data row with state '{ImportState.DONE}' must have an 'id'" + ) + elif entry["id"] != id_: + row["state"] = ImportState.ERROR + entry["username"]["info"] = ImportState.ERROR + row["messages"].append( + f"Error: username '{username}' found in different id ({id_} instead of {entry['id']})" + ) + elif check_result == ResultType.FOUND_MORE_IDS: + row["state"] = ImportState.ERROR + entry["username"]["info"] = ImportState.ERROR + row["messages"].append( + f"Error: username '{username}' is duplicated in import." + ) + elif check_result == ResultType.NOT_FOUND_ANYMORE: + row["messages"].append( + f"Error: user {entry['username']['id']} not found anymore for updating user '{username}'." + ) + row["state"] = ImportState.ERROR + elif check_result == ResultType.NOT_FOUND: + pass # cannot create an error ! + + saml_id = self.get_value_from_union_str_object(entry.get("saml_id")) + if saml_id: + check_result = self.saml_id_lookup.check_duplicate(saml_id) + id_from_saml_id = cast( + int, self.saml_id_lookup.get_field_by_name(saml_id, "id") + ) + if check_result == ResultType.FOUND_ID and id_ != 0: + if id_ != id_from_saml_id: + row["state"] = ImportState.ERROR + entry["saml_id"]["info"] = ImportState.ERROR + row["messages"].append( + f"Error: saml_id '{saml_id}' found in different id ({id_from_saml_id} instead of {id_})" + ) + elif check_result == ResultType.FOUND_MORE_IDS: + row["state"] = ImportState.ERROR + entry["saml_id"]["info"] = ImportState.ERROR + row["messages"].append( + f"Error: saml_id '{saml_id}' is duplicated in import." + ) + elif check_result == ResultType.NOT_FOUND_ANYMORE: + row["state"] = ImportState.ERROR + entry["saml_id"]["info"] = ImportState.ERROR + row["messages"].append( + f"Error: saml_id '{saml_id}' not found anymore in user with id '{id_from_saml_id}'" + ) + elif check_result == ResultType.NOT_FOUND: + pass + + if ( + (default_password := entry.get("default_password")) + and type(default_password) == dict + and default_password["info"] == ImportState.WARNING + ): + field = "can_change_own_password" + if self.username_lookup.get_field_by_name(username, field): + entry[field] = False + if row["state"] == ImportState.ERROR and self.import_state == ImportState.DONE: + self.import_state = ImportState.ERROR + return { + "state": row["state"], + "data": row["data"], + "messages": row.get("messages", []), + } + + def setup_lookups(self) -> None: + rows = self.result["rows"] + self.username_lookup = Lookup( + self.datastore, + "user", + [ + (entry["username"]["value"], entry) + for row in rows + if "username" in (entry := row["data"]) + ], + field="username", + mapped_fields=[ + "saml_id", + "default_password", + "password", + "can_change_own_password", + ], + ) + self.saml_id_lookup = Lookup( + self.datastore, + "user", + [ + (entry["saml_id"]["value"], entry) + for row in rows + if "saml_id" in (entry := row["data"]) + ], + field="saml_id", + ) diff --git a/openslides_backend/action/actions/user/account_json_upload.py b/openslides_backend/action/actions/user/account_json_upload.py new file mode 100644 index 000000000..397ae5512 --- /dev/null +++ b/openslides_backend/action/actions/user/account_json_upload.py @@ -0,0 +1,376 @@ +from collections import defaultdict +from typing import Any, Dict, List, Optional, Tuple, cast + +from ....models.models import User +from ....permissions.management_levels import OrganizationManagementLevel +from ...mixins.import_mixins import ( + ImportState, + JsonUploadMixin, + Lookup, + ResultType, + SearchFieldType, +) +from ...util.crypto import get_random_password +from ...util.default_schema import DefaultSchema +from ...util.register import register_action +from .user_mixin import UsernameMixin + + +@register_action("account.json_upload") +class AccountJsonUpload(JsonUploadMixin, UsernameMixin): + """ + Action to allow to upload a json. It is used as first step of an import. + """ + + model = User() + schema = DefaultSchema(User()).get_default_schema( + additional_required_fields={ + "data": { + "type": "array", + "items": { + "type": "object", + "properties": { + **model.get_properties( + "title", + "first_name", + "last_name", + "is_active", + "is_physical_person", + "default_password", + "email", + "username", + "gender", + "pronoun", + "default_number", + "default_structure_level", + "default_vote_weight", + "saml_id", + ), + }, + "required": [], + "additionalProperties": False, + }, + "minItems": 1, + "uniqueItems": False, + }, + } + ) + headers = [ + {"property": "title", "type": "string"}, + {"property": "first_name", "type": "string"}, + {"property": "last_name", "type": "string"}, + {"property": "is_active", "type": "boolean"}, + {"property": "is_physical_person", "type": "boolean"}, + {"property": "default_password", "type": "string", "is_object": True}, + {"property": "email", "type": "string"}, + {"property": "username", "type": "string", "is_object": True}, + {"property": "gender", "type": "string"}, + {"property": "pronoun", "type": "string"}, + {"property": "default_number", "type": "string"}, + {"property": "default_structure_level", "type": "string"}, + {"property": "default_vote_weight", "type": "decimal"}, + {"property": "saml_id", "type": "string", "is_object": True}, + ] + permission = OrganizationManagementLevel.CAN_MANAGE_USERS + skip_archived_meeting_check = True + row_state: ImportState + username_lookup: Lookup + saml_id_lookup: Lookup + names_email_lookup: Lookup + all_saml_id_lookup: Lookup + + def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]: + data = instance.pop("data") + data = self.add_payload_index_to_action_data(data) + self.setup_lookups(data) + self.create_usernames(data) + + self.rows = [self.validate_entry(entry) for entry in data] + + # generate statistics + itemCount = len(self.rows) + state_to_count = {state: 0 for state in ImportState} + for row in self.rows: + state_to_count[row["state"]] += 1 + state_to_count[ImportState.WARNING] += self.count_warnings_in_payload( + row.get("data", {}).values() + ) + row["data"].pop("payload_index", None) + + self.statistics = [ + {"name": "total", "value": itemCount}, + {"name": "created", "value": state_to_count[ImportState.NEW]}, + {"name": "updated", "value": state_to_count[ImportState.DONE]}, + {"name": "error", "value": state_to_count[ImportState.ERROR]}, + {"name": "warning", "value": state_to_count[ImportState.WARNING]}, + ] + + self.set_state( + state_to_count[ImportState.ERROR], state_to_count[ImportState.WARNING] + ) + self.store_rows_in_the_import_preview("account") + return {} + + def validate_entry(self, entry: Dict[str, Any]) -> Dict[str, Any]: + messages: List[str] = [] + id_: Optional[int] = None + old_saml_id: Optional[str] = None + old_default_password: Optional[str] = None + if (username := entry.get("username")) and type(username) == str: + check_result = self.username_lookup.check_duplicate(username) + id_ = cast(int, self.username_lookup.get_field_by_name(username, "id")) + if check_result == ResultType.FOUND_ID and id_ != 0: + old_saml_id = cast( + str, self.username_lookup.get_field_by_name(username, "saml_id") + ) + old_default_password = cast( + str, + self.username_lookup.get_field_by_name( + username, "default_password" + ), + ) + self.row_state = ImportState.DONE + entry["id"] = id_ + entry["username"] = { + "value": username, + "info": ImportState.DONE, + "id": id_, + } + elif check_result == ResultType.NOT_FOUND or id_ == 0: + self.row_state = ImportState.NEW + entry["username"] = { + "value": username, + "info": ImportState.DONE, + } + elif check_result == ResultType.FOUND_MORE_IDS: + self.row_state = ImportState.ERROR + entry["username"] = { + "value": username, + "info": ImportState.ERROR, + } + messages.append("Found more users with the same username") + elif saml_id := entry.get("saml_id"): + check_result = self.saml_id_lookup.check_duplicate(saml_id) + id_ = cast(int, self.saml_id_lookup.get_field_by_name(saml_id, "id")) + if check_result == ResultType.FOUND_ID and id_ != 0: + username = self.saml_id_lookup.get_field_by_name(saml_id, "username") + old_saml_id = cast( + str, self.saml_id_lookup.get_field_by_name(saml_id, "saml_id") + ) + old_default_password = cast( + str, + self.saml_id_lookup.get_field_by_name(saml_id, "default_password"), + ) + + self.row_state = ImportState.DONE + entry["id"] = id_ + entry["username"] = { + "value": username, + "info": ImportState.DONE, + "id": id_, + } + elif check_result == ResultType.NOT_FOUND or id_ == 0: + self.row_state = ImportState.NEW + else: + if not (entry.get("first_name") or entry.get("last_name")): + self.row_state = ImportState.ERROR + messages.append( + "Cannot generate username. Missing one of first_name, last_name." + ) + else: + names_and_email = self._names_and_email(entry) + check_result = self.names_email_lookup.check_duplicate(names_and_email) + id_ = cast( + int, + self.names_email_lookup.get_field_by_name(names_and_email, "id"), + ) + if check_result == ResultType.FOUND_ID and id_ != 0: + username = self.names_email_lookup.get_field_by_name( + names_and_email, "username" + ) + old_saml_id = cast( + str, + self.names_email_lookup.get_field_by_name( + names_and_email, "saml_id" + ), + ) + old_default_password = cast( + str, + self.names_email_lookup.get_field_by_name( + names_and_email, "default_password" + ), + ) + self.row_state = ImportState.DONE + entry["id"] = id_ + entry["username"] = { + "value": username, + "info": ImportState.DONE, + "id": id_, + } + elif check_result == ResultType.NOT_FOUND or id_ == 0: + self.row_state = ImportState.NEW + elif check_result == ResultType.FOUND_MORE_IDS: + self.row_state = ImportState.ERROR + messages.append("Found more users with name and email") + + if id_ and len(self.all_id_mapping.get(id_, [])) > 1: + self.row_state = ImportState.ERROR + messages.append( + f"The account with id {id_} was found multiple times by different search criteria." + ) + + if saml_id := entry.get("saml_id"): + check_result = self.all_saml_id_lookup.check_duplicate(saml_id) + if id_ := entry.get("id"): + if check_result == ResultType.FOUND_ID: + idFound = self.all_saml_id_lookup.get_field_by_name(saml_id, "id") + if check_result == ResultType.NOT_FOUND or ( + check_result == ResultType.FOUND_ID and id_ == idFound + ): + entry["saml_id"] = { + "value": saml_id, + "info": ImportState.DONE + if old_saml_id + else ImportState.NEW, # only if newly set + } + else: + self.row_state = ImportState.ERROR + messages.append(f"saml_id {saml_id} must be unique.") + entry["saml_id"] = { + "value": saml_id, + "info": ImportState.ERROR, + } + else: + if check_result != ResultType.NOT_FOUND: + self.row_state = ImportState.ERROR + entry["saml_id"] = { + "value": saml_id, + "info": ImportState.ERROR, + } + messages.append(f"saml_id {saml_id} must be unique.") + else: + entry["saml_id"] = { + "value": saml_id, + "info": ImportState.NEW, + } + if ( + entry["saml_id"]["info"] == ImportState.NEW + or entry.get("default_password") + or old_default_password + ): + entry["default_password"] = {"value": "", "info": ImportState.WARNING} + messages.append( + "Will remove password and default_password and forbid changing your OpenSlides password." + ) + else: + self.handle_default_password(entry) + return {"state": self.row_state, "messages": messages, "data": entry} + + def handle_default_password(self, entry: Dict[str, Any]) -> None: + if self.row_state == ImportState.NEW: + if "default_password" in entry: + value = entry["default_password"] + info = ImportState.DONE + else: + value = get_random_password() + info = ImportState.GENERATED + entry["default_password"] = {"value": value, "info": info} + elif self.row_state in (ImportState.DONE, ImportState.ERROR): + if "default_password" in entry: + entry["default_password"] = { + "value": entry["default_password"], + "info": ImportState.DONE, + } + + @staticmethod + def _names_and_email(entry: Dict[str, Any]) -> Tuple[str, ...]: + return ( + entry.get("first_name", ""), + entry.get("last_name", ""), + entry.get("email", ""), + ) + + def create_usernames(self, data: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + usernames: List[str] = [] + fix_usernames: List[str] = [] + payload_indices: List[int] = [] + + for entry in data: + if "username" not in entry.keys(): + if saml_id := entry.get("saml_id"): + username = saml_id + else: + username = entry.get("first_name", "") + entry.get("last_name", "") + usernames.append(username) + payload_indices.append(entry["payload_index"]) + else: + fix_usernames.append(entry["username"]) + + usernames = self.generate_usernames(usernames, fix_usernames) + + for index, username in zip(payload_indices, usernames): + data[index]["username"] = { + "value": username, + "info": ImportState.GENERATED, + } + self.username_lookup.add_item(data[index]) + return data + + def setup_lookups(self, data: List[Dict[str, Any]]) -> None: + self.username_lookup = Lookup( + self.datastore, + "user", + [ + (username, entry) + for entry in data + if (username := entry.get("username")) + ], + field="username", + mapped_fields=["username", "saml_id", "default_password"], + ) + self.saml_id_lookup = Lookup( + self.datastore, + "user", + [ + (saml_id, entry) + for entry in data + if not entry.get("username") and (saml_id := entry.get("saml_id")) + ], + field="saml_id", + mapped_fields=["saml_id", "username", "default_password"], + ) + self.names_email_lookup = Lookup( + self.datastore, + "user", + [ + (names_email, entry) + for entry in data + if not entry.get("username") + and not entry.get("saml_id") + and ( + names_email := ( + entry.get("first_name", ""), + entry.get("last_name", ""), + entry.get("email", ""), + ) + ) + ], + field=("first_name", "last_name", "email"), + mapped_fields=["username", "saml_id", "default_password"], + ) + self.all_saml_id_lookup = Lookup( + self.datastore, + "user", + [(saml_id, entry) for entry in data if (saml_id := entry.get("saml_id"))], + field="saml_id", + mapped_fields=["username", "saml_id"], + ) + + self.all_id_mapping: Dict[int, List[SearchFieldType]] = defaultdict(list) + for lookup in ( + self.username_lookup, + self.saml_id_lookup, + self.names_email_lookup, + ): + for id, values in lookup.id_to_name.items(): + self.all_id_mapping[id].extend(values) diff --git a/openslides_backend/action/actions/user/create.py b/openslides_backend/action/actions/user/create.py index d26fc1b2e..851fcf2c5 100644 --- a/openslides_backend/action/actions/user/create.py +++ b/openslides_backend/action/actions/user/create.py @@ -64,7 +64,7 @@ class UserCreate( def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]: self.meeting_id: Optional[int] = instance.get("meeting_id") - instance = super().update_instance(instance) + if instance.get("is_active"): self.check_limit_of_user(1) saml_id = instance.get("saml_id") @@ -72,12 +72,14 @@ def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]: if saml_id: instance["username"] = saml_id else: + if not (instance.get("first_name") or instance.get("last_name")): + raise ActionException("Need username or first_name or last_name") instance["username"] = self.generate_username(instance) + instance = super().update_instance(instance) if saml_id: instance["can_change_own_password"] = False - if instance.get("can_change_own_password") or instance.get( - "default_password" - ): + instance["password"] = None + if instance.get("default_password"): raise ActionException( f"user {instance['saml_id']} is a Single Sign On user and may not set the local default_passwort or the right to change it locally." ) @@ -85,12 +87,6 @@ def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]: if not instance.get("default_password"): instance["default_password"] = get_random_password() instance = self.set_password(instance) - if not ( - instance.get("username") - or instance.get("first_name") - or instance.get("last_name") - ): - raise ActionException("Need username or first_name or last_name") instance["organization_id"] = ONE_ORGANIZATION_ID check_gender_helper(self.datastore, instance) return instance diff --git a/openslides_backend/action/actions/user/import_.py b/openslides_backend/action/actions/user/import_.py deleted file mode 100644 index 3310254af..000000000 --- a/openslides_backend/action/actions/user/import_.py +++ /dev/null @@ -1,151 +0,0 @@ -from typing import Any, Dict, List - -from ....models.models import ActionWorker -from ....permissions.management_levels import OrganizationManagementLevel -from ....shared.schema import required_id_schema -from ...mixins.import_mixins import ImportMixin, ImportState -from ...util.default_schema import DefaultSchema -from ...util.register import register_action -from .create import UserCreate -from .update import UserUpdate -from .user_mixin import DuplicateCheckMixin - - -@register_action("user.import") -class UserImport(DuplicateCheckMixin, ImportMixin): - """ - Action to import a result from the action_worker. - """ - - model = ActionWorker() - schema = DefaultSchema(ActionWorker()).get_default_schema( - additional_required_fields={ - "id": required_id_schema, - "import": {"type": "boolean"}, - } - ) - permission = OrganizationManagementLevel.CAN_MANAGE_USERS - skip_archived_meeting_check = True - import_name = "account" - - def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]: - instance = super().update_instance(instance) - - # handle abort in on_success - if not instance["import"]: - return {} - - # init duplicate mixin - data = self.result.get("rows", []) - for entry in data: - # Revert username-info and default-password-info - for field in ("username", "default_password", "saml_id"): - if field in entry["data"]: - if field == "username" and "id" in entry["data"][field]: - entry["data"]["id"] = entry["data"][field]["id"] - entry["data"][field] = entry["data"][field]["value"] - - search_data_list = [ - { - field: entry["data"].get(field, "") - for field in ("username", "first_name", "last_name", "email", "saml_id") - } - for entry in data - ] - self.init_duplicate_set(search_data_list) - - # Recheck and update data, update needs "id" - create_action_payload: List[Dict[str, Any]] = [] - update_action_payload: List[Dict[str, Any]] = [] - self.error = False - for payload_index, entry in enumerate(data): - if entry["state"] == ImportState.NEW: - if not entry["data"].get("username") and not entry["data"].get( - "saml_id" - ): - self.error = True - entry["state"] = ImportState.ERROR - entry["messages"].append( - "Error: Want to create user, but missing username in import data." - ) - elif entry["data"].get( - "username" - ) and self.check_username_for_duplicate( - entry["data"]["username"], payload_index - ): - self.error = True - entry["state"] = ImportState.ERROR - entry["messages"].append( - "Error: want to create a new user, but username already exists." - ) - elif entry["data"].get("saml_id") and self.check_saml_id_for_duplicate( - entry["data"]["saml_id"], payload_index - ): - self.error = True - entry["state"] = ImportState.ERROR - entry["messages"].append( - "Error: want to create a new user, but saml_id already exists." - ) - else: - create_action_payload.append(entry["data"]) - elif entry["state"] == ImportState.DONE: - search_data = self.get_search_data(payload_index) - if not entry["data"].get("username") and not entry["data"].get( - "saml_id" - ): - self.error = True - entry["state"] = ImportState.ERROR - entry["messages"].append( - "Error: Want to update user, but missing username in import data." - ) - elif entry["data"].get( - "username" - ) and not self.check_username_for_duplicate( - entry["data"]["username"], payload_index - ): - self.error = True - entry["state"] = ImportState.ERROR - entry["messages"].append( - "Error: want to update, but missing user in db." - ) - elif entry["data"].get( - "saml_id" - ) and not self.check_saml_id_for_duplicate( - entry["data"]["saml_id"], payload_index - ): - self.error = True - entry["state"] = ImportState.ERROR - entry["messages"].append( - "Error: want to update, but missing user in db." - ) - elif search_data is None: - self.error = True - entry["state"] = ImportState.ERROR - entry["messages"].append( - "Error: want to update, but found search data are wrong." - ) - elif search_data["id"] != entry["data"]["id"]: - self.error = True - entry["state"] = ImportState.ERROR - entry["messages"].append( - "Error: want to update, but found search data doesn't match." - ) - else: - for field in ("username", "saml_id"): - if field in entry["data"]: - del entry["data"][field] - - update_action_payload.append(entry["data"]) - else: - self.error = True - entry["messages"].append("Error in import.") - - # execute the actions - if not self.error: - if create_action_payload: - self.execute_other_action(UserCreate, create_action_payload) - if update_action_payload: - self.execute_other_action(UserUpdate, update_action_payload) - else: - self.error_store_ids.append(instance["id"]) - return {} diff --git a/openslides_backend/action/actions/user/json_upload.py b/openslides_backend/action/actions/user/json_upload.py deleted file mode 100644 index 6725b09d8..000000000 --- a/openslides_backend/action/actions/user/json_upload.py +++ /dev/null @@ -1,219 +0,0 @@ -from typing import Any, Dict, Optional, Tuple - -import fastjsonschema - -from ....models.models import User -from ....permissions.management_levels import OrganizationManagementLevel -from ...mixins.import_mixins import ImportState, JsonUploadMixin -from ...util.crypto import get_random_password -from ...util.default_schema import DefaultSchema -from ...util.register import register_action -from .create import UserCreate -from .user_mixin import DuplicateCheckMixin, UsernameMixin - - -@register_action("user.json_upload") -class UserJsonUpload(DuplicateCheckMixin, UsernameMixin, JsonUploadMixin): - """ - Action to allow to upload a json. It is used as first step of an import. - """ - - model = User() - schema = DefaultSchema(User()).get_default_schema( - additional_required_fields={ - "data": { - "type": "array", - "items": { - "type": "object", - "properties": { - **model.get_properties( - "title", - "first_name", - "last_name", - "is_active", - "is_physical_person", - "default_password", - "email", - "username", - "gender", - "pronoun", - "saml_id", - ), - }, - "required": [], - "additionalProperties": False, - }, - "minItems": 1, - "uniqueItems": False, - }, - } - ) - headers = [ - {"property": "title", "type": "string"}, - {"property": "first_name", "type": "string"}, - {"property": "last_name", "type": "string"}, - {"property": "is_active", "type": "boolean"}, - {"property": "is_physical_person", "type": "boolean"}, - {"property": "default_password", "type": "string"}, - {"property": "email", "type": "string"}, - {"property": "username", "type": "string"}, - {"property": "gender", "type": "string"}, - {"property": "pronoun", "type": "string"}, - {"property": "saml_id", "type": "string"}, - ] - permission = OrganizationManagementLevel.CAN_MANAGE_USERS - skip_archived_meeting_check = True - - def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]: - data = instance.pop("data") - - # validate and check for duplicates - self.init_duplicate_set( - [ - { - field: entry.get(field, "") - for field in ( - "username", - "saml_id", - "first_name", - "last_name", - "email", - ) - } - for entry in data - ] - ) - self.rows = [ - self.generate_entry(entry, payload_index) - for payload_index, entry in enumerate(data) - ] - - # generate statistics - itemCount = len(self.rows) - state_to_count = {state: 0 for state in ImportState} - for entry in self.rows: - state_to_count[entry["state"]] += 1 - - self.statistics = [ - {"name": "total", "value": itemCount}, - {"name": "created", "value": state_to_count[ImportState.NEW]}, - {"name": "updated", "value": state_to_count[ImportState.DONE]}, - {"name": "error", "value": state_to_count[ImportState.ERROR]}, - {"name": "warning", "value": state_to_count[ImportState.WARNING]}, - ] - - self.set_state( - state_to_count[ImportState.ERROR], state_to_count[ImportState.WARNING] - ) - self.store_rows_in_the_action_worker("account") - return {} - - def generate_entry( - self, entry: Dict[str, Any], payload_index: int - ) -> Dict[str, Any]: - state, messages = None, [] - try: - UserCreate.schema_validator(entry) - if entry.get("username"): - if self.check_username_for_duplicate(entry["username"], payload_index): - state, msg = self.handle_done_entry( - "username", entry, payload_index - ) - if msg: - messages.append(msg) - else: - state = self.handle_new_entry("username", entry) - elif entry.get("saml_id"): - if self.check_saml_id_for_duplicate(entry["saml_id"], payload_index): - state, msg = self.handle_done_entry("saml_id", entry, payload_index) - if msg: - messages.append(msg) - else: - state = self.handle_new_entry("saml_id", entry) - else: - if not (entry.get("first_name") or entry.get("last_name")): - state = ImportState.ERROR - messages.append("Cannot generate username.") - elif self.check_name_and_email_for_duplicate( - *UserJsonUpload._names_and_email(entry), payload_index - ): - state = ImportState.DONE - if searchdata := self.get_search_data(payload_index): - entry["username"] = { - "value": searchdata["username"], - "info": ImportState.DONE, - "id": searchdata["id"], - } - else: - state = ImportState.ERROR - if usernames := self.has_multiple_search_data(payload_index): - messages.append( - "Found more than one user: " + ", ".join(usernames) - ) - else: - messages.append( - f"Duplicate in csv list index: {payload_index}" - ) - else: - state = ImportState.NEW - entry["username"] = { - "value": self.generate_username(entry), - "info": ImportState.GENERATED, - } - self.handle_default_password(entry, state) - if entry.get("saml_id", {}).get("value"): - if entry.get("password") or entry.get("default_password"): - messages.append("Remove password or default_password from entry.") - entry.pop("password", None) - entry.pop("default_password", None) - entry["can_change_own_password"] = False - except fastjsonschema.JsonSchemaException as exception: - state = ImportState.ERROR - messages.append(exception.message) - return {"state": state, "messages": messages, "data": entry} - - def handle_default_password(self, entry: Dict[str, Any], state: str) -> None: - if state == ImportState.NEW: - if "default_password" in entry: - value = entry["default_password"] - info = ImportState.DONE - else: - value = get_random_password() - info = ImportState.GENERATED - entry["default_password"] = {"value": value, "info": info} - elif state in (ImportState.DONE, ImportState.ERROR): - if "default_password" in entry: - entry["default_password"] = { - "value": entry["default_password"], - "info": ImportState.DONE, - } - - @staticmethod - def _names_and_email(entry: Dict[str, Any]) -> Tuple[str, str, str]: - return ( - entry.get("first_name", ""), - entry.get("last_name", ""), - entry.get("email", ""), - ) - - def handle_done_entry( - self, fieldname: str, entry: Dict[str, Any], payload_index: int - ) -> Tuple[ImportState, Optional[str]]: - state = ImportState.DONE - message = None - if searchdata := self.get_search_data(payload_index): - entry[fieldname] = { - "value": entry[fieldname], - "info": "done", - "id": searchdata["id"], - } - else: - entry[fieldname] = {"value": entry[fieldname], "info": "done"} - state = ImportState.ERROR - message = f"Duplicate in csv list index: {payload_index}" - return state, message - - def handle_new_entry(self, fieldname: str, entry: Dict[str, Any]) -> ImportState: - state = ImportState.NEW - entry[fieldname] = {"value": entry[fieldname], "info": "done"} - return state diff --git a/openslides_backend/action/actions/user/update.py b/openslides_backend/action/actions/user/update.py index 9eb315cf0..89a66df98 100644 --- a/openslides_backend/action/actions/user/update.py +++ b/openslides_backend/action/actions/user/update.py @@ -67,6 +67,7 @@ def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]: "is_active", "organization_management_level", "saml_id", + "password", ], ) if user.get("saml_id") and ( @@ -75,6 +76,10 @@ def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]: raise ActionException( f"user {user['saml_id']} is a Single Sign On user and may not set the local default_passwort or the right to change it locally." ) + if instance.get("saml_id") and user.get("password"): + instance["can_change_own_password"] = False + instance["default_password"] = "" + instance["password"] = "" if ( instance["id"] == self.user_id diff --git a/openslides_backend/action/actions/user/user_mixin.py b/openslides_backend/action/actions/user/user_mixin.py index 40419ab3f..938245dfc 100644 --- a/openslides_backend/action/actions/user/user_mixin.py +++ b/openslides_backend/action/actions/user/user_mixin.py @@ -5,7 +5,7 @@ from openslides_backend.shared.typing import HistoryInformation from openslides_backend.shared.util import ONE_ORGANIZATION_FQID -from ....action.action import Action +from ....action.action import Action, original_instances from ....action.mixins.archived_meeting_check_mixin import CheckForArchivedMeetingMixin from ....presenter.search_users import SearchUsers from ....services.datastore.interface import DatastoreService @@ -13,20 +13,25 @@ from ....shared.filters import FilterOperator from ....shared.patterns import FullQualifiedId, fqid_from_collection_and_id from ....shared.schema import decimal_schema, id_list_schema, optional_id_schema +from ...util.typing import ActionData from ..meeting_user.set_data import MeetingUserSetData class UsernameMixin(Action): - def generate_usernames(self, usernames: List[str]) -> List[str]: + def generate_usernames( + self, usernames: List[str], fix_usernames: Optional[List[str]] = None + ) -> List[str]: """ Generate unique usernames in parallel to a given list of usernames """ + if fix_usernames is None: + fix_usernames = [] used_usernames: List[str] = [] for username in usernames: template_username = username count = 0 while True: - if username in used_usernames: + if username in used_usernames or username in fix_usernames: count += 1 username = template_username + str(count) continue @@ -92,22 +97,33 @@ def validate_instance(self, instance: Dict[str, Any]) -> None: "Missing meeting_id in instance, because meeting related fields used" ) + @original_instances + def get_updated_instances(self, action_data: ActionData) -> ActionData: + for instance in action_data: + for field in ("username", "first_name", "last_name", "email", "saml_id"): + self.strip_field(field, instance) + return super().get_updated_instances(action_data) + def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]: instance = super().update_instance(instance) - for field in ("username", "first_name", "last_name", "email"): - self.strip_field(field, instance) - if "username" in instance: - if not instance["username"]: - raise ActionException("This username is forbidden.") - result = self.datastore.filter( - "user", - FilterOperator("username", "=", instance["username"]), - ["id"], - ) - if result and instance["id"] not in result.keys(): - raise ActionException( - f"A user with the username {instance['username']} already exists." + + def check_existence(what: str) -> None: + if what in instance: + if not instance[what]: + raise ActionException(f"This {what} is forbidden.") + result = self.datastore.filter( + "user", + FilterOperator(what, "=", instance[what]), + ["id"], ) + if result and instance["id"] not in result.keys(): + raise ActionException( + f"A user with the {what} {instance[what]} already exists." + ) + + check_existence("username") + check_existence("saml_id") + self.check_meeting_and_users( instance, fqid_from_collection_and_id("user", instance["id"]) ) diff --git a/openslides_backend/action/mixins/import_mixins.py b/openslides_backend/action/mixins/import_mixins.py index e45e83dcb..a5fe8b332 100644 --- a/openslides_backend/action/mixins/import_mixins.py +++ b/openslides_backend/action/mixins/import_mixins.py @@ -1,51 +1,203 @@ +import csv +from collections import defaultdict +from decimal import Decimal from enum import Enum -from time import time -from typing import Any, Callable, Dict, List, Optional, TypedDict +from time import mktime, strptime, time +from typing import Any, Callable, Dict, List, Optional, Tuple, Union, cast + +from typing_extensions import NotRequired, TypedDict from ...shared.exceptions import ActionException +from ...shared.filters import And, Filter, FilterOperator, Or from ...shared.interfaces.event import Event, EventType +from ...shared.interfaces.services import DatastoreService from ...shared.interfaces.write_request import WriteRequest from ...shared.patterns import fqid_from_collection_and_id -from ..action import Action from ..util.typing import ActionData, ActionResultElement +from .singular_action_mixin import SingularActionMixin + +TRUE_VALUES = ("1", "true", "yes", "y", "t") +FALSE_VALUES = ("0", "false", "no", "n", "f") -TRUE_VALUES = ("1", "true", "yes", "t") -FALSE_VALUES = ("0", "false", "no", "f") +# A searchfield can be a string or a tuple of strings +SearchFieldType = Union[str, Tuple[str, ...]] class ImportState(str, Enum): - ERROR = "error" - NEW = "new" + NONE = "none" WARNING = "warning" + NEW = "new" DONE = "done" GENERATED = "generated" + REMOVE = "remove" + ERROR = "error" + + +class ImportRow(TypedDict): + state: ImportState + data: Dict[str, Any] + messages: List[str] + + +class ResultType(Enum): + """Used by Lookup to differ the possible results in check_duplicate.""" + + FOUND_ID = 1 + FOUND_MORE_IDS = 2 + NOT_FOUND = 3 + NOT_FOUND_ANYMORE = 4 + + +class Lookup: + def __init__( + self, + datastore: DatastoreService, + collection: str, + name_entries: List[Tuple[SearchFieldType, Dict[str, Any]]], + field: SearchFieldType = "name", + mapped_fields: Optional[List[str]] = None, + ) -> None: + if mapped_fields is None: + mapped_fields = [] + self.datastore = datastore + self.collection = collection + self.field = field + self.name_to_ids: Dict[SearchFieldType, List[Dict[str, Any]]] = defaultdict( + list + ) + for name, _ in name_entries: + self.name_to_ids[name] = [] + self.id_to_name: Dict[int, List[SearchFieldType]] = defaultdict(list) + or_filters: List[Filter] = [] + if "id" not in mapped_fields: + mapped_fields.append("id") + if type(field) == str: + if field not in mapped_fields: + mapped_fields.append(field) + if name_entries: + or_filters = [ + FilterOperator(field, "=", name) for name, _ in name_entries + ] + else: + mapped_fields.extend((f for f in field if f not in mapped_fields)) + if name_entries: + or_filters = [ + And(*[FilterOperator(field[i], "=", name_tpl[i]) for i in range(3)]) + for name_tpl, _ in name_entries + ] + if or_filters: + for entry in datastore.filter( + collection, + Or(*or_filters), + mapped_fields, + lock_result=False, + ).values(): + self.add_item(entry) + + # Add action data items not found in database to lookup dict + for name, entry in name_entries: + if values := cast(list, self.name_to_ids[name]): + if not values[0].get("id"): + values.append(entry) + else: + if type(self.field) == str: + obj = entry[self.field] + if type(obj) == dict and obj.get("id"): + obj["info"] = ImportState.ERROR + values.append(entry) + + def check_duplicate(self, name: SearchFieldType) -> ResultType: + if len(values := self.name_to_ids.get(name, [])) == 1: + if (entry := values[0]).get("id"): + if ( + type(self.field) == str + and type(obj := entry[self.field]) == dict + and obj["info"] == ImportState.ERROR + ): + return ResultType.NOT_FOUND_ANYMORE + return ResultType.FOUND_ID + else: + return ResultType.NOT_FOUND + elif len(values) > 1: + return ResultType.FOUND_MORE_IDS + raise ActionException("Logical Error in Lookup Class") + + def get_field_by_name( + self, name: SearchFieldType, fieldname: str + ) -> Optional[Union[int, str, bool]]: + """Gets 'fieldname' from value of name_to_ids-dict""" + if len(self.name_to_ids.get(name, [])) == 1: + return self.name_to_ids[name][0].get(fieldname) + return None + + def add_item(self, entry: Dict[str, Any]) -> None: + if type(self.field) == str: + if type(key := entry[self.field]) == dict: + key = key["value"] + self.name_to_ids[key].append(entry) + if entry.get("id"): + self.id_to_name[entry["id"]].append(entry[self.field]) + else: + key = tuple(entry.get(f, "") for f in self.field) + self.name_to_ids[key].append(entry) + if entry.get("id"): + self.id_to_name[entry["id"]].append(key) + + +class BaseImportJsonUpload(SingularActionMixin): + @staticmethod + def count_warnings_in_payload( + data: Union[List[Union[Dict[str, str], List[Any]]], Dict[str, Any]] + ) -> int: + count = 0 + for col in data: + if type(col) == dict: + if col.get("info") == ImportState.WARNING: + count += 1 + elif type(col) == list: + count += BaseImportJsonUpload.count_warnings_in_payload(col) + return count + + @staticmethod + def get_value_from_union_str_object( + field: Optional[Union[str, Dict[str, Any]]] + ) -> Optional[str]: + if type(field) == dict: + return field.get("value", "") + elif type(field) == str: + return field + else: + return None -class ImportMixin(Action): +class ImportMixin(BaseImportJsonUpload): """ Mixin for import actions. It works together with the json_upload. """ import_name: str - - def prepare_action_data(self, action_data: ActionData) -> ActionData: - self.error_store_ids: List[int] = [] - return action_data + rows: List[ImportRow] = [] + result: Dict[str, List] = {} + import_state = ImportState.DONE def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]: store_id = instance["id"] - worker = self.datastore.get( - fqid_from_collection_and_id("action_worker", store_id), - ["result", "state"], + import_preview = self.datastore.get( + fqid_from_collection_and_id("import_preview", store_id), + ["result", "state", "name"], lock_result=False, ) - if (worker.get("result") or {}).get("import") != self.import_name: + if import_preview.get("name") != self.import_name: raise ActionException( f"Wrong id doesn't point on {self.import_name} import data." ) - if worker.get("state") == ImportState.ERROR: - raise ActionException("Error in import.") - self.result = worker["result"] + if import_preview.get("state") not in list(ImportState): + raise ActionException( + "Error in import: Missing valid state in stored import_preview." + ) + if import_preview.get("state") == ImportState.ERROR: + raise ActionException("Error in import. Data will not be imported.") + self.result = import_preview.get("result", {}) return instance def handle_relation_updates(self, instance: Dict[str, Any]) -> Any: @@ -57,23 +209,31 @@ def create_events(self, instance: Dict[str, Any]) -> Any: def create_action_result_element( self, instance: Dict[str, Any] ) -> Optional[ActionResultElement]: - return { - "rows": self.result.get("rows", []), - } + return {"rows": self.result.get("rows", []), "state": self.import_state} + + def flatten_object_fields(self, fields: Optional[List[str]] = None) -> None: + """replace objects from self.rows["data"] with their values. Uses the fields, if given, otherwise all""" + for row in self.rows: + entry = row["data"] + used_list = fields if fields else entry.keys() + for field in used_list: + if field in entry: + if type(dvalue := entry[field]) == dict: + entry[field] = dvalue["value"] def get_on_success(self, action_data: ActionData) -> Callable[[], None]: def on_success() -> None: for instance in action_data: store_id = instance["id"] - if store_id in self.error_store_ids: + if self.import_state == ImportState.ERROR: continue - self.datastore.write_action_worker( + self.datastore.write_without_events( WriteRequest( events=[ Event( type=EventType.Delete, fqid=fqid_from_collection_and_id( - "action_worker", store_id + "import_preview", store_id ), ) ], @@ -88,6 +248,7 @@ def on_success() -> None: class HeaderEntry(TypedDict): property: str type: str + is_object: NotRequired[bool] class StatisticEntry(TypedDict): @@ -95,25 +256,28 @@ class StatisticEntry(TypedDict): value: int -class JsonUploadMixin(Action): +class JsonUploadMixin(BaseImportJsonUpload): headers: List[HeaderEntry] rows: List[Dict[str, Any]] statistics: List[StatisticEntry] - state: ImportState + import_state: ImportState def set_state(self, number_errors: int, number_warnings: int) -> None: + """ + To remove, but is used in some backend imports + """ if number_errors > 0: - self.state = ImportState.ERROR + self.import_state = ImportState.ERROR elif number_warnings > 0: - self.state = ImportState.WARNING + self.import_state = ImportState.WARNING else: - self.state = ImportState.DONE + self.import_state = ImportState.DONE - def store_rows_in_the_action_worker(self, import_name: str) -> None: - self.new_store_id = self.datastore.reserve_id(collection="action_worker") - fqid = fqid_from_collection_and_id("action_worker", self.new_store_id) + def store_rows_in_the_import_preview(self, import_name: str) -> None: + self.new_store_id = self.datastore.reserve_id(collection="import_preview") + fqid = fqid_from_collection_and_id("import_preview", self.new_store_id) time_created = int(time()) - self.datastore.write_action_worker( + self.datastore.write_without_events( WriteRequest( events=[ Event( @@ -121,10 +285,10 @@ def store_rows_in_the_action_worker(self, import_name: str) -> None: fqid=fqid, fields={ "id": self.new_store_id, - "result": {"import": import_name, "rows": self.rows}, + "name": import_name, + "result": {"rows": self.rows}, "created": time_created, - "timestamp": time_created, - "state": self.state, + "state": self.import_state, }, ) ], @@ -147,21 +311,43 @@ def create_action_result_element( "headers": self.headers, "rows": self.rows, "statistics": self.statistics, - "state": self.state, + "state": self.import_state, } + def add_payload_index_to_action_data(self, action_data: ActionData) -> ActionData: + for payload_index, entry in enumerate(action_data): + entry["payload_index"] = payload_index + return action_data + def validate_instance(self, instance: Dict[str, Any]) -> None: # filter extra, not needed fields before validate and parse some fields property_to_type = { - header["property"]: header["type"] for header in self.headers + header["property"]: ( + header["type"], + header.get("is_object"), + header.get("is_list", False), + ) + for header in self.headers } for entry in list(instance.get("data", [])): for field in dict(entry): if field not in property_to_type: del entry[field] else: - type_ = property_to_type[field] - if type_ == "integer": + type_, is_object, is_list = property_to_type[field] + if type_ == "string" and is_list: + try: + entry[field] = [ + item.strip() + for item in list(csv.reader([entry[field]]))[0] + ] + except Exception: + pass + elif type_ == "string": + continue + elif type_ == "decimal": + entry[field] = str(Decimal("0.000000") + Decimal(entry[field])) + elif type_ == "integer": try: entry[field] = int(entry[field]) except ValueError: @@ -177,5 +363,15 @@ def validate_instance(self, instance: Dict[str, Any]) -> None: raise ActionException( f"Could not parse {entry[field]} expect boolean" ) - + elif type_ == "date": + try: + entry[field] = int( + mktime(strptime(entry[field], "%Y-%m-%d")) + ) + except Exception: + pass + else: + raise ActionException( + f"Unknown type in conversion: type:{type_} is_object:{str(is_object)} is_list:{str(is_list)}" + ) super().validate_instance(instance) diff --git a/openslides_backend/models/models.py b/openslides_backend/models/models.py index 92a6dc1e9..6a20e1bb5 100644 --- a/openslides_backend/models/models.py +++ b/openslides_backend/models/models.py @@ -4,7 +4,7 @@ from .base import Model from .mixins import AgendaItemModelMixin, MeetingModelMixin, PollModelMixin -MODELS_YML_CHECKSUM = "295720b86599b69e9340af0427f4f2bc" +MODELS_YML_CHECKSUM = "566944b1e2fa1b4b216537f3acbb3e5f" class Organization(Model): @@ -2109,3 +2109,16 @@ class ActionWorker(Model): created = fields.TimestampField(required=True) timestamp = fields.TimestampField(required=True) result = fields.JSONField() + + +class ImportPreview(Model): + collection = "import_preview" + verbose_name = "import preview" + + id = fields.IntegerField() + name = fields.CharField(required=True) + state = fields.CharField( + required=True, constraints={"enum": ["warning", "error", "done"]} + ) + created = fields.TimestampField(required=True) + result = fields.JSONField() diff --git a/openslides_backend/presenter/check_database_all.py b/openslides_backend/presenter/check_database_all.py index 0c229d319..3b2ea4942 100644 --- a/openslides_backend/presenter/check_database_all.py +++ b/openslides_backend/presenter/check_database_all.py @@ -37,7 +37,7 @@ def check_everything(datastore: DatastoreService) -> None: for id, model in models.items() } for collection, models in result.items() - if collection != "action_worker" + if collection not in ["action_worker", "import_preview"] } data["_migration_index"] = get_backend_migration_index() Checker( diff --git a/openslides_backend/services/datastore/adapter.py b/openslides_backend/services/datastore/adapter.py index 55ef735a4..36242ce3b 100644 --- a/openslides_backend/services/datastore/adapter.py +++ b/openslides_backend/services/datastore/adapter.py @@ -445,10 +445,10 @@ def write(self, write_requests: Union[List[WriteRequest], WriteRequest]) -> None ) self.retrieve(command) - def write_action_worker(self, write_request: WriteRequest) -> None: - command = commands.WriteActionWorker(write_requests=[write_request]) + def write_without_events(self, write_request: WriteRequest) -> None: + command = commands.WriteWithoutEvents(write_requests=[write_request]) self.logger.debug( - f"Start WRITE_ACTION_WORKER request to datastore with the following data: " + f"Start WRITE_WITHOUT_EVENTS request to datastore with the following data: " f"Write request: {write_request}" ) self.retrieve(command) diff --git a/openslides_backend/services/datastore/commands.py b/openslides_backend/services/datastore/commands.py index ebd6afa45..e3bf0ea7e 100644 --- a/openslides_backend/services/datastore/commands.py +++ b/openslides_backend/services/datastore/commands.py @@ -108,9 +108,9 @@ def default(self, o: Any) -> Any: return json.dumps(self.write_requests, cls=WriteRequestJSONEncoder) -class WriteActionWorker(Write): +class WriteWithoutEvents(Write): """ - WriteActionWorker command, same as Write, but on separate route + WriteWithoutEvents command, same as Write, but on separate route """ diff --git a/openslides_backend/services/datastore/http_engine.py b/openslides_backend/services/datastore/http_engine.py index d24581515..95d95cd57 100644 --- a/openslides_backend/services/datastore/http_engine.py +++ b/openslides_backend/services/datastore/http_engine.py @@ -27,7 +27,7 @@ class HTTPEngine: "write", "truncate_db", "delete_history_information", - "write_action_worker", + "write_without_events", ] def __init__( diff --git a/openslides_backend/services/datastore/interface.py b/openslides_backend/services/datastore/interface.py index aff5fe988..9df19bdcb 100644 --- a/openslides_backend/services/datastore/interface.py +++ b/openslides_backend/services/datastore/interface.py @@ -145,7 +145,7 @@ def write(self, write_requests: Union[List[WriteRequest], WriteRequest]) -> None ... @abstractmethod - def write_action_worker(self, write_request: WriteRequest) -> None: + def write_without_events(self, write_request: WriteRequest) -> None: ... @abstractmethod diff --git a/requirements/export_datastore_commit.sh b/requirements/export_datastore_commit.sh index 424fa1350..120f40c6c 100755 --- a/requirements/export_datastore_commit.sh +++ b/requirements/export_datastore_commit.sh @@ -1,2 +1,2 @@ #!/bin/bash -export DATASTORE_COMMIT_HASH=069b5b27df09f0b4965539c81c75c276507d08e6 +export DATASTORE_COMMIT_HASH=0720dd02b04b5d77a6269b5cd8611b1eab154e9f diff --git a/tests/system/action/meeting/test_clone.py b/tests/system/action/meeting/test_clone.py index 702cabdf6..3fc6e4794 100644 --- a/tests/system/action/meeting/test_clone.py +++ b/tests/system/action/meeting/test_clone.py @@ -1502,6 +1502,20 @@ def test_with_action_worker(self) -> None: self.assert_model_exists("action_worker/1", {"name": aw_name}) self.assert_model_not_exists("action_worker/2") + def test_with_import_preview(self) -> None: + """import_preview shouldn't be cloned""" + aw_name = "test import_preview" + self.test_models["import_preview/1"] = { + "name": aw_name, + "state": "done", + "created": round(time() - 3), + } + self.set_models(self.test_models) + response = self.request("meeting.clone", {"meeting_id": 1}) + self.assert_status_code(response, 200) + self.assert_model_exists("import_preview/1", {"name": aw_name}) + self.assert_model_not_exists("import_preview/2") + def test_clone_with_2_existing_meetings(self) -> None: self.test_models[ONE_ORGANIZATION_FQID]["active_meeting_ids"] = [1, 2] self.test_models["committee/1"]["meeting_ids"] = [1, 2] diff --git a/tests/system/action/meeting/test_import.py b/tests/system/action/meeting/test_import.py index 68c6fd716..b87df1994 100644 --- a/tests/system/action/meeting/test_import.py +++ b/tests/system/action/meeting/test_import.py @@ -1486,6 +1486,23 @@ def test_dont_import_action_worker(self) -> None: self.assert_status_code(response, 200) self.assert_model_not_exists("action_worker/1") + def test_dont_import_import_preview(self) -> None: + request_data = self.create_request_data( + { + "import_preview": { + "1": { + "id": 1, + "name": "testcase", + "state": "done", + "created": round(time.time() - 3), + } + } + } + ) + response = self.request("meeting.import", request_data) + self.assert_status_code(response, 200) + self.assert_model_not_exists("import_preview/1") + def test_bad_format_invalid_id_key(self) -> None: request_data = self.create_request_data({"tag": {"1": {"id": 2}}}) response = self.request("meeting.import", request_data) diff --git a/tests/system/action/test_action_worker.py b/tests/system/action/test_action_worker.py index 2227f74f0..44613cf44 100644 --- a/tests/system/action/test_action_worker.py +++ b/tests/system/action/test_action_worker.py @@ -227,7 +227,7 @@ def thread_method(self: ActionWorkerTest) -> None: start2 = datetime.now() self.new_id = self.datastore.reserve_id("action_worker") self.fqid = fqid_from_collection_and_id("action_worker", self.new_id) - self.datastore.write_action_worker( + self.datastore.write_without_events( WriteRequest( events=[ Event( @@ -257,7 +257,7 @@ def test_action_worker_delete_by_ids(self) -> None: self.user_id = 1 new_ids = self.datastore.reserve_ids("action_worker", amount=3) for new_id in new_ids: - self.datastore.write_action_worker( + self.datastore.write_without_events( WriteRequest( events=[ Event( @@ -278,7 +278,7 @@ def test_action_worker_delete_by_ids(self) -> None: f"action_worker/{new_id}", {"name": "test", "state": "running"} ) - self.datastore.write_action_worker( + self.datastore.write_without_events( WriteRequest( events=[ Event(type=EventType.Delete, fqid=f"action_worker/{new_id}") diff --git a/tests/system/action/topic/test_import.py b/tests/system/action/topic/test_import.py index ef09d5982..d547d8438 100644 --- a/tests/system/action/topic/test_import.py +++ b/tests/system/action/topic/test_import.py @@ -8,22 +8,18 @@ def setUp(self) -> None: self.set_models( { "meeting/22": {"name": "test", "is_active_in_organization_id": 1}, - "action_worker/2": { + "import_preview/2": { + "state": ImportState.DONE, + "name": "topic", "result": { - "import": "topic", "rows": [ { "state": ImportState.NEW, "messages": [], "data": {"title": "test", "meeting_id": 22}, }, - { - "state": ImportState.ERROR, - "messages": ["test"], - "data": {"title": "broken", "meeting_id": 22}, - }, ], - } + }, }, } ) @@ -33,13 +29,13 @@ def test_import_correct(self) -> None: self.assert_status_code(response, 200) self.assert_model_exists("topic/1", {"title": "test", "meeting_id": 22}) self.assert_model_exists("meeting/22", {"topic_ids": [1]}) - self.assert_model_not_exists("action_worker/2") + self.assert_model_not_exists("import_preview/2") def test_import_abort(self) -> None: response = self.request("topic.import", {"id": 2, "import": False}) self.assert_status_code(response, 200) self.assert_model_not_exists("topic/1") - self.assert_model_not_exists("action_worker/2") + self.assert_model_not_exists("import_preview/2") def test_import_duplicate_in_db(self) -> None: self.set_models( @@ -71,7 +67,7 @@ def test_import_duplicate_and_topic_deleted_so_imported(self) -> None: }, ) self.assert_status_code(response, 200) - self.assert_model_exists("action_worker/3") + self.assert_model_exists("import_preview/3") response = self.request("topic.delete", {"id": 1}) self.assert_status_code(response, 200) self.assert_model_deleted("topic/1") @@ -98,7 +94,7 @@ def test_import_duplicate_so_not_imported(self) -> None: }, ) self.assert_status_code(response, 200) - self.assert_model_exists("action_worker/3") + self.assert_model_exists("import_preview/3") response = self.request("topic.import", {"id": 3, "import": True}) self.assert_status_code(response, 200) self.assert_model_not_exists("topic/2") @@ -116,11 +112,11 @@ def test_import_with_upload(self) -> None: }, ) self.assert_status_code(response, 200) - self.assert_model_exists("action_worker/3") + self.assert_model_exists("import_preview/3") response = self.request("topic.import", {"id": 3, "import": True}) self.assert_status_code(response, 200) self.assert_model_exists( "topic/1", {"title": "another title", "meeting_id": 22} ) self.assert_model_exists("meeting/22", {"topic_ids": [1]}) - self.assert_model_not_exists("action_worker/3") + self.assert_model_not_exists("import_preview/3") diff --git a/tests/system/action/topic/test_json_upload.py b/tests/system/action/topic/test_json_upload.py index e9a2b4de8..a98eaecaa 100644 --- a/tests/system/action/topic/test_json_upload.py +++ b/tests/system/action/topic/test_json_upload.py @@ -45,10 +45,9 @@ def test_json_upload_agenda_data(self) -> None: }, } worker = self.assert_model_exists( - "action_worker/1", {"state": ImportState.DONE} + "import_preview/1", {"state": ImportState.DONE} ) assert start_time <= worker.get("created", -1) <= end_time - assert start_time <= worker.get("timestamp", -1) <= end_time def test_json_upload_empty_data(self) -> None: response = self.request( @@ -84,10 +83,10 @@ def test_json_upload_results(self) -> None: ) self.assert_status_code(response, 200) self.assert_model_exists( - "action_worker/1", + "import_preview/1", { + "name": "topic", "result": { - "import": "topic", "rows": [ { "state": ImportState.NEW, @@ -95,7 +94,7 @@ def test_json_upload_results(self) -> None: "data": {"title": "test", "meeting_id": 22}, } ], - } + }, }, ) result = response.json["results"][0][0] @@ -159,10 +158,10 @@ def test_json_upload_duplicate_in_data(self) -> None: assert result["rows"][2]["messages"] == ["Duplicate"] assert result["rows"][2]["state"] == ImportState.WARNING self.assert_model_exists( - "action_worker/1", + "import_preview/1", { + "name": "topic", "result": { - "import": "topic", "rows": [ { "state": ImportState.NEW, @@ -180,7 +179,7 @@ def test_json_upload_duplicate_in_data(self) -> None: "data": {"title": "test", "meeting_id": 22}, }, ], - } + }, }, ) diff --git a/tests/system/action/user/test_account_import.py b/tests/system/action/user/test_account_import.py new file mode 100644 index 000000000..c051d007b --- /dev/null +++ b/tests/system/action/user/test_account_import.py @@ -0,0 +1,762 @@ +from typing import Any, Dict + +from openslides_backend.action.mixins.import_mixins import ImportMixin, ImportState +from openslides_backend.permissions.management_levels import OrganizationManagementLevel +from tests.system.action.base import BaseActionTestCase + +from .test_account_json_upload import AccountJsonUploadForUseInImport + + +class AccountJsonImport(BaseActionTestCase): + def setUp(self) -> None: + super().setUp() + self.set_models( + { + "import_preview/2": { + "state": ImportState.DONE, + "name": "account", + "result": { + "rows": [ + { + "state": ImportState.NEW, + "messages": [], + "data": { + "username": { + "value": "jonny", + "info": ImportState.DONE, + }, + "first_name": "Testy", + }, + }, + ], + }, + }, + "import_preview/3": { + "state": ImportState.DONE, + "name": "account", + "result": { + "rows": [ + { + "state": ImportState.NEW, + "messages": [], + "data": { + "username": { + "value": "TestyTester", + "info": ImportState.DONE, + }, + "first_name": "Testy", + "last_name": "Tester", + "email": "email@test.com", + "gender": "male", + }, + }, + ], + }, + }, + "import_preview/4": { + "state": ImportState.ERROR, + "name": "account", + "result": { + "rows": [ + { + "state": ImportState.ERROR, + "messages": ["test"], + "data": {"gender": "male"}, + }, + ], + }, + }, + "import_preview/5": {"result": None}, + "user/2": { + "username": "test", + "default_password": "secret", + "password": "secret", + }, + "import_preview/6": { + "state": ImportState.WARNING, + "name": "account", + "result": { + "rows": [ + { + "state": ImportState.DONE, + "messages": [], + "data": { + "id": 2, + "username": { + "value": "test", + "info": ImportState.DONE, + "id": 2, + }, + "saml_id": { + "value": "12345", + "info": ImportState.DONE, + }, + "default_password": { + "value": "", + "info": ImportState.WARNING, + }, + }, + }, + ], + }, + }, + } + ) + + def test_import_username_and_create(self) -> None: + response = self.request("account.import", {"id": 2, "import": True}) + self.assert_status_code(response, 200) + user = self.assert_model_exists( + "user/3", + {"username": "jonny", "first_name": "Testy"}, + ) + assert user.get("default_password") + assert user.get("password") + self.assert_model_not_exists("import_preview/2") + + def test_import_abort(self) -> None: + response = self.request("account.import", {"id": 2, "import": False}) + self.assert_status_code(response, 200) + self.assert_model_not_exists("import_preview/2") + self.assert_model_not_exists("user/3") + + def test_import_wrong_import_preview(self) -> None: + response = self.request("account.import", {"id": 5, "import": True}) + self.assert_status_code(response, 400) + assert ( + "Wrong id doesn't point on account import data." in response.json["message"] + ) + + def test_import_username_and_update(self) -> None: + self.set_models( + { + "user/1": { + "username": "user1", + }, + "import_preview/7": { + "state": ImportState.DONE, + "name": "account", + "result": { + "rows": [ + { + "state": ImportState.DONE, + "messages": [], + "data": { + "id": 1, + "username": { + "value": "user1", + "info": ImportState.DONE, + "id": 1, + }, + "first_name": "Testy", + }, + }, + ], + }, + }, + } + ) + response = self.request("account.import", {"id": 7, "import": True}) + self.assert_status_code(response, 200) + self.assert_model_exists("user/1", {"first_name": "Testy"}) + + def test_import_names_and_email_and_create(self) -> None: + response = self.request("account.import", {"id": 3, "import": True}) + self.assert_status_code(response, 200) + self.assert_model_exists( + "user/3", + { + "username": "TestyTester", + "first_name": "Testy", + "gender": "male", + "last_name": "Tester", + "email": "email@test.com", + }, + ) + + def get_import_preview_data( + self, number: int, row_state: ImportState, data: Dict[str, Any] + ) -> Dict[str, Any]: + def get_import_state() -> ImportState: + """Precondition: There is only 1 row(_state)""" + if row_state == ImportState.ERROR: + return row_state + if ImportMixin.count_warnings_in_payload(data): + return ImportState.WARNING + else: + return ImportState.DONE + + return { + f"import_preview/{number}": { + "state": get_import_state(), + "name": "account", + "result": { + "rows": [ + { + "state": row_state, + "messages": [], + "data": data, + }, + ], + }, + } + } + + def test_import_with_saml_id(self) -> None: + self.set_models( + self.get_import_preview_data( + 7, + ImportState.NEW, + {"saml_id": {"value": "testsaml", "info": ImportState.NEW}}, + ) + ) + response = self.request("account.import", {"id": 7, "import": True}) + self.assert_status_code(response, 400) + self.assertIn( + "Invalid JsonUpload data: The data from json upload must contain a valid username object", + response.json["message"], + ) + + def test_import_saml_id_error_new_and_saml_id_exists(self) -> None: + """Set saml_id 'testsaml' to user 1, add the import user 1 will be + found and the import should result in an error.""" + self.set_models( + { + "user/1": {"saml_id": "testsaml"}, + **self.get_import_preview_data( + 6, + ImportState.NEW, + { + "username": {"value": "testuser", "info": ImportState.NEW}, + "saml_id": {"value": "testsaml", "info": ImportState.NEW}, + }, + ), + } + ) + response = self.request("account.import", {"id": 6, "import": True}) + self.assert_status_code(response, 200) + entry = response.json["results"][0][0]["rows"][0] + assert entry["state"] == ImportState.ERROR + assert entry["messages"] == [ + "Error: saml_id 'testsaml' found in different id (1 instead of None)" + ] + + def test_import_done_error_missing_user(self) -> None: + self.set_models( + { + **self.get_import_preview_data( + 6, + ImportState.DONE, + { + "username": { + "value": "XYZ", + "info": ImportState.DONE, + "id": 78, + }, + "saml_id": {"value": "testsaml", "info": ImportState.NEW}, + "first_name": "Hugo", + "id": 78, + }, + ), + } + ) + response = self.request("account.import", {"id": 6, "import": True}) + self.assert_status_code(response, 200) + entry = response.json["results"][0][0]["rows"][0] + assert entry["state"] == ImportState.ERROR + assert entry["messages"] == [ + "Error: user 78 not found anymore for updating user 'XYZ'." + ] + + def test_import_error_state_done_missing_username(self) -> None: + self.set_models( + self.get_import_preview_data( + 6, + ImportState.DONE, + { + "first_name": "Testy", + }, + ) + ) + response = self.request("account.import", {"id": 6, "import": True}) + self.assert_status_code(response, 400) + self.assertIn( + "Invalid JsonUpload data: The data from json upload must contain a valid username object", + response.json["message"], + ) + + def test_import_error_state_done_missing_user_in_db(self) -> None: + self.set_models( + self.get_import_preview_data( + 6, + ImportState.DONE, + { + "first_name": "Testy", + "username": { + "value": "fred", + "info": ImportState.DONE, + "id": 111, + }, + "id": 111, + }, + ) + ) + response = self.request("account.import", {"id": 6, "import": True}) + self.assert_status_code(response, 200) + entry = response.json["results"][0][0]["rows"][0] + assert entry["state"] == ImportState.ERROR + assert entry["messages"] == [ + "Error: user 111 not found anymore for updating user 'fred'." + ] + + def test_import_error_state_done_search_data_error(self) -> None: + self.set_models( + { + "import_preview/7": { + "state": ImportState.DONE, + "name": "account", + "result": { + "rows": [ + { + "state": ImportState.NEW, + "messages": [], + "data": { + "username": { + "value": "durban", + "info": ImportState.DONE, + } + }, + }, + { + "state": ImportState.DONE, + "messages": [], + "data": { + "username": { + "value": "durban", + "info": ImportState.DONE, + } + }, + }, + ], + }, + } + } + ) + response = self.request("account.import", {"id": 7, "import": True}) + self.assert_status_code(response, 200) + result = response.json["results"][0][0] + assert result["state"] == ImportState.ERROR + for i in range(2): + entry = result["rows"][i] + assert entry["state"] == ImportState.ERROR + assert entry["messages"] == [ + "Error: username 'durban' is duplicated in import." + ] + + def test_import_error_state_done_not_matching_ids(self) -> None: + self.set_models( + { + "user/8": {"username": "user8"}, + **self.get_import_preview_data( + 6, + ImportState.DONE, + { + "id": 5, + "first_name": "Testy", + "username": { + "value": "user8", + "info": ImportState.DONE, + "id": 5, + }, + }, + ), + } + ) + response = self.request("account.import", {"id": 6, "import": True}) + self.assert_status_code(response, 200) + entry = response.json["results"][0][0]["rows"][0] + assert entry["state"] == ImportState.ERROR + assert entry["messages"] == [ + "Error: username 'user8' found in different id (8 instead of 5)" + ] + + def test_import_error_state_import_preview4(self) -> None: + response = self.request("account.import", {"id": 4, "import": True}) + self.assert_status_code(response, 400) + assert response.json["message"] == "Error in import. Data will not be imported." + self.assert_model_exists("import_preview/4") + + def test_import_no_permission(self) -> None: + self.base_permission_test({}, "account.import", {"id": 2, "import": True}) + + def test_import_permission(self) -> None: + self.base_permission_test( + {}, + "account.import", + {"id": 2, "import": True}, + OrganizationManagementLevel.CAN_MANAGE_USERS, + ) + + +class AccountJsonImportWithIncludedJsonUpload(AccountJsonUploadForUseInImport): + def test_upload_import_with_generated_usernames_okay(self) -> None: + self.json_upload_saml_id_new() + response_import = self.request("account.import", {"id": 1, "import": True}) + self.assert_status_code(response_import, 200) + self.assert_model_exists( + "user/35", + { + "username": "test_saml_id2", + "saml_id": "test_saml_id", + "default_password": "", + "can_change_own_password": False, + "default_vote_weight": "1.000000", + "organization_id": 1, + "is_physical_person": True, + }, + ) + user36 = self.assert_model_exists( + "user/36", + { + "username": "test_saml_id1", + "saml_id": None, + "can_change_own_password": True, + "default_vote_weight": "1.000000", + }, + ) + assert user36["default_password"] + assert user36["password"] + + user37 = self.assert_model_exists( + "user/37", + { + "username": "test_saml_id21", + "saml_id": None, + "can_change_own_password": True, + "default_vote_weight": "1.000000", + }, + ) + assert user37["default_password"] + assert user37["password"] + + self.assert_model_not_exists("import_preview/1") + + def test_upload_import_with_generated_usernames_error_username(self) -> None: + self.json_upload_saml_id_new() + self.set_models({"user/33": {"username": "test_saml_id21"}}) + response = self.request("account.import", {"id": 1, "import": True}) + self.assert_status_code(response, 200) + assert response.json["results"][0][0]["rows"][2]["state"] == ImportState.ERROR + assert response.json["results"][0][0]["rows"][2]["messages"] == [ + "Error: row state expected to be 'done', but it is 'new'." + ] + assert response.json["results"][0][0]["rows"][2]["data"]["username"] == { + "info": ImportState.ERROR, + "value": "test_saml_id21", + } + self.assert_model_not_exists("user/35") + self.assert_model_not_exists("user/36") + self.assert_model_not_exists("user/37") + self.assert_model_exists("import_preview/1") + + def test_json_upload_set_saml_id_in_existing_account(self) -> None: + self.json_upload_set_saml_id_in_existing_account() + response_import = self.request("account.import", {"id": 1, "import": True}) + self.assert_status_code(response_import, 200) + self.assert_model_exists( + "user/2", + { + "username": "test", + "saml_id": "test_saml_id", + "default_password": "", + "can_change_own_password": False, + "password": "", + "default_vote_weight": "2.300000", + }, + ) + self.assert_model_not_exists("import_preview/1") + + def test_json_upload_update_saml_id_in_existing_account(self) -> None: + self.json_upload_update_saml_id_in_existing_account() + response_import = self.request("account.import", {"id": 1, "import": True}) + self.assert_status_code(response_import, 200) + self.assert_model_exists( + "user/2", + { + "username": "test", + "saml_id": "new_one", + "default_vote_weight": "2.300000", + }, + ) + self.assert_model_not_exists("import_preview/1") + + def test_json_upload_names_and_email_find_username_error(self) -> None: + self.json_upload_names_and_email_find_username() + self.set_models({"user/34": {"username": "test34"}}) + response_import = self.request("account.import", {"id": 1, "import": True}) + self.assert_status_code(response_import, 200) + row = response_import.json["results"][0][0]["rows"][0] + assert row["state"] == ImportState.ERROR + assert row["messages"] == [ + "Error: user 34 not found anymore for updating user 'test'." + ] + assert row["data"] == { + "id": 34, + "email": "test@ntvtn.de", + "username": {"id": 34, "info": "error", "value": "test"}, + "last_name": "Mustermann", + "first_name": "Max", + "default_password": {"info": "done", "value": "new default password"}, + } + + def test_json_upload_names_and_email_generate_username(self) -> None: + self.json_upload_names_and_email_generate_username() + response_import = self.request("account.import", {"id": 1, "import": True}) + self.assert_status_code(response_import, 200) + row = response_import.json["results"][0][0]["rows"][0] + assert row["state"] == ImportState.NEW + assert row["messages"] == [] + self.assert_model_exists( + "user/35", + { + "id": 35, + "username": "MaxMustermann1", + "last_name": "Mustermann", + "first_name": "Max", + "organization_id": 1, + "is_physical_person": True, + "default_vote_weight": "1.000000", + "can_change_own_password": True, + }, + ) + + def test_json_upload_generate_default_password(self) -> None: + self.json_upload_generate_default_password() + response_import = self.request("account.import", {"id": 1, "import": True}) + self.assert_status_code(response_import, 200) + row = response_import.json["results"][0][0]["rows"][0] + assert row["state"] == ImportState.NEW + assert row["messages"] == [] + user2 = self.assert_model_exists( + "user/2", + { + "id": 2, + "username": "test", + "organization_id": 1, + "is_physical_person": True, + "default_vote_weight": "1.000000", + "can_change_own_password": True, + }, + ) + assert user2["default_password"] + assert user2["password"] + + def test_json_upload_username_10_saml_id_11_error(self) -> None: + self.json_upload_username_10_saml_id_11() + self.set_models({"user/11": {"saml_id": "saml_id10"}}) + response_import = self.request("account.import", {"id": 1, "import": True}) + self.assert_status_code(response_import, 200) + row = response_import.json["results"][0][0]["rows"][0] + assert row["state"] == ImportState.ERROR + assert row["messages"] == [ + "Will remove password and default_password and forbid changing your OpenSlides password.", + "Error: saml_id 'saml_id10' found in different id (11 instead of 10)", + ] + assert row["data"] == { + "id": 10, + "saml_id": {"info": "error", "value": "saml_id10"}, + "username": {"id": 10, "info": "done", "value": "user10"}, + "default_password": {"info": "warning", "value": ""}, + } + + def test_json_upload_username_username_and_saml_id_found_and_deleted_error( + self, + ) -> None: + self.json_upload_username_username_and_saml_id_found() + self.request("user.delete", {"id": 11}) + assert self.assert_model_deleted("user/11") + response_import = self.request("account.import", {"id": 1, "import": True}) + self.assert_status_code(response_import, 200) + row = response_import.json["results"][0][0]["rows"][0] + assert row["state"] == ImportState.ERROR + assert row["messages"] == [ + "Error: user 11 not found anymore for updating user 'user11'." + ] + assert row["data"] == { + "id": 11, + "saml_id": {"info": "done", "value": "saml_id11"}, + "username": {"id": 11, "info": ImportState.ERROR, "value": "user11"}, + "default_vote_weight": "11.000000", + } + + def test_json_upload_update_multiple_users_okay(self) -> None: + self.json_upload_multiple_users() + response_import = self.request("account.import", {"id": 1, "import": True}) + self.assert_status_code(response_import, 200) + self.assert_model_exists( + "user/2", + { + "id": 2, + "saml_id": "test_saml_id2", + "username": "user2", + "default_password": "", + "default_vote_weight": "2.345678", + "password": "", + "can_change_own_password": False, + }, + ) + self.assert_model_exists( + "user/3", + { + "id": 3, + "saml_id": "saml3", + "username": "user3", + "default_password": "", + "default_vote_weight": "3.345678", + "can_change_own_password": False, + }, + ) + self.assert_model_exists( + "user/4", + { + "id": 4, + "username": "user4", + "email": "mlk@america.com", + "first_name": "Martin", + "last_name": "Luther King", + "default_password": "secret", + "default_vote_weight": "4.345678", + "can_change_own_password": True, + }, + ) + self.assert_model_exists( + "user/5", + { + "id": 5, + "saml_id": "saml5", + "username": "new_user5", + "default_password": "", + "default_vote_weight": "5.345678", + "can_change_own_password": False, + }, + ) + self.assert_model_exists( + "user/6", + { + "id": 6, + "saml_id": "new_saml6", + "username": "new_saml6", + "default_password": "", + "default_vote_weight": "6.345678", + "can_change_own_password": False, + }, + ) + self.assert_model_exists( + "user/7", + { + "id": 7, + "username": "JoanBaez7", + "first_name": "Joan", + "last_name": "Baez7", + "default_vote_weight": "7.345678", + "can_change_own_password": True, + }, + ) + + def test_json_upload_update_multiple_users_all_error(self) -> None: + self.json_upload_multiple_users() + self.request("user.delete", {"id": 2}) + self.set_models( + { + "user/10": {"username": "doubler3", "saml_id": "saml3"}, + "user/4": {"username": "user4_married"}, + "user/11": {"username": "new_user_5", "saml_id": "saml5"}, + "user/12": {"username": "doubler6", "saml_id": "new_saml6"}, + "user/13": {"username": "JoanBaez7"}, + }, + ) + response_import = self.request("account.import", {"id": 1, "import": True}) + self.assert_status_code(response_import, 200) + assert (result := response_import.json["results"][0][0])[ + "state" + ] == ImportState.ERROR + row = result["rows"][0] + assert row["state"] == ImportState.ERROR + assert row["messages"] == [ + "Will remove password and default_password and forbid changing your OpenSlides password.", + "Error: user 2 not found anymore for updating user 'user2'.", + ] + assert row["data"] == { + "id": 2, + "saml_id": {"info": ImportState.NEW, "value": "test_saml_id2"}, + "username": {"id": 2, "info": ImportState.ERROR, "value": "user2"}, + "default_password": {"info": ImportState.WARNING, "value": ""}, + "default_vote_weight": "2.345678", + } + + row = row = result["rows"][1] + assert row["state"] == ImportState.ERROR + assert row["messages"] == [ + "Will remove password and default_password and forbid changing your OpenSlides password.", + "Error: saml_id 'saml3' is duplicated in import.", + ] + assert row["data"] == { + "id": 3, + "saml_id": {"info": ImportState.ERROR, "value": "saml3"}, + "username": {"id": 3, "info": ImportState.DONE, "value": "user3"}, + "default_password": {"info": ImportState.WARNING, "value": ""}, + "default_vote_weight": "3.345678", + "can_change_own_password": False, + } + + row = row = result["rows"][2] + assert row["state"] == ImportState.ERROR + assert row["messages"] == [ + "Error: user 4 not found anymore for updating user 'user4'." + ] + assert row["data"] == { + "id": 4, + "email": "mlk@america.com", + "username": {"id": 4, "info": ImportState.ERROR, "value": "user4"}, + "last_name": "Luther King", + "first_name": "Martin", + "default_vote_weight": "4.345678", + } + + row = row = result["rows"][3] + assert row["state"] == ImportState.ERROR + assert row["messages"] == [ + "Will remove password and default_password and forbid changing your OpenSlides password.", + "Error: saml_id 'saml5' found in different id (11 instead of None)", + ] + assert row["data"] == { + "username": {"info": ImportState.DONE, "value": "new_user5"}, + "saml_id": {"info": ImportState.ERROR, "value": "saml5"}, + "default_password": {"info": ImportState.WARNING, "value": ""}, + "default_vote_weight": "5.345678", + } + + row = row = result["rows"][4] + assert row["state"] == ImportState.ERROR + assert row["messages"] == [ + "Will remove password and default_password and forbid changing your OpenSlides password.", + "Error: saml_id 'new_saml6' found in different id (12 instead of None)", + ] + assert row["data"] == { + "username": {"info": ImportState.GENERATED, "value": "new_saml6"}, + "saml_id": {"info": ImportState.ERROR, "value": "new_saml6"}, + "default_password": {"info": ImportState.WARNING, "value": ""}, + "default_vote_weight": "6.345678", + } + + row = row = result["rows"][5] + assert row["state"] == ImportState.ERROR + assert row["messages"] == [ + "Error: row state expected to be 'done', but it is 'new'." + ] + assert row["data"]["username"] == { + "info": ImportState.ERROR, + "value": "JoanBaez7", + } + assert row["data"]["default_password"]["info"] == ImportState.GENERATED + assert row["data"]["default_password"]["value"] diff --git a/tests/system/action/user/test_account_json_upload.py b/tests/system/action/user/test_account_json_upload.py new file mode 100644 index 000000000..8ff8faad9 --- /dev/null +++ b/tests/system/action/user/test_account_json_upload.py @@ -0,0 +1,1001 @@ +from time import time + +from openslides_backend.action.mixins.import_mixins import ImportState +from openslides_backend.permissions.management_levels import OrganizationManagementLevel +from openslides_backend.shared.patterns import fqid_from_collection_and_id +from tests.system.action.base import BaseActionTestCase + + +class AccountJsonUpload(BaseActionTestCase): + def test_json_upload_simple(self) -> None: + start_time = int(time()) + response = self.request( + "account.json_upload", + { + "data": [ + { + "username": "test", + "default_password": "secret", + "is_active": "1", + "is_physical_person": "F", + "default_number": "strange number", + "default_structure_level": "CEO", + "default_vote_weight": "1.12", + "wrong": 15, + } + ], + }, + ) + end_time = int(time()) + self.assert_status_code(response, 200) + assert response.json["results"][0][0]["rows"][0] == { + "state": ImportState.NEW, + "messages": [], + "data": { + "username": {"value": "test", "info": ImportState.DONE}, + "default_password": {"value": "secret", "info": ImportState.DONE}, + "is_active": True, + "is_physical_person": False, + "default_number": "strange number", + "default_structure_level": "CEO", + "default_vote_weight": "1.120000", + }, + } + import_preview_id = response.json["results"][0][0].get("id") + import_preview_fqid = fqid_from_collection_and_id( + "import_preview", import_preview_id + ) + import_preview = self.assert_model_exists( + import_preview_fqid, {"name": "account"} + ) + assert start_time <= import_preview["created"] <= end_time + + def test_json_upload_empty_data(self) -> None: + response = self.request( + "account.json_upload", + {"data": []}, + ) + self.assert_status_code(response, 400) + assert "data.data must contain at least 1 items" in response.json["message"] + + def test_json_upload_parse_boolean_error(self) -> None: + response = self.request( + "account.json_upload", + { + "data": [ + { + "username": "test", + "default_password": "secret", + "is_physical_person": "X50", + } + ], + }, + ) + self.assert_status_code(response, 400) + assert "Could not parse X50 expect boolean" in response.json["message"] + + def test_json_upload_without_names_error(self) -> None: + response = self.request( + "account.json_upload", + { + "data": [ + { + "default_number": "strange number", + } + ], + }, + ) + self.assert_status_code(response, 200) + assert response.json["results"][0][0]["rows"][0] == { + "state": ImportState.ERROR, + "messages": [ + "Cannot generate username. Missing one of first_name, last_name." + ], + "data": { + "username": {"value": "", "info": ImportState.GENERATED}, + "default_number": "strange number", + }, + } + + def test_json_upload_results(self) -> None: + response = self.request( + "account.json_upload", + {"data": [{"username": "test", "default_password": "secret"}]}, + ) + self.assert_status_code(response, 200) + self.assert_model_exists( + "import_preview/1", + { + "name": "account", + "state": ImportState.DONE, + "result": { + "rows": [ + { + "state": ImportState.NEW, + "messages": [], + "data": { + "username": { + "value": "test", + "info": ImportState.DONE, + }, + "default_password": { + "value": "secret", + "info": ImportState.DONE, + }, + }, + } + ], + }, + }, + ) + result = response.json["results"][0][0] + assert result == { + "id": 1, + "headers": [ + {"property": "title", "type": "string"}, + {"property": "first_name", "type": "string"}, + {"property": "last_name", "type": "string"}, + {"property": "is_active", "type": "boolean"}, + {"property": "is_physical_person", "type": "boolean"}, + {"property": "default_password", "type": "string", "is_object": True}, + {"property": "email", "type": "string"}, + {"property": "username", "type": "string", "is_object": True}, + {"property": "gender", "type": "string"}, + {"property": "pronoun", "type": "string"}, + {"property": "default_number", "type": "string"}, + {"property": "default_structure_level", "type": "string"}, + {"property": "default_vote_weight", "type": "decimal"}, + {"property": "saml_id", "type": "string", "is_object": True}, + ], + "rows": [ + { + "state": ImportState.NEW, + "messages": [], + "data": { + "username": {"value": "test", "info": ImportState.DONE}, + "default_password": { + "value": "secret", + "info": ImportState.DONE, + }, + }, + } + ], + "statistics": [ + {"name": "total", "value": 1}, + {"name": "created", "value": 1}, + {"name": "updated", "value": 0}, + {"name": "error", "value": 0}, + {"name": "warning", "value": 0}, + ], + "state": ImportState.DONE, + } + + def test_json_upload_duplicate_in_db(self) -> None: + self.set_models( + { + "user/3": { + "username": "test", + "saml_id": "12345", + "first_name": "Max", + "last_name": "Mustermann", + "email": "max@mustermann.org", + }, + }, + ) + response = self.request( + "account.json_upload", + { + "data": [ + {"username": "test"}, + {"saml_id": "12345"}, + { + "first_name": "Max", + "last_name": "Mustermann", + "email": "max@mustermann.org", + }, + ] + }, + ) + self.assert_status_code(response, 200) + result = response.json["results"][0][0] + assert result["rows"] == [ + { + "state": ImportState.ERROR, + "messages": [ + "The account with id 3 was found multiple times by different search criteria." + ], + "data": { + "username": {"value": "test", "info": "done", "id": 3}, + "id": 3, + }, + }, + { + "state": ImportState.ERROR, + "messages": [ + "The account with id 3 was found multiple times by different search criteria.", + ], + "data": { + "saml_id": {"value": "12345", "info": ImportState.DONE}, + "id": 3, + "username": {"value": "test", "info": "done", "id": 3}, + }, + }, + { + "state": ImportState.ERROR, + "messages": [ + "The account with id 3 was found multiple times by different search criteria." + ], + "data": { + "first_name": "Max", + "last_name": "Mustermann", + "email": "max@mustermann.org", + "id": 3, + "username": {"value": "test", "info": "done", "id": 3}, + }, + }, + ] + + def test_json_upload_multiple_duplicates(self) -> None: + self.set_models( + { + "user/3": { + "username": "test", + "first_name": "Max", + "last_name": "Mustermann", + "email": "max@mustermann.org", + }, + "user/4": { + "username": "test2", + "first_name": "Max", + "last_name": "Mustermann", + "email": "max@mustermann.org", + }, + } + ) + response = self.request( + "account.json_upload", + { + "data": [ + { + "first_name": "Max", + "last_name": "Mustermann", + "email": "max@mustermann.org", + } + ] + }, + ) + self.assert_status_code(response, 200) + result = response.json["results"][0][0] + assert result["rows"] == [ + { + "state": ImportState.ERROR, + "messages": ["Found more users with name and email"], + "data": { + "first_name": "Max", + "last_name": "Mustermann", + "email": "max@mustermann.org", + "username": {"value": "MaxMustermann", "info": "generated"}, + }, + } + ] + + def test_json_upload_username_duplicate_in_data(self) -> None: + self.maxDiff = None + response = self.request( + "account.json_upload", + { + "data": [ + {"username": "test", "default_password": "secret"}, + {"username": "bla", "default_password": "secret"}, + {"username": "test", "default_password": "secret"}, + ], + }, + ) + self.assert_status_code(response, 200) + result = response.json["results"][0][0] + assert result["rows"][0]["messages"] == [ + "Found more users with the same username" + ] + assert result["rows"][0]["state"] == ImportState.ERROR + assert result["rows"][2]["messages"] == [ + "Found more users with the same username" + ] + assert result["rows"][2]["state"] == ImportState.ERROR + self.assert_model_exists( + "import_preview/1", + { + "id": 1, + "name": "account", + "state": ImportState.ERROR, + "result": { + "rows": [ + { + "state": ImportState.ERROR, + "messages": ["Found more users with the same username"], + "data": { + "username": { + "value": "test", + "info": ImportState.ERROR, + }, + "default_password": { + "value": "secret", + "info": ImportState.DONE, + }, + }, + }, + { + "state": ImportState.NEW, + "messages": [], + "data": { + "username": {"value": "bla", "info": ImportState.DONE}, + "default_password": { + "value": "secret", + "info": ImportState.DONE, + }, + }, + }, + { + "state": ImportState.ERROR, + "messages": ["Found more users with the same username"], + "data": { + "username": { + "value": "test", + "info": ImportState.ERROR, + }, + "default_password": { + "value": "secret", + "info": ImportState.DONE, + }, + }, + }, + ], + }, + }, + ) + + def test_json_upload_duplicated_one_saml_id(self) -> None: + self.set_models( + { + "user/3": { + "username": "test", + "saml_id": "12345", + "first_name": "Max", + "last_name": "Mustermann", + "email": "max@mustermann.org", + "default_password": "test1", + "password": "secret", + "can_change_own_password": True, + }, + }, + ) + + response = self.request( + "account.json_upload", + { + "data": [ + { + "username": "test2", + "saml_id": "12345", + "first_name": "John", + "default_password": "test2", + } + ], + }, + ) + self.assert_status_code(response, 200) + result = response.json["results"][0][0] + assert result["rows"] == [ + { + "state": ImportState.ERROR, + "messages": [ + "saml_id 12345 must be unique.", + "Will remove password and default_password and forbid changing your OpenSlides password.", + ], + "data": { + "username": {"value": "test2", "info": ImportState.DONE}, + "saml_id": {"value": "12345", "info": ImportState.ERROR}, + "first_name": "John", + "default_password": {"value": "", "info": ImportState.WARNING}, + }, + } + ] + assert result["statistics"] == [ + {"name": "total", "value": 1}, + {"name": "created", "value": 0}, + {"name": "updated", "value": 0}, + {"name": "error", "value": 1}, + {"name": "warning", "value": 1}, + ] + assert result["state"] == ImportState.ERROR + + def test_json_upload_duplicated_two_new_saml_ids1(self) -> None: + response = self.request( + "account.json_upload", + { + "data": [ + { + "username": "test1", + "saml_id": "12345", + }, + { + "username": "test2", + "saml_id": "12345", + "default_password": "def_password", + }, + ], + }, + ) + self.assert_status_code(response, 200) + result = response.json["results"][0][0] + assert result["rows"] == [ + { + "state": ImportState.ERROR, + "messages": ["saml_id 12345 must be unique."], + "data": { + "username": {"value": "test1", "info": ImportState.DONE}, + "saml_id": {"value": "12345", "info": ImportState.ERROR}, + }, + }, + { + "state": ImportState.ERROR, + "messages": [ + "saml_id 12345 must be unique.", + "Will remove password and default_password and forbid changing your OpenSlides password.", + ], + "data": { + "username": {"value": "test2", "info": ImportState.DONE}, + "saml_id": {"value": "12345", "info": ImportState.ERROR}, + "default_password": {"info": ImportState.WARNING, "value": ""}, + }, + }, + ] + assert result["statistics"] == [ + {"name": "total", "value": 2}, + {"name": "created", "value": 0}, + {"name": "updated", "value": 0}, + {"name": "error", "value": 2}, + {"name": "warning", "value": 1}, + ] + assert result["state"] == ImportState.ERROR + + def test_json_upload_duplicated_two_new_saml_ids2(self) -> None: + response = self.request( + "account.json_upload", + { + "data": [ + { + "saml_id": "12345", + }, + { + "saml_id": "12345", + "default_password": "def_password", + }, + ], + }, + ) + self.assert_status_code(response, 200) + result = response.json["results"][0][0] + assert result["rows"] == [ + { + "state": ImportState.ERROR, + "messages": ["saml_id 12345 must be unique."], + "data": { + "username": {"value": "12345", "info": ImportState.GENERATED}, + "saml_id": {"value": "12345", "info": ImportState.ERROR}, + }, + }, + { + "state": ImportState.ERROR, + "messages": [ + "saml_id 12345 must be unique.", + "Will remove password and default_password and forbid changing your OpenSlides password.", + ], + "data": { + "username": {"value": "123451", "info": ImportState.GENERATED}, + "saml_id": {"value": "12345", "info": ImportState.ERROR}, + "default_password": {"info": ImportState.WARNING, "value": ""}, + }, + }, + ] + assert result["statistics"] == [ + {"name": "total", "value": 2}, + {"name": "created", "value": 0}, + {"name": "updated", "value": 0}, + {"name": "error", "value": 2}, + {"name": "warning", "value": 1}, + ] + assert result["state"] == ImportState.ERROR + + def test_json_upload_duplicated_two_found_saml_ids(self) -> None: + self.set_models( + { + "user/3": { + "username": "test1", + "saml_id": "1", + }, + "user/4": { + "username": "test2", + "saml_id": "2", + }, + }, + ) + + response = self.request( + "account.json_upload", + { + "data": [ + { + "username": "test1", + "saml_id": "12345", + }, + { + "username": "test2", + "saml_id": "12345", + }, + ], + }, + ) + self.assert_status_code(response, 200) + result = response.json["results"][0][0] + assert result["rows"] == [ + { + "state": ImportState.ERROR, + "messages": ["saml_id 12345 must be unique."], + "data": { + "username": {"value": "test1", "info": ImportState.DONE, "id": 3}, + "saml_id": {"value": "12345", "info": ImportState.ERROR}, + "id": 3, + }, + }, + { + "state": ImportState.ERROR, + "messages": ["saml_id 12345 must be unique."], + "data": { + "username": {"value": "test2", "info": ImportState.DONE, "id": 4}, + "saml_id": {"value": "12345", "info": ImportState.ERROR}, + "id": 4, + }, + }, + ] + assert result["statistics"] == [ + {"name": "total", "value": 2}, + {"name": "created", "value": 0}, + {"name": "updated", "value": 0}, + {"name": "error", "value": 2}, + {"name": "warning", "value": 0}, + ] + assert result["state"] == ImportState.ERROR + + def test_json_upload_no_permission(self) -> None: + self.base_permission_test( + {}, "account.json_upload", {"data": [{"username": "test"}]} + ) + + def test_json_upload_permission(self) -> None: + self.base_permission_test( + {}, + "account.json_upload", + {"data": [{"username": "test"}]}, + OrganizationManagementLevel.CAN_MANAGE_USERS, + ) + + +class AccountJsonUploadForUseInImport(BaseActionTestCase): + def json_upload_saml_id_new(self) -> None: + self.set_models( + { + "user/34": { + "first_name": "Max", + "last_name": "Mustermann", + "email": "test@ntvtn.de", + "username": "test_saml_id", + } + } + ) + + response = self.request( + "account.json_upload", + { + "data": [ + { + "saml_id": "test_saml_id", + "default_password": "test2", + }, + { + "username": "test_saml_id1", + }, + { + "first_name": "test_sa", + "last_name": "ml_id2", + }, + ], + }, + ) + self.assert_status_code(response, 200) + import_preview = self.assert_model_exists("import_preview/1") + assert import_preview["name"] == "account" + assert import_preview["result"]["rows"][0]["messages"] == [ + "Will remove password and default_password and forbid changing your OpenSlides password." + ] + assert import_preview["result"]["rows"][0]["state"] == ImportState.NEW + data0 = import_preview["result"]["rows"][0]["data"] + assert data0 == { + "saml_id": {"info": "new", "value": "test_saml_id"}, + "username": {"info": "generated", "value": "test_saml_id2"}, + "default_password": {"info": "warning", "value": ""}, + } + assert import_preview["result"]["rows"][1]["data"]["username"] == { + "info": "done", + "value": "test_saml_id1", + } + assert import_preview["result"]["rows"][2]["data"]["username"] == { + "info": "generated", + "value": "test_saml_id21", + } + assert import_preview["result"]["rows"][2]["data"]["last_name"] == "ml_id2" + assert import_preview["result"]["rows"][2]["data"]["first_name"] == "test_sa" + + def json_upload_set_saml_id_in_existing_account(self) -> None: + self.set_models( + { + "user/2": { + "username": "test", + "password": "secret", + "default_password": "secret", + "can_change_own_password": True, + "default_vote_weight": "2.300000", + } + } + ) + response = self.request( + "account.json_upload", + { + "data": [ + { + "username": "test", + "saml_id": "test_saml_id", + "default_password": "secret2", + } + ], + }, + ) + self.assert_status_code(response, 200) + import_preview = self.assert_model_exists("import_preview/1") + assert import_preview["state"] == ImportState.WARNING + assert import_preview["name"] == "account" + assert import_preview["result"]["rows"][0]["state"] == ImportState.DONE + assert import_preview["result"]["rows"][0]["messages"] == [ + "Will remove password and default_password and forbid changing your OpenSlides password." + ] + data = import_preview["result"]["rows"][0]["data"] + assert data == { + "id": 2, + "saml_id": {"info": "new", "value": "test_saml_id"}, + "username": {"info": "done", "value": "test", "id": 2}, + "default_password": {"info": "warning", "value": ""}, + } + + def json_upload_update_saml_id_in_existing_account(self) -> None: + self.set_models( + { + "user/2": { + "username": "test", + "default_vote_weight": "2.300000", + "saml_id": "old_one", + } + } + ) + response = self.request( + "account.json_upload", + { + "data": [ + { + "username": "test", + "saml_id": "new_one", + } + ], + }, + ) + self.assert_status_code(response, 200) + import_preview = self.assert_model_exists("import_preview/1") + assert import_preview["state"] == ImportState.DONE + assert import_preview["name"] == "account" + assert import_preview["result"]["rows"][0]["state"] == ImportState.DONE + assert import_preview["result"]["rows"][0]["messages"] == [] + data = import_preview["result"]["rows"][0]["data"] + assert data == { + "id": 2, + "saml_id": {"info": "done", "value": "new_one"}, + "username": {"info": "done", "value": "test", "id": 2}, + } + + def json_upload_names_and_email_find_username(self) -> None: + self.set_models( + { + "user/34": { + "first_name": "Max", + "last_name": "Mustermann", + "email": "test@ntvtn.de", + "username": "test", + } + } + ) + response = self.request( + "account.json_upload", + { + "data": [ + { + "first_name": "Max", + "last_name": "Mustermann", + "email": "test@ntvtn.de", + "default_password": "new default password", + } + ], + }, + ) + self.assert_status_code(response, 200) + row = response.json["results"][0][0]["rows"][0] + assert row["state"] == ImportState.DONE + assert row["data"] == { + "id": 34, + "first_name": "Max", + "last_name": "Mustermann", + "email": "test@ntvtn.de", + "default_password": {"value": "new default password", "info": "done"}, + "username": {"value": "test", "info": "done", "id": 34}, + } + + def json_upload_names_and_email_generate_username(self) -> None: + self.set_models( + { + "user/34": { + "username": "MaxMustermann", + "first_name": "Testy", + "last_name": "Tester", + } + } + ) + + response = self.request( + "account.json_upload", + { + "data": [ + { + "first_name": "Max", + "last_name": "Mustermann", + } + ], + }, + ) + self.assert_status_code(response, 200) + entry = response.json["results"][0][0]["rows"][0] + assert entry["state"] == ImportState.NEW + assert entry["data"]["first_name"] == "Max" + assert entry["data"]["last_name"] == "Mustermann" + assert entry["data"]["username"] == { + "value": "MaxMustermann1", + "info": ImportState.GENERATED, + } + assert entry["data"]["default_password"]["info"] == ImportState.GENERATED + + def json_upload_generate_default_password(self) -> None: + response = self.request( + "account.json_upload", + { + "data": [ + { + "username": "test", + } + ], + }, + ) + self.assert_status_code(response, 200) + import_preview = self.assert_model_exists("import_preview/1") + assert import_preview["name"] == "account" + assert import_preview["result"]["rows"][0]["data"].get("default_password") + assert ( + import_preview["result"]["rows"][0]["data"]["default_password"]["info"] + == ImportState.GENERATED + ) + + def json_upload_username_10_saml_id_11(self) -> None: + self.set_models( + { + "user/10": { + "username": "user10", + }, + "user/11": { + "username": "user11", + "saml_id": "saml_id11", + }, + } + ) + response = self.request( + "account.json_upload", + { + "data": [ + { + "username": "user10", + "saml_id": "saml_id10", + } + ], + }, + ) + self.assert_status_code(response, 200) + row = response.json["results"][0][0]["rows"][0] + assert row["state"] == ImportState.DONE + assert row["data"] == { + "id": 10, + "username": {"value": "user10", "info": "done", "id": 10}, + "saml_id": {"value": "saml_id10", "info": "new"}, + "default_password": {"value": "", "info": "warning"}, + } + + def json_upload_username_username_and_saml_id_found(self) -> None: + self.set_models( + { + "user/11": { + "username": "user11", + "saml_id": "saml_id11", + } + } + ) + response = self.request( + "account.json_upload", + { + "data": [ + { + "username": "user11", + "saml_id": "saml_id11", + "default_vote_weight": "11.000000", + } + ], + }, + ) + self.assert_status_code(response, 200) + row = response.json["results"][0][0]["rows"][0] + assert row["state"] == ImportState.DONE + assert row["data"] == { + "id": 11, + "username": {"value": "user11", "info": ImportState.DONE, "id": 11}, + "saml_id": {"value": "saml_id11", "info": ImportState.DONE}, + "default_vote_weight": "11.000000", + } + + def json_upload_multiple_users(self) -> None: + self.set_models( + { + "user/2": { + "username": "user2", + "password": "secret", + "default_password": "secret", + "can_change_own_password": True, + "default_vote_weight": "2.300000", + }, + "user/3": { + "username": "user3", + "saml_id": "saml3", + "password": "secret", + "default_password": "secret", + "can_change_own_password": True, + "default_vote_weight": "3.300000", + }, + "user/4": { + "username": "user4", + "first_name": "Martin", + "last_name": "Luther King", + "email": "mlk@america.com", + "password": "secret", + "default_password": "secret", + "can_change_own_password": True, + "default_vote_weight": "4.300000", + }, + } + ) + response = self.request( + "account.json_upload", + { + "data": [ + { + "username": "user2", + "saml_id": "test_saml_id2", + "default_vote_weight": "2.345678", + }, + {"saml_id": "saml3", "default_vote_weight": "3.345678"}, + { + "first_name": "Martin", + "last_name": "Luther King", + "email": "mlk@america.com", + "default_vote_weight": "4.345678", + }, + { + "username": "new_user5", + "default_vote_weight": "5.345678", + "saml_id": "saml5", + }, + { + "saml_id": "new_saml6", + "default_vote_weight": "6.345678", + }, + { + "first_name": "Joan", + "last_name": "Baez7", + "default_vote_weight": "7.345678", + }, + ], + }, + ) + self.assert_status_code(response, 200) + import_preview = self.assert_model_exists("import_preview/1") + assert import_preview["state"] == ImportState.WARNING + assert import_preview["name"] == "account" + assert import_preview["result"]["rows"][0]["state"] == ImportState.DONE + assert import_preview["result"]["rows"][0]["messages"] == [ + "Will remove password and default_password and forbid changing your OpenSlides password." + ] + assert import_preview["result"]["rows"][0]["data"] == { + "id": 2, + "saml_id": {"info": "new", "value": "test_saml_id2"}, + "username": {"id": 2, "info": "done", "value": "user2"}, + "default_password": {"info": "warning", "value": ""}, + "default_vote_weight": "2.345678", + } + + assert import_preview["result"]["rows"][1]["state"] == ImportState.DONE + assert import_preview["result"]["rows"][1]["messages"] == [ + "Will remove password and default_password and forbid changing your OpenSlides password." + ] + assert import_preview["result"]["rows"][1]["data"] == { + "id": 3, + "saml_id": {"info": ImportState.DONE, "value": "saml3"}, + "username": {"id": 3, "info": ImportState.DONE, "value": "user3"}, + "default_password": {"info": ImportState.WARNING, "value": ""}, + "default_vote_weight": "3.345678", + } + + assert import_preview["result"]["rows"][2]["state"] == ImportState.DONE + assert import_preview["result"]["rows"][2]["messages"] == [] + assert import_preview["result"]["rows"][2]["data"] == { + "id": 4, + "email": "mlk@america.com", + "username": {"id": 4, "info": "done", "value": "user4"}, + "last_name": "Luther King", + "first_name": "Martin", + "default_vote_weight": "4.345678", + } + + assert import_preview["result"]["rows"][3]["state"] == ImportState.NEW + assert import_preview["result"]["rows"][3]["messages"] == [ + "Will remove password and default_password and forbid changing your OpenSlides password." + ] + assert import_preview["result"]["rows"][3]["data"] == { + "saml_id": {"info": "new", "value": "saml5"}, + "username": {"info": "done", "value": "new_user5"}, + "default_password": {"info": "warning", "value": ""}, + "default_vote_weight": "5.345678", + } + + assert import_preview["result"]["rows"][4]["state"] == ImportState.NEW + assert import_preview["result"]["rows"][4]["messages"] == [ + "Will remove password and default_password and forbid changing your OpenSlides password." + ] + assert import_preview["result"]["rows"][4]["data"] == { + "saml_id": {"info": "new", "value": "new_saml6"}, + "username": {"info": "generated", "value": "new_saml6"}, + "default_password": {"info": "warning", "value": ""}, + "default_vote_weight": "6.345678", + } + + assert import_preview["result"]["rows"][5]["state"] == ImportState.NEW + assert import_preview["result"]["rows"][5]["messages"] == [] + default_password = import_preview["result"]["rows"][5]["data"].pop( + "default_password" + ) + assert default_password["info"] == ImportState.GENERATED + assert default_password["value"] + assert import_preview["result"]["rows"][5]["data"] == { + "username": {"info": "generated", "value": "JoanBaez7"}, + "last_name": "Baez7", + "first_name": "Joan", + "default_vote_weight": "7.345678", + } diff --git a/tests/system/action/user/test_create.py b/tests/system/action/user/test_create.py index 9dc12f3a1..4ebcbcef3 100644 --- a/tests/system/action/user/test_create.py +++ b/tests/system/action/user/test_create.py @@ -448,6 +448,44 @@ def test_create_saml_id_and_empty_values(self) -> None: }, ) + def test_create_saml_id_but_duplicate_error1(self) -> None: + self.set_models({"user/2": {"username": "x", "saml_id": "123saml"}}) + response = self.request( + "user.create", + { + "saml_id": "123saml", + "default_password": "", + "can_change_own_password": False, + }, + ) + self.assert_status_code(response, 400) + self.assertIn( + "A user with the saml_id 123saml already exists.", response.json["message"] + ) + + def test_create_saml_id_but_duplicate_error2(self) -> None: + self.set_models({"user/2": {"username": "123saml"}}) + response = self.request( + "user.create", + { + "saml_id": "123saml", + "default_password": "", + "can_change_own_password": False, + }, + ) + self.assert_status_code(response, 400) + self.assertIn( + "A user with the username 123saml already exists.", response.json["message"] + ) + + def test_create_empty_saml_id_and_empty_values(self) -> None: + response = self.request( + "user.create", + {"saml_id": " ", "username": "x"}, + ) + self.assert_status_code(response, 400) + self.assertIn("This saml_id is forbidden.", response.json["message"]) + def test_create_permission_nothing(self) -> None: self.permission_setup() response = self.request( @@ -1150,7 +1188,7 @@ def test_create_forbidden_username(self) -> None: }, ) self.assert_status_code(response, 400) - assert "This username is forbidden." in response.json["message"] + assert "Need username or first_name or last_name" in response.json["message"] def test_create_gender(self) -> None: self.set_models({"organization/1": {"genders": ["male", "female"]}}) diff --git a/tests/system/action/user/test_import.py b/tests/system/action/user/test_import.py deleted file mode 100644 index 24b700de1..000000000 --- a/tests/system/action/user/test_import.py +++ /dev/null @@ -1,399 +0,0 @@ -from typing import Any, Dict - -from openslides_backend.action.mixins.import_mixins import ImportState -from openslides_backend.permissions.management_levels import OrganizationManagementLevel -from tests.system.action.base import BaseActionTestCase - - -class UserJsonImport(BaseActionTestCase): - def setUp(self) -> None: - super().setUp() - self.set_models( - { - "action_worker/2": { - "result": { - "import": "account", - "rows": [ - { - "state": ImportState.NEW, - "messages": [], - "data": { - "username": { - "value": "test", - "info": ImportState.DONE, - }, - "first_name": "Testy", - }, - }, - ], - }, - }, - "action_worker/3": { - "result": { - "import": "account", - "rows": [ - { - "state": ImportState.NEW, - "messages": [], - "data": { - "username": { - "value": "TestyTester", - "info": ImportState.DONE, - }, - "first_name": "Testy", - "last_name": "Tester", - "email": "email@test.com", - "gender": "male", - }, - }, - ], - }, - }, - "action_worker/4": { - "result": { - "import": "account", - "rows": [ - { - "state": ImportState.ERROR, - "messages": ["test"], - "data": {"gender": "male"}, - }, - ], - }, - }, - "action_worker/5": {"result": None}, - } - ) - - def test_import_username_and_create(self) -> None: - response = self.request("user.import", {"id": 2, "import": True}) - self.assert_status_code(response, 200) - self.assert_model_exists( - "user/2", - {"username": "test", "first_name": "Testy"}, - ) - self.assert_model_not_exists("action_worker/2") - - def test_import_abort(self) -> None: - response = self.request("user.import", {"id": 2, "import": False}) - self.assert_status_code(response, 200) - self.assert_model_not_exists("action_worker/2") - self.assert_model_not_exists("user/2") - - def test_import_wrong_action_worker(self) -> None: - response = self.request("user.import", {"id": 5, "import": True}) - self.assert_status_code(response, 400) - assert ( - "Wrong id doesn't point on account import data." in response.json["message"] - ) - - def test_import_username_and_update(self) -> None: - self.set_models( - { - "user/1": { - "username": "test", - }, - "action_worker/6": { - "result": { - "import": "account", - "rows": [ - { - "state": ImportState.DONE, - "messages": [], - "data": { - "username": { - "value": "test", - "info": ImportState.DONE, - "id": 1, - }, - "first_name": "Testy", - }, - }, - ], - }, - }, - } - ) - response = self.request("user.import", {"id": 6, "import": True}) - self.assert_status_code(response, 200) - self.assert_model_not_exists("user/2") - self.assert_model_exists("user/1", {"first_name": "Testy"}) - - def test_import_names_and_email_and_create(self) -> None: - response = self.request("user.import", {"id": 3, "import": True}) - self.assert_status_code(response, 200) - self.assert_model_exists( - "user/2", - { - "username": "TestyTester", - "first_name": "Testy", - "gender": "male", - "last_name": "Tester", - "email": "email@test.com", - }, - ) - - def get_action_worker_data( - self, number: int, state: ImportState, data: Dict[str, Any] - ) -> Dict[str, Any]: - return { - f"action_worker/{number}": { - "result": { - "import": "account", - "rows": [ - { - "state": state, - "messages": [], - "data": data, - }, - ], - }, - } - } - - def test_import_with_saml_id(self) -> None: - self.set_models( - self.get_action_worker_data( - 6, - ImportState.NEW, - {"saml_id": {"value": "testsaml", "info": ImportState.NEW}}, - ) - ) - response = self.request("user.import", {"id": 6, "import": True}) - self.assert_status_code(response, 200) - self.assert_model_exists( - "user/2", - { - "username": "testsaml", - "saml_id": "testsaml", - }, - ) - - def test_import_saml_id_error_new_and_saml_id_exists(self) -> None: - """Set saml_id 'testsaml' to user 1, add the import user 1 will be - found and the import should result in an error.""" - self.set_models( - { - "user/1": {"saml_id": "testsaml"}, - **self.get_action_worker_data( - 6, - ImportState.NEW, - {"saml_id": {"value": "testsaml", "info": ImportState.NEW}}, - ), - } - ) - response = self.request("user.import", {"id": 6, "import": True}) - self.assert_status_code(response, 200) - entry = response.json["results"][0][0]["rows"][0] - assert entry["state"] == ImportState.ERROR - assert entry["messages"] == [ - "Error: want to create a new user, but saml_id already exists." - ] - - def test_import_done_with_saml_id(self) -> None: - self.set_models( - { - "user/2": {"username": "test", "saml_id": "testsaml"}, - **self.get_action_worker_data( - 6, - ImportState.DONE, - { - "saml_id": {"value": "testsaml", "info": ImportState.DONE}, - "id": 2, - "first_name": "Hugo", - }, - ), - } - ) - response = self.request("user.import", {"id": 6, "import": True}) - self.assert_status_code(response, 200) - self.assert_model_exists( - "user/2", - { - "username": "test", - "saml_id": "testsaml", - "first_name": "Hugo", - }, - ) - - def test_import_done_error_missing_user(self) -> None: - self.set_models( - { - **self.get_action_worker_data( - 6, - ImportState.DONE, - { - "saml_id": {"value": "testsaml", "info": ImportState.NEW}, - "first_name": "Hugo", - "id": 2, - }, - ), - } - ) - response = self.request("user.import", {"id": 6, "import": True}) - self.assert_status_code(response, 200) - entry = response.json["results"][0][0]["rows"][0] - assert entry["state"] == ImportState.ERROR - assert entry["messages"] == ["Error: want to update, but missing user in db."] - - def test_import_error_at_state_new(self) -> None: - self.set_models( - { - "user/1": { - "username": "test", - }, - **self.get_action_worker_data( - 6, - ImportState.NEW, - { - "first_name": "Testy", - }, - ), - **self.get_action_worker_data( - 7, - ImportState.NEW, - { - "first_name": "Testy", - "username": { - "value": "test", - "info": ImportState.DONE, - }, - }, - ), - } - ) - response = self.request("user.import", {"id": 6, "import": True}) - self.assert_status_code(response, 200) - entry = response.json["results"][0][0]["rows"][0] - assert entry["state"] == ImportState.ERROR - assert entry["messages"] == [ - "Error: Want to create user, but missing username in import data." - ] - - response = self.request("user.import", {"id": 7, "import": True}) - self.assert_status_code(response, 200) - entry = response.json["results"][0][0]["rows"][0] - assert entry["state"] == ImportState.ERROR - assert entry["messages"] == [ - "Error: want to create a new user, but username already exists." - ] - - def test_import_error_state_done_missing_username(self) -> None: - self.set_models( - self.get_action_worker_data( - 6, - ImportState.DONE, - { - "first_name": "Testy", - }, - ) - ) - response = self.request("user.import", {"id": 6, "import": True}) - self.assert_status_code(response, 200) - entry = response.json["results"][0][0]["rows"][0] - assert entry["state"] == ImportState.ERROR - assert entry["messages"] == [ - "Error: Want to update user, but missing username in import data." - ] - - def test_import_error_state_done_missing_user_in_db(self) -> None: - self.set_models( - self.get_action_worker_data( - 6, - ImportState.DONE, - { - "first_name": "Testy", - "username": {"value": "test", "info": ImportState.DONE}, - }, - ) - ) - response = self.request("user.import", {"id": 6, "import": True}) - self.assert_status_code(response, 200) - entry = response.json["results"][0][0]["rows"][0] - assert entry["state"] == ImportState.ERROR - assert entry["messages"] == ["Error: want to update, but missing user in db."] - - def test_import_error_state_done_search_data_error(self) -> None: - self.set_models( - { - "action_worker/6": { - "result": { - "import": "account", - "rows": [ - { - "state": ImportState.NEW, - "messages": [], - "data": { - "username": { - "value": "test", - "info": ImportState.DONE, - } - }, - }, - { - "state": ImportState.DONE, - "messages": [], - "data": { - "username": { - "value": "test", - "info": ImportState.DONE, - } - }, - }, - ], - }, - } - } - ) - response = self.request("user.import", {"id": 6, "import": True}) - self.assert_status_code(response, 200) - entry = response.json["results"][0][0]["rows"][1] - assert entry["state"] == ImportState.ERROR - assert entry["messages"] == [ - "Error: want to update, but found search data are wrong." - ] - - def test_import_error_state_done_not_matching_ids(self) -> None: - self.set_models( - { - "user/8": {"username": "test"}, - **self.get_action_worker_data( - 6, - ImportState.DONE, - { - "first_name": "Testy", - "username": { - "value": "test", - "info": ImportState.DONE, - "id": 5, - }, - }, - ), - } - ) - response = self.request("user.import", {"id": 6, "import": True}) - self.assert_status_code(response, 200) - entry = response.json["results"][0][0]["rows"][0] - assert entry["state"] == ImportState.ERROR - assert entry["messages"] == [ - "Error: want to update, but found search data doesn't match." - ] - - def test_import_error_state(self) -> None: - response = self.request("user.import", {"id": 4, "import": True}) - self.assert_status_code(response, 200) - entry = response.json["results"][0][0]["rows"][0] - assert entry["state"] == ImportState.ERROR - assert entry["messages"] == ["test", "Error in import."] - self.assert_model_exists("action_worker/4") - - def test_import_no_permission(self) -> None: - self.base_permission_test({}, "user.import", {"id": 2, "import": True}) - - def test_import_permission(self) -> None: - self.base_permission_test( - {}, - "user.import", - {"id": 2, "import": True}, - OrganizationManagementLevel.CAN_MANAGE_USERS, - ) diff --git a/tests/system/action/user/test_json_upload.py b/tests/system/action/user/test_json_upload.py deleted file mode 100644 index b677c0708..000000000 --- a/tests/system/action/user/test_json_upload.py +++ /dev/null @@ -1,389 +0,0 @@ -from time import time - -from openslides_backend.action.mixins.import_mixins import ImportState -from openslides_backend.permissions.management_levels import OrganizationManagementLevel -from tests.system.action.base import BaseActionTestCase - - -class TopicJsonUpload(BaseActionTestCase): - def test_json_upload(self) -> None: - start_time = int(time()) - response = self.request( - "user.json_upload", - { - "data": [ - { - "username": "test", - "default_password": "secret", - "is_active": "1", - "is_physical_person": "F", - "wrong": 15, - } - ], - }, - ) - end_time = int(time()) - self.assert_status_code(response, 200) - assert response.json["results"][0][0]["rows"][0] == { - "state": ImportState.NEW, - "messages": [], - "data": { - "username": {"value": "test", "info": ImportState.DONE}, - "default_password": {"value": "secret", "info": ImportState.DONE}, - "is_active": True, - "is_physical_person": False, - }, - } - worker = self.assert_model_exists("action_worker/1") - assert worker["result"]["import"] == "account" - assert start_time <= worker["created"] <= end_time - assert start_time <= worker["timestamp"] <= end_time - - def test_json_upload_empty_data(self) -> None: - response = self.request( - "user.json_upload", - {"data": []}, - ) - self.assert_status_code(response, 400) - assert "data.data must contain at least 1 items" in response.json["message"] - - def test_json_upload_parse_boolean_error(self) -> None: - response = self.request( - "user.json_upload", - { - "data": [ - { - "username": "test", - "default_password": "secret", - "is_physical_person": "X50", - } - ], - }, - ) - self.assert_status_code(response, 400) - assert "Could not parse X50 expect boolean" in response.json["message"] - - def test_json_upload_results(self) -> None: - response = self.request( - "user.json_upload", - {"data": [{"username": "test", "default_password": "secret"}]}, - ) - self.assert_status_code(response, 200) - self.assert_model_exists( - "action_worker/1", - { - "result": { - "import": "account", - "rows": [ - { - "state": ImportState.NEW, - "messages": [], - "data": { - "username": { - "value": "test", - "info": ImportState.DONE, - }, - "default_password": { - "value": "secret", - "info": ImportState.DONE, - }, - }, - } - ], - } - }, - ) - result = response.json["results"][0][0] - assert result == { - "id": 1, - "headers": [ - {"property": "title", "type": "string"}, - {"property": "first_name", "type": "string"}, - {"property": "last_name", "type": "string"}, - {"property": "is_active", "type": "boolean"}, - {"property": "is_physical_person", "type": "boolean"}, - {"property": "default_password", "type": "string"}, - {"property": "email", "type": "string"}, - {"property": "username", "type": "string"}, - {"property": "gender", "type": "string"}, - {"property": "pronoun", "type": "string"}, - {"property": "saml_id", "type": "string"}, - ], - "rows": [ - { - "state": ImportState.NEW, - "messages": [], - "data": { - "username": {"value": "test", "info": ImportState.DONE}, - "default_password": { - "value": "secret", - "info": ImportState.DONE, - }, - }, - } - ], - "statistics": [ - {"name": "total", "value": 1}, - {"name": "created", "value": 1}, - {"name": "updated", "value": 0}, - {"name": "error", "value": 0}, - {"name": "warning", "value": 0}, - ], - "state": ImportState.DONE, - } - - def test_json_upload_duplicate_in_db(self) -> None: - self.set_models( - { - "user/3": {"username": "test"}, - } - ) - response = self.request( - "user.json_upload", - {"data": [{"username": "test"}]}, - ) - self.assert_status_code(response, 200) - result = response.json["results"][0][0] - assert result["rows"] == [ - { - "state": ImportState.DONE, - "messages": [], - "data": { - "username": {"value": "test", "info": ImportState.DONE, "id": 3}, - }, - } - ] - - def test_json_upload_multiple_duplicates(self) -> None: - self.set_models( - { - "user/3": { - "username": "test", - "first_name": "Max", - "last_name": "Mustermann", - "email": "max@mustermann.org", - }, - "user/4": { - "username": "test2", - "first_name": "Max", - "last_name": "Mustermann", - "email": "max@mustermann.org", - }, - } - ) - response = self.request( - "user.json_upload", - { - "data": [ - { - "first_name": "Max", - "last_name": "Mustermann", - "email": "max@mustermann.org", - } - ] - }, - ) - self.assert_status_code(response, 200) - result = response.json["results"][0][0] - assert result["rows"] == [ - { - "state": ImportState.ERROR, - "messages": ["Found more than one user: test, test2"], - "data": { - "first_name": "Max", - "last_name": "Mustermann", - "email": "max@mustermann.org", - }, - } - ] - - def test_json_upload_duplicate_in_data(self) -> None: - self.maxDiff = None - response = self.request( - "user.json_upload", - { - "data": [ - {"username": "test", "default_password": "secret"}, - {"username": "bla", "default_password": "secret"}, - {"username": "test", "default_password": "secret"}, - ], - }, - ) - self.assert_status_code(response, 200) - result = response.json["results"][0][0] - assert result["rows"][2]["messages"] == ["Duplicate in csv list index: 2"] - assert result["rows"][2]["state"] == ImportState.ERROR - self.assert_model_exists( - "action_worker/1", - { - "result": { - "import": "account", - "rows": [ - { - "state": ImportState.NEW, - "messages": [], - "data": { - "username": { - "value": "test", - "info": ImportState.DONE, - }, - "default_password": { - "value": "secret", - "info": ImportState.DONE, - }, - }, - }, - { - "state": ImportState.NEW, - "messages": [], - "data": { - "username": {"value": "bla", "info": ImportState.DONE}, - "default_password": { - "value": "secret", - "info": ImportState.DONE, - }, - }, - }, - { - "state": ImportState.ERROR, - "messages": ["Duplicate in csv list index: 2"], - "data": { - "username": { - "value": "test", - "info": ImportState.DONE, - }, - "default_password": { - "value": "secret", - "info": ImportState.DONE, - }, - }, - }, - ], - } - }, - ) - - def test_json_upload_names_and_email_generate_username(self) -> None: - self.set_models( - { - "user/34": { - "username": "MaxMustermann", - "first_name": "Testy", - "last_name": "Tester", - } - } - ) - - response = self.request( - "user.json_upload", - { - "data": [ - { - "first_name": "Max", - "last_name": "Mustermann", - } - ], - }, - ) - self.assert_status_code(response, 200) - entry = response.json["results"][0][0]["rows"][0] - assert entry["data"]["first_name"] == "Max" - assert entry["data"]["last_name"] == "Mustermann" - assert entry["data"]["username"] == { - "value": "MaxMustermann1", - "info": ImportState.GENERATED, - } - - def test_json_upload_names_and_email_set_username(self) -> None: - self.set_models( - { - "user/34": { - "first_name": "Max", - "last_name": "Mustermann", - "email": "test@ntvtn.de", - "username": "test", - } - } - ) - response = self.request( - "user.json_upload", - { - "data": [ - { - "first_name": "Max", - "last_name": "Mustermann", - "email": "test@ntvtn.de", - } - ], - }, - ) - self.assert_status_code(response, 200) - entry = response.json["results"][0][0]["rows"][0] - assert entry["data"]["first_name"] == "Max" - assert entry["data"]["last_name"] == "Mustermann" - assert entry["data"]["username"] == { - "value": "test", - "info": ImportState.DONE, - "id": 34, - } - - def test_json_upload_generate_default_password(self) -> None: - response = self.request( - "user.json_upload", - { - "data": [ - { - "username": "test", - } - ], - }, - ) - self.assert_status_code(response, 200) - worker = self.assert_model_exists("action_worker/1") - assert worker["result"]["import"] == "account" - assert worker["result"]["rows"][0]["data"].get("default_password") - assert ( - worker["result"]["rows"][0]["data"]["default_password"]["info"] - == ImportState.GENERATED - ) - - def test_json_upload_saml_id(self) -> None: - response = self.request( - "user.json_upload", - { - "data": [ - { - "saml_id": "test", - "password": "test2", - "default_password": "test3", - } - ], - }, - ) - self.assert_status_code(response, 200) - worker = self.assert_model_exists("action_worker/1") - assert worker["result"]["import"] == "account" - assert worker["result"]["rows"][0]["messages"] == [ - "Remove password or default_password from entry." - ] - data = worker["result"]["rows"][0]["data"] - assert data.get("saml_id") == { - "value": "test", - "info": "done", - } - assert "password" not in data - assert "default_password" not in data - assert data.get("can_change_own_password") is False - - def test_json_upload_no_permission(self) -> None: - self.base_permission_test( - {}, "user.json_upload", {"data": [{"username": "test"}]} - ) - - def test_json_upload_permission(self) -> None: - self.base_permission_test( - {}, - "user.json_upload", - {"data": [{"username": "test"}]}, - OrganizationManagementLevel.CAN_MANAGE_USERS, - ) diff --git a/tests/system/presenter/test_check_database_all.py b/tests/system/presenter/test_check_database_all.py index 4f5b316f8..15ef87a8a 100644 --- a/tests/system/presenter/test_check_database_all.py +++ b/tests/system/presenter/test_check_database_all.py @@ -259,6 +259,11 @@ def test_correct(self) -> None: "created": round(time() - 3), "timestamp": round(time()), }, + "import_preview/1": { + "name": "testcase", + "state": "done", + "created": round(time() - 3), + }, } ) status_code, data = self.request("check_database_all", {}) diff --git a/tests/system/presenter/test_export_meeting.py b/tests/system/presenter/test_export_meeting.py index 22e17663b..f221f9a90 100644 --- a/tests/system/presenter/test_export_meeting.py +++ b/tests/system/presenter/test_export_meeting.py @@ -73,7 +73,7 @@ def test_organization_tags_exclusion(self) -> None: assert "organization_tag" not in data assert data["meeting"]["1"].get("organization_tag_ids") is None - def test_action_worker_exclusion(self) -> None: + def test_action_worker_import_preview_exclusion(self) -> None: self.set_models( { "meeting/1": {"name": "name_foo"}, @@ -84,11 +84,18 @@ def test_action_worker_exclusion(self) -> None: "created": round(time() - 3), "timestamp": round(time()), }, + "import_preview/1": { + "id": 1, + "name": "testcase", + "state": "done", + "created": round(time()), + }, } ) status_code, data = self.request("export_meeting", {"meeting_id": 1}) assert status_code == 200 assert "action_worker" not in data + assert "import_preview" not in data def test_add_users(self) -> None: self.set_models( From 39bdb82f0997e1a27a59a999a1212e939f6e128b Mon Sep 17 00:00:00 2001 From: luisa-beerboom <101706784+luisa-beerboom@users.noreply.github.com> Date: Fri, 29 Sep 2023 11:36:37 +0200 Subject: [PATCH 3/5] Make user.forget_password two-email test safer (#1918) --- tests/system/action/user/test_forget_password.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/tests/system/action/user/test_forget_password.py b/tests/system/action/user/test_forget_password.py index 611607458..65f6239c3 100644 --- a/tests/system/action/user/test_forget_password.py +++ b/tests/system/action/user/test_forget_password.py @@ -121,6 +121,9 @@ def test_forget_password_two_users_with_email(self) -> None: "For completeness your username: test2" in handler.emails[1]["data"] or "For completeness your username: admin" in handler.emails[1]["data"] ) + assert ("test2" in handler.emails[0]["data"]) != ( + "test2" in handler.emails[1]["data"] + ) assert "https://openslides.example.com" in handler.emails[1]["data"] def test_forget_password_no_user_found(self) -> None: From 732341a1c8f0f52170c4d8f878638a2ab497cb5b Mon Sep 17 00:00:00 2001 From: reiterl Date: Fri, 29 Sep 2023 13:37:09 +0200 Subject: [PATCH 4/5] Topic json upload rework (#1914) Co-authored-by: Ralf Peschke --- .../actions/agenda_item/agenda_creation.py | 31 +-- .../action/actions/topic/import_.py | 116 ++++++++-- .../action/actions/topic/json_upload.py | 64 ++++-- .../action/mixins/import_mixins.py | 8 +- tests/system/action/topic/test_import.py | 206 ++++++++++++++---- tests/system/action/topic/test_json_upload.py | 173 +++++++++------ .../system/action/user/test_account_import.py | 10 +- 7 files changed, 441 insertions(+), 167 deletions(-) diff --git a/openslides_backend/action/actions/agenda_item/agenda_creation.py b/openslides_backend/action/actions/agenda_item/agenda_creation.py index cae52ca7b..f88227804 100644 --- a/openslides_backend/action/actions/agenda_item/agenda_creation.py +++ b/openslides_backend/action/actions/agenda_item/agenda_creation.py @@ -82,18 +82,21 @@ def check_dependant_action_execution_agenda_item( def get_dependent_action_data_agenda_item( self, instance: Dict[str, Any], CreateActionClass: Type[Action] ) -> List[Dict[str, Any]]: - agenda_item_action_data = { - "content_object_id": fqid_from_collection_and_id( - self.model.collection, instance["id"] - ), - } - for extra_field in agenda_creation_properties.keys(): - if extra_field == f"{AGENDA_PREFIX}create": - # This field should not be provided to the AgendaItemCreate action. - continue - prefix_len = len(AGENDA_PREFIX) - extra_field_without_prefix = extra_field[prefix_len:] - value = instance.pop(extra_field, None) - if value is not None: - agenda_item_action_data[extra_field_without_prefix] = value + agenda_item_action_data = self.remove_agenda_prefix_from_fieldnames(instance) + agenda_item_action_data["content_object_id"] = fqid_from_collection_and_id( + self.model.collection, instance["id"] + ) return [agenda_item_action_data] + + @staticmethod + def remove_agenda_prefix_from_fieldnames( + instance: Dict[str, Any] + ) -> Dict[str, Any]: + prefix_len = len(AGENDA_PREFIX) + extra_field = f"{AGENDA_PREFIX}create" # This field should not be provided to the AgendaItemCreate action. + agenda_item = { + field[prefix_len:]: value + for field in agenda_creation_properties.keys() + if field != extra_field and (value := instance.pop(field, None)) is not None + } + return agenda_item diff --git a/openslides_backend/action/actions/topic/import_.py b/openslides_backend/action/actions/topic/import_.py index 3f10f40fe..bcec58bfd 100644 --- a/openslides_backend/action/actions/topic/import_.py +++ b/openslides_backend/action/actions/topic/import_.py @@ -1,19 +1,28 @@ -from typing import Any, Dict +from typing import Any, Dict, List, cast from ....models.models import ImportPreview from ....permissions.permissions import Permissions from ....shared.exceptions import ActionException +from ....shared.filters import FilterOperator from ....shared.patterns import fqid_from_collection_and_id from ....shared.schema import required_id_schema -from ...mixins.import_mixins import ImportMixin, ImportState +from ...mixins.import_mixins import ( + ImportMixin, + ImportRow, + ImportState, + Lookup, + ResultType, +) from ...util.default_schema import DefaultSchema from ...util.register import register_action +from ..agenda_item.agenda_creation import CreateActionWithAgendaItemMixin +from ..agenda_item.update import AgendaItemUpdate from .create import TopicCreate -from .mixins import DuplicateCheckMixin +from .update import TopicUpdate @register_action("topic.import") -class TopicImport(DuplicateCheckMixin, ImportMixin): +class TopicImport(ImportMixin): """ Action to import a result from the import_preview. """ @@ -27,25 +36,84 @@ class TopicImport(DuplicateCheckMixin, ImportMixin): ) permission = Permissions.AgendaItem.CAN_MANAGE import_name = "topic" + agenda_item_fields = ["agenda_comment", "agenda_duration", "agenda_type"] def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]: - instance = super().update_instance(instance) - - # handle abort in on_success if not instance["import"]: return {} + instance = super().update_instance(instance) + meeting_id = self.get_meeting_id(instance) - self.init_duplicate_set(meeting_id) - action_payload = [ - entry["data"] - for entry in self.result.get("rows", []) - if (entry["state"] in (ImportState.NEW, ImportState.WARNING)) - and not self.check_for_duplicate(entry["data"]["title"]) - ] - self.execute_other_action(TopicCreate, action_payload) - self.error = False - return instance + self.setup_lookups(self.result.get("rows", []), meeting_id) + + self.rows = [self.validate_entry(row) for row in self.result["rows"]] + + if self.import_state != ImportState.ERROR: + create_action_payload: List[Dict[str, Any]] = [] + update_action_payload: List[Dict[str, Any]] = [] + update_agenda_item_payload: List[Dict[str, Any]] = [] + self.flatten_object_fields(["title"]) + for row in self.rows: + entry = row["data"] + if row["state"] == ImportState.NEW: + create_action_payload.append(entry) + else: + agenda_item = CreateActionWithAgendaItemMixin.remove_agenda_prefix_from_fieldnames( + entry + ) + if agenda_item: + agenda_item["id"] = self.topic_lookup.get_field_by_name( + entry["title"], "agenda_item_id" + ) + update_agenda_item_payload.append(agenda_item) + entry.pop("meeting_id", None) + update_action_payload.append(entry) + if create_action_payload: + self.execute_other_action(TopicCreate, create_action_payload) + if update_action_payload: + self.execute_other_action(TopicUpdate, update_action_payload) + if update_agenda_item_payload: + self.execute_other_action(AgendaItemUpdate, update_agenda_item_payload) + + return {} + + def validate_entry(self, row: ImportRow) -> ImportRow: + entry = row["data"] + title = cast(str, self.get_value_from_union_str_object(entry.get("title"))) + check_result = self.topic_lookup.check_duplicate(title) + id_ = cast(int, self.topic_lookup.get_field_by_name(title, "id")) + + if check_result == ResultType.FOUND_ID and id_ != 0: + if "id" not in entry: + raise ActionException( + f"Invalid JsonUpload data: A data row with state '{ImportState.DONE}' must have an 'id'" + ) + elif entry["id"] != id_: + row["state"] = ImportState.ERROR + entry["title"]["info"] = ImportState.ERROR + row["messages"].append( + f"Error: topic '{title}' found in different id ({id_} instead of {entry['id']})" + ) + elif check_result == ResultType.FOUND_MORE_IDS: + row["state"] = ImportState.ERROR + entry["title"]["info"] = ImportState.ERROR + row["messages"].append(f"Error: topic '{title}' is duplicated in import.") + elif check_result == ResultType.NOT_FOUND_ANYMORE: + row["messages"].append( + f"Error: topic {entry['title']['id']} not found anymore for updating topic '{title}'." + ) + row["state"] = ImportState.ERROR + elif check_result == ResultType.NOT_FOUND: + pass # cannot create an error ! + + if row["state"] == ImportState.ERROR and self.import_state == ImportState.DONE: + self.import_state = ImportState.ERROR + return { + "state": row["state"], + "data": row["data"], + "messages": row.get("messages", []), + } def get_meeting_id(self, instance: Dict[str, Any]) -> int: store_id = instance["id"] @@ -57,3 +125,17 @@ def get_meeting_id(self, instance: Dict[str, Any]) -> int: if worker.get("name") == TopicImport.import_name: return next(iter(worker.get("result", {})["rows"]))["data"]["meeting_id"] raise ActionException("Import data cannot be found.") + + def setup_lookups(self, data: List[Dict[str, Any]], meeting_id: int) -> None: + self.topic_lookup = Lookup( + self.datastore, + "topic", + [ + (title, entry["data"]) + for entry in data + if (title := entry["data"].get("title", {}).get("value")) + ], + field="title", + mapped_fields=["agenda_item_id"], + global_and_filter=FilterOperator("meeting_id", "=", meeting_id), + ) diff --git a/openslides_backend/action/actions/topic/json_upload.py b/openslides_backend/action/actions/topic/json_upload.py index 3c348074b..9446ab28b 100644 --- a/openslides_backend/action/actions/topic/json_upload.py +++ b/openslides_backend/action/actions/topic/json_upload.py @@ -1,20 +1,17 @@ -from typing import Any, Dict - -import fastjsonschema +from typing import Any, Dict, List from ....models.models import Topic from ....permissions.permissions import Permissions +from ....shared.filters import FilterOperator from ....shared.schema import required_id_schema -from ...mixins.import_mixins import ImportState, JsonUploadMixin +from ...mixins.import_mixins import ImportState, JsonUploadMixin, Lookup, ResultType from ...util.default_schema import DefaultSchema from ...util.register import register_action from ..agenda_item.agenda_creation import agenda_creation_properties -from .create import TopicCreate -from .mixins import DuplicateCheckMixin @register_action("topic.json_upload") -class TopicJsonUpload(DuplicateCheckMixin, JsonUploadMixin): +class TopicJsonUpload(JsonUploadMixin): """ Action to allow to upload a json. It is used as first step of an import. """ @@ -46,14 +43,16 @@ class TopicJsonUpload(DuplicateCheckMixin, JsonUploadMixin): "meeting_id": required_id_schema, } ) - permission = Permissions.AgendaItem.CAN_MANAGE headers = [ - {"property": "title", "type": "string"}, + {"property": "title", "type": "string", "is_object": True}, {"property": "text", "type": "string"}, {"property": "agenda_comment", "type": "string"}, {"property": "agenda_type", "type": "string"}, {"property": "agenda_duration", "type": "integer"}, ] + permission = Permissions.AgendaItem.CAN_MANAGE + row_state: ImportState + topic_lookup: Lookup def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]: data = instance.pop("data") @@ -62,15 +61,18 @@ def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]: for entry in data: entry["meeting_id"] = instance["meeting_id"] - # validate and check for duplicates - self.init_duplicate_set(instance["meeting_id"]) + # setup and validate entries + self.setup_lookups(data, instance["meeting_id"]) self.rows = [self.validate_entry(entry) for entry in data] # generate statistics itemCount = len(self.rows) state_to_count = {state: 0 for state in ImportState} - for entry in self.rows: - state_to_count[entry["state"]] += 1 + for row in self.rows: + state_to_count[row["state"]] += 1 + state_to_count[ImportState.WARNING] += self.count_warnings_in_payload( + row.get("data", {}).values() + ) self.statistics = [ {"name": "total", "value": itemCount}, @@ -80,6 +82,7 @@ def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]: {"name": "warning", "value": state_to_count[ImportState.WARNING]}, ] + # finalize self.set_state( state_to_count[ImportState.ERROR], state_to_count[ImportState.WARNING] ) @@ -88,14 +91,31 @@ def update_instance(self, instance: Dict[str, Any]) -> Dict[str, Any]: def validate_entry(self, entry: Dict[str, Any]) -> Dict[str, Any]: state, messages = None, [] - try: - TopicCreate.schema_validator(entry) - if self.check_for_duplicate(entry["title"]): - state = ImportState.WARNING - messages.append("Duplicate") - else: - state = ImportState.NEW - except fastjsonschema.JsonSchemaException as exception: + check_result = self.topic_lookup.check_duplicate(title := entry["title"]) + id_ = self.topic_lookup.get_field_by_name(title, "id") + if check_result == ResultType.FOUND_ID: + state = ImportState.DONE + messages.append("Existing topic will be updated.") + entry["id"] = id_ + entry["title"] = { + "value": title, + "info": ImportState.WARNING, + "id": id_, + } + elif check_result == ResultType.NOT_FOUND: + state = ImportState.NEW + entry["title"] = {"value": title, "info": ImportState.NEW} + elif check_result == ResultType.FOUND_MORE_IDS: state = ImportState.ERROR - messages.append(exception.message) + messages.append(f"Duplicated topic name '{title}'.") + entry["title"] = {"value": title, "info": ImportState.ERROR} return {"state": state, "messages": messages, "data": entry} + + def setup_lookups(self, data: List[Dict[str, Any]], meeting_id: int) -> None: + self.topic_lookup = Lookup( + self.datastore, + "topic", + [(title, entry) for entry in data if (title := entry.get("title"))], + field="title", + global_and_filter=FilterOperator("meeting_id", "=", meeting_id), + ) diff --git a/openslides_backend/action/mixins/import_mixins.py b/openslides_backend/action/mixins/import_mixins.py index a5fe8b332..9eb75005d 100644 --- a/openslides_backend/action/mixins/import_mixins.py +++ b/openslides_backend/action/mixins/import_mixins.py @@ -56,6 +56,7 @@ def __init__( name_entries: List[Tuple[SearchFieldType, Dict[str, Any]]], field: SearchFieldType = "name", mapped_fields: Optional[List[str]] = None, + global_and_filter: Optional[Filter] = None, ) -> None: if mapped_fields is None: mapped_fields = [] @@ -86,9 +87,14 @@ def __init__( for name_tpl, _ in name_entries ] if or_filters: + if global_and_filter: + filter_: Filter = And(global_and_filter, Or(*or_filters)) + else: + filter_ = Or(*or_filters) + for entry in datastore.filter( collection, - Or(*or_filters), + filter_, mapped_fields, lock_result=False, ).values(): diff --git a/tests/system/action/topic/test_import.py b/tests/system/action/topic/test_import.py index d547d8438..83d4692dc 100644 --- a/tests/system/action/topic/test_import.py +++ b/tests/system/action/topic/test_import.py @@ -1,5 +1,6 @@ from openslides_backend.action.mixins.import_mixins import ImportState from tests.system.action.base import BaseActionTestCase +from tests.system.action.topic.test_json_upload import TopicJsonUploadForUseInImport class TopicJsonImport(BaseActionTestCase): @@ -16,7 +17,10 @@ def setUp(self) -> None: { "state": ImportState.NEW, "messages": [], - "data": {"title": "test", "meeting_id": 22}, + "data": { + "title": {"value": "test", "info": ImportState.NEW}, + "meeting_id": 22, + }, }, ], }, @@ -31,51 +35,117 @@ def test_import_correct(self) -> None: self.assert_model_exists("meeting/22", {"topic_ids": [1]}) self.assert_model_not_exists("import_preview/2") - def test_import_abort(self) -> None: + def test_import_abort_with_import_false(self) -> None: response = self.request("topic.import", {"id": 2, "import": False}) self.assert_status_code(response, 200) self.assert_model_not_exists("topic/1") self.assert_model_not_exists("import_preview/2") - def test_import_duplicate_in_db(self) -> None: + def test_import_abort_import_with_error(self) -> None: + self.set_models( + { + "import_preview/2": { + "state": ImportState.ERROR, + }, + } + ) + response = self.request("topic.import", {"id": 2, "import": True}) + self.assert_status_code(response, 400) + self.assertIn( + "Error in import. Data will not be imported.", response.json["message"] + ) + self.assert_model_not_exists("topic/1") + self.assert_model_exists("import_preview/2") + + def test_import_found_id_and_text_field(self) -> None: self.set_models( { "topic/1": {"title": "test", "meeting_id": 22}, "meeting/22": {"topic_ids": [1]}, + "import_preview/2": { + "state": ImportState.DONE, + "result": { + "import": "topic", + "rows": [ + { + "state": ImportState.DONE, + "messages": ["Existing topic will be updated."], + "data": { + "title": { + "value": "test", + "info": ImportState.WARNING, + "id": 1, + }, + "id": 1, + "meeting_id": 22, + "text": "this should be updated", + }, + }, + ], + }, + }, } ) response = self.request("topic.import", {"id": 2, "import": True}) self.assert_status_code(response, 200) - self.assert_model_not_exists("topic/2") + self.assert_model_exists( + "topic/1", + {"title": "test", "meeting_id": 22, "text": "this should be updated"}, + ) - def test_import_duplicate_and_topic_deleted_so_imported(self) -> None: + def test_import_found_id_and_agenda_fields(self) -> None: self.set_models( { - "topic/1": {"title": "test", "meeting_id": 22}, + "topic/1": {"title": "test", "meeting_id": 22, "agenda_item_id": 7}, + "agenda_item/7": { + "content_object_id": "topic/1", + "meeting_id": 22, + "duration": 20, + }, "meeting/22": {"topic_ids": [1]}, + "import_preview/2": { + "state": ImportState.DONE, + "result": { + "import": "topic", + "rows": [ + { + "state": ImportState.DONE, + "messages": ["Existing topic will be updated."], + "data": { + "title": { + "value": "test", + "info": ImportState.WARNING, + "id": 1, + }, + "id": 1, + "meeting_id": 22, + "agenda_comment": "test", + "agenda_type": "hidden", + "agenda_duration": 40, + }, + }, + ], + }, + }, } ) - response = self.request( - "topic.json_upload", + response = self.request("topic.import", {"id": 2, "import": True}) + self.assert_status_code(response, 200) + self.assert_model_exists( + "topic/1", + {"title": "test", "meeting_id": 22, "agenda_item_id": 7}, + ) + self.assert_model_exists( + "agenda_item/7", { - "meeting_id": 22, - "data": [ - { - "title": "test", - } - ], + "content_object_id": "topic/1", + "comment": "test", + "type": "hidden", + "duration": 40, }, ) - self.assert_status_code(response, 200) - self.assert_model_exists("import_preview/3") - response = self.request("topic.delete", {"id": 1}) - self.assert_status_code(response, 200) - self.assert_model_deleted("topic/1") - response = self.request("topic.import", {"id": 3, "import": True}) - self.assert_status_code(response, 200) - self.assert_model_exists("topic/2", {"title": "test"}) - def test_import_duplicate_so_not_imported(self) -> None: + def test_import_duplicate_and_topic_deleted(self) -> None: self.set_models( { "topic/1": {"title": "test", "meeting_id": 22}, @@ -95,28 +165,84 @@ def test_import_duplicate_so_not_imported(self) -> None: ) self.assert_status_code(response, 200) self.assert_model_exists("import_preview/3") + response = self.request("topic.delete", {"id": 1}) + self.assert_status_code(response, 200) + self.assert_model_deleted("topic/1") response = self.request("topic.import", {"id": 3, "import": True}) self.assert_status_code(response, 200) - self.assert_model_not_exists("topic/2") - - def test_import_with_upload(self) -> None: - response = self.request( - "topic.json_upload", - { + result = response.json["results"][0][0] + assert result["rows"][0] == { + "state": ImportState.ERROR, + "messages": [ + "Existing topic will be updated.", + "Error: topic 1 not found anymore for updating topic 'test'.", + ], + "data": { + "id": 1, + "title": {"id": 1, "info": "error", "value": "test"}, "meeting_id": 22, - "data": [ - { - "title": "another title", - } - ], }, - ) + } + self.assert_model_not_exists("topic/2") + + +class TopicImportWithIncludedJsonUpload(TopicJsonUploadForUseInImport): + def test_import_agenda_data(self) -> None: + self.json_upload_agenda_data() + response = self.request("topic.import", {"id": 1, "import": True}) self.assert_status_code(response, 200) - self.assert_model_exists("import_preview/3") - response = self.request("topic.import", {"id": 3, "import": True}) + self.assert_model_exists("topic/1", {"title": "test"}) + self.assert_model_exists( + "agenda_item/1", + {"comment": "testtesttest", "type": "hidden", "duration": 50}, + ) + + def test_import_duplicate_in_db(self) -> None: + self.json_upload_duplicate_in_db() + response = self.request("topic.import", {"id": 1, "import": True}) self.assert_status_code(response, 200) + result = response.json["results"][0][0] + assert result["state"] == ImportState.DONE self.assert_model_exists( - "topic/1", {"title": "another title", "meeting_id": 22} + "topic/3", {"title": "test", "text": "new one", "meeting_id": 22} ) - self.assert_model_exists("meeting/22", {"topic_ids": [1]}) - self.assert_model_not_exists("import_preview/3") + self.assert_model_not_exists("topic/4") + + def test_import_done_switched_to_new(self) -> None: + self.json_upload_duplicate_in_db() + self.request("topic.delete", {"id": 3}) + self.assert_model_deleted("topic/3") + response = self.request("topic.import", {"id": 1, "import": True}) + self.assert_status_code(response, 200) + result = response.json["results"][0][0] + assert result["state"] == ImportState.ERROR + assert result["rows"][0]["messages"] == [ + "Existing topic will be updated.", + "Error: topic 3 not found anymore for updating topic 'test'.", + ] + + def test_import_topic_switched_id(self) -> None: + self.json_upload_duplicate_in_db() + self.request("topic.delete", {"id": 3}) + self.assert_model_deleted("topic/3", {"title": "test", "meeting_id": 22}) + self.create_model("topic/4", {"title": "test", "meeting_id": 22}) + response = self.request("topic.import", {"id": 1, "import": True}) + self.assert_status_code(response, 200) + result = response.json["results"][0][0] + assert result["state"] == ImportState.ERROR + assert result["rows"][0]["messages"] == [ + "Existing topic will be updated.", + "Error: topic 'test' found in different id (4 instead of 3)", + ] + + def test_import_topic_duplicate_id(self) -> None: + self.json_upload_duplicate_in_db() + self.create_model("topic/4", {"title": "test", "meeting_id": 22}) + response = self.request("topic.import", {"id": 1, "import": True}) + self.assert_status_code(response, 200) + result = response.json["results"][0][0] + assert result["state"] == ImportState.ERROR + assert result["rows"][0]["messages"] == [ + "Existing topic will be updated.", + "Error: topic 'test' is duplicated in import.", + ] diff --git a/tests/system/action/topic/test_json_upload.py b/tests/system/action/topic/test_json_upload.py index a98eaecaa..3fc2917c3 100644 --- a/tests/system/action/topic/test_json_upload.py +++ b/tests/system/action/topic/test_json_upload.py @@ -14,41 +14,6 @@ def setUp(self) -> None: } ) - def test_json_upload_agenda_data(self) -> None: - start_time = int(time()) - response = self.request( - "topic.json_upload", - { - "meeting_id": 22, - "data": [ - { - "title": "test", - "agenda_comment": "testtesttest", - "agenda_type": "hidden", - "agenda_duration": "50", - "wrong": 15, - } - ], - }, - ) - end_time = int(time()) - self.assert_status_code(response, 200) - assert response.json["results"][0][0]["rows"][0] == { - "state": ImportState.NEW, - "messages": [], - "data": { - "title": "test", - "meeting_id": 22, - "agenda_comment": "testtesttest", - "agenda_type": "hidden", - "agenda_duration": 50, - }, - } - worker = self.assert_model_exists( - "import_preview/1", {"state": ImportState.DONE} - ) - assert start_time <= worker.get("created", -1) <= end_time - def test_json_upload_empty_data(self) -> None: response = self.request( "topic.json_upload", @@ -86,12 +51,16 @@ def test_json_upload_results(self) -> None: "import_preview/1", { "name": "topic", + "state": ImportState.DONE, "result": { "rows": [ { "state": ImportState.NEW, "messages": [], - "data": {"title": "test", "meeting_id": 22}, + "data": { + "title": {"value": "test", "info": ImportState.NEW}, + "meeting_id": 22, + }, } ], }, @@ -101,7 +70,7 @@ def test_json_upload_results(self) -> None: assert result == { "id": 1, "headers": [ - {"property": "title", "type": "string"}, + {"property": "title", "type": "string", "is_object": True}, {"property": "text", "type": "string"}, {"property": "agenda_comment", "type": "string"}, {"property": "agenda_type", "type": "string"}, @@ -111,7 +80,10 @@ def test_json_upload_results(self) -> None: { "state": ImportState.NEW, "messages": [], - "data": {"title": "test", "meeting_id": 22}, + "data": { + "title": {"value": "test", "info": ImportState.NEW}, + "meeting_id": 22, + }, } ], "statistics": [ @@ -124,27 +96,6 @@ def test_json_upload_results(self) -> None: "state": ImportState.DONE, } - def test_json_upload_duplicate_in_db(self) -> None: - self.set_models( - { - "topic/3": {"title": "test", "meeting_id": 22}, - "meeting/22": {"topic_ids": [3]}, - } - ) - response = self.request( - "topic.json_upload", - {"meeting_id": 22, "data": [{"title": "test"}]}, - ) - self.assert_status_code(response, 200) - result = response.json["results"][0][0] - assert result["rows"] == [ - { - "state": ImportState.WARNING, - "messages": ["Duplicate"], - "data": {"title": "test", "meeting_id": 22}, - } - ] - def test_json_upload_duplicate_in_data(self) -> None: response = self.request( "topic.json_upload", @@ -155,28 +106,40 @@ def test_json_upload_duplicate_in_data(self) -> None: ) self.assert_status_code(response, 200) result = response.json["results"][0][0] - assert result["rows"][2]["messages"] == ["Duplicate"] - assert result["rows"][2]["state"] == ImportState.WARNING + assert result["state"] == ImportState.ERROR + assert result["rows"][0]["state"] == ImportState.ERROR + assert result["rows"][1]["state"] == ImportState.NEW + assert result["rows"][2]["state"] == ImportState.ERROR self.assert_model_exists( "import_preview/1", { "name": "topic", + "state": "error", "result": { "rows": [ { - "state": ImportState.NEW, - "messages": [], - "data": {"title": "test", "meeting_id": 22}, + "state": ImportState.ERROR, + "messages": ["Duplicated topic name 'test'."], + "data": { + "title": {"value": "test", "info": ImportState.ERROR}, + "meeting_id": 22, + }, }, { "state": ImportState.NEW, "messages": [], - "data": {"title": "bla", "meeting_id": 22}, + "data": { + "title": {"value": "bla", "info": ImportState.NEW}, + "meeting_id": 22, + }, }, { - "state": ImportState.WARNING, - "messages": ["Duplicate"], - "data": {"title": "test", "meeting_id": 22}, + "state": ImportState.ERROR, + "messages": ["Duplicated topic name 'test'."], + "data": { + "title": {"value": "test", "info": ImportState.ERROR}, + "meeting_id": 22, + }, }, ], }, @@ -195,3 +158,77 @@ def test_json_uplad_permission(self) -> None: {"data": [{"title": "test"}], "meeting_id": 1}, Permissions.AgendaItem.CAN_MANAGE, ) + + +class TopicJsonUploadForUseInImport(BaseActionTestCase): + def setUp(self) -> None: + super().setUp() + self.set_models( + { + "meeting/22": {"name": "test", "is_active_in_organization_id": 1}, + } + ) + + def json_upload_agenda_data(self) -> None: + start_time = int(time()) + response = self.request( + "topic.json_upload", + { + "meeting_id": 22, + "data": [ + { + "title": "test", + "agenda_comment": "testtesttest", + "agenda_type": "hidden", + "agenda_duration": "50", + "wrong": 15, + } + ], + }, + ) + end_time = int(time()) + self.assert_status_code(response, 200) + result = response.json["results"][0][0] + assert result["state"] == ImportState.DONE + assert result["rows"][0] == { + "state": ImportState.NEW, + "messages": [], + "data": { + "title": {"value": "test", "info": ImportState.NEW}, + "meeting_id": 22, + "agenda_comment": "testtesttest", + "agenda_type": "hidden", + "agenda_duration": 50, + }, + } + worker = self.assert_model_exists( + "import_preview/1", {"state": ImportState.DONE} + ) + assert start_time <= worker.get("created", -1) <= end_time + + def json_upload_duplicate_in_db(self) -> None: + self.set_models( + { + "topic/3": {"title": "test", "text": "old one", "meeting_id": 22}, + "meeting/22": {"topic_ids": [3]}, + } + ) + response = self.request( + "topic.json_upload", + {"meeting_id": 22, "data": [{"title": "test", "text": "new one"}]}, + ) + self.assert_status_code(response, 200) + result = response.json["results"][0][0] + assert result["state"] == ImportState.WARNING + assert result["rows"] == [ + { + "state": ImportState.DONE, + "messages": ["Existing topic will be updated."], + "data": { + "id": 3, + "title": {"value": "test", "info": ImportState.WARNING, "id": 3}, + "text": "new one", + "meeting_id": 22, + }, + } + ] diff --git a/tests/system/action/user/test_account_import.py b/tests/system/action/user/test_account_import.py index c051d007b..58ad0c59f 100644 --- a/tests/system/action/user/test_account_import.py +++ b/tests/system/action/user/test_account_import.py @@ -694,7 +694,7 @@ def test_json_upload_update_multiple_users_all_error(self) -> None: "default_vote_weight": "2.345678", } - row = row = result["rows"][1] + row = result["rows"][1] assert row["state"] == ImportState.ERROR assert row["messages"] == [ "Will remove password and default_password and forbid changing your OpenSlides password.", @@ -709,7 +709,7 @@ def test_json_upload_update_multiple_users_all_error(self) -> None: "can_change_own_password": False, } - row = row = result["rows"][2] + row = result["rows"][2] assert row["state"] == ImportState.ERROR assert row["messages"] == [ "Error: user 4 not found anymore for updating user 'user4'." @@ -723,7 +723,7 @@ def test_json_upload_update_multiple_users_all_error(self) -> None: "default_vote_weight": "4.345678", } - row = row = result["rows"][3] + row = result["rows"][3] assert row["state"] == ImportState.ERROR assert row["messages"] == [ "Will remove password and default_password and forbid changing your OpenSlides password.", @@ -736,7 +736,7 @@ def test_json_upload_update_multiple_users_all_error(self) -> None: "default_vote_weight": "5.345678", } - row = row = result["rows"][4] + row = result["rows"][4] assert row["state"] == ImportState.ERROR assert row["messages"] == [ "Will remove password and default_password and forbid changing your OpenSlides password.", @@ -749,7 +749,7 @@ def test_json_upload_update_multiple_users_all_error(self) -> None: "default_vote_weight": "6.345678", } - row = row = result["rows"][5] + row = result["rows"][5] assert row["state"] == ImportState.ERROR assert row["messages"] == [ "Error: row state expected to be 'done', but it is 'new'." From 2d8d4a4f081c7d11401b7dd3f79363fb8bad2cd7 Mon Sep 17 00:00:00 2001 From: Joshua Sangmeister <33004050+jsangmeister@users.noreply.github.com> Date: Tue, 10 Oct 2023 10:20:53 +0200 Subject: [PATCH 5/5] Make `THREAD_WATCH_TIMEOUT` configurable via env var (#1903) --- README.md | 83 +++++++++++++--------- openslides_backend/action/action_worker.py | 6 +- openslides_backend/shared/env.py | 7 +- tests/system/base.py | 5 +- 4 files changed, 57 insertions(+), 44 deletions(-) diff --git a/README.md b/README.md index 128bae443..c83b98daf 100644 --- a/README.md +++ b/README.md @@ -71,77 +71,90 @@ The action component listens to port 9002. The presenter component listens to po ## Environment variables -* OPENSLIDES_BACKEND_COMPONENT +### Functionality + +* `OPENSLIDES_BACKEND_COMPONENT` Use one of the following values to start only one component of this service: `action` or `presenter`. Defaults to all of them using different child processes. If using `all` you can shut down all compontes by sending SIGTERM to Python master process. -* ACTION_PORT +* `ACTION_PORT` - Action component listens on this port. Default: 9002 + Action component listens on this port. Default: `9002` -* PRESENTER_PORT +* `PRESENTER_PORT` - Presenter component listens on this port. Default 9003 + Presenter component listens on this port. Default `9003` -* OPENSLIDES_DEVELOPMENT +* `OPENTELEMETRY_ENABLED` - Set this variable e. g. to 1 to set loglevel to `debug` and activate Gunicorn's reload mechanism. + Set this variable e. g. to `1` to enable span reporting to an OpenTelemetry collector (defined in the main OpenSlides repository). -* OPENTELEMETRY_ENABLED +* `OPENSLIDES_LOGLEVEL` - Set this variable e. g. to 1 to enable span reporting to an OpenTelemetry collector (defined in the main OpenSlides repository). + In production mode you can set the loglevel to `debug`, `info`, `warning`, `error` or `critical`. Default is `info`. -* OPENSLIDES_LOGLEVEL +* `OPENSLIDES_BACKEND_NUM_WORKERS` - In production mode you can set the loglevel to `debug`, `info`, `warning`, `error` or `critical`. Default is `info`. + Number of Gunicorn workers. Default: `1` -* OPENSLIDES_BACKEND_RAISE_4XX +* `OPENSLIDES_BACKEND_WORKER_TIMEOUT` - Set this variable to raise HTTP 400 and 403 as exceptions instead of valid HTTP responses. + Gunicorn worker timeout in seconds. Default: `30` + +* `OPENSLIDES_BACKEND_THREAD_WATCH_TIMEOUT` -* DATASTORE_READER_PROTOCOL + Seconds after which an action is delegated to an action worker. `-1` deactivates action workers all together. Default: `1.0` - Protocol of datastore reader service. Default: http +### Development + +* `OPENSLIDES_DEVELOPMENT` + + Set this variable e. g. to `1` to set loglevel to `debug` and activate Gunicorn's reload mechanism. + +* `OPENSLIDES_BACKEND_RAISE_4XX` + + Set this variable to raise HTTP 400 and 403 as exceptions instead of valid HTTP responses. -* DATASTORE_READER_HOST +### Connection to other services +* `DATASTORE_READER_PROTOCOL` - Host of datastore reader service. Default: localhost + Protocol of datastore reader service. Default: `http` -* DATASTORE_READER_PORT +* `DATASTORE_READER_HOST` - Port of datastore reader service. Default: 9010 + Host of datastore reader service. Default: `localhost` -* DATASTORE_READER_PATH +* `DATASTORE_READER_PORT` - Path of datastore reader service. Default: /internal/datastore/reader + Port of datastore reader service. Default: `9010` -* DATASTORE_WRITER_PROTOCOL +* `DATASTORE_READER_PATH` - Protocol of datastore writer service. Default: http + Path of datastore reader service. Default: `/internal/datastore/reader` -* DATASTORE_WRITER_HOST +* `DATASTORE_WRITER_PROTOCOL` - Host of datastore writer service. Default: localhost + Protocol of datastore writer service. Default: `http` -* DATASTORE_WRITER_PORT +* `DATASTORE_WRITER_HOST` - Port of datastore writer service. Default: 9011 + Host of datastore writer service. Default: `localhost` -* DATASTORE_WRITER_PATH +* `DATASTORE_WRITER_PORT` - Path of datastore writer service. Default: /internal/datastore/writer + Port of datastore writer service. Default: `9011` -* OPENSLIDES_BACKEND_NUM_WORKERS +* `DATASTORE_WRITER_PATH` - Number of Gunicorn workers. Default: 1 + Path of datastore writer service. Default: `/internal/datastore/writer` -* OPENSLIDES_BACKEND_WORKER_TIMEOUT +* `AUTH_HOST` - Gunicorn worker timeout in seconds. Default: 30 + Host of auth service. Used by the `authlib` package. Default: `localhost` -* AUTH_HOST and AUTH_PORT +* `AUTH_PORT` - Implicitly used by the authlib to get the endpoint for the auth-service + Port of auth service. Used by the `authlib` package. Default: `9004` # Some curl examples diff --git a/openslides_backend/action/action_worker.py b/openslides_backend/action/action_worker.py index 2a4696131..bae1496f1 100644 --- a/openslides_backend/action/action_worker.py +++ b/openslides_backend/action/action_worker.py @@ -18,8 +18,6 @@ from .action_handler import ActionHandler from .util.typing import ActionsResponse, Payload -THREAD_WATCH_TIMEOUT = 1.0 - def handle_action_in_worker_thread( payload: Payload, @@ -51,7 +49,9 @@ def handle_action_in_worker_thread( action_worker_thread.start() while not action_worker_thread.started: sleep(0.001) # The action_worker_thread should gain the lock and NOT this one - if lock.acquire(timeout=THREAD_WATCH_TIMEOUT): + + timeout = float(handler.env.OPENSLIDES_BACKEND_THREAD_WATCH_TIMEOUT) + if lock.acquire(timeout=timeout): lock.release() if hasattr(action_worker_thread, "exception"): raise action_worker_thread.exception diff --git a/openslides_backend/shared/env.py b/openslides_backend/shared/env.py index e838902a3..9dbe6ac18 100644 --- a/openslides_backend/shared/env.py +++ b/openslides_backend/shared/env.py @@ -43,6 +43,7 @@ class Environment: "OPENSLIDES_BACKEND_NUM_THREADS": "3", "OPENSLIDES_BACKEND_RAISE_4XX": "false", "OPENSLIDES_BACKEND_WORKER_TIMEOUT": "30", + "OPENSLIDES_BACKEND_THREAD_WATCH_TIMEOUT": "1", "OPENSLIDES_DEVELOPMENT": "false", "OPENSLIDES_LOGLEVEL": Loglevel.NOTSET.name, "OPENTELEMETRY_ENABLED": "false", @@ -54,10 +55,10 @@ class Environment: } def __init__(self, os_env: Any, *args: Any, **kwargs: Any) -> None: - for k, v in list(self.vars.items()): - env = os_env.get(k) + for key in self.vars.keys(): + env = os_env.get(key) if env is not None: - self.vars[k] = env + self.vars[key] = env def __getattr__(self, attr: str) -> str: value = self.vars.get(attr) diff --git a/tests/system/base.py b/tests/system/base.py index 6cbf2c0d2..f53684b00 100644 --- a/tests/system/base.py +++ b/tests/system/base.py @@ -7,7 +7,6 @@ from datastore.shared.util import DeletedModelsBehaviour, is_reserved_field from fastjsonschema.exceptions import JsonSchemaException -from openslides_backend.action import action_worker from openslides_backend.models.base import Model, model_registry from openslides_backend.services.auth.interface import AuthenticationService from openslides_backend.services.datastore.interface import DatastoreService @@ -93,8 +92,8 @@ def setUp(self) -> None: self.vote_service.clear_all() self.anon_client = self.create_client() - def set_thread_watch_timeout(self, thread_watch_timeout: float) -> None: - action_worker.THREAD_WATCH_TIMEOUT = thread_watch_timeout + def set_thread_watch_timeout(self, timeout: float) -> None: + self.app.env.vars["OPENSLIDES_BACKEND_THREAD_WATCH_TIMEOUT"] = str(timeout) def tearDown(self) -> None: if thread := self.__class__.get_thread_by_name("action_worker"):