diff --git a/.circleci/config.yml b/.circleci/config.yml index f08608285b..a0a154163a 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -74,5 +74,4 @@ workflows: filters: branches: ignore: - - /release\/.*/ - static_pages diff --git a/README.md b/README.md index bb04445ecb..e8c8c4877a 100644 --- a/README.md +++ b/README.md @@ -4,6 +4,12 @@ This repository provides the software which drives the DOAJ website and the DOAJ directory. +## CI Status + +**develop**   [![CircleCI](https://dl.circleci.com/status-badge/img/gh/DOAJ/doaj/tree/develop.svg?style=svg)](https://dl.circleci.com/status-badge/redirect/gh/DOAJ/doaj/tree/develop) + +**master**   [![CircleCI](https://dl.circleci.com/status-badge/img/gh/DOAJ/doaj/tree/master.svg?style=svg)](https://dl.circleci.com/status-badge/redirect/gh/DOAJ/doaj/tree/master) + ## Reporting issues Please feel free to use the issue tracker on https://github.com/DOAJ/doaj/issues for any bug diff --git a/cms/data/notifications.yml b/cms/data/notifications.yml index f514d7e4eb..1940db4daa 100644 --- a/cms/data/notifications.yml +++ b/cms/data/notifications.yml @@ -44,6 +44,10 @@ application:publisher:accepted:notify: It is your responsibility to keep the information about your journal in DOAJ up to date. When there are changes or updates needed please [submit an Update Request](https://doaj.org/publisher/journal) from your Publisher dashboard promptly. Please be aware that failure to do this may result in removal of your journal from DOAJ. + [How to submit an Update Request](https://doaj.org/apply/publisher-responsibilities/#keeping-your-journal-records-up-to-date) + + For more information on managing your DOAJ account and journal records, see our Publisher Information page (https://doaj.org/apply/publisher-responsibilities) + To increase the visibility, distribution and usage of your journal content, we encourage you to upload article metadata for this journal to DOAJ as soon as possible. [How to upload article metadata]({faq_url}#uploading-article-metadata) diff --git a/cms/pages/about/index.md b/cms/pages/about/index.md index f677f83f19..49dbeabd84 100644 --- a/cms/pages/about/index.md +++ b/cms/pages/about/index.md @@ -38,17 +38,6 @@ The work done by the [DOAJ Team](/about/team/) is supported by over 100 voluntar DOAJ also has a global network of Ambassadors who promote open access and best practice in their regions. Our Ambassadors are bound by an agreement and must declare all conflicts of interest. -## Zero tolerance policy - -We expect the members of our Team, our volunteers and our Ambassadors to always be treated courteously and with respect for the work they are doing. They should not be subject to abusive behaviour in any form, which includes: - -- rude, disrespectful and offensive behaviour, including derogatory remarks or anything which amounts to verbal or emotional abuse in person or in communications -- sexually inappropriate comments or behaviour -- racist and discriminatory abuse -- threats of physical violence -- aggressive and violent behaviour - - ## Partnerships and collaborations DOAJ partners with many organisations. The nature of the partnerships varies and may include membership, contracts for work, exchanges of information or services, initiative signatories, or access to information resources that assist DOAJ with our application review process. @@ -95,7 +84,72 @@ Established in 2015, Think. Check. Submit. was developed with the support of an Think. Check. Submit. provides tools and practical resources to educate researchers, promote integrity, and build trust in credible research and publications. Separate checklists for [journals](https://thinkchecksubmit.org/journals/) and [books](https://thinkchecksubmit.org/books-and-chapters/) are available to guide researchers through the key criteria for selecting where to publish their research and are also invaluable to scholarly communications professionals who are advising researchers about these issues. Both checklists are also available in a growing number of languages. - ## Diversity DOAJ believes in the power and value of diversity in scholarly communications. DOAJ is an [adopting organisation](https://c4disc.org/about/adopting-organizations/) of the [C4DISC Joint Statement of Principles](https://c4disc.org/joint-statement-of-principles/). Adopting the Joint Statement of Principles demonstrates our support for improving diversity and inclusion in our industry. + +## Code of conduct and zero tolerance policy + +At DOAJ, we foster a welcoming and inclusive community for everyone. Our code of conduct and zero tolerance policy help us create an environment where you feel comfortable and valued. + +### What this policy covers + +Our policy applies to all kinds of communication within our community or the spaces we create. They cover interactions among our team, volunteers, ambassadors, and individuals involved in events, activities and projects initiated or organised by us. This includes: + +- Communication via email or social media +- Participation in webinars and events organised by us +- Communication with our team members, volunteers and ambassadors at other events + +### Who this policy includes + +We welcome everyone, no matter your gender identity, sexual orientation, abilities, neurodiversity, appearance, body size, ethnicity, nationality, race, age, religion (or lack of it), social identity, or any other protected characteristic. Our community is wonderfully diverse and filled with passionate individuals who care deeply about their work. While our collaboration can sometimes spark lively discussions, we ask everyone to treat one another with consideration, kindness and respect. + +### Code of conduct + +We encourage members of our community who engage with or for DOAJ to: + +1. **Be kind and patient.** We want all members of our community to engage and be heard. We are a diverse community with many languages and many needs. Remember that people may use translation services or screen readers. They may be communicating in a second language. +2. **Be respectful and open.** Our community is open scholarship, open research, open access, and open science. We encourage you to have open discussions. No matter the content, we ask you to communicate in a professional manner, be open to hearing others' points of view, and exchange information, experiences and knowledge openly. +3. **Create a positive environment.** We want you to feel comfortable when engaging with us and our community. Learning and exploring different aspects of open scholarship should be fun! We encourage you to interact positively and share laughs, but avoid making demeaning or exclusionary jokes. +4. **Speak up!** We encourage and support you in addressing unwelcome behaviour as long as you feel comfortable doing so. Otherwise, follow our process below to report issues. + +### Zero tolerance + +We have zero tolerance for: + +- Rude, disrespectful and offensive behaviour, including derogatory remarks or anything that amounts to verbal or emotional abuse +- Sexually inappropriate comments or behaviour +- Racist and discriminatory abuse +- Threats of physical violence +- Aggressive and violent behaviour +- Doxxing (publishing private information about any of members of the DOAJ team, ambassadors or volunteers) +- Stalking + +Or any other inappropriate behaviours that breach our code of conduct. + +### Reporting and resolving issues + +#### Reporting issues + +If you have concerns about someone’s behaviour, please contact the Operations Manager, Lene (lene@doaj.org). Lene will: + +- refer the incident to our Executive Team (who will decide on further action) +- follow up with all those involved and provide you with an update + +#### Resolving issues + +In cases where our zero tolerance policy has not been breached, we will try to resolve the conflict, for example, through a mediated conversation involving a third party or separate conversations with everyone involved. Our aim is to solve issues respectfully and to improve relationships and collaborations. + +We also encourage you to let us know about incidents that were successfully resolved without our involvement. This helps us ensure we have the correct policies in place. + +#### Breaches to our zero tolerance policy + +We expect you to treat members of our team, volunteers and ambassadors with kindness and respect for the work they are doing. If someone breaches our zero tolerance policy, we may take action such as: + +- Stopping all communication with you, including emails or any appeals and complaints +- Excluding a publisher or journal from the DOAJ +- Banning an individual from future DOAJ events +- Blocking or reporting an individual or a profile on social media +- Removing an individual from an event or preventing them from attending future events +- Reporting incidents involving our team, volunteers or ambassadors at external events or withdrawing from an event +- Making a statement on our blog diff --git a/cms/pages/docs/oai-pmh.md b/cms/pages/docs/oai-pmh.md index 8eac6f9b34..443bc8d9d1 100644 --- a/cms/pages/docs/oai-pmh.md +++ b/cms/pages/docs/oai-pmh.md @@ -11,17 +11,19 @@ featuremap: --- +Journal records are announced in the feed when they are added or removed. + ## Journal feed {:.tabular-list} - `Identify` - Access the [base Identify endpoint](/oai?verb=Identify). - `ListSets` - - DOAJ provides all its subject classifications as OAI-PMH sets, so you can harvest just those you are interested in. Access the [full list of the sets](/oai?verb=ListSets). + - We provide all our subject classifications as OAI-PMH sets, so you can harvest only those you are interested in. Access the [full list of the sets](/oai?verb=ListSets). - `ListMetadataFormats` - - DOAJ currently supports only `oai_dc`; access [the metadata formats](/oai?verb=ListMetadataFormats). + - Currently, we only support `oai_dc`; access [the metadata formats](/oai?verb=ListMetadataFormats). -The metadata held by DOAJ is mapped to Dublin Core in the OAI-PMH feed, with the following interpretations for each Journal field: +The metadata held by us is mapped to Dublin Core in the OAI-PMH feed, with the following interpretations for each Journal field: | Dublin Core | Meaning within DOAJ | |---------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| @@ -91,6 +93,7 @@ The metadata held by DOAJ is mapped to Dublin Core in the OAI-PMH feed, with the | Date changes were made live | Changes | |-----------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| 6 November 2024 | added support for article and journal delete to the base Identify endpoint. (Sponsored by Clarivate.) | | 25 November 2020 | a minor edit to `dc:relation`: 'Links to related resources: the journal home page and the journal author-pays link if relevant' became 'Links to related resources (if present): the journal home page, open access statement, author instructions, aims, and waiver pages'| | 20 April 2015 | `subject` elements which represent a Library of Congress Classification \(LCC\) topic will now be marked with an additional OAI DC\-compliant attribute to denote this: `xsi:type="dcterms:LCSH"`\. LCC subjects will no longer be prefixed by `LCC:`\. | | 13 December 2013 | Initial release | @@ -103,13 +106,13 @@ The metadata held by DOAJ is mapped to Dublin Core in the OAI-PMH feed, with the - `Identify` - Access the [base Identify endpoint](http://www.doaj.org/oai.article?verb=Identify). - `ListSets` - - DOAJ provides all its subject classifications as OAI-PMH sets, so you can harvest just those you are interested in. Access the [full list of the sets](http://www.doaj.org/oai.article?verb=ListSets). + - We provide all our subject classifications as OAI-PMH sets, so you can harvest only those you are interested in. Access the [full list of the sets](http://www.doaj.org/oai.article?verb=ListSets). - `ListMetadataFormats` - - DOAJ currently supports the `oai_dc` and `oai_doaj` formats; access [the metadata formats](http://www.doaj.org/oai.article?verb=ListMetadataFormats). + - Currently, we only support the `oai_dc` and `oai_doaj` formats; access [the metadata formats](http://www.doaj.org/oai.article?verb=ListMetadataFormats). ### Dublin Core OAI Article format (`OAI_DC`) -The metadata held by DOAJ is mapped to Dublin Core in the OAI-PMH feed, with the following interpretations for each Article field: +The metadata held by us is mapped to Dublin Core in the OAI-PMH feed, with the following interpretations for each Article field: | Dublin Core | Meaning within DOAJ | |---------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| @@ -190,6 +193,7 @@ The metadata held by DOAJ is mapped to Dublin Core in the OAI-PMH feed, with the | Date changes were made live | Changes | |---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|-----------------| +| 6 November 2024 | added support for article and journal delete to the base Identify endpoint | | 25 November 2020 | The `dc:provenance` element was removed, as we no longer synchronise provenance information to our article records from the journal\. | | 22 August 2016 | The `dc:rights` element was removed, as it was technically inaccurate \- it represented the Journal's overall licence policy, not the specific rights for the article\. This information is now in `dc:provenance`\. | | 20 April 2015 | The `identifier` element will now point to the DOAJ article page rather than the `/search` page\. E\.g\. [`https://doaj.org/article/0000178c89214dc8b82df1a25c0c478e`](https://doaj.org/article/0000178c89214dc8b82df1a25c0c478e)

Up to two new `relation` elements will appear for each article, containing URL\-s to the Table of Contents page for the article's journal\. The page can be reached via both print ISSN and E\-ISSN, so up to two such links might appear\.

`subject` elements which represent a Library of Congress Classification \(LCC\) topic will now be marked with an additional OAI DC\-compliant attribute to denote this: `xsi:type="dcterms:LCSH"`\. LCC subjects will no longer be prefixed by `LCC:`\. | @@ -288,4 +292,5 @@ The following fields are available (not every article will have all the informat | Date changes were made live | Changes | |-----------------------------|-----------------| +| 6 November 2024 | added support for article and journal delete to the base Identify endpoint | | 20 April 2015 | Initial release | diff --git a/deploy/logrotate/README.md b/deploy/logrotate/README.md index ec163fd12a..2ef592ccad 100644 --- a/deploy/logrotate/README.md +++ b/deploy/logrotate/README.md @@ -30,4 +30,32 @@ Log uploads can be checked on the S3 bucket with the following command: ``` aws --profile doaj-nginx-logs s3 ls s3://doaj-nginx-logs -``` \ No newline at end of file +``` + +You can try a test run of `logrotate`: + +``` +sudo /usr/sbin/logrotate /etc/logrotate.conf +``` + +Or sync the files by directly running: + +``` +HOSTNAME=`hostname` sudo aws --profile doaj-nginx-logs s3 sync /var/log/nginx/ s3://doaj-nginx-logs/$HOSTNAME/ --exclude "*" --include "doaj.*.gz" +``` + + +### Troubleshooting: + +``` +error: nginx:1 duplicate log entry for /var/log/nginx/doaj.access.log +error: found error in file nginx, skipping +``` + +Check whether you've disabled the default nginx configuration + +``` +The config profile (doaj-nginx-logs) could not be found +``` + +Check that root user has access to the AWS credentials and symlink if required. \ No newline at end of file diff --git a/doajtest/fixtures/accounts.py b/doajtest/fixtures/accounts.py index c845bd83e3..b13bfb734b 100644 --- a/doajtest/fixtures/accounts.py +++ b/doajtest/fixtures/accounts.py @@ -85,9 +85,12 @@ def create_publisher_a(): return publisher -def create_maned_a(): +def create_maned_a(save=False): from portality import models maned = models.Account(**AccountFixtureFactory.make_managing_editor_source()) + maned.set_password("password") + if save: + maned.save(blocking=True) return maned diff --git a/doajtest/fixtures/v2/common.py b/doajtest/fixtures/v2/common.py index efa5f7367b..c550437c90 100644 --- a/doajtest/fixtures/v2/common.py +++ b/doajtest/fixtures/v2/common.py @@ -25,7 +25,7 @@ } SEAL_FORM_EXPANDED = { - "doaj_seal": False, + "doaj_seal": [], } JOURNAL_LIKE_BIBJSON = { diff --git a/doajtest/functional/make_notifications.py b/doajtest/functional/make_notifications.py index 3d5aeea0c9..64aa044b53 100644 --- a/doajtest/functional/make_notifications.py +++ b/doajtest/functional/make_notifications.py @@ -6,6 +6,7 @@ from portality import constants from portality import models, app_email from portality.core import app +from portality.bll import DOAJ from portality.events.consumers import application_assed_assigned_notify, \ application_assed_inprogress_notify, \ application_editor_completed_notify, \ @@ -30,34 +31,36 @@ USER = "richard" -NOTIFICATIONS = [ - "application_assed_assigned_notify", - "application_assed_inprogress_notify", - "application_editor_completed_notify", - "application_editor_group_assigned_notify", - "application_editor_inprogress_notify", - "application_maned_ready_notify", - "application_publisher_accepted_notify", - "application_publisher_assigned_notify", - "application_publisher_created_notify", - "application_publisher_inprogress_notify", - "application_publisher_quickreject_notify", - "application_publisher_revision_notify", - "bg_job_finished_notify", - "journal_assed_assigned_notify", - "journal_editor_group_assigned_notify", - "update_request_publisher_accepted_notify", - "update_request_publisher_assigned_notify", - "update_request_publisher_rejected_notify", - UpdateRequestPublisherSubmittedNotify.ID, -] +NOTIFICATIONS = [ec.ID for ec in DOAJ.eventsService().EVENT_CONSUMERS] + +# NOTIFICATIONS = [ +# "application_assed_assigned_notify", +# "application_assed_inprogress_notify", +# "application_editor_completed_notify", +# "application_editor_group_assigned_notify", +# "application_editor_inprogress_notify", +# "application_maned_ready_notify", +# "application_publisher_accepted_notify", +# "application_publisher_assigned_notify", +# "application_publisher_created_notify", +# "application_publisher_inprogress_notify", +# "application_publisher_quickreject_notify", +# "application_publisher_revision_notify", +# "bg_job_finished_notify", +# "journal_assed_assigned_notify", +# "journal_editor_group_assigned_notify", +# "update_request_publisher_accepted_notify", +# "update_request_publisher_assigned_notify", +# "update_request_publisher_rejected_notify", +# UpdateRequestPublisherSubmittedNotify.ID, +# ] app.config["ENABLE_EMAIL"] = True app_email.Mail = MockMail ############################################## ## ApplicationAssedAssignedNotify -if "application_assed_assigned_notify" in NOTIFICATIONS: +if "application:assed:assigned:notify" in NOTIFICATIONS: aaan_application = ApplicationFixtureFactory.make_application_source() aaan_application["admin"]["editor"] = USER aaan_application["bibjson"]["title"] = "Application Assed Assigned Notify" @@ -71,7 +74,7 @@ ############################################## ## ApplicationAssedAssignedNotify -if "application_assed_inprogress_notify" in NOTIFICATIONS: +if "application:assed:inprogress:notify" in NOTIFICATIONS: aain_application = ApplicationFixtureFactory.make_application_source() aain_application["admin"]["editor"] = USER aain_application["bibjson"]["title"] = "Application Assed In Progress Notify" @@ -85,7 +88,7 @@ ############################################## ## ApplicationEditorCompletedNotify -if "application_editor_completed_notify" in NOTIFICATIONS: +if "application:editor:completed:notify" in NOTIFICATIONS: def editor_group_mock_pull(editor_group_id): return EditorGroup(**{ "editor": USER @@ -109,7 +112,7 @@ def editor_group_mock_pull(editor_group_id): ############################################## ## ApplicationEditorGroupAssignedNotify -if "application_editor_group_assigned_notify" in NOTIFICATIONS: +if "application:editor_group:assigned:notify" in NOTIFICATIONS: def editor_group_mock_pull(key, value): return EditorGroup(**{ "editor": USER @@ -133,7 +136,7 @@ def editor_group_mock_pull(key, value): ############################################## ## ApplicationEditorInprogressNotify -if "application_editor_inprogress_notify" in NOTIFICATIONS: +if "application:editor:inprogress:notify" in NOTIFICATIONS: def editor_group_mock_pull(editor_group_id): return EditorGroup(**{ "editor": USER @@ -157,7 +160,7 @@ def editor_group_mock_pull(editor_group_id): ############################################## ## ApplicationManedReadyNotify -if "application_maned_ready_notify" in NOTIFICATIONS: +if "application:maned:ready:notify" in NOTIFICATIONS: def editor_group_mock_pull(key, value): return EditorGroup(**{ "maned": USER @@ -181,7 +184,7 @@ def editor_group_mock_pull(key, value): ############################################## ## ApplicationPublisherAcceptedNotify -if "application_publisher_accepted_notify" in NOTIFICATIONS: +if "application:publisher:accepted:notify" in NOTIFICATIONS: application = ApplicationFixtureFactory.make_application_source() application["admin"]["owner"] = USER application["bibjson"]["title"] = "Application Publisher Accepted Notify" @@ -195,7 +198,7 @@ def editor_group_mock_pull(key, value): ############################################## ## ApplicationPublisherAssignedNotify -if "application_publisher_assigned_notify" in NOTIFICATIONS: +if "application:publisher:assigned:notify" in NOTIFICATIONS: application = ApplicationFixtureFactory.make_application_source() application["admin"]["owner"] = USER application["bibjson"]["title"] = "Application Publisher Assigned Notify" @@ -209,7 +212,7 @@ def editor_group_mock_pull(key, value): ############################################## ## ApplicationPublisherCreatedNotify -if "application_publisher_created_notify" in NOTIFICATIONS: +if "application:publisher:created:notify" in NOTIFICATIONS: application = ApplicationFixtureFactory.make_application_source() application["admin"]["owner"] = USER application["bibjson"]["title"] = "Application Publisher Created Notify" @@ -223,7 +226,7 @@ def editor_group_mock_pull(key, value): ############################################## ## ApplicationPublisherInprogressNotify -if "application_publisher_inprogress_notify" in NOTIFICATIONS: +if "application:publisher:inprogress:notify" in NOTIFICATIONS: application = ApplicationFixtureFactory.make_application_source() application["admin"]["owner"] = USER application["bibjson"]["title"] = "Application Publisher In Progress Notify" @@ -237,7 +240,7 @@ def editor_group_mock_pull(key, value): ############################################## ## ApplicationPublisherQuickRejectNotify -if "application_publisher_quickreject_notify" in NOTIFICATIONS: +if "application:publisher:quickreject:notify" in NOTIFICATIONS: application = ApplicationFixtureFactory.make_application_source() application["admin"]["owner"] = USER application["bibjson"]["title"] = "Application Publisher Quick Reject Notify" @@ -251,7 +254,7 @@ def editor_group_mock_pull(key, value): ############################################## ## ApplicationPublisherQuickRejectNotify -if "application_publisher_revision_notify" in NOTIFICATIONS: +if "application:publisher:revision:notify" in NOTIFICATIONS: application = ApplicationFixtureFactory.make_application_source() application["admin"]["owner"] = USER application["bibjson"]["title"] = "Application Publisher Revision Notify" @@ -267,7 +270,7 @@ def editor_group_mock_pull(key, value): ## BGJobFinishedNotify if "bg_job_finished_notify" in NOTIFICATIONS: job = models.BackgroundJob(**{ - "id": "bg_job_finished_notify", + "id": "bg:job_finished:notify", "user": USER, "action": "bg_job_finished_notify", "status": "complete" @@ -281,7 +284,7 @@ def editor_group_mock_pull(key, value): ############################################## ## JournalAssedAssignedNotify -if "journal_assed_assigned_notify" in NOTIFICATIONS: +if "journal:assed:assigned:notify" in NOTIFICATIONS: journal = JournalFixtureFactory.make_journal_source(in_doaj=True) journal["admin"]["editor"] = USER journal["bibjson"]["title"] = "Journal Assed Assigned Notify" @@ -295,7 +298,7 @@ def editor_group_mock_pull(key, value): ############################################## ## JournalEditorGroupAssignedNotify -if "journal_editor_group_assigned_notify" in NOTIFICATIONS: +if "journal:editor_group:assigned:notify" in NOTIFICATIONS: def editor_group_mock_pull(key, value): return EditorGroup(**{ "editor": USER @@ -320,7 +323,7 @@ def editor_group_mock_pull(key, value): ############################################## ## UpdateRequestPublisherAcceptedNotify -if "update_request_publisher_accepted_notify" in NOTIFICATIONS: +if "update_request:publisher:accepted:notify" in NOTIFICATIONS: application = ApplicationFixtureFactory.make_application_source() application["admin"]["owner"] = USER application["bibjson"]["title"] = "Update Request Publisher Accepted Notify" @@ -334,7 +337,7 @@ def editor_group_mock_pull(key, value): ############################################## ## UpdateRequestPublisherAssignedNotify -if "update_request_publisher_assigned_notify" in NOTIFICATIONS: +if "update_request:publisher:assigned:notify" in NOTIFICATIONS: application = ApplicationFixtureFactory.make_application_source() application["admin"]["owner"] = USER application["bibjson"]["title"] = "Update Request Publisher Assigned Notify" @@ -348,7 +351,7 @@ def editor_group_mock_pull(key, value): ############################################## ## UpdateRequestPublisherRejectedNotify -if "update_request_publisher_rejected_notify" in NOTIFICATIONS: +if "update_request:publisher:rejected:notify" in NOTIFICATIONS: application = ApplicationFixtureFactory.make_application_source() application["admin"]["owner"] = USER application["bibjson"]["title"] = "Update Request Publisher Rejected Notify" diff --git a/doajtest/helpers.py b/doajtest/helpers.py index 1fcf47eba3..2200d69544 100644 --- a/doajtest/helpers.py +++ b/doajtest/helpers.py @@ -67,6 +67,8 @@ def setUp(self): for im in self.warm_mappings: if im == "article": self.warmArticle() + if im == "article_tombstone": + self.warmArticleTombstone() # add more types if they are necessary def tearDown(self): @@ -82,6 +84,16 @@ def warmArticle(self): article.delete() Article.blockdeleted(article.id) + def warmArticleTombstone(self): + # push an article to initialise the mappings + from doajtest.fixtures import ArticleFixtureFactory + from portality.models import ArticleTombstone + source = ArticleFixtureFactory.make_article_source() + article = ArticleTombstone(**source) + article.save(blocking=True) + article.delete() + ArticleTombstone.blockdeleted(article.id) + CREATED_INDICES = [] @@ -91,10 +103,17 @@ def initialise_index(): def create_index(index_type): - if index_type in CREATED_INDICES: - return - core.initialise_index(app, core.es_connection, only_mappings=[index_type]) - CREATED_INDICES.append(index_type) + if "," in index_type: + # this covers a DAO that has multiple index types for searching purposes + # expressed as a comma separated list + index_types = index_type.split(",") + else: + index_types = [index_type] + for it in index_types: + if it in CREATED_INDICES: + return + core.initialise_index(app, core.es_connection, only_mappings=[it]) + CREATED_INDICES.append(it) def dao_proxy(dao_method, type="class"): @@ -130,6 +149,7 @@ class DoajTestCase(TestCase): @classmethod def create_app_patch(cls): return { + 'AUTOCHECK_INCOMING': False, # old test cases design and depend on work flow of autocheck disabled "STORE_IMPL": "portality.store.StoreLocal", "STORE_LOCAL_DIR": paths.rel2abs(__file__, "..", "tmp", "store", "main", cls.__name__.lower()), "STORE_TMP_DIR": paths.rel2abs(__file__, "..", "tmp", "store", "tmp", cls.__name__.lower()), @@ -417,9 +437,9 @@ def assert_expected_dict(test_case: TestCase, target, expected: dict): test_case.assertDictEqual(actual, expected) -def login(app_client, username, password, follow_redirects=True): +def login(app_client, email, password, follow_redirects=True): return app_client.post(url_for('account.login'), - data=dict(user=username, password=password), + data=dict(user=email, password=password), follow_redirects=follow_redirects) diff --git a/doajtest/matrices/bll_todo_maned/top_todo_maned.matrix.csv b/doajtest/matrices/bll_todo_maned/top_todo_maned.matrix.csv index 9965c80495..0e0a97573f 100644 --- a/doajtest/matrices/bll_todo_maned/top_todo_maned.matrix.csv +++ b/doajtest/matrices/bll_todo_maned/top_todo_maned.matrix.csv @@ -1,6 +1,6 @@ -test_id,account,raises,todo_maned_stalled,todo_maned_follow_up_old,todo_maned_ready,todo_maned_completed,todo_maned_assign_pending,todo_maned_new_update_request,todo_maned_new_update_request_order,todo_maned_ready_order,todo_maned_follow_up_old_order,todo_maned_stalled_order,todo_maned_assign_pending_order,todo_maned_completed_order -1,none,ArgumentException,0,0,0,0,0,0,,,,,, -2,no_role,,0,0,0,0,0,0,,,,,, -3,admin,,1,1,1,1,1,1,1,2,3,4,5,6 -4,editor,,0,0,0,0,0,0,,,,,, -5,assed,,0,0,0,0,0,0,,,,,, +test_id,account,raises,todo_maned_stalled,todo_maned_follow_up_old,todo_maned_ready,todo_maned_completed,todo_maned_assign_pending,todo_maned_new_update_request,todo_maned_on_hold,todo_maned_new_update_request_order,todo_maned_ready_order,todo_maned_follow_up_old_order,todo_maned_stalled_order,todo_maned_assign_pending_order,todo_maned_completed_order,todo_maned_on_hold_order +1,none,ArgumentException,0,0,0,0,0,0,0,,,,,,, +2,no_role,,0,0,0,0,0,0,0,,,,,,, +3,admin,,1,1,1,1,1,1,1,1,2,3,4,5,6,7 +4,editor,,0,0,0,0,0,0,0,,,,,,, +5,assed,,0,0,0,0,0,0,0,,,,,,, diff --git a/doajtest/matrices/bll_todo_maned/top_todo_maned.settings.csv b/doajtest/matrices/bll_todo_maned/top_todo_maned.settings.csv index a8148032f9..3d0f77656f 100644 --- a/doajtest/matrices/bll_todo_maned/top_todo_maned.settings.csv +++ b/doajtest/matrices/bll_todo_maned/top_todo_maned.settings.csv @@ -1,36 +1,40 @@ -field,test_id,account,raises,todo_maned_stalled,todo_maned_follow_up_old,todo_maned_ready,todo_maned_completed,todo_maned_assign_pending,todo_maned_new_update_request,todo_maned_new_update_request_order,todo_maned_ready_order,todo_maned_follow_up_old_order,todo_maned_stalled_order,todo_maned_assign_pending_order,todo_maned_completed_order -type,index,generated,conditional,conditional,conditional,conditional,conditional,conditional,conditional,conditional,conditional,conditional,conditional,conditional,conditional -default,,,,,,,,,,,,,,, -,,,,,,,,,,,,,,, -values,,none,ArgumentException,,,,,,,,,,,, -values,,no_role,,,,,,,,,,,,, -values,,admin,,,,,,,,,,,,, -values,,editor,,,,,,,,,,,,, -values,,assed,,,,,,,,,,,,, -,,,,,,,,,,,,,,, -conditional raises,,none,ArgumentException,,,,,,,,,,,, -,,,,,,,,,,,,,,, -conditional todo_maned_stalled,,admin,,1,,,,,,,,,,, -conditional todo_maned_stalled,,!admin,,0,,,,,,,,,,, -,,,,,,,,,,,,,,, -conditional todo_maned_follow_up_old,,admin,,,1,,,,,,,,,, -conditional todo_maned_follow_up_old,,!admin,,,0,,,,,,,,,, -,,,,,,,,,,,,,,, -conditional todo_maned_ready,,admin,,,,1,,,,,,,,, -conditional todo_maned_ready,,!admin,,,,0,,,,,,,,, -,,,,,,,,,,,,,,, -conditional todo_maned_completed,,admin,,,,,1,,,,,,,, -conditional todo_maned_completed,,!admin,,,,,0,,,,,,,, -,,,,,,,,,,,,,,, -conditional todo_maned_assign_pending,,admin,,,,,,1,,,,,,, -conditional todo_maned_assign_pending,,!admin,,,,,,0,,,,,,, -,,,,,,,,,,,,,,, -conditional todo_maned_new_update_request,,admin,,,,,,,1,,,,,, -conditional todo_maned_new_update_request,,!admin,,,,,,,0,,,,,, -,,,,,,,,,,,,,,, -conditional todo_maned_new_update_request_order,,admin,,,,,,,,1,,,,, -conditional todo_maned_ready_order,,admin,,,,,,,,,2,,,, -conditional todo_maned_follow_up_old_order,,admin,,,,,,,,,,3,,, -conditional todo_maned_stalled_order,,admin,,,,,,,,,,,4,, -conditional todo_maned_assign_pending_order,,admin,,,,,,,,,,,,5, -conditional todo_maned_completed_order,,admin,,,,,,,,,,,,,6 \ No newline at end of file +field,test_id,account,raises,todo_maned_stalled,todo_maned_follow_up_old,todo_maned_ready,todo_maned_completed,todo_maned_assign_pending,todo_maned_new_update_request,todo_maned_on_hold,todo_maned_new_update_request_order,todo_maned_ready_order,todo_maned_follow_up_old_order,todo_maned_stalled_order,todo_maned_assign_pending_order,todo_maned_completed_order,todo_maned_on_hold_order +type,index,generated,conditional,conditional,conditional,conditional,conditional,conditional,conditional,conditional,conditional,conditional,conditional,conditional,conditional,conditional,conditional +default,,,,,,,,,,,,,,,,, +,,,,,,,,,,,,,,,,, +values,,none,ArgumentException,,,,,,,,,,,,,, +values,,no_role,,,,,,,,,,,,,,, +values,,admin,,,,,,,,,,,,,,, +values,,editor,,,,,,,,,,,,,,, +values,,assed,,,,,,,,,,,,,,, +,,,,,,,,,,,,,,,,, +conditional raises,,none,ArgumentException,,,,,,,,,,,,,, +,,,,,,,,,,,,,,,,, +conditional todo_maned_stalled,,admin,,1,,,,,,,,,,,,, +conditional todo_maned_stalled,,!admin,,0,,,,,,,,,,,,, +,,,,,,,,,,,,,,,,, +conditional todo_maned_follow_up_old,,admin,,,1,,,,,,,,,,,, +conditional todo_maned_follow_up_old,,!admin,,,0,,,,,,,,,,,, +,,,,,,,,,,,,,,,,, +conditional todo_maned_ready,,admin,,,,1,,,,,,,,,,, +conditional todo_maned_ready,,!admin,,,,0,,,,,,,,,,, +,,,,,,,,,,,,,,,,, +conditional todo_maned_completed,,admin,,,,,1,,,,,,,,,, +conditional todo_maned_completed,,!admin,,,,,0,,,,,,,,,, +,,,,,,,,,,,,,,,,, +conditional todo_maned_assign_pending,,admin,,,,,,1,,,,,,,,, +conditional todo_maned_assign_pending,,!admin,,,,,,0,,,,,,,,, +,,,,,,,,,,,,,,,,, +conditional todo_maned_new_update_request,,admin,,,,,,,1,,,,,,,, +conditional todo_maned_new_update_request,,!admin,,,,,,,0,,,,,,,, +,,,,,,,,,,,,,,,,, +conditional todo_maned_on_hold,,admin,,,,,,,,1,,,,,,, +conditional todo_maned_on_hold,,!admin,,,,,,,,0,,,,,,, +,,,,,,,,,,,,,,,,, +conditional todo_maned_new_update_request_order,,admin,,,,,,,,,1,,,,,, +conditional todo_maned_ready_order,,admin,,,,,,,,,,2,,,,, +conditional todo_maned_follow_up_old_order,,admin,,,,,,,,,,,3,,,, +conditional todo_maned_stalled_order,,admin,,,,,,,,,,,,4,,, +conditional todo_maned_assign_pending_order,,admin,,,,,,,,,,,,,5,, +conditional todo_maned_completed_order,,admin,,,,,,,,,,,,,,6, +conditional todo_maned_on_hold_order,,admin,,,,,,,,,,,,,,,7 \ No newline at end of file diff --git a/doajtest/matrices/bll_todo_maned/top_todo_maned.settings.json b/doajtest/matrices/bll_todo_maned/top_todo_maned.settings.json index 6625f298f2..a7ef53dc1b 100644 --- a/doajtest/matrices/bll_todo_maned/top_todo_maned.settings.json +++ b/doajtest/matrices/bll_todo_maned/top_todo_maned.settings.json @@ -207,6 +207,35 @@ } } }, + { + "name": "todo_maned_on_hold", + "type": "conditional", + "default": "", + "values": { + "1": { + "conditions": [ + { + "account": { + "or": [ + "admin" + ] + } + } + ] + }, + "0": { + "conditions": [ + { + "account": { + "nor": [ + "admin" + ] + } + } + ] + } + } + }, { "name": "todo_maned_new_update_request_order", "type": "conditional", @@ -314,6 +343,24 @@ ] } } + }, + { + "name": "todo_maned_on_hold_order", + "type": "conditional", + "default": "", + "values": { + "7": { + "conditions": [ + { + "account": { + "or": [ + "admin" + ] + } + } + ] + } + } } ] } \ No newline at end of file diff --git a/doajtest/testbook/dashboards/maned_todo.yml b/doajtest/testbook/dashboards/maned_todo.yml index 96c7b171c4..06ce8921c9 100644 --- a/doajtest/testbook/dashboards/maned_todo.yml +++ b/doajtest/testbook/dashboards/maned_todo.yml @@ -27,7 +27,7 @@ tests: - step: Go to the maned dashboard page path: /dashboard results: - - You can see 16 applications in your priority list + - You can see 18 applications in your priority list - Your priority list contains a mixture of managing editor items (actions related to teams you are the managing editor for), editor items (actions related to teams you are the editor for) and associate items (actions related to applications which are assigned specifically to you for review). @@ -37,30 +37,31 @@ tests: - At least one of your priority items is for an application in the state ready (it should indicate that it is for your maned group) - At least one of your priority items is for an application in the completed state which has not been updated for more than 2 weeks (it should indicate that it is for your maned group) - At least one of your priority items is for an application in the pending state which has not been updated for more than 2 weeks (it should indicate that it is for your maned group) + - At least one of your priority items is for an application in the "on hold" state - Your lowest priority item (last in the list) is for an update request which was submitted this month - step: click on the managing editor's ready application - step: Change the application status to "Accepted" and save - step: close the tab, return to the dashboard and reload the page results: - - You can see 15 applications in your priority list + - You can see 17 applications in your priority list - The application you have just edited has disappeared from your priority list - step: click on the [in progress] stalled managing editor's application - step: make any minor adjustment to the metadata and save - step: close the tab, return to the dashboard and reload the page results: - - You can see 14 applications in your priority list + - You can see 16 applications in your priority list - The application you just edited has disappeared from your priority list - step: click on the "completed" maned application - step: Change the application to "ready" status - step: close the tab, return to the dashboard and reload the page results: - - You can still see 14 applications in your priority list + - You can still see 15 applications in your priority list - The completed application you just moved to ready is now in your priority list as a ready application - step: click on the pending managing editor's application - step: Assign the item to an editor in the selected group (there should be a test editor available to you to select) - step: close the tab, return to the dashboard and reload the page results: - - You have 13 applications left in your todo list + - You have 14 applications left in your todo list - The pending application you just edited is no longer visible - title: Filtering the todo list @@ -74,22 +75,28 @@ tests: - step: Go to the maned dashboard page path: /dashboard results: - - You can see 16 applications in your priority list + - You can see 18 applications in your priority list - Your highest priority item (first in the list) is for an update request which was submitted last month - Your lowest priority item (last in the list) is for an update request which was submitted this month - - On the top right of the todo list are a set of filter buttons "Show all", "New Applications" and "Update Requests" + - On the top right of the todo list are a set of filter buttons "Show all", "New Applications", "Update Requests" and "On Hold" - The "Show all" button is highlighted - step: click on the "New Applications" filter button results: - - You can see 14 applications in your priority list - - The update requests which were on the previous screen are no longer visible + - You can see 16 applications in your priority list + - The update requests and "on hold" items which were on the previous screen are no longer visible - The "New Applications" filter button is now highlighted - step: click on the "Update Request" filter button results: - - You can see 12application in your priority list + - You can see 2 applications in your priority list - Your highest priority item (first in the list) is for an update request which was submitted last month - Your lowest priority item (last in the list) is for an update request which was submitted this month - The "Update Request" filter button is now highlighted + - step: click on the "On Hold" filter button + results: + - You can see 2 application in your priority list + - The "On Hold" filter button is now highlighted + - One of the "On Hold" items is for an application which is not assigned to you, but belongs to a group you are the managing editor for + - The other "On Hold" item is for an application which is assigned to you, in a group for which you are not the managing editor - step: click the "Show all" filter button results: - You are back to the original display, containing both applications and update requests \ No newline at end of file diff --git a/doajtest/testbook/public_site/public_search.yml b/doajtest/testbook/public_site/public_search.yml index 6ff91b38db..166b02c255 100644 --- a/doajtest/testbook/public_site/public_search.yml +++ b/doajtest/testbook/public_site/public_search.yml @@ -186,4 +186,13 @@ tests: - step: click spacebar to check the filter results: - filter is applied - +- title: Export article in RIS format + context: + role: anonymous + steps: + - step: Go to the DOAJ search page at /search/articles + results: + - Only articles are shown in the results + - step: Click on 'Export RIS' of any article + results: + - A RIS file is downloaded diff --git a/doajtest/testdrive/todo_maned_editor_associate.py b/doajtest/testdrive/todo_maned_editor_associate.py index 1fa8ff936e..f6a4d62875 100644 --- a/doajtest/testdrive/todo_maned_editor_associate.py +++ b/doajtest/testdrive/todo_maned_editor_associate.py @@ -51,7 +51,7 @@ def setup(self) -> dict: aapps = build_associate_applications(un) eapps = build_editor_applications(un, eg2) - mapps = build_maned_applications(un, eg1, owner.id, eg3) + mapps = build_maned_applications(un, eg1, owner.id, eg3, eg2) return { @@ -96,7 +96,7 @@ def teardown(self, params) -> dict: return {"status": "success"} -def build_maned_applications(un, eg, owner, eponymous_group): +def build_maned_applications(un, eg, owner, eponymous_group, other_group): w = 7 * 24 * 60 * 60 apps = {} @@ -142,6 +142,22 @@ def build_maned_applications(un, eg, owner, eponymous_group): "title": un + " Maned Pending Application" }] + app = build_application(un + " Maned (Group) On Hold Application", 2 * w, 2 * w, constants.APPLICATION_STATUS_ON_HOLD, + editor_group=eg.name, owner=owner) + app.save() + apps["on_hold"] = [{ + "id": app.id, + "title": un + " Maned (Group) On Hold Application" + }] + + app = build_application(un + " Maned (Editor) On Hold Application", 2 * w, 2 * w, constants.APPLICATION_STATUS_ON_HOLD, + editor_group=other_group.name, editor=un, owner=owner) + app.save() + apps["on_hold"].append({ + "id": app.id, + "title": un + " Maned (Editor) On Hold Application" + }) + app = build_application(un + " Maned Low Priority Pending Application", 1 * w, 1 * w, constants.APPLICATION_STATUS_PENDING, editor_group=eponymous_group.name, owner=owner) @@ -154,11 +170,11 @@ def build_maned_applications(un, eg, owner, eponymous_group): lmur = build_application(un + " Last Month Maned Update Request", 5 * w, 5 * w, constants.APPLICATION_STATUS_UPDATE_REQUEST, editor_group=eponymous_group.name, owner=owner, update_request=True) - lmur.save() + # lmur.save() tmur = build_application(un + " This Month Maned Update Request", 0, 0, constants.APPLICATION_STATUS_UPDATE_REQUEST, editor_group=eponymous_group.name, owner=owner, update_request=True) - tmur.save() + # tmur.save() apps["update_request"] = [ { @@ -183,6 +199,7 @@ def build_application(title, lmu_diff, cd_diff, status, editor=None, editor_grou if update_request: ap.application_type = constants.APPLICATION_TYPE_UPDATE_REQUEST + ap.set_current_journal(ap.makeid()) else: ap.remove_current_journal() ap.remove_related_journal() diff --git a/doajtest/unit/api_tests/test_api_crud_returnvalues.py b/doajtest/unit/api_tests/test_api_crud_returnvalues.py index 1d708b4224..b398d0d236 100644 --- a/doajtest/unit/api_tests/test_api_crud_returnvalues.py +++ b/doajtest/unit/api_tests/test_api_crud_returnvalues.py @@ -1,3 +1,4 @@ +from doajtest import helpers from doajtest.helpers import DoajTestCase, with_es from portality import models from doajtest.fixtures import ApplicationFixtureFactory, ArticleFixtureFactory, JournalFixtureFactory @@ -205,9 +206,7 @@ def test_04_article_structure_exceptions(self): @staticmethod def login(app, username, password): - return app.post('/account/login', - data=dict(username=username, password=password), - follow_redirects=True) + return helpers.login(app, username, password) @staticmethod def logout(app): diff --git a/doajtest/unit/application_processors/test_application_processor_emails.py b/doajtest/unit/application_processors/test_application_processor_emails.py index cc106942e6..228d09405d 100644 --- a/doajtest/unit/application_processors/test_application_processor_emails.py +++ b/doajtest/unit/application_processors/test_application_processor_emails.py @@ -354,16 +354,7 @@ def test_01_maned_review_emails(self): re.DOTALL) assert bool(assEd_email_matched), info_stream_contents.strip('\x00') - publisher_template = re.escape(templates.EMAIL_NOTIFICATION) - publisher_to = re.escape(owner.email) - publisher_subject = re.escape('Directory of Open Access Journals - Your application ({}) has been assigned to an editor for review'.format(', '.join(issn for issn in processor.source.bibjson().issns()))) - - publisher_email_matched = re.search(email_log_regex % (publisher_template, publisher_to, publisher_subject), - info_stream_contents, - re.DOTALL) - - assert bool(publisher_email_matched) - assert len(re.findall(email_count_string, info_stream_contents)) == 2 + assert len(re.findall(email_count_string, info_stream_contents)) == 1 # Clear the stream for the next part self.info_stream.truncate(0) @@ -553,16 +544,7 @@ def test_02_ed_review_emails(self): info_stream_contents, re.DOTALL) assert bool(assEd_email_matched), info_stream_contents.strip('\x00') - - publisher_template = templates.EMAIL_NOTIFICATION - publisher_to = re.escape(owner.email) - publisher_subject = re.escape('Your update request ({}) has been assigned to an editor for review'.format(', '.join(issn for issn in processor.source.bibjson().issns()))) - - publisher_email_matched = re.search(email_log_regex % (publisher_template, publisher_to, publisher_subject), - info_stream_contents, - re.DOTALL) - assert bool(publisher_email_matched) - assert len(re.findall(email_count_string, info_stream_contents)) == 2 + assert len(re.findall(email_count_string, info_stream_contents)) == 1 # Clear the stream for the next part self.info_stream.truncate(0) @@ -665,17 +647,8 @@ def test_03_assoc_ed_review_emails(self): processor.finalise() info_stream_contents = self.info_stream.getvalue() - # We expect one email to be sent here: - # * to the publisher, notifying that an editor is viewing their application - publisher_template = re.escape(templates.EMAIL_NOTIFICATION) - publisher_to = re.escape(owner.email) - publisher_subject = re.escape('Directory of Open Access Journals - Your submission ({}) is under review'.format(', '.join(issn for issn in processor.source.bibjson().issns()))) - - publisher_email_matched = re.search(email_log_regex % (publisher_template, publisher_to, publisher_subject), - info_stream_contents, - re.DOTALL) - assert bool(publisher_email_matched) - assert len(re.findall(email_count_string, info_stream_contents)) == 1 + # We expect no emails + assert len(re.findall(email_count_string, info_stream_contents)) == 0 # Clear the stream for the next part self.info_stream.truncate(0) @@ -936,17 +909,7 @@ def test_01_maned_review_emails(self): info_stream_contents, re.DOTALL) assert bool(assEd_email_matched), info_stream_contents.strip('\x00') - - publisher_template = templates.EMAIL_NOTIFICATION - publisher_to = re.escape(owner.email) - publisher_subject = re.escape('Your update request ({}) has been assigned to an editor for review'.format(', '.join(issn for issn in processor.source.bibjson().issns()))) - - publisher_email_matched = re.search(email_log_regex % (publisher_template, publisher_to, publisher_subject), - info_stream_contents, - re.DOTALL) - - assert bool(publisher_email_matched) - assert len(re.findall(email_count_string, info_stream_contents)) == 2 + assert len(re.findall(email_count_string, info_stream_contents)) == 1 # Clear the stream for the next part self.info_stream.truncate(0) @@ -1125,16 +1088,7 @@ def test_02_ed_review_emails(self): info_stream_contents, re.DOTALL) assert bool(assEd_email_matched), info_stream_contents.strip('\x00') - - publisher_template = templates.EMAIL_NOTIFICATION - publisher_to = re.escape(owner.email) - publisher_subject = re.escape('Your update request ({}) has been assigned to an editor for review'.format(', '.join(issn for issn in processor.source.bibjson().issns()))) - - publisher_email_matched = re.search(email_log_regex % (publisher_template, publisher_to, publisher_subject), - info_stream_contents, - re.DOTALL) - assert bool(publisher_email_matched) - assert len(re.findall(email_count_string, info_stream_contents)) == 2 + assert len(re.findall(email_count_string, info_stream_contents)) == 1 # Clear the stream for the next part self.info_stream.truncate(0) @@ -1240,17 +1194,8 @@ def test_03_assoc_ed_review_emails(self): processor.finalise() info_stream_contents = self.info_stream.getvalue() - # We expect one email to be sent here: - # * to the publisher, notifying that an editor is viewing their application - publisher_template = re.escape(templates.EMAIL_NOTIFICATION) - publisher_to = re.escape(owner.email) - publisher_subject = re.escape('Your submission ({}) is under review'.format(', '.join(issn for issn in processor.source.bibjson().issns()))) - - publisher_email_matched = re.search(email_log_regex % (publisher_template, publisher_to, publisher_subject), - info_stream_contents, - re.DOTALL) - assert bool(publisher_email_matched) - assert len(re.findall(email_count_string, info_stream_contents)) == 1 + # We expect no email to be sent + assert len(re.findall(email_count_string, info_stream_contents)) == 0 # Clear the stream for the next part self.info_stream.truncate(0) @@ -1328,27 +1273,8 @@ def test_01_maned_review_emails(self): # check the associate was changed assert processor.target.editor == "associate_3" - # We expect 2 emails to be sent: - # * to the editor of the assigned group, - # * to the AssEd who's been assigned, - editor_template = re.escape(templates.EMAIL_NOTIFICATION) - editor_to = re.escape('eddie@example.com') - editor_subject = re.escape('Directory of Open Access Journals - New journal ({}) assigned to your group'.format(', '.join(issn for issn in processor.source.bibjson().issns()))) - - editor_email_matched = re.search(email_log_regex % (editor_template, editor_to, editor_subject), - info_stream_contents, - re.DOTALL) - assert bool(editor_email_matched) - - assEd_template = re.escape(templates.EMAIL_NOTIFICATION) - assEd_to = re.escape(models.Account.pull('associate_3').email) - assEd_subject = re.escape('Directory of Open Access Journals - New journal ({}) assigned to you'.format(', '.join(issn for issn in processor.source.bibjson().issns()))) - - assEd_email_matched = re.search(email_log_regex % (assEd_template, assEd_to, assEd_subject), - info_stream_contents, - re.DOTALL) - assert bool(assEd_email_matched), info_stream_contents.strip('\x00') - assert len(re.findall(email_count_string, info_stream_contents)) == 2 + # We expect no emails to be sent + assert len(re.findall(email_count_string, info_stream_contents)) == 0 ctx.pop() def test_02_ed_review_emails(self): @@ -1369,16 +1295,7 @@ def test_02_ed_review_emails(self): # check the associate was changed assert processor.target.editor == "associate_2" - # We expect 1 email to be sent: - # * to the AssEd who's been assigned - assEd_template = re.escape(templates.EMAIL_NOTIFICATION) - assEd_to = re.escape(models.Account.pull('associate_2').email) - assEd_subject = re.escape('Directory of Open Access Journals - New journal ({}) assigned to you'.format(', '.join(issn for issn in processor.source.bibjson().issns()))) - - assEd_email_matched = re.search(email_log_regex % (assEd_template, assEd_to, assEd_subject), - info_stream_contents, - re.DOTALL) - assert bool(assEd_email_matched), info_stream_contents.strip('\x00') - assert len(re.findall(email_count_string, info_stream_contents)) == 1 + # We no email to be sent + assert len(re.findall(email_count_string, info_stream_contents)) == 0 ctx.pop() diff --git a/doajtest/unit/application_processors/test_maned_journal_review.py b/doajtest/unit/application_processors/test_maned_journal_review.py index 07badcbabe..b62fbb146d 100644 --- a/doajtest/unit/application_processors/test_maned_journal_review.py +++ b/doajtest/unit/application_processors/test_maned_journal_review.py @@ -148,7 +148,7 @@ def test_04_maned_review_doaj_seal(self): ) # set the seal to False using the form - fc.form.doaj_seal.data = False + fc.form.doaj_seal.data = [] # run the crosswalk, don't test it at all in this test fc.form2target() @@ -162,7 +162,7 @@ def test_04_maned_review_doaj_seal(self): fc.source.set_seal(True) fc.source2form() - assert fc.form.doaj_seal.data is True + assert 'y' in fc.form.doaj_seal.data def test_05_maned_review_continuations(self): # construct it from form data (with a known source) diff --git a/doajtest/unit/test_bll_todo_top_todo_maned.py b/doajtest/unit/test_bll_todo_top_todo_maned.py index 5322a8c9ae..1c5ae4457d 100644 --- a/doajtest/unit/test_bll_todo_top_todo_maned.py +++ b/doajtest/unit/test_bll_todo_top_todo_maned.py @@ -41,7 +41,8 @@ def test_top_todo(self, name, kwargs): "todo_maned_ready", "todo_maned_completed", "todo_maned_assign_pending", - "todo_maned_new_update_request" + "todo_maned_new_update_request", + "todo_maned_on_hold" ] category_args = { @@ -100,6 +101,9 @@ def assign_pending(ap): self.build_application("maned_update_request", 5 * w, 5 * w, constants.APPLICATION_STATUS_UPDATE_REQUEST, apps, update_request=True) + # an application that was modifed recently into the ready status (todo_maned_completed) + self.build_application("maned_on_hold", 2 * w, 2 * w, constants.APPLICATION_STATUS_ON_HOLD, apps) + # Applications that should never be reported ############################################ diff --git a/doajtest/unit/test_crosswalks_article_ris.py b/doajtest/unit/test_crosswalks_article_ris.py new file mode 100644 index 0000000000..760d819e1c --- /dev/null +++ b/doajtest/unit/test_crosswalks_article_ris.py @@ -0,0 +1,43 @@ +import unittest + +from doajtest.fixtures import ArticleFixtureFactory +from portality import models +from portality.crosswalks.article_ris import ArticleRisXWalk + + +class TestArticleRisXWalk(unittest.TestCase): + def test_article2ris(self): + article = ArticleFixtureFactory.make_article_source() + article = models.Article(**article) + article.bibjson().abstract = "abstract" + ris = ArticleRisXWalk.article2ris(article) + assert ris.type == 'JOUR' + assert ris['T1'] == [article.data['bibjson']['title']] + assert ris.to_text().split() == """ +TY - JOUR +T1 - Article Title +AU - The Author +PY - 1991 +JF - The Title +PB - The Publisher +VL - 1 +IS - 99 +SP - 3 +EP - 21 +UR - http://www.example.com/article +AB - abstract +KW - word +KW - key +DO - 10.0000/SOME.IDENTIFIER +LA - EN +LA - FR +ER - + """.split() + + def test_article2ris__only_title(self): + ris = ArticleRisXWalk.article2ris({"bibjson": {"title": "Article Title"}}) + assert ris.to_text().split() == """ +TY - JOUR +T1 - Article Title +ER - + """.split() diff --git a/doajtest/unit/test_models.py b/doajtest/unit/test_models.py index af07a92859..d1bae191c1 100644 --- a/doajtest/unit/test_models.py +++ b/doajtest/unit/test_models.py @@ -1722,6 +1722,61 @@ def test_40_autocheck_retrieves(self): ap2 = models.Autocheck.for_journal("9876") assert ap2.journal == "9876" + def test_41_article_tombstone(self): + t = models.ArticleTombstone() + t.set_id("1234") + t.bibjson().add_subject("LCC", "Medicine", "KM22") + t.set_in_doaj(True) # should have no effect + + t.save(blocking=True) + + t2 = models.ArticleTombstone.pull("1234") + assert t2.id == "1234" + assert t2.is_in_doaj() is False + assert t2.last_updated is not None + assert t2.bibjson().subjects()[0].get("scheme") == "LCC" + assert t2.bibjson().subjects()[0].get("term") == "Medicine" + assert t2.bibjson().subjects()[0].get("code") == "KM22" + + def test_42_make_article_tombstone(self): + a = models.Article(**ArticleFixtureFactory.make_article_source(in_doaj=True)) + a.set_id(a.makeid()) + + t = a._tombstone() + assert t.id == a.id + assert t.bibjson().subjects() == a.bibjson().subjects() + assert t.is_in_doaj() is False + + a = models.Article(**ArticleFixtureFactory.make_article_source(in_doaj=True)) + a.set_id(a.makeid()) + a.delete() + time.sleep(1) + + stone = models.ArticleTombstone.pull(a.id) + assert stone is not None + + a = models.Article(**ArticleFixtureFactory.make_article_source(in_doaj=True)) + a.set_id(a.makeid()) + a.save(blocking=True) + + query = { + "query": { + "bool": { + "must": [ + {"term": {"id.exact": a.id}} + ] + } + } + } + models.Article.delete_selected(query) + time.sleep(1) + + stone = models.ArticleTombstone.pull(a.id) + assert stone is not None + + + + class TestAccount(DoajTestCase): def test_get_name_safe(self): diff --git a/doajtest/unit/test_oaipmh.py b/doajtest/unit/test_oaipmh.py index 0a6540eece..f74569ad6c 100644 --- a/doajtest/unit/test_oaipmh.py +++ b/doajtest/unit/test_oaipmh.py @@ -16,11 +16,17 @@ from portality.lib.dates import FMT_DATE_STD from portality.view.oaipmh import ResumptionTokenException, decode_resumption_token +from doajtest.helpers import with_es + class TestClient(DoajTestCase): @classmethod def setUpClass(cls): app.testing = True + + # Preserve default value of OAI record page size + cls.DEFAULT_OAIPMH_LIST_IDENTIFIERS_PAGE_SIZE = app.config.get("OAIPMH_LIST_IDENTIFIERS_PAGE_SIZE", 25) + super(TestClient, cls).setUpClass() def setUp(self): @@ -30,7 +36,11 @@ def setUp(self): self.oai_ns = {'oai': 'http://www.openarchives.org/OAI/2.0/', 'oai_dc': 'http://www.openarchives.org/OAI/2.0/oai_dc/', 'dc': 'http://purl.org/dc/elements/1.1/', - 'xsi' : 'http://www.w3.org/2001/XMLSchema-instance'} + 'xsi': 'http://www.w3.org/2001/XMLSchema-instance'} + + def tearDown(self): + app.config['OAIPMH_LIST_IDENTIFIERS_PAGE_SIZE'] = self.DEFAULT_OAIPMH_LIST_IDENTIFIERS_PAGE_SIZE + super(TestClient, self).tearDown() def test_01_oai_ListMetadataFormats(self): """ Check we get the correct response from the OAI endpoint ListMetdataFormats request""" @@ -40,10 +50,11 @@ def test_01_oai_ListMetadataFormats(self): assert resp.status_code == 200 t = etree.fromstring(resp.data) - assert t.xpath('/oai:OAI-PMH/oai:ListMetadataFormats/oai:metadataFormat/oai:metadataPrefix', namespaces=self.oai_ns)[0].text == 'oai_dc' + assert t.xpath('/oai:OAI-PMH/oai:ListMetadataFormats/oai:metadataFormat/oai:metadataPrefix', + namespaces=self.oai_ns)[0].text == 'oai_dc' def test_02_oai_journals(self): - """test if the OAI-PMH journal feed returns records and only displays journals accepted in DOAJ""" + """test if the OAI-PMH journal feed returns records and only displays journals accepted in DOAJ, marking withdrawn ones as deleted""" journal_sources = JournalFixtureFactory.make_many_journal_sources(2, in_doaj=True) j_public = models.Journal(**journal_sources[0]) j_public.save(blocking=True) @@ -61,13 +72,28 @@ def test_02_oai_journals(self): t = etree.fromstring(resp.data) records = t.xpath('/oai:OAI-PMH/oai:ListRecords', namespaces=self.oai_ns) - # Check we only have one journal returned - assert len(records[0].xpath('//oai:record', namespaces=self.oai_ns)) == 1 + # Check we only have two journals returned + assert len(records[0].xpath('//oai:record', namespaces=self.oai_ns)) == 2 + + seen_deleted = False + seen_public = False + records = records[0].getchildren() + for r in records: + header = r.xpath('oai:header', namespaces=self.oai_ns)[0] + status = header.get("status") + if status == "deleted": + seen_deleted = True + else: + # Check we have the correct journal + seen_public = True + assert r.xpath('//dc:title', namespaces=self.oai_ns)[0].text == j_public.bibjson().title + + assert seen_deleted + assert seen_public - # Check we have the correct journal - assert records[0].xpath('//dc:title', namespaces=self.oai_ns)[0].text == j_public.bibjson().title - - resp = t_client.get(url_for('oaipmh.oaipmh', verb='GetRecord', metadataPrefix='oai_dc') + '&identifier={0}'.format(public_id)) + resp = t_client.get( + url_for('oaipmh.oaipmh', verb='GetRecord', metadataPrefix='oai_dc') + '&identifier={0}'.format( + public_id)) assert resp.status_code == 200 t = etree.fromstring(resp.data) @@ -109,7 +135,7 @@ def test_03_oai_resumption_token(self): with self.app_test.test_client() as t_client: resp = t_client.get(url_for('oaipmh.oaipmh', verb='ListIdentifiers', metadataPrefix='oai_dc')) t = etree.fromstring(resp.data) - #print etree.tostring(t, pretty_print=True) + # print etree.tostring(t, pretty_print=True) rt = t.xpath('//oai:resumptionToken', namespaces=self.oai_ns)[0] assert rt.get('completeListSize') == '5' assert rt.get('cursor') == '2' @@ -117,7 +143,7 @@ def test_03_oai_resumption_token(self): # Get the next result resp2 = t_client.get(url_for('oaipmh.oaipmh', verb='ListIdentifiers', resumptionToken=rt.text)) t = etree.fromstring(resp2.data) - #print etree.tostring(t, pretty_print=True) + # print etree.tostring(t, pretty_print=True) rt2 = t.xpath('//oai:resumptionToken', namespaces=self.oai_ns)[0] assert rt2.get('completeListSize') == '5' assert rt2.get('cursor') == '4' @@ -125,17 +151,18 @@ def test_03_oai_resumption_token(self): # And the final result - check we get an empty resumptionToken resp3 = t_client.get(url_for('oaipmh.oaipmh', verb='ListIdentifiers', resumptionToken=rt2.text)) t = etree.fromstring(resp3.data) - #print etree.tostring(t, pretty_print=True) + # print etree.tostring(t, pretty_print=True) rt3 = t.xpath('//oai:resumptionToken', namespaces=self.oai_ns)[0] assert rt3.get('completeListSize') == '5' assert rt3.get('cursor') == '5' assert rt3.text is None # We should get an error if we request again with an empty resumptionToken - resp4 = t_client.get(url_for('oaipmh.oaipmh', verb='ListIdentifiers') + '&resumptionToken={0}'.format(rt3.text)) - assert resp4.status_code == 200 # fixme: should this be a real error code? + resp4 = t_client.get( + url_for('oaipmh.oaipmh', verb='ListIdentifiers') + '&resumptionToken={0}'.format(rt3.text)) + assert resp4.status_code == 200 # fixme: should this be a real error code? t = etree.fromstring(resp4.data) - #print etree.tostring(t, pretty_print=True) + # print etree.tostring(t, pretty_print=True) err = t.xpath('//oai:error', namespaces=self.oai_ns)[0] assert 'the resumptionToken argument is invalid or expired' in err.text @@ -157,9 +184,11 @@ def test_04_oai_changing_index(self): yesterday = (dates.now() - timedelta(days=1)).strftime(FMT_DATE_STD) with self.app_test.test_request_context(): with self.app_test.test_client() as t_client: - resp = t_client.get(url_for('oaipmh.oaipmh', verb='ListRecords', metadataPrefix='oai_dc') + '&from={0}'.format(yesterday)) + resp = t_client.get( + url_for('oaipmh.oaipmh', verb='ListRecords', metadataPrefix='oai_dc') + '&from={0}'.format( + yesterday)) t = etree.fromstring(resp.data) - #print etree.tostring(t, pretty_print=True) + # print etree.tostring(t, pretty_print=True) rt = t.xpath('//oai:resumptionToken', namespaces=self.oai_ns)[0] assert rt.get('completeListSize') == '3' assert rt.get('cursor') == '2' @@ -173,15 +202,17 @@ def test_04_oai_changing_index(self): resp2 = t_client.get('/oai?verb=ListRecords&resumptionToken={0}'.format(rt.text)) resp2 = t_client.get(url_for('oaipmh.oaipmh', verb='ListRecords', resumptionToken=rt.text)) t = etree.fromstring(resp2.data) - #print etree.tostring(t, pretty_print=True) + # print etree.tostring(t, pretty_print=True) rt2 = t.xpath('//oai:resumptionToken', namespaces=self.oai_ns)[0] assert rt2.get('completeListSize') == '3' assert rt2.get('cursor') == '3' # Start a new request - we should see the new journal - resp3 = t_client.get(url_for('oaipmh.oaipmh', verb='ListRecords', metadataPrefix='oai_dc') + '&from={0}'.format(yesterday)) + resp3 = t_client.get( + url_for('oaipmh.oaipmh', verb='ListRecords', metadataPrefix='oai_dc') + '&from={0}'.format( + yesterday)) t = etree.fromstring(resp3.data) - #print etree.tostring(t, pretty_print=True) + # print etree.tostring(t, pretty_print=True) rt = t.xpath('//oai:resumptionToken', namespaces=self.oai_ns)[0] assert rt.get('completeListSize') == '4' @@ -213,9 +244,11 @@ def test_05_date_ranges(self): with self.app_test.test_request_context(): with self.app_test.test_client() as t_client: # Request OAI journals since yesterday (looking for today's results only) - resp = t_client.get(url_for('oaipmh.oaipmh', verb='ListRecords', metadataPrefix='oai_dc') + '&from={0}'.format(yesterday.strftime(FMT_DATE_STD))) + resp = t_client.get( + url_for('oaipmh.oaipmh', verb='ListRecords', metadataPrefix='oai_dc') + '&from={0}'.format( + yesterday.strftime(FMT_DATE_STD))) t = etree.fromstring(resp.data) - #print etree.tostring(t, pretty_print=True) + # print etree.tostring(t, pretty_print=True) rt = t.xpath('//oai:resumptionToken', namespaces=self.oai_ns)[0] assert rt.get('completeListSize') == '2' assert rt.get('cursor') == '1' @@ -224,10 +257,11 @@ def test_05_date_ranges(self): assert title.text in [journals[2]['bibjson']['title'], journals[3]['bibjson']['title']] # Request OAI journals from 3 days ago to yesterday (expecting the 2 days ago results) - resp = t_client.get(url_for('oaipmh.oaipmh', verb='ListRecords', metadataPrefix='oai_dc') + '&from={0}&until={1}'.format( + resp = t_client.get(url_for('oaipmh.oaipmh', verb='ListRecords', + metadataPrefix='oai_dc') + '&from={0}&until={1}'.format( two_days_before_yesterday.strftime(FMT_DATE_STD), yesterday.strftime(FMT_DATE_STD))) t = etree.fromstring(resp.data) - #print etree.tostring(t, pretty_print=True) + # print etree.tostring(t, pretty_print=True) rt = t.xpath('//oai:resumptionToken', namespaces=self.oai_ns)[0] assert rt.get('completeListSize') == '2' assert rt.get('cursor') == '1' @@ -248,7 +282,8 @@ def test_06_identify(self): t = etree.fromstring(resp.data) records = t.xpath('/oai:OAI-PMH/oai:Identify', namespaces=self.oai_ns) assert len(records) == 1 - assert records[0].xpath('//oai:repositoryName', namespaces=self.oai_ns)[0].text == 'Directory of Open Access Journals' + assert records[0].xpath('//oai:repositoryName', namespaces=self.oai_ns)[ + 0].text == 'Directory of Open Access Journals' assert records[0].xpath('//oai:adminEmail', namespaces=self.oai_ns)[0].text == 'helpdesk+oai@doaj.org' assert records[0].xpath('//oai:granularity', namespaces=self.oai_ns)[0].text == 'YYYY-MM-DDThh:mm:ssZ' @@ -264,15 +299,17 @@ def test_07_bad_verb(self): assert resp.status_code == 200 t = etree.fromstring(resp.data) records = t.xpath('/oai:OAI-PMH', namespaces=self.oai_ns) - assert records[0].xpath('//oai:error', namespaces=self.oai_ns)[0].text == 'Value of the verb argument is not a legal OAI-PMH verb, the verb argument is missing, or the verb argument is repeated.' + assert records[0].xpath('//oai:error', namespaces=self.oai_ns)[ + 0].text == 'Value of the verb argument is not a legal OAI-PMH verb, the verb argument is missing, or the verb argument is repeated.' assert records[0].xpath('//oai:error', namespaces=self.oai_ns)[0].get("code") == 'badVerb' - #invalid verb + # invalid verb resp = t_client.get(url_for('oaipmh.oaipmh', verb='InvalidVerb', metadataPrefix='oai_dc')) assert resp.status_code == 200 t = etree.fromstring(resp.data) records = t.xpath('/oai:OAI-PMH', namespaces=self.oai_ns) - assert records[0].xpath('//oai:error', namespaces=self.oai_ns)[0].text == 'Value of the verb argument is not a legal OAI-PMH verb, the verb argument is missing, or the verb argument is repeated.' + assert records[0].xpath('//oai:error', namespaces=self.oai_ns)[ + 0].text == 'Value of the verb argument is not a legal OAI-PMH verb, the verb argument is missing, or the verb argument is repeated.' assert records[0].xpath('//oai:error', namespaces=self.oai_ns)[0].get("code") == 'badVerb' def test_08_list_sets(self): @@ -297,57 +334,82 @@ def test_08_list_sets(self): # check that we can retrieve a record with one of those sets with self.app_test.test_client() as t_client: - resp = t_client.get(url_for('oaipmh.oaipmh', verb='ListRecords', metadataPrefix='oai_dc', set=set0[0].text)) + resp = t_client.get( + url_for('oaipmh.oaipmh', verb='ListRecords', metadataPrefix='oai_dc', set=set0[0].text)) assert resp.status_code == 200 t = etree.fromstring(resp.data) records = t.xpath('/oai:OAI-PMH/oai:ListRecords', namespaces=self.oai_ns) results = records[0].getchildren() assert len(results) == 1 - def test_09_article(self): - """test if the OAI-PMH journal feed returns records and only displays journals accepted in DOAJ""" + """test if the OAI-PMH article feed returns records and only displays articles accepted in DOAJ, showing the others as deleted""" article_source = ArticleFixtureFactory.make_article_source(eissn='1234-1234', pissn='5678-5678,', in_doaj=False) - """test if the OAI-PMH article feed returns records and only displays articles accepted in DOAJ""" a_private = models.Article(**article_source) + a_private.set_id(a_private.makeid()) ba = a_private.bibjson() ba.title = "Private Article" a_private.save(blocking=True) article_source = ArticleFixtureFactory.make_article_source(eissn='4321-4321', pissn='8765-8765,', in_doaj=True) a_public = models.Article(**article_source) + a_public.set_id(a_public.makeid()) ba = a_public.bibjson() ba.title = "Public Article" a_public.save(blocking=True) public_id = a_public.id - time.sleep(1) + stone = models.ArticleTombstone() + stone.set_id(stone.makeid()) + stone.bibjson().add_subject("LCC", "Economic theory. Demography", "AB22") + stone.save(blocking=True) + + models.Article.blockall([(a_private.id, a_private.last_updated), (a_public.id, a_public.last_updated)]) + models.ArticleTombstone.blockall([(stone.id, stone.last_updated)]) with self.app_test.test_request_context(): with self.app_test.test_client() as t_client: - resp = t_client.get(url_for('oaipmh.oaipmh', specified='article', verb='ListRecords', metadataPrefix='oai_dc')) + resp = t_client.get( + url_for('oaipmh.oaipmh', specified='article', verb='ListRecords', metadataPrefix='oai_dc')) assert resp.status_code == 200 t = etree.fromstring(resp.data) records = t.xpath('/oai:OAI-PMH/oai:ListRecords', namespaces=self.oai_ns) - # Check we only have one journal returned + # Check we only have three articles returned r = records[0].xpath('//oai:record', namespaces=self.oai_ns) - assert len(r) == 1 - - # Check we have the correct journal - title = r[0].xpath('//dc:title', namespaces=self.oai_ns)[0].text - # check orcid_id xwalk - assert str(records[0].xpath('//dc:creator/@id', namespaces=self.oai_ns)[0]) == a_public.bibjson().author[0].get("orcid_id") - assert records[0].xpath('//dc:title', namespaces=self.oai_ns)[0].text == a_public.bibjson().title - - resp = t_client.get(url_for('oaipmh.oaipmh', specified='article', verb='GetRecord', metadataPrefix='oai_dc') + '&identifier=abcdefghijk_article') + assert len(r) == 3 + + seen_deleted = 0 + seen_public = False + records = records[0].getchildren() + for r in records: + header = r.xpath('oai:header', namespaces=self.oai_ns)[0] + status = header.get("status") + if status == "deleted": + seen_deleted += 1 + else: + seen_public = True + # Check we have the correct article + title = r[0].xpath('//dc:title', namespaces=self.oai_ns)[0].text + + # check orcid_id xwalk + assert str(records[0].xpath('//dc:creator/@id', namespaces=self.oai_ns)[0]) == \ + a_public.bibjson().author[0].get("orcid_id") + assert records[0].xpath('//dc:title', namespaces=self.oai_ns)[ + 0].text == a_public.bibjson().title + + assert seen_deleted == 2 + assert seen_public + + resp = t_client.get(url_for('oaipmh.oaipmh', specified='article', verb='GetRecord', + metadataPrefix='oai_dc') + '&identifier=' + public_id) assert resp.status_code == 200 t = etree.fromstring(resp.data) records = t.xpath('/oai:OAI-PMH/oai:GetRecord', namespaces=self.oai_ns) - # Check we only have one journal returnedt + # Check we only have one article returned kids = records[0].getchildren() r = records[0].xpath('//oai:record', namespaces=self.oai_ns) assert len(r) == 1 @@ -365,7 +427,8 @@ def test_10_subjects(self): with self.app_test.test_request_context(): # Check whether the journal is found for its specific set: Veterinary Medicine (TENDOlZldGVyaW5hcnkgbWVkaWNpbmU) with self.app_test.test_client() as t_client: - resp = t_client.get(url_for('oaipmh.oaipmh', verb='ListRecords', metadataPrefix='oai_dc', set='TENDOlZldGVyaW5hcnkgbWVkaWNpbmU~')) + resp = t_client.get(url_for('oaipmh.oaipmh', verb='ListRecords', metadataPrefix='oai_dc', + set='TENDOlZldGVyaW5hcnkgbWVkaWNpbmU~')) assert resp.status_code == 200 t = etree.fromstring(resp.data) @@ -377,7 +440,7 @@ def test_10_subjects(self): # Check we have the correct journal assert records[0].xpath('//dc:title', namespaces=self.oai_ns)[0].text == j_public.bibjson().title - #check we have expected subjects (Veterinary Medicine but not Agriculture) + # check we have expected subjects (Veterinary Medicine but not Agriculture) subjects = records[0].xpath('//dc:subject', namespaces=self.oai_ns) assert len(subjects) != 0 @@ -389,7 +452,8 @@ def test_10_subjects(self): with self.app_test.test_request_context(): # Check whether the journal is found for more general set: Agriculture (TENDOkFncmljdWx0dXJl) with self.app_test.test_client() as t_client: - resp = t_client.get(url_for('oaipmh.oaipmh', verb='ListRecords', metadataPrefix='oai_dc', set='TENDOkFncmljdWx0dXJl~')) + resp = t_client.get( + url_for('oaipmh.oaipmh', verb='ListRecords', metadataPrefix='oai_dc', set='TENDOkFncmljdWx0dXJl~')) assert resp.status_code == 200 t = etree.fromstring(resp.data) @@ -406,7 +470,8 @@ def test_10_subjects(self): # Check we have the correct journal assert records[0].xpath('//dc:title', namespaces=self.oai_ns)[0].text == j_public.bibjson().title - + @with_es(indices=[models.Article.__type__, models.ArticleTombstone.__type__], + warm_mappings=[models.Article.__type__, models.ArticleTombstone.__type__]) def test_11_oai_dc_attr(self): """test if the OAI-PMH article feed returns record with correct attributes in oai_dc element""" article_source = ArticleFixtureFactory.make_article_source(eissn='1234-1234', pissn='5678-5678,', in_doaj=True) @@ -419,7 +484,8 @@ def test_11_oai_dc_attr(self): with self.app_test.test_request_context(): with self.app_test.test_client() as t_client: - resp = t_client.get(url_for('oaipmh.oaipmh', specified='article', verb='ListRecords', metadataPrefix='oai_dc')) + resp = t_client.get( + url_for('oaipmh.oaipmh', specified='article', verb='ListRecords', metadataPrefix='oai_dc')) assert resp.status_code == 200 t = etree.fromstring(resp.data) @@ -445,7 +511,7 @@ def test_11_oai_dc_attr(self): t = etree.fromstring(resp.data) # find metadata element of our record elem = t.xpath('/oai:OAI-PMH/oai:ListRecords/oai:record/oai:metadata', namespaces=self.oai_ns) - #metadata element should have only one child, "dc" with correct nsmap + # metadata element should have only one child, "dc" with correct nsmap oai_dc = elem[0].getchildren() assert len(oai_dc) == 1 assert oai_dc[0].tag == "{%s}" % self.oai_ns["oai_dc"] + "dc" @@ -461,4 +527,4 @@ def test_decode_resumption_token__fail(self): def test_decode_resumption_token(self): params = decode_resumption_token(base64.urlsafe_b64encode(b'{"m":1}').decode('utf-8')) - assert params == {"metadata_prefix": 1} \ No newline at end of file + assert params == {"metadata_prefix": 1} diff --git a/doajtest/unit/test_ris.py b/doajtest/unit/test_ris.py new file mode 100644 index 0000000000..346181c794 --- /dev/null +++ b/doajtest/unit/test_ris.py @@ -0,0 +1,66 @@ +from unittest import TestCase + +from portality.lib.ris import RisEntry + + +class TestRisEntry(TestCase): + + def test_get_set_item(self): + test_value = 'value_a' + entry = RisEntry() + entry['A1'] = test_value + assert entry['A1'] == [test_value] + + def test_append(self): + entry = RisEntry() + entry.append('A1', '1') + entry['A1'].append('2') + assert entry['A1'] == ['1', '2'] + + entry['A1'] = '9' + assert entry['A1'] == ['9'] + + def test_getitem__valid_undefined(self): + entry = RisEntry() + assert entry['A1'] == [] + + def test_setitem__raise_field_not_found(self): + entry = RisEntry() + with self.assertRaises(ValueError): + entry['qoidjqowijdkncoiqw'] = 'value_a' + + def test_getitem__raise_field_not_found(self): + entry = RisEntry() + with self.assertRaises(ValueError): + print(entry['qoidjqowijdkncoiqw']) + + def test_to_text(self): + entry = RisEntry() + entry['A1'] = 'value_a' + entry['A2'] = 'value_b' + entry['TY'] = 'JOUR' + + expected = """ +TY - JOUR +A1 - value_a +A2 - value_b +ER - + """.strip() + ' \n' + + assert entry.to_text() == expected + + def test_from_text(self): + expected = """ + TY - JOUR + A1 - value_a + A2 - value_b + ER - + """.strip() + ' \n' + + entry = RisEntry.from_text(expected) + assert entry.type == 'JOUR' + assert dict(entry.data) == { + 'TY': ['JOUR'], + 'A1': ['value_a'], + 'A2': ['value_b'], + } diff --git a/doajtest/unit/test_view_admin.py b/doajtest/unit/test_view_admin.py new file mode 100644 index 0000000000..84eead5558 --- /dev/null +++ b/doajtest/unit/test_view_admin.py @@ -0,0 +1,37 @@ +import json + +from doajtest import helpers +from doajtest.fixtures import JournalFixtureFactory +from doajtest.fixtures.accounts import create_maned_a +from doajtest.helpers import DoajTestCase +from portality import models +from portality.util import url_for + + +class TestViewAdmin(DoajTestCase): + + def setUp(self): + super().setUp() + self.acc = create_maned_a(save=True) + + def test_journal_article_info(self): + journal = models.Journal( + **JournalFixtureFactory.make_journal_source() + ) + journal.save(blocking=True) + models.Journal.refresh() + + with self.app_test.test_client() as client: + resp = helpers.login(client, self.acc.email, 'password') + assert resp.status_code == 200 + + resp = client.get(url_for("admin.journal_article_info", journal_id=journal.id)) + assert resp.status_code == 200 + assert json.loads(resp.data) == {'n_articles': 0} + + def test_journal_article_info__not_found(self): + with self.app_test.test_client() as client: + helpers.login(client, self.acc.email, 'password') + + resp = client.get(url_for("admin.journal_article_info", journal_id='aksjdlaksjdlkajsdlkajsdlk')) + assert resp.status_code == 404 diff --git a/doajtest/unit/test_view_doajservices.py b/doajtest/unit/test_view_doajservices.py new file mode 100644 index 0000000000..30e41ba17c --- /dev/null +++ b/doajtest/unit/test_view_doajservices.py @@ -0,0 +1,28 @@ +from doajtest.fixtures import ArticleFixtureFactory +from doajtest.helpers import DoajTestCase +from portality.crosswalks.article_ris import ArticleRisXWalk +from portality.models import Article +from portality.util import url_for + + +class TestDoajservices(DoajTestCase): + + def test_export_article_ris(self): + article = Article(**ArticleFixtureFactory.make_article_source()) + article.save(blocking=True) + Article.refresh() + + ris = ArticleRisXWalk.article2ris(article).to_text() + + with self.app_test.test_client() as t_client: + url = url_for('doajservices.export_article_ris', article_id=article.id, fmt='ris') + response = t_client.get(url) + assert response.status_code == 200 + assert response.get_data(as_text=True) == ris + + def test_export_article_ris__not_found(self): + with self.app_test.test_client() as t_client: + url = url_for('doajservices.export_article_ris', + article_id='article_id_that_does_not_exist', fmt='ris') + response = t_client.get(url) + assert response.status_code == 404 diff --git a/doajtest/unit/test_view_publisher.py b/doajtest/unit/test_view_publisher.py new file mode 100644 index 0000000000..fbdf770549 --- /dev/null +++ b/doajtest/unit/test_view_publisher.py @@ -0,0 +1,21 @@ +from doajtest import helpers +from doajtest.helpers import DoajTestCase +from portality import models, constants +from portality.util import url_for + + +class TestViewPublisher(DoajTestCase): + + def test_delete_application__no_such_object(self): + pwd = 'password' + un = 'publisher_a' + acc = models.Account.make_account(un + "@example.com", un, "Publisher " + un, [constants.ROLE_PUBLISHER]) + acc.set_password(pwd) + acc.save(blocking=True) + + with self.app_test.test_client() as t_client: + resp = helpers.login(t_client, acc.email, pwd) + assert resp.status_code == 200 + + resp = t_client.get(url_for("publisher.delete_application", application_id='no_such_id')) + assert resp.status_code == 404 diff --git a/docs/dictionary.md b/docs/dictionary.md index c77bedf643..fcbee273d9 100644 --- a/docs/dictionary.md +++ b/docs/dictionary.md @@ -1,11 +1,11 @@ -| Short | Description | -|---------|------------------------------| -| bgjob | background job | -| noti | notification | -| noqa | NO-QA (NO Quality Assurance) | -| inst | instance | -| fmt | format | -| exparam | extra parameter | -| maned | Managing Editor | -| gsheet | Google Sheet | -| svc | service | \ No newline at end of file +| Short | Description | +|----------|------------------------------| +| bgjob | background job | +| noti | notification | +| noqa | NO-QA (NO Quality Assurance) | +| inst | instance | +| fmt | format | +| exparam | extra parameter | +| maned | Managing Editor | +| gsheet | Google Sheet | +| svc,serv | service | \ No newline at end of file diff --git a/portality/bll/services/events.py b/portality/bll/services/events.py index aa7a937f1c..6ca7677e82 100644 --- a/portality/bll/services/events.py +++ b/portality/bll/services/events.py @@ -26,32 +26,32 @@ class EventsService(object): - # disabled events - to enable move the event to EVENT_CONSUMENRS array + # disabled events - to enable move the event to EVENT_CONSUMERS array DISABLED_EVENTS = [ - ApplicationPublisherRevisionNotify + ApplicationPublisherAssignedNotify, # https://github.com/DOAJ/doajPM/issues/3974 + ApplicationPublisherInprogressNotify, # https://github.com/DOAJ/doajPM/issues/3974 + ApplicationPublisherRevisionNotify, + JournalEditorGroupAssignedNotify, # https://github.com/DOAJ/doajPM/issues/3974 + JournalAssedAssignedNotify, # https://github.com/DOAJ/doajPM/issues/3974 + UpdateRequestPublisherAssignedNotify, # https://github.com/DOAJ/doajPM/issues/3974 ] EVENT_CONSUMERS = [ - ApplicationPublisherQuickRejectNotify, AccountCreatedEmail, AccountPasswordResetEmail, - ApplicationAssedInprogressNotify, ApplicationAssedAssignedNotify, + ApplicationAssedInprogressNotify, ApplicationEditorCompletedNotify, - ApplicationEditorInProgressNotify, ApplicationEditorGroupAssignedNotify, + ApplicationEditorInProgressNotify, ApplicationManedReadyNotify, - ApplicationPublisherCreatedNotify, - ApplicationPublisherInprogressNotify, ApplicationPublisherAcceptedNotify, - ApplicationPublisherAssignedNotify, + ApplicationPublisherCreatedNotify, + ApplicationPublisherQuickRejectNotify, BGJobFinishedNotify, - JournalAssedAssignedNotify, - JournalEditorGroupAssignedNotify, + JournalDiscontinuingSoonNotify, UpdateRequestPublisherAcceptedNotify, - UpdateRequestPublisherAssignedNotify, UpdateRequestPublisherRejectedNotify, - UpdateRequestPublisherSubmittedNotify, - JournalDiscontinuingSoonNotify, + UpdateRequestPublisherSubmittedNotify ] def __init__(self): diff --git a/portality/bll/services/todo.py b/portality/bll/services/todo.py index fc57f66da7..25a62a2c71 100644 --- a/portality/bll/services/todo.py +++ b/portality/bll/services/todo.py @@ -5,6 +5,7 @@ from portality.lib import dates from datetime import datetime + class TodoService(object): """ ~~Todo:Service->DOAJ:Service~~ @@ -63,8 +64,7 @@ def group_stats(self, group_id): return stats - - def top_todo(self, account, size=25, new_applications=True, update_requests=True): + def top_todo(self, account, size=25, new_applications=True, update_requests=True, on_hold=True): """ Returns the top number of todo items for a given user @@ -89,6 +89,8 @@ def top_todo(self, account, size=25, new_applications=True, update_requests=True if update_requests: queries.append(TodoRules.maned_last_month_update_requests(size, maned_of)) queries.append(TodoRules.maned_new_update_requests(size, maned_of)) + if on_hold: + queries.append(TodoRules.maned_on_hold(size, account.id, maned_of)) if new_applications: # editor and associate editor roles only deal with new applications if account.has_role("editor"): @@ -174,7 +176,11 @@ def maned_stalled(cls, size, maned_of): TodoQuery.is_new_application() ], must_nots=[ - TodoQuery.status([constants.APPLICATION_STATUS_ACCEPTED, constants.APPLICATION_STATUS_REJECTED]) + TodoQuery.status([ + constants.APPLICATION_STATUS_ACCEPTED, + constants.APPLICATION_STATUS_REJECTED, + constants.APPLICATION_STATUS_ON_HOLD + ]) ], sort=sort_date, size=size @@ -191,7 +197,11 @@ def maned_follow_up_old(cls, size, maned_of): TodoQuery.is_new_application() ], must_nots=[ - TodoQuery.status([constants.APPLICATION_STATUS_ACCEPTED, constants.APPLICATION_STATUS_REJECTED]) + TodoQuery.status([ + constants.APPLICATION_STATUS_ACCEPTED, + constants.APPLICATION_STATUS_REJECTED, + constants.APPLICATION_STATUS_ON_HOLD + ]) ], sort=sort_date, size=size @@ -262,7 +272,10 @@ def maned_last_month_update_requests(cls, size, maned_of): TodoQuery.is_update_request() ], must_nots=[ - TodoQuery.status([constants.APPLICATION_STATUS_ACCEPTED, constants.APPLICATION_STATUS_REJECTED]) + TodoQuery.status([ + constants.APPLICATION_STATUS_ACCEPTED, + constants.APPLICATION_STATUS_REJECTED + ]) # TodoQuery.exists("admin.editor") ], sort=sort_date, @@ -282,7 +295,10 @@ def maned_new_update_requests(cls, size, maned_of): TodoQuery.is_update_request() ], must_nots=[ - TodoQuery.status([constants.APPLICATION_STATUS_ACCEPTED, constants.APPLICATION_STATUS_REJECTED]) + TodoQuery.status([ + constants.APPLICATION_STATUS_ACCEPTED, + constants.APPLICATION_STATUS_REJECTED + ]) # TodoQuery.exists("admin.editor") ], sort=sort_date, @@ -290,6 +306,23 @@ def maned_new_update_requests(cls, size, maned_of): ) return constants.TODO_MANED_NEW_UPDATE_REQUEST, assign_pending, sort_date, -2 + @classmethod + def maned_on_hold(cls, size, account, maned_of): + sort_date = "created_date" + on_holds = TodoQuery( + musts=[ + TodoQuery.is_new_application(), + TodoQuery.status([constants.APPLICATION_STATUS_ON_HOLD]) + ], + ors=[ + TodoQuery.editor_group(maned_of), + TodoQuery.editor(account) + ], + sort=sort_date, + size=size + ) + return constants.TODO_MANED_ON_HOLD, on_holds, sort_date, 0 + @classmethod def editor_stalled(cls, groups, size): sort_date = "created_date" @@ -303,7 +336,8 @@ def editor_stalled(cls, groups, size): TodoQuery.status([ constants.APPLICATION_STATUS_ACCEPTED, constants.APPLICATION_STATUS_REJECTED, - constants.APPLICATION_STATUS_READY + constants.APPLICATION_STATUS_READY, + constants.APPLICATION_STATUS_ON_HOLD ]) ], sort=sort_date, @@ -324,7 +358,8 @@ def editor_follow_up_old(cls, groups, size): TodoQuery.status([ constants.APPLICATION_STATUS_ACCEPTED, constants.APPLICATION_STATUS_REJECTED, - constants.APPLICATION_STATUS_READY + constants.APPLICATION_STATUS_READY, + constants.APPLICATION_STATUS_ON_HOLD ]) ], sort=sort_date, @@ -377,7 +412,8 @@ def associate_stalled(cls, acc_id, size): constants.APPLICATION_STATUS_ACCEPTED, constants.APPLICATION_STATUS_REJECTED, constants.APPLICATION_STATUS_READY, - constants.APPLICATION_STATUS_COMPLETED + constants.APPLICATION_STATUS_COMPLETED, + constants.APPLICATION_STATUS_ON_HOLD ]) ], sort=sort_field, @@ -399,7 +435,8 @@ def associate_follow_up_old(cls, acc_id, size): constants.APPLICATION_STATUS_ACCEPTED, constants.APPLICATION_STATUS_REJECTED, constants.APPLICATION_STATUS_READY, - constants.APPLICATION_STATUS_COMPLETED + constants.APPLICATION_STATUS_COMPLETED, + constants.APPLICATION_STATUS_ON_HOLD ]) ], sort=sort_field, @@ -434,7 +471,8 @@ def associate_all_applications(cls, acc_id, size): constants.APPLICATION_STATUS_ACCEPTED, constants.APPLICATION_STATUS_REJECTED, constants.APPLICATION_STATUS_READY, - constants.APPLICATION_STATUS_COMPLETED + constants.APPLICATION_STATUS_COMPLETED, + constants.APPLICATION_STATUS_ON_HOLD ]) ], sort=sort_field, @@ -454,9 +492,10 @@ class TodoQuery(object): # therefore, we take a created_date sort to mean a date_applied sort cd_sort = {"admin.date_applied": {"order": "asc"}} - def __init__(self, musts=None, must_nots=None, sort="last_manual_update", size=10): + def __init__(self, musts=None, must_nots=None, ors=None, sort="last_manual_update", size=10): self._musts = [] if musts is None else musts self._must_nots = [] if must_nots is None else must_nots + self._ors = [] if ors is None else ors self._sort = sort self._size = size @@ -464,16 +503,22 @@ def query(self): sort = self.lmu_sort if self._sort == "last_manual_update" else self.cd_sort q = { "query" : { - "bool" : { - "must": self._musts, - "must_not": self._must_nots - } + "bool" : {} }, "sort" : [ sort ], "size" : self._size } + + if len(self._musts) > 0: + q["query"]["bool"]["must"] = self._musts + if len(self._must_nots) > 0: + q["query"]["bool"]["must_not"] = self._must_nots + if len(self._ors) > 0: + q["query"]["bool"]["should"] = self._ors + q["query"]["bool"]["minimum_should_match"] = 1 + return q @classmethod diff --git a/portality/constants.py b/portality/constants.py index 4372aa503c..974ab3f49c 100644 --- a/portality/constants.py +++ b/portality/constants.py @@ -50,6 +50,7 @@ TODO_MANED_ASSIGN_PENDING = "todo_maned_assign_pending" TODO_MANED_LAST_MONTH_UPDATE_REQUEST = "todo_maned_last_month_update_request" TODO_MANED_NEW_UPDATE_REQUEST = "todo_maned_new_update_request" +TODO_MANED_ON_HOLD = "todo_maned_on_hold" TODO_EDITOR_STALLED = "todo_editor_stalled" TODO_EDITOR_FOLLOW_UP_OLD = "todo_editor_follow_up_old" TODO_EDITOR_COMPLETED = "todo_editor_completed" diff --git a/portality/crosswalks/article_ris.py b/portality/crosswalks/article_ris.py new file mode 100644 index 0000000000..82d4b04d26 --- /dev/null +++ b/portality/crosswalks/article_ris.py @@ -0,0 +1,50 @@ +from typing import Union + +from portality import models +from portality.lib import jsonpath_utils +from portality.lib.ris import RisEntry + + +def extra_author_names(article) -> list: + query = '$.bibjson.author[*].name' + values = jsonpath_utils.find_values(query, article) + return sorted(set(values)) + + +RIS_ARTICLE_MAPPING = { + 'T1': '$.bibjson.title', + 'AU': extra_author_names, + 'PY': '$.bibjson.year', + 'JF': '$.bibjson.journal.title', + 'PB': '$.bibjson.journal.publisher', + 'VL': '$.bibjson.journal.volume', + 'IS': '$.bibjson.journal.number', + 'SP': '$.bibjson.start_page', + 'EP': '$.bibjson.end_page', + 'UR': '$.bibjson.link[*].url', + 'AB': '$.bibjson.abstract', + 'KW': '$.bibjson.keywords[*]', + 'DO': '$.bibjson.identifier[?(@.type == "doi")].id', + 'SN': '$.bibjson.journal.issns[*]', + 'LA': '$.bibjson.journal.language[*]', +} + + +class ArticleRisXWalk: + + @classmethod + def article2ris(cls, article: Union[models.Article, dict]) -> RisEntry: + if isinstance(article, models.Article): + article = article.data + + entry = RisEntry(type_of_reference='JOUR') + for tag, query in RIS_ARTICLE_MAPPING.items(): + if callable(query): + values = query(article) + else: + values = jsonpath_utils.find_values(query, article) + + for v in values: + entry[tag].append(v) + + return entry diff --git a/portality/crosswalks/journal_form.py b/portality/crosswalks/journal_form.py index ecdc20c61a..91ad74a138 100644 --- a/portality/crosswalks/journal_form.py +++ b/portality/crosswalks/journal_form.py @@ -289,7 +289,7 @@ def form2admin(cls, form, obj): obj.set_editor(editor) if getattr(form, "doaj_seal", None): - obj.set_seal(form.doaj_seal.data) + obj.set_seal('y' in form.doaj_seal.data) @classmethod def bibjson2form(cls, bibjson, forminfo): @@ -457,7 +457,7 @@ def admin2form(cls, obj, forminfo): if obj.editor is not None: forminfo['editor'] = obj.editor - forminfo['doaj_seal'] = obj.has_seal() + forminfo['doaj_seal'] = ['y'] if obj.has_seal() else [] class JournalFormXWalk(JournalGenericXWalk): diff --git a/portality/crosswalks/oaipmh.py b/portality/crosswalks/oaipmh.py index 26bab8229c..dfc4efff9d 100644 --- a/portality/crosswalks/oaipmh.py +++ b/portality/crosswalks/oaipmh.py @@ -99,6 +99,9 @@ class OAI_DC_Article(OAI_DC): ~~->OAIDC:Crosswalk~~ """ def crosswalk(self, record): + if not record.is_in_doaj(): + return None + bibjson = record.bibjson() metadata = etree.Element(self.PMH + "metadata") @@ -171,6 +174,9 @@ def header(self, record): bibjson = record.bibjson() head = etree.Element(self.PMH + "header", nsmap=self.NSMAP) + if not record.is_in_doaj(): + head.set("status", "deleted") + identifier = etree.SubElement(head, self.PMH + "identifier") set_text(identifier, make_oai_identifier(record.id, "article")) @@ -231,6 +237,9 @@ class OAI_DC_Journal(OAI_DC): ~~->OAIDC:Crosswalk~~ """ def crosswalk(self, record): + if not record.is_in_doaj(): + return None + bibjson = record.bibjson() metadata = etree.Element(self.PMH + "metadata") @@ -293,6 +302,9 @@ def header(self, record): bibjson = record.bibjson() head = etree.Element(self.PMH + "header", nsmap=self.NSMAP) + if not record.is_in_doaj(): + head.set("status", "deleted") + identifier = etree.SubElement(head, self.PMH + "identifier") set_text(identifier, make_oai_identifier(record.id, "journal")) @@ -315,6 +327,9 @@ class OAI_DOAJ_Article(OAI_Crosswalk): NSMAP.update({"oai_doaj": OAI_DOAJ_NAMESPACE}) def crosswalk(self, record): + if not record.is_in_doaj(): + return None + bibjson = record.bibjson() metadata = etree.Element(self.PMH + "metadata") @@ -459,6 +474,9 @@ def header(self, record): bibjson = record.bibjson() head = etree.Element(self.PMH + "header", nsmap=self.NSMAP) + if not record.is_in_doaj(): + head.set("status", "deleted") + identifier = etree.SubElement(head, self.PMH + "identifier") set_text(identifier, make_oai_identifier(record.id, "article")) @@ -472,10 +490,12 @@ def header(self, record): CROSSWALKS = { "oai_dc": { "article": OAI_DC_Article, - "journal": OAI_DC_Journal + "journal": OAI_DC_Journal, + "article,article_tombstone": OAI_DC_Article }, 'oai_doaj': { - "article": OAI_DOAJ_Article + "article": OAI_DOAJ_Article, + "article,article_tombstone": OAI_DOAJ_Article } } diff --git a/portality/forms/application_forms.py b/portality/forms/application_forms.py index 8d4adcd85d..7f923539a0 100644 --- a/portality/forms/application_forms.py +++ b/portality/forms/application_forms.py @@ -1693,8 +1693,13 @@ class FieldDefinitions: # ~~->$ DOAJSeal:FormField~~ DOAJ_SEAL = { "name": "doaj_seal", - "label": "The journal may have fulfilled all the criteria for the Seal. Award the Seal?", + "label": "The journal may have fulfilled all the criteria for the Seal.", + "multiple": True, "input": "checkbox", + "options": [ + {"display": "Award the Seal?", "value": 'y'}, + ], + "validate": [ { "only_if": { @@ -1716,7 +1721,10 @@ class FieldDefinitions: "the journal must use a persistent identifier" } } - ] + ], + "widgets": [ + "article_info", + ], } # FIXME: this probably shouldn't be in the admin form fieldsets, rather its own separate form @@ -1977,7 +1985,7 @@ class FieldDefinitions: "entry_template": templates.AF_ENTRY_GOUP, "widgets": [ {"infinite_repeat": {"enable_on_repeat": ["textarea"]}}, - "note_modal" + "note_modal", ], "merge_disabled": "merge_disabled_notes", } @@ -3174,7 +3182,8 @@ def wtforms(field, settings): "trim_whitespace": "formulaic.widgets.newTrimWhitespace", # ~~-> TrimWhitespace:FormWidget~~ "note_modal": "formulaic.widgets.newNoteModal", # ~~-> NoteModal:FormWidget~~, "autocheck": "formulaic.widgets.newAutocheck", # ~~-> Autocheck:FormWidget~~ - "issn_link": "formulaic.widgets.newIssnLink" # ~~-> IssnLink:FormWidget~~, + "issn_link" : "formulaic.widgets.newIssnLink", # ~~-> IssnLink:FormWidget~~, + "article_info": "formulaic.widgets.newArticleInfo", # ~~-> ArticleInfo:FormWidget~~ } @@ -3382,10 +3391,8 @@ def wtform(formulaic_context, field, wtfargs): HiddenFieldBuilder ] -ApplicationFormFactory = Formulaic(APPLICATION_FORMS, WTFORMS_BUILDERS, function_map=PYTHON_FUNCTIONS, - javascript_functions=JAVASCRIPT_FUNCTIONS) -JournalFormFactory = Formulaic(JOURNAL_FORMS, WTFORMS_BUILDERS, function_map=PYTHON_FUNCTIONS, - javascript_functions=JAVASCRIPT_FUNCTIONS) +ApplicationFormFactory = Formulaic(APPLICATION_FORMS, WTFORMS_BUILDERS, function_map=PYTHON_FUNCTIONS, javascript_functions=JAVASCRIPT_FUNCTIONS) +JournalFormFactory = Formulaic(JOURNAL_FORMS, WTFORMS_BUILDERS, function_map=PYTHON_FUNCTIONS, javascript_functions=JAVASCRIPT_FUNCTIONS) if __name__ == "__main__": """ diff --git a/portality/forms/application_processors.py b/portality/forms/application_processors.py index a7f0271448..6afe654914 100644 --- a/portality/forms/application_processors.py +++ b/portality/forms/application_processors.py @@ -456,11 +456,6 @@ def finalise(self, account, save_target=True, email_alert=True): # self.add_alert("Problem sending email to associate editor - probably address is invalid") # app.logger.exception("Email to associate failed.") - # If this is the first time this application has been assigned to an editor, notify the publisher. - old_ed = self.source.editor - if (old_ed is None or old_ed == '') and self.target.editor is not None: - self.add_alert(Messages.SENT_PUBLISHER_ASSIGNED_EMAIL) - # Inform editor and associate editor if this application was 'ready' or 'completed', but has been changed to 'in progress' if (self.source.application_status == constants.APPLICATION_STATUS_READY or self.source.application_status == constants.APPLICATION_STATUS_COMPLETED) and self.target.application_status == constants.APPLICATION_STATUS_IN_PROGRESS: # First, the editor @@ -586,11 +581,6 @@ def finalise(self): # self.add_alert("Problem sending email to associate editor - probably address is invalid") # app.logger.exception('Error sending associate assigned email') - # If this is the first time this application has been assigned to an editor, notify the publisher. - old_ed = self.source.editor - if (old_ed is None or old_ed == '') and self.target.editor is not None: - self.add_alert(Messages.SENT_PUBLISHER_ASSIGNED_EMAIL) - # Email the assigned associate if the application was reverted from 'completed' to 'in progress' (failed review) if self.source.application_status == constants.APPLICATION_STATUS_COMPLETED and self.target.application_status == constants.APPLICATION_STATUS_IN_PROGRESS: if self.target.editor: diff --git a/portality/lib/jsonpath_utils.py b/portality/lib/jsonpath_utils.py new file mode 100644 index 0000000000..7201c9bfe8 --- /dev/null +++ b/portality/lib/jsonpath_utils.py @@ -0,0 +1,7 @@ +from typing import Iterable + +import jsonpath_ng.ext + + +def find_values(query: str, data: dict) -> Iterable: + return (m.value for m in jsonpath_ng.ext.parse(query).find(data)) diff --git a/portality/lib/ris.py b/portality/lib/ris.py new file mode 100644 index 0000000000..36bb83ab99 --- /dev/null +++ b/portality/lib/ris.py @@ -0,0 +1,225 @@ +""" +very simple library for RIS format + +file format references: https://en.wikipedia.org/wiki/RIS_(file_format) +""" +import collections +import logging +from collections import OrderedDict +from typing import Dict, Optional + +log = logging.getLogger(__name__) + +RTAG_TYPE = 'TY' +RTAG_END = 'ER' +RIS_TAGS = [ + 'A1', # primary_author + 'A2', # secondary_author + 'A3', # tertiary_author + 'A4', # quaternary_author + 'A5', # quinary_author_compiler + 'A6', # website_editor + 'AB', # abstract_synopsis + 'AD', # author_editor_address + 'AN', # accession_number + 'AU', # author_editor_translator + 'AV', # availability_location + 'BT', # primary_secondary_title + 'C1', # custom1 + 'C2', # custom2 + 'C3', # custom3 + 'C4', # custom4 + 'C5', # custom5 + 'C6', # custom6 + 'C7', # custom7 + 'C8', # custom8 + 'CA', # caption + 'CL', # classification + 'CN', # call_number + 'CP', # city_place_publication + 'CR', # cited_references + 'CT', # caption_primary_title + 'CY', # place_published + 'DA', # date + 'DB', # name_of_database + 'DI', # digital_object_identifier + 'DO', # digital_object_identifier2 + 'DOI', # digital_object_identifier3 + 'DP', # database_provider + 'DS', # data_source + 'ED', # secondary_author + 'EP', # end_page + 'ET', # edition + 'FD', # free_form_publication_data + 'H1', # location_library + 'H2', # location_call_number + 'ID', # reference_identifier + 'IP', # identifying_phrase + 'IS', # number_volumes + 'J1', # journal_abbreviation_1 + 'J2', # alternate_title + 'JA', # journal_standard_abbreviation + 'JF', # journal_full_name + 'JO', # journal_abbreviation + 'K1', # keyword1 + 'KW', # keyword_phrase + 'L1', # file_attachments + 'L2', # url_link + 'L3', # doi_link + 'L4', # figure_image_link + 'LA', # language + 'LB', # label + 'LK', # links + 'LL', # sponsoring_library_location + 'M1', # miscellaneous1 + 'M2', # miscellaneous2 + 'M3', # type_of_work + 'N1', # notes1 + 'N2', # abstract_notes + 'NO', # notes + 'NV', # number_of_volumes + 'OL', # output_language + 'OP', # original_publication + 'PA', # personal_notes + 'PB', # publisher + 'PMCID', # pmcid + 'PMID', # pmid + 'PP', # place_of_publication + 'PY', # publication_year + 'RD', # retrieved_date + 'RI', # reviewed_item + 'RN', # research_notes + 'RP', # reprint_status + 'RT', # reference_type + 'SE', # section + 'SF', # subfile_database + 'SL', # sponsoring_library + 'SN', # issn_isbn + 'SP', # start_pages + 'SR', # source_type + 'ST', # short_title + 'SV', # series_volume + 'T1', # primary_title + 'T2', # secondary_title + 'T3', # tertiary_title + 'TA', # translated_author + 'TI', # title + 'TT', # translated_title + RTAG_TYPE, # 'type_of_reference' + 'U1', # user_definable1 + 'U2', # user_definable2 + 'U3', # user_definable3 + 'U4', # user_definable4 + 'U5', # user_definable5 + 'U6', # user_definable6 + 'U7', # user_definable7 + 'U8', # user_definable8 + 'U9', # user_definable9 + 'U10', # user_definable10 + 'U11', # user_definable11 + 'U12', # user_definable12 + 'U13', # user_definable13 + 'U14', # user_definable14 + 'U15', # user_definable15 + 'UR', # web_url + 'VL', # volume + 'VO', # volume_published_standard + 'WP', # date_of_electronic_publication + 'WT', # website_title + 'WV', # website_version + 'Y1', # year_date + 'Y2', # access_date_secondary_date + 'YR', # publication_year_ref +] + + +def find_tag(field_name) -> Optional[str]: + field_name = field_name.upper() + if field_name in RIS_TAGS: + return field_name + raise ValueError(f'Field not found: {field_name}') + + +class RisEntry: + + def __init__(self, type_of_reference: str = None): + self.data: collections.defaultdict[str, list] = collections.defaultdict(list) + if type_of_reference: + self.type = type_of_reference + + def __setitem__(self, field_name, value): + tag = find_tag(field_name) + self.data[tag] = [value] + + def append(self, tag, value) -> list: + tag = find_tag(tag) + self[tag].append(value) + return self[tag] + + def __getitem__(self, field_name) -> list: + tag = find_tag(field_name) + return self.data[tag] + + @property + def type(self): + return self[RTAG_TYPE] and self[RTAG_TYPE][0] + + @type.setter + def type(self, value): + self[RTAG_TYPE] = value + + @classmethod + def from_dict(cls, d: dict): + instance = cls() + for k, v in d.items(): + if isinstance(v, list): + for vv in v: + instance[k].append(vv) + else: + instance[k].append(v) + + return instance + + @classmethod + def from_text(cls, text: str): + def _to_tag_value(line: str): + tag, value = line.split('-', 1) + tag = tag.strip() + value = value.lstrip() + value = value.replace('\\n', '\n') + return tag, value + + text = text.strip() + lines = text.splitlines() + entry = RisEntry() + for line in lines: + tag, val = _to_tag_value(line) + if tag == RTAG_END: + break + entry[tag].append(val) + return entry + + def to_text(self) -> str: + tags = list(self.data.keys()) + if RTAG_TYPE in tags: + tags.remove(RTAG_TYPE) + tags.insert(0, RTAG_TYPE) + + if RTAG_END in tags: + tags.remove(RTAG_END) + + def _to_line(tag, value): + if '\n' in value: + value = value.replace('\n', '\\n') + if value is None: + value = '' + return f'{tag} - {value}\n' + + text = '' + for tag in tags: + values = self.data[tag] + for v in values: + text += _to_line(tag, v) + + text += _to_line(RTAG_END, '') + return text diff --git a/portality/models/__init__.py b/portality/models/__init__.py index 2570929105..21f1f460a1 100644 --- a/portality/models/__init__.py +++ b/portality/models/__init__.py @@ -14,7 +14,7 @@ from portality.models.uploads import FileUpload, ExistsFileQuery, OwnerFileQuery, ValidFileQuery, BulkArticles from portality.models.lock import Lock from portality.models.history import ArticleHistory, JournalHistory -from portality.models.article import Article, ArticleBibJSON, ArticleQuery, ArticleVolumesQuery, DuplicateArticleQuery, NoJournalException +from portality.models.article import Article, ArticleBibJSON, ArticleQuery, ArticleVolumesQuery, DuplicateArticleQuery, NoJournalException, ArticleTombstone from portality.models.oaipmh import OAIPMHRecord, OAIPMHJournal, OAIPMHArticle from portality.models.atom import AtomRecord from portality.models.search import JournalArticle, JournalStatsQuery, ArticleStatsQuery diff --git a/portality/models/article.py b/portality/models/article.py index d431bb5ae7..3cb2210e56 100644 --- a/portality/models/article.py +++ b/portality/models/article.py @@ -85,8 +85,8 @@ def find_by_issns(cls, issns): return articles @classmethod - def count_by_issns(cls, issns): - q = ArticleQuery(issns=issns) + def count_by_issns(cls, issns, in_doaj=None): + q = ArticleQuery(issns=issns, in_doaj=in_doaj) return cls.hit_count(q.query()) @classmethod @@ -95,19 +95,26 @@ def delete_by_issns(cls, issns, snapshot=True): cls.delete_selected(query=q.query(), snapshot=snapshot) @classmethod - def delete_selected(cls, query=None, owner=None, snapshot=True): + def delete_selected(cls, query=None, owner=None, snapshot=True, tombstone=True): if owner is not None: from portality.models import Journal issns = Journal.issns_by_owner(owner) q = ArticleQuery(issns=issns) query = q.query() - if snapshot: + if snapshot or tombstone: articles = cls.iterate(query, page_size=1000) for article in articles: - article.snapshot() + if snapshot: + article.snapshot() + if tombstone: + article._tombstone() return cls.delete_by_query(query) + def delete(self): + self._tombstone() + super(Article, self).delete() + def bibjson(self, **kwargs): if "bibjson" not in self.data: self.data["bibjson"] = {} @@ -142,6 +149,18 @@ def snapshot(self): hist.save() return hist.id + def _tombstone(self): + stone = ArticleTombstone() + stone.set_id(self.id) + sbj = stone.bibjson() + + subs = self.bibjson().subjects() + for s in subs: + sbj.add_subject(s.get("scheme"), s.get("term"), s.get("code")) + + stone.save() + return stone + def add_history(self, bibjson, date=None): """Deprecated""" bibjson = bibjson.bibjson if isinstance(bibjson, ArticleBibJSON) else bibjson @@ -565,6 +584,23 @@ def get_owner(self): return owners[0] + +class ArticleTombstone(Article): + __type__ = "article_tombstone" + + def snapshot(self): + return None + + def is_in_doaj(self): + return False + + def prep(self): + self.data['last_updated'] = dates.now_str() + + def save(self, *args, **kwargs): + return super(ArticleTombstone, self).save(*args, **kwargs) + + class ArticleBibJSON(GenericBibJSON): def __init__(self, bibjson=None, **kwargs): @@ -866,9 +902,10 @@ class ArticleQuery(object): _issn_terms = { "terms" : {"index.issn.exact" : [""]} } _volume_term = { "term" : {"bibjson.journal.volume.exact" : ""} } - def __init__(self, issns=None, volume=None): + def __init__(self, issns=None, volume=None, in_doaj=None): self.issns = issns self.volume = volume + self.in_doaj = in_doaj def query(self): q = deepcopy(self.base_query) @@ -883,6 +920,9 @@ def query(self): vq["term"]["bibjson.journal.volume.exact"] = self.volume q["query"]["bool"]["must"].append(vq) + if self.in_doaj is not None: + q["query"]["bool"]["must"].append({"term": {"admin.in_doaj": self.in_doaj}}) + return q class ArticleVolumesQuery(object): diff --git a/portality/models/oaipmh.py b/portality/models/oaipmh.py index 4350cb66e7..e7c050fe68 100644 --- a/portality/models/oaipmh.py +++ b/portality/models/oaipmh.py @@ -1,18 +1,20 @@ from copy import deepcopy -from portality.models import Journal, Article + +from portality.models import Journal, Article, ArticleTombstone from portality import constants + class OAIPMHRecord(object): earliest = { "query": { "bool": { "must": [ - { "term": { "admin.in_doaj": True } } + {"term": {"admin.in_doaj": True}} ] } }, "size": 1, - "sort" : [ + "sort": [ {"last_updated": {"order": "asc"}} ] } @@ -21,7 +23,7 @@ class OAIPMHRecord(object): "query": { "bool": { "must": [ - { "term": { "admin.in_doaj": True } } + {"term": {"admin.in_doaj": True}} ] } }, @@ -30,7 +32,7 @@ class OAIPMHRecord(object): "sets": { "terms": { "field": "index.schema_subject.exact", - "order": {"_key" : "asc"}, + "order": {"_key": "asc"}, "size": 100000 } } @@ -41,18 +43,16 @@ class OAIPMHRecord(object): "track_total_hits": True, "query": { "bool": { - "must": [ - { "term": { "admin.in_doaj": True } } - ] + "must": [] } }, "from": 0, "size": 25 } - set_limit = {"term" : { "index.classification.exact" : "" }} - range_limit = { "range" : { "last_updated" : {"gte" : "", "lte" : ""} } } - created_sort = [{"last_updated" : {"order" : "desc"}}, {"id.exact" : "desc"}] + set_limit = {"term": {"index.classification.exact": ""}} + range_limit = {"range": {"last_updated": {"gte": "", "lte": ""}}} + created_sort = [{"last_updated": {"order": "desc"}}, {"id.exact": "desc"}] def earliest_datestamp(self): result = self.query(q=self.earliest) @@ -114,27 +114,23 @@ def list_records(self, from_date=None, until_date=None, oai_set=None, list_size= class OAIPMHArticle(OAIPMHRecord, Article): + __type__ = "article,article_tombstone" + def list_records(self, from_date=None, until_date=None, oai_set=None, list_size=None, start_after=None): total, results = super(OAIPMHArticle, self).list_records(from_date=from_date, until_date=until_date, oai_set=oai_set, list_size=list_size, start_after=start_after) - return total, [Article(**r) for r in results] + return total, [Article(**r) if r.get("es_type") == "article" else ArticleTombstone(**r) for r in results] def pull(self, identifier): - # override the default pull, as we care about whether the item is in_doaj - record = super(OAIPMHArticle, self).pull(identifier) - if record is not None and record.is_in_doaj(): - return record - return None + # override the default pull, as we must check the tombstone record too + article = Article.pull(identifier) + if article is None: + article = ArticleTombstone.pull(identifier) + return article + class OAIPMHJournal(OAIPMHRecord, Journal): def list_records(self, from_date=None, until_date=None, oai_set=None, list_size=None, start_after=None): total, results = super(OAIPMHJournal, self).list_records(from_date=from_date, until_date=until_date, oai_set=oai_set, list_size=list_size, start_after=start_after) return total, [Journal(**r) for r in results] - - def pull(self, identifier): - # override the default pull, as we care about whether the item is in_doaj - record = super(OAIPMHJournal, self).pull(identifier) - if record is not None and record.is_in_doaj(): - return record - return None diff --git a/portality/settings.py b/portality/settings.py index 9d6e313e6b..e6bd2ae57e 100644 --- a/portality/settings.py +++ b/portality/settings.py @@ -9,7 +9,7 @@ # Application Version information # ~~->API:Feature~~ -DOAJ_VERSION = "7.0.0" +DOAJ_VERSION = "7.0.3" API_VERSION = "4.0.0" ###################################### @@ -698,6 +698,7 @@ MAPPINGS['provenance'] = MAPPINGS["account"] #~~->Provenance:Model~~ MAPPINGS['preserve'] = MAPPINGS["account"] #~~->Preservation:Model~~ MAPPINGS['notification'] = MAPPINGS["account"] #~~->Notification:Model~~ +MAPPINGS['article_tombstone'] = MAPPINGS["account"] #~~->ArticleTombstone:Model~~ ######################################### # Query Routes diff --git a/portality/static/doaj/images/feather-icons/download.svg b/portality/static/doaj/images/feather-icons/download.svg new file mode 100644 index 0000000000..76767a9246 --- /dev/null +++ b/portality/static/doaj/images/feather-icons/download.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/portality/static/js/doaj.fieldrender.edges.js b/portality/static/js/doaj.fieldrender.edges.js index dc3138e4f8..7eaa93ebff 100644 --- a/portality/static/js/doaj.fieldrender.edges.js +++ b/portality/static/js/doaj.fieldrender.edges.js @@ -2872,6 +2872,8 @@ $.extend(true, doaj, { published = 'Published ' + name; } + const export_url = this.doaj_url + '/service/export/article/' + resultobj.id + '/ris'; + var frag = '
  • \
  • \ +
  • \ + \ + Export Citation (RIS) ' + if (this.widget){ + frag += 'external-link icon' + } else { + frag += '' + } + frag += '\ +
  • \
  • \ About the journal\
  • \ diff --git a/portality/static/js/formulaic.js b/portality/static/js/formulaic.js index 0c5932c3ce..494dd747fb 100644 --- a/portality/static/js/formulaic.js +++ b/portality/static/js/formulaic.js @@ -1155,14 +1155,14 @@ var formulaic = { this._renderAutocheck = function(autocheck) { let frag = "
  • "; - + if (autocheck.checked_by && doaj.autocheckers && doaj.autocheckers.registry.hasOwnProperty(autocheck.checked_by)) { frag += (new doaj.autocheckers.registry[autocheck.checked_by]()).draw(autocheck) } else { frag += this._defaultRender(autocheck); } - + frag += `
  • `; return frag; } @@ -2252,5 +2252,36 @@ var formulaic = { this.init(); }, + + newArticleInfo : (params) => edges.instantiate(formulaic.widgets.ArticleInfo, params), + ArticleInfo: function ({formulaic, fieldDef, args}) { + const $sealEle = $('label[for=doaj_seal-0]'); + + if (!$sealEle.length) { + console.log('skip ArticleInfo, seal section not found') + return; + } + + const idResult = window.location.pathname.match('/journal/([a-f0-9]+)') + if (!idResult) { + console.log('skip ArticleInfo, journal id not found') + return + } + const journalId = idResult[1] + fetch(`/admin/journal/${journalId}/article-info`) + .then(response => response.json()) + .then(data => { + let articleText = `(This journal has ${data.n_articles} articles in DOAJ)` + if (data.n_articles > 0) { + const articlesUrl = `/admin/journal/${journalId}/article-info/admin-site-search` + articleText = `${articleText}` + } + $sealEle.html($sealEle.text() + ` ${articleText}`) + }) + }, + + + + } }; diff --git a/portality/tasks/journal_bulk_edit.py b/portality/tasks/journal_bulk_edit.py index 3e39d4388e..0b4242adbc 100644 --- a/portality/tasks/journal_bulk_edit.py +++ b/portality/tasks/journal_bulk_edit.py @@ -123,8 +123,8 @@ def run(self): job.add_audit_message("Setting {f} to {x} for journal {y}".format(f=k, x=v, y=journal_id)) fc.form[k].data = v else: - if v: - fc.form.doaj_seal.data = v + if v or (isinstance(v, str) and v.lower() == 'y'): + fc.form.doaj_seal.data = ['y'] updated = True if note: diff --git a/portality/templates-v2/_application-form/includes/_fieldset_oa_compliance.html b/portality/templates-v2/_application-form/includes/_fieldset_oa_compliance.html index 932e2b3953..b0170c9bab 100644 --- a/portality/templates-v2/_application-form/includes/_fieldset_oa_compliance.html +++ b/portality/templates-v2/_application-form/includes/_fieldset_oa_compliance.html @@ -8,7 +8,7 @@
    1. The application form takes approximately 30 minutes to complete.
    2. Your progress is automatically saved.
    3. -
    4. You can return to this application at any time by clicking My accountPublisher at the top.
    5. +
    6. You can return to this application at any time by clicking DashboardPublisher dashboard at the top.
    7. You can print or download a PDF list of the questions.
    8. You must apply online.
    diff --git a/portality/templates-v2/management/_application-form/includes/_editorial_form_fields.html b/portality/templates-v2/management/_application-form/includes/_editorial_form_fields.html index ba8f74cab6..e567690934 100644 --- a/portality/templates-v2/management/_application-form/includes/_editorial_form_fields.html +++ b/portality/templates-v2/management/_application-form/includes/_editorial_form_fields.html @@ -56,7 +56,6 @@

    {{ fs.label }}

    {% set fs = formulaic_context.fieldset("seal") %} {% if fs %}

    {{ fs.label }}

    -

    The journal may have fulfilled all the criteria for the Seal.

    {% for f in fs.fields() %} {% set field_template = f.template %} {% include field_template %} diff --git a/portality/templates-v2/management/admin/dashboard.html b/portality/templates-v2/management/admin/dashboard.html index 440d2bda05..b70e51e874 100644 --- a/portality/templates-v2/management/admin/dashboard.html +++ b/portality/templates-v2/management/admin/dashboard.html @@ -28,6 +28,12 @@ {% else %} Update Requests {% endif %} + + {% if request.values.get("filter") == "oh" %} + On Hold + {% else %} + On Hold + {% endif %} {% include "management/includes/_todo.html" %}
    diff --git a/portality/templates-v2/management/includes/_todo.html b/portality/templates-v2/management/includes/_todo.html index 14d29431b2..7e552bfa23 100644 --- a/portality/templates-v2/management/includes/_todo.html +++ b/portality/templates-v2/management/includes/_todo.html @@ -41,6 +41,11 @@ "feather": "edit", "show_status": true }, + constants.TODO_MANED_ON_HOLD: { + "text" : "On Hold Application Review status", + "colour" : "var(--sanguine)", + "feather": "x-circle" + }, constants.TODO_EDITOR_STALLED: { "text" : "Stalled Chase Associate Editor", "show_status": true, diff --git a/portality/ui/messages.py b/portality/ui/messages.py index 5702b45aa4..1487a2ca7e 100644 --- a/portality/ui/messages.py +++ b/portality/ui/messages.py @@ -29,8 +29,6 @@ class Messages(object): SENT_JOURNAL_CONTACT_ACCEPTED_UPDATE_REQUEST_EMAIL = """Sent email to journal contact '{email}' to tell that an update to their journal was accepted.""" SENT_JOURNAL_CONTACT_IN_PROGRESS_EMAIL = """An email has been sent to the Journal Contact alerting them that you are working on their application.""" SENT_JOURNAL_CONTACT_ASSIGNED_EMAIL = """An email has been sent to the Journal Contact alerting them that an editor has been assigned to their application.""" - SENT_PUBLISHER_IN_PROGRESS_EMAIL = """An email has been sent to the Owner alerting them that you are working on their application.""" - SENT_PUBLISHER_ASSIGNED_EMAIL = """A notification has been sent to the Owner alerting them that an editor has been assigned to their application.""" NOT_SENT_ACCEPTED_APPLICATION_EMAIL = """Did not send notification to '{user}' to tell them that their journal was accepted. Email may be disabled, or there is a problem with the email address.""" NOT_SENT_REJECTED_APPLICATION_EMAILS = """Did not send email to user '{user}' or application suggester to tell them that their journal was rejected Email may be disabled, or there is a problem with the email address.""" @@ -40,8 +38,6 @@ class Messages(object): NOT_SENT_JOURNAL_CONTACT_ACCEPTED_APPLICATION_EMAIL = """Did not send email to '{email}' to tell them that their application/update request was accepted. Email may be disabled, or there is a problem with the email address""" NOT_SENT_JOURNAL_CONTACT_IN_PROGRESS_EMAIL = """An email could not be sent to the Journal Contact alerting them that you are working on their application. Email may be disabled, or there is a problem with the email address""" NOT_SENT_JOURNAL_CONTACT_ASSIGNED_EMAIL = """An email could not be sent to the Journal Contact alerting them that an editor has been assigned to their application. Email may be disabled, or there is a problem with the email address""" - NOT_SENT_PUBLISHER_IN_PROGRESS_EMAIL = """An email could not be sent to the Owner alerting them that you are working on their application. Email may be disabled, or there is a problem with the email address. """ - NOT_SENT_PUBLISHER_ASSIGNED_EMAIL = """An email could not be sent to the Owner alerting them that an editor has been assigned to their application. Email may be disabled, or there is a problem with the email address""" IN_PROGRESS_NOT_SENT_EMAIL_DISABLED = """Did not send email to Owner or Journal Contact about the status change, as publisher emails are disabled.""" diff --git a/portality/view/admin.py b/portality/view/admin.py index 088c4513fc..cdddbb69a9 100644 --- a/portality/view/admin.py +++ b/portality/view/admin.py @@ -7,13 +7,14 @@ from flask_login import current_user, login_required from werkzeug.datastructures import MultiDict -from portality import dao import portality.models as models from portality import constants +from portality import dao from portality import lock from portality.background import BackgroundSummary from portality.bll import DOAJ, exceptions from portality.bll.exceptions import ArticleMergeConflict, DuplicateArticleException +from portality.bll.services.query import Query from portality.core import app from portality.crosswalks.application_form import ApplicationFormXWalk from portality.decorators import ssl_required, restrict_to_role, write_required @@ -29,8 +30,6 @@ from portality.ui import templates from portality.util import flash_with_url, jsonp, make_json_resp, get_web_json_payload, validate_json from portality.view.forms import EditorGroupForm, MakeContinuation - -from portality.bll.services.query import Query from portality.view.view_helper import exparam_editing_user # ~~Admin:Blueprint~~ @@ -335,6 +334,31 @@ def journals_bulk_reinstate(): # ##################################################################### +@blueprint.route("/journal//article-info/", methods=["GET"]) +@login_required +def journal_article_info(journal_id): + j = models.Journal.pull(journal_id) + if j is None: + abort(404) + + return {'n_articles': models.Article.count_by_issns(j.bibjson().issns(), in_doaj=True)} + + +@blueprint.route("/journal//article-info/admin-site-search", methods=["GET"]) +@login_required +def journal_article_info_admin_site_search(journal_id): + j = models.Journal.pull(journal_id) + if j is None: + abort(404) + + issns = j.bibjson().issns() + if not issns: + abort(404) + + target_url = '/admin/admin_site_search?source={"query":{"bool":{"must":[{"term":{"admin.in_doaj":true}},{"term":{"es_type.exact":"article"}},{"query_string":{"query":"%s","default_operator":"AND","default_field":"index.issn.exact"}}]}},"track_total_hits":true}' + return redirect(target_url % issns[0].replace('-', r'\\-')) + + @blueprint.route("/journal//continue", methods=["GET", "POST"]) @login_required @ssl_required @@ -445,7 +469,8 @@ def application(application_id): flash(str(e)) return redirect(url_for("admin.application", application_id=ap.id, _anchor='cannot_edit')) else: - return fc.render_template(obj=ap, lock=lockinfo, form_diff=form_diff, current_journal=current_journal, lcc_tree=lcc_jstree, autochecks=autochecks) + return fc.render_template(obj=ap, lock=lockinfo, form_diff=form_diff, current_journal=current_journal, + lcc_tree=lcc_jstree, autochecks=autochecks) @blueprint.route("/application_quick_reject/", methods=["POST"]) diff --git a/portality/view/dashboard.py b/portality/view/dashboard.py index e9c39564e1..437351e51c 100644 --- a/portality/view/dashboard.py +++ b/portality/view/dashboard.py @@ -20,10 +20,15 @@ @ssl_required def top_todo(): filter = request.values.get("filter") - new_applications, update_requests = True, True + new_applications, update_requests, on_hold = True, True, True if filter == "na": + on_hold = False update_requests = False elif filter == "ur": + on_hold = False + new_applications = False + elif filter == "oh": + update_requests = False new_applications = False # ~~-> Todo:Service~~ @@ -31,7 +36,8 @@ def top_todo(): todos = svc.top_todo(current_user._get_current_object(), size=app.config.get("TODO_LIST_SIZE"), new_applications=new_applications, - update_requests=update_requests) + update_requests=update_requests, + on_hold=on_hold) # ~~-> Dashboard:Page~~ return render_template(templates.DASHBOARD, todos=todos) diff --git a/portality/view/doajservices.py b/portality/view/doajservices.py index f83a727b6b..4ee10eff11 100644 --- a/portality/view/doajservices.py +++ b/portality/view/doajservices.py @@ -1,13 +1,14 @@ -import json, urllib.request, urllib.parse, urllib.error, requests +import json +from io import BytesIO -from flask import Blueprint, make_response, request, abort, render_template +from flask import Blueprint, make_response, abort, render_template, send_file from flask_login import current_user, login_required -from portality.core import app -from portality.decorators import ssl_required, write_required, restrict_to_role -from portality.util import jsonp from portality import lock, models from portality.bll import DOAJ +from portality.crosswalks.article_ris import ArticleRisXWalk +from portality.decorators import ssl_required, write_required +from portality.util import jsonp from portality.ui import templates blueprint = Blueprint('doajservices', __name__) @@ -41,7 +42,7 @@ def unlock(object_type, object_id): abort(400) # otherwise, return success - resp = make_response(json.dumps({"result" : "success"})) + resp = make_response(json.dumps({"result": "success"})) resp.mimetype = "application/json" return resp @@ -111,7 +112,8 @@ def group_status(group_id): :param group_id: :return: """ - if (not (current_user.has_role("editor") and models.EditorGroup.pull(group_id).editor == current_user.id)) and (not current_user.has_role("admin")): + if (not (current_user.has_role("editor") and models.EditorGroup.pull(group_id).editor == current_user.id)) and ( + not current_user.has_role("admin")): abort(404) svc = DOAJ.todoService() stats = svc.group_stats(group_id) @@ -130,6 +132,7 @@ def dismiss_autocheck(autocheck_set_id, autocheck_id): abort(404) return make_response(json.dumps({"status": "success"})) + @blueprint.route("/autocheck/undismiss//", methods=["GET", "POST"]) @jsonp @login_required @@ -142,3 +145,23 @@ def undismiss_autocheck(autocheck_set_id, autocheck_id): abort(404) return make_response(json.dumps({"status": "success"})) + +@blueprint.route('/export/article//') +def export_article_ris(article_id, fmt): + article = models.Article.pull(article_id) + if not article: + abort(404) + + if fmt != 'ris': + # only support ris for now + abort(404) + + byte_stream = BytesIO() + ris = ArticleRisXWalk.article2ris(article) + byte_stream.write(ris.to_text().encode('utf-8', errors='ignore')) + byte_stream.seek(0) + + filename = f'article-{article_id[:10]}.ris' + + resp = make_response(send_file(byte_stream, as_attachment=True, attachment_filename=filename)) + return resp diff --git a/portality/view/oaipmh.py b/portality/view/oaipmh.py index 947d5d5f28..b1c5ec7ac5 100644 --- a/portality/view/oaipmh.py +++ b/portality/view/oaipmh.py @@ -288,13 +288,16 @@ def get_record(dao, base_url, specified_oai_endpoint, identifier=None, metadata_ return IdDoesNotExist(base_url) # do the crosswalk xwalk = get_crosswalk(f.get("metadataPrefix"), dao.__type__) - metadata = xwalk.crosswalk(record) + header = xwalk.header(record) - # make the response oai_id = make_oai_identifier(identifier, dao.__type__) gr = GetRecord(base_url, oai_id, metadata_prefix) - gr.metadata = metadata gr.header = header + + if record.is_in_doaj(): + metadata = xwalk.crosswalk(record) + gr.metadata = metadata + return gr # if we have not returned already, this means we can't disseminate this format @@ -556,7 +559,8 @@ def get_element(self): record = etree.SubElement(gr, self.PMH + "record") record.append(self.header) - record.append(self.metadata) + if self.metadata is not None: + record.append(self.metadata) return gr @@ -735,7 +739,8 @@ def get_element(self): for metadata, header in self.records: r = etree.SubElement(lr, self.PMH + "record") r.append(header) - r.append(metadata) + if metadata is not None: + r.append(metadata) if self.resumption is not None: rt = etree.SubElement(lr, self.PMH + "resumptionToken") diff --git a/portality/view/publisher.py b/portality/view/publisher.py index 31263ce3c7..105bb54873 100644 --- a/portality/view/publisher.py +++ b/portality/view/publisher.py @@ -4,7 +4,8 @@ from portality.app_email import EmailException from portality import models, constants -from portality.bll.exceptions import AuthoriseException, ArticleMergeConflict, DuplicateArticleException, ArticleNotAcceptable +from portality.bll.exceptions import AuthoriseException, ArticleMergeConflict, DuplicateArticleException, \ + ArticleNotAcceptable, NoSuchObjectException from portality.decorators import ssl_required, restrict_to_role, write_required from portality.dao import ESMappingMissingError from portality.forms.application_forms import ApplicationFormFactory @@ -55,7 +56,10 @@ def delete_application(application_id): # otherwise delegate to the application service to sort this out appService = DOAJ.applicationService() - appService.delete_application(application_id, current_user._get_current_object()) + try: + appService.delete_application(application_id, current_user._get_current_object()) + except NoSuchObjectException: + abort(404) return redirect(url_for("publisher.deleted_thanks")) diff --git a/setup.py b/setup.py index 7682c79b23..953c5fe420 100644 --- a/setup.py +++ b/setup.py @@ -5,7 +5,7 @@ setup( name='doaj', - version='7.0.0', + version='7.0.3', packages=find_packages(), install_requires=[ "awscli==1.20.50", @@ -19,6 +19,7 @@ "feedparser==6.0.8", "itsdangerous==2.0.1", # fixme: unpinned dependency of flask, 2.1.0 is causing an import error 'json' "jinja2<3.1.0", # fixme: unpinned dependency of flask, import error on 'escape' + "jsonpath-ng~=1.6", "Flask~=2.1.2", "Flask-Cors==3.0.8", "Flask-DebugToolbar==0.13.1", @@ -63,6 +64,7 @@ 'pandas~=2.0.1', # pandas lets us generate URLs for linkcheck 'gspread-dataframe~=3.3.1', 'gspread-formatting~=1.1.2', + ] + (["setproctitle==1.1.10"] if "linux" in sys.platform else []), extras_require={ # prevent backtracking through all versions