Skip to content
10 changes: 10 additions & 0 deletions cms/data/notifications.yml
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,16 @@ bg:job:admin_reports:export_available:
short:
Report "{name}" ready for download

bg:job:article_deletion_notifications:publisher:
long: |
The following articles have been deleted from DOAJ in the last week

{list_of_articles}

Please upload replacement records if appropriate.
short:
Deleted articles in your journal(s) this week

journal:assed:assigned:notify:
long: |
The journal **{journal_name}** has been assigned to you by the Editor of your group **{group_name}**. Please start work on this within 10 days.
Expand Down
3 changes: 2 additions & 1 deletion dev.template.cfg
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,8 @@ HUEY_SCHEDULE = {
"datalog_journal_added_update": CRON_NEVER,
"auto_assign_editor_group_data": CRON_NEVER,
"ris_export": CRON_NEVER,
"site_statistics": CRON_NEVER
"site_statistics": CRON_NEVER,
"article_deletion_notifications": CRON_NEVER,
}

###########################################
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
suite: Article Deletion Notifications
testset: Article Deletion Notifications

tests:
- title: Article Deletion Notification Generation
context:
role: admin
steps:
- step: Run the testdrive for deleted_articles to set up the fixtures for this test
path: /testdrive/deleted_articles
- step: Log in to the system as an administrator
- step: Go to the admin notifications page
path: /admin/notifications
results:
- There are no notifications listed for either user account supplied in the testdrive
- step: Go to the testdrive page without reloading it, and click the button marked "Run background task".
results:
- The text beneath the button confirms that the script executed successfully
- step: Return to the admin notifications page
results:
- A new notification is present with the title "Deleted articles in your journal(s) this week".
- The notification content lists the titles and details of the deleted articles.
- There is only one notification for the first publisher listed on the testdrive. The second publisher
does not have a notification
4 changes: 2 additions & 2 deletions doajtest/testbook/journal_form/maned_form.yml
Original file line number Diff line number Diff line change
Expand Up @@ -180,7 +180,7 @@ tests:
role: admin
steps:
- step: Go to admin journal search page at /admin/journals
- step: "Locate a record with the metadata value 'Last full review: Never'" and click "Edit this journal"
- step: "Locate a record with the metadata value 'Last full review: Never' and click 'Edit this journal'"
- step: Locate the "Last Full Review" section of the form
- step: Enter a date in the future in "Last Full Review" field
results:
Expand All @@ -202,7 +202,7 @@ tests:
role: admin
steps:
- step: Go to admin journal search page at /admin/journals
- step: "Locate a record with the metadata value 'Last Owner Transfer: Never'" and click "Edit this journal"
- step: "Locate a record with the metadata value 'Last Owner Transfer: Never' and click 'Edit this journal'"
- step: Locate the "Re-assign publisher account" section of the form
- step: Change the user to a different user account
- step: Save the journal record
Expand Down
36 changes: 36 additions & 0 deletions doajtest/testdrive/deleted_articles.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
from doajtest.testdrive.factory import TestDrive


class DeletedArticles(TestDrive):
def setup(self) -> dict:
# create 2 publisher accounts. The first will have tombstoned articles,
# the second will not
publisher1, pw1 = self.publisher_account()
publisher2, pw2 = self.publisher_account()

# create one journal for each publisher
journals1 = self.journals_in_doaj(publisher1, n=1, block=True)
journals2 = self.journals_in_doaj(publisher2, n=1, block=True)

# place 10 articles in each journal
articles1 = self.articles(journals1[0], n=10)
articles2 = self.articles(journals2[0], n=10)

# for the first publishers journal, tombstone 10 articles
tombs1 = self.article_tombstones(journals1[0], n=10)

report = {}
self.report_accounts([(publisher1, pw1), (publisher2, pw2)], report)
self.report_journal_ids(journals1 + journals2, report)
self.report_article_ids(articles1 + articles2, report)
self.report_article_tombstone_ids(tombs1, report)
self.report_script("article_deletion_notifications", "Run background task", report)

return report

def teardown(self, params) -> dict:
self.teardown_accounts(params)
self.teardown_journals(params)
self.teardown_articles(params)
self.teardown_article_tombstones(params)
return self.SUCCESS
119 changes: 118 additions & 1 deletion doajtest/testdrive/factory.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,18 @@
from portality.lib import plugin
import random
import string
from portality import models
from portality import models, constants
from doajtest.fixtures.v2.journals import JournalFixtureFactory
from doajtest.fixtures.article import ArticleFixtureFactory


class TestDrive():

SUCCESS = {"status": "success"}

def __init__(self):
self.run_seed = self.create_random_str()

def create_random_str(self, n_char=10):
s = string.ascii_letters + string.digits
return ''.join(random.choices(s, k=n_char))
Expand All @@ -22,6 +30,115 @@ def setup(self) -> dict:
def teardown(self, setup_params) -> dict:
return {"status": "not implemented"}

### Reporting functions for consistent delivery of similar data models

def report_accounts(self, accounts, report):
report["accounts"] = []
for acc, pw in accounts:
report["accounts"].append({
"username": acc.id,
"password": pw,
"api_key": acc.api_key
})

def report_journal_ids(self, journals, report):
report["journals"] = []
for j in journals:
report["journals"].append(j.id)

def report_article_ids(self, articles, report):
report["articles"] = []
for a in articles:
report["articles"].append(a.id)

def report_article_tombstone_ids(self, tombstones, report):
report["article_tombstones"] = []
for a in tombstones:
report["article_tombstones"].append(a.id)

def report_script(self, script, name, report):
report["script"] = {
"script_name": script,
"title": name
}

### Teardown methods for consistent cleanup of similar data models

def teardown_accounts(self, report):
for acc in report.get("accounts", []):
models.Account.remove_by_id(acc["username"])

def teardown_journals(self, report):
for jid in report.get("journals", []):
models.Journal.remove_by_id(jid)

def teardown_articles(self, report):
for aid in report.get("articles", []):
models.Article.remove_by_id(aid)

def teardown_article_tombstones(self, report):
for tid in report.get("article_tombstones", []):
models.ArticleTombstone.remove_by_id(tid)

### Useful factory methods

def publisher_account(self, save=True, block=False):
un = self.create_random_str()
pw = self.create_random_str()
acc = models.Account.make_account(un + "@example.com", un, "Publisher " + un, [constants.ROLE_PUBLISHER, constants.ROLE_API])
acc.set_password(pw)
acc.generate_api_key()
if save:
acc.save(blocking=block)
return acc, pw

def journals_in_doaj(self, owner, n=1, save=True, block=False):
journals = []
for i in range(n):
template = {
"bibjson": {
"title": f"Journal {owner} {i} {self.run_seed}",
}
}
source = JournalFixtureFactory.make_journal_source(in_doaj=True, overlay=template)
j = models.Journal(**source)
j.remove_current_application()
j.set_id(j.makeid())
j.set_owner(owner.id)
j.bibjson().eissn = self.generate_unique_issn()
j.bibjson().pissn = self.generate_unique_issn()
if save:
j.save(blocking=block)
journals.append(j)

return journals

def articles(self, journal, n=1, save=True):
articles = []
for i in range(n):
eissn = journal.bibjson().eissn
pissn = journal.bibjson().pissn
a = ArticleFixtureFactory.make_article_source(eissn=eissn, pissn=pissn, with_id=True, in_doaj=journal.is_in_doaj())
a = models.Article(**a)
a.bibjson().title = f"Article {journal.owner} {i} {self.run_seed}"
a.set_id(a.makeid())
if save:
a.save()
articles.append(a)
return articles

def article_tombstones(self, journal, n=1, save=True):
articles = self.articles(journal, n=n, save=False)

tombs = []
for a in articles:
t = a.make_tombstone()
if save:
t.save()
tombs.append(t)

return tombs


class TestFactory():
@classmethod
Expand Down
91 changes: 91 additions & 0 deletions doajtest/unit/test_tasks_article_deletion_notifications.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
from doajtest.helpers import DoajTestCase
from portality.tasks.article_deletion_notifications import ArticleDeletionNotificationsBackgroundTask
from portality import models
from doajtest.fixtures.article import ArticleFixtureFactory
from doajtest.fixtures.accounts import AccountFixtureFactory
from portality.lib import dates
import time

class TestArticleDeletionNotifications(DoajTestCase):

def test_article_deletion_notifications(self):
# 1. Setup data
# Create two publishers
pub1 = models.Account(**AccountFixtureFactory.make_publisher_source())
pub1.set_id("pub1")
pub1.save(blocking=True)

pub2 = models.Account(**AccountFixtureFactory.make_publisher_source())
pub2.set_id("pub2")
pub2.save(blocking=True)

# Create some ArticleTombstones
# Recent tombstone for pub1
at1 = models.ArticleTombstone(**ArticleFixtureFactory.make_article_source())
at1.set_id("at1")
at1.set_created(dates.format(dates.before_now(1 * 24 * 60 * 60)))
at1.data['admin'] = {'owner': 'pub1'}
at1.save(blocking=True)

# Another recent tombstone for pub1
at2 = models.ArticleTombstone(**ArticleFixtureFactory.make_article_source())
at2.set_id("at2")
at2.set_created(dates.format(dates.before_now(2 * 24 * 60 * 60)))
at2.data['admin'] = {'owner': 'pub1'}
at2.save(blocking=True)

# Recent tombstone for pub2
at3 = models.ArticleTombstone(**ArticleFixtureFactory.make_article_source())
at3.set_id("at3")
at3.set_created(dates.format(dates.before_now(3 * 24 * 60 * 60)))
at3.data['admin'] = {'owner': 'pub2'}
at3.save(blocking=True)

# Old tombstone (should be ignored)
at4 = models.ArticleTombstone(**ArticleFixtureFactory.make_article_source())
at4.set_id("at4")
at4.set_created(dates.format(dates.before_now(10 * 24 * 60 * 60)))
at4.data['admin'] = {'owner': 'pub1'}
at4.save(blocking=True)

# 2. Run the task
job = ArticleDeletionNotificationsBackgroundTask.prepare("system")
task = ArticleDeletionNotificationsBackgroundTask(job)
task.run()

time.sleep(2)

# 3. Verify notifications
# Pub1 should have 1 notification with 2 articles
notes1 = models.Notification.all()
self.assertEqual(len(notes1), 2)

note1 = None
for n in notes1:
if n.who == "pub1":
note1 = n
break

self.assertIsNotNone(note1)
self.assertIn("Deleted articles in your journal(s) this week", note1.short)
# It should contain titles of at1 and at2, but not at4
# We need to be careful about what make_article_source produces as titles

# Pub2 should have 1 notification with 1 article
note2 = None
for n in notes1:
if n.who == "pub2":
note2 = n
break
self.assertIsNotNone(note2)

def test_no_articles(self):
# Run task without any tombstones
job = ArticleDeletionNotificationsBackgroundTask.prepare("system")
task = ArticleDeletionNotificationsBackgroundTask(job)
task.run()

# Verify no notifications sent
notes = models.Notification.all()
self.assertEqual(len(notes), 0)
self.assertIn("No deleted articles in the last week", job.audit[0]['message'])
2 changes: 1 addition & 1 deletion portality/models/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
from portality.models.uploads import FileUpload, ExistsFileQuery, OwnerFileQuery, ValidFileQuery, BulkArticles
from portality.models.lock import Lock
from portality.models.history import ArticleHistory, JournalHistory
from portality.models.article import Article, ArticleBibJSON, ArticleQuery, ArticleVolumesQuery, DuplicateArticleQuery, NoJournalException, ArticleTombstone
from portality.models.article import Article, ArticleBibJSON, ArticleQuery, ArticleVolumesQuery, DuplicateArticleQuery, NoJournalException, ArticleTombstone, ArticleTombstoneRecentlyDeletedQuery
from portality.models.oaipmh import OAIPMHRecord, OAIPMHJournal, OAIPMHArticle
from portality.models.atom import AtomRecord
from portality.models.search import JournalArticle, JournalStatsQuery, ArticleStatsQuery
Expand Down
Loading