From 034ddffd6b71b6a4369e3b06b52d0b48685dbcd8 Mon Sep 17 00:00:00 2001 From: Blaine Jester Date: Wed, 21 Sep 2022 12:41:57 -0700 Subject: [PATCH 001/555] Add intellij run configuration for the devserver, allowing in IDE debugging --- .run/devserver.run.xml | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 .run/devserver.run.xml diff --git a/.run/devserver.run.xml b/.run/devserver.run.xml new file mode 100644 index 0000000000..1c94ee6402 --- /dev/null +++ b/.run/devserver.run.xml @@ -0,0 +1,24 @@ + + + + + From a904f47cbfc43e0c4e58b13932eaa22f37054ebb Mon Sep 17 00:00:00 2001 From: Blaine Jester Date: Thu, 22 Sep 2022 07:23:41 -0700 Subject: [PATCH 002/555] Add run configuration to .gitignore --- .gitignore | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.gitignore b/.gitignore index b5e0261f09..5c77ad049d 100644 --- a/.gitignore +++ b/.gitignore @@ -29,6 +29,8 @@ var/ # IntelliJ IDE, except project config .idea/* !.idea/studio.iml +# ignore future updates to run configuration +.run/devserver.run.xml # PyInstaller # Usually these files are written by a python script from a template From aa92a57ecb6bec5956380b920bf60d4f667cba48 Mon Sep 17 00:00:00 2001 From: Vivek Agrawal Date: Mon, 10 Oct 2022 19:22:51 +0530 Subject: [PATCH 003/555] Soft delete users --- contentcuration/contentcuration/forms.py | 3 +- .../migrations/0141_user_deleted.py | 18 ++++++++ contentcuration/contentcuration/models.py | 42 +++++++++---------- .../contentcuration/tests/test_models.py | 27 ++++++++++++ .../contentcuration/tests/test_settings.py | 29 ++++++++----- .../contentcuration/tests/views/test_users.py | 14 +++++++ .../contentcuration/views/settings.py | 2 + 7 files changed, 101 insertions(+), 34 deletions(-) create mode 100644 contentcuration/contentcuration/migrations/0141_user_deleted.py diff --git a/contentcuration/contentcuration/forms.py b/contentcuration/contentcuration/forms.py index 973916431e..d9dc781f61 100644 --- a/contentcuration/contentcuration/forms.py +++ b/contentcuration/contentcuration/forms.py @@ -7,6 +7,7 @@ from django.contrib.auth.forms import UserChangeForm from django.contrib.auth.forms import UserCreationForm from django.core import signing +from django.db.models import Q from django.template.loader import render_to_string from contentcuration.models import User @@ -45,7 +46,7 @@ class RegistrationForm(UserCreationForm, ExtraFormMixin): def clean_email(self): email = self.cleaned_data['email'].strip().lower() - if User.objects.filter(email__iexact=email, is_active=True).exists(): + if User.objects.filter(Q(is_active=True) | Q(deleted=True), email__iexact=email).exists(): raise UserWarning return email diff --git a/contentcuration/contentcuration/migrations/0141_user_deleted.py b/contentcuration/contentcuration/migrations/0141_user_deleted.py new file mode 100644 index 0000000000..25444e6577 --- /dev/null +++ b/contentcuration/contentcuration/migrations/0141_user_deleted.py @@ -0,0 +1,18 @@ +# Generated by Django 3.2.14 on 2022-10-06 11:18 +from django.db import migrations +from django.db import models + + +class Migration(migrations.Migration): + + dependencies = [ + ('contentcuration', '0140_delete_task'), + ] + + operations = [ + migrations.AddField( + model_name='user', + name='deleted', + field=models.BooleanField(db_index=True, default=False), + ), + ] diff --git a/contentcuration/contentcuration/models.py b/contentcuration/contentcuration/models.py index 0e26e36c86..c2a23d1554 100644 --- a/contentcuration/contentcuration/models.py +++ b/contentcuration/contentcuration/models.py @@ -200,6 +200,7 @@ class User(AbstractBaseUser, PermissionsMixin): content_defaults = JSONField(default=dict) policies = JSONField(default=dict, null=True) feature_flags = JSONField(default=dict, null=True) + deleted = models.BooleanField(default=False, db_index=True) _field_updates = FieldTracker(fields=[ # Field to watch for changes @@ -214,27 +215,21 @@ def __unicode__(self): return self.email def delete(self): - from contentcuration.viewsets.common import SQCount - # Remove any invitations associated to this account - self.sent_to.all().delete() - - # Delete channels associated with this user (if user is the only editor) - user_query = ( - User.objects.filter(editable_channels__id=OuterRef('id')) - .values_list('id', flat=True) - .distinct() - ) - self.editable_channels.annotate(num_editors=SQCount(user_query, field="id")).filter(num_editors=1).delete() - - # Delete channel collections associated with this user (if user is the only editor) - user_query = ( - User.objects.filter(channel_sets__id=OuterRef('id')) - .values_list('id', flat=True) - .distinct() - ) - self.channel_sets.annotate(num_editors=SQCount(user_query, field="id")).filter(num_editors=1).delete() + """ + Soft deletes the user. + """ + self.deleted = True + # Deactivate the user to disallow authentication and also + # to let the user verify the email again after recovery. + self.is_active = False + self.save() - super(User, self).delete() + def recover(self): + """ + Use this method when we want to recover a user. + """ + self.deleted = False + self.save() def can_edit(self, channel_id): return Channel.filter_edit_queryset(Channel.objects.all(), self).filter(pk=channel_id).exists() @@ -406,17 +401,20 @@ def filter_edit_queryset(cls, queryset, user): return queryset.filter(pk=user.pk) @classmethod - def get_for_email(cls, email, **filters): + def get_for_email(cls, email, deleted=False, **filters): """ Returns the appropriate User record given an email, ordered by: - those with is_active=True first, which there should only ever be one - otherwise by ID DESC so most recent inactive shoud be returned + Filters out deleted User records by default. Can be overridden with + deleted argument. + :param email: A string of the user's email :param filters: Additional filters to filter the User queryset :return: User or None """ - return User.objects.filter(email__iexact=email.strip(), **filters)\ + return User.objects.filter(email__iexact=email.strip(), deleted=deleted, **filters)\ .order_by("-is_active", "-id").first() diff --git a/contentcuration/contentcuration/tests/test_models.py b/contentcuration/contentcuration/tests/test_models.py index 3c0caa9967..66720a0985 100644 --- a/contentcuration/contentcuration/tests/test_models.py +++ b/contentcuration/contentcuration/tests/test_models.py @@ -787,6 +787,7 @@ def test_get_for_email(self): user1 = self._create_user("tester@tester.com", is_active=False) user2 = self._create_user("tester@Tester.com", is_active=False) user3 = self._create_user("Tester@Tester.com", is_active=True) + user4 = self._create_user("testing@test.com", is_active=True) # active should be returned first self.assertEqual(user3, User.get_for_email("tester@tester.com")) @@ -801,6 +802,32 @@ def test_get_for_email(self): # ensure nothing found doesn't error self.assertIsNone(User.get_for_email("tester@tester.com")) + # ensure we don't return soft-deleted users + user4.delete() + self.assertIsNone(User.get_for_email("testing@test.com")) + + def test_delete__sets_deleted_true(self): + user = self._create_user("tester@tester.com") + user.delete() + self.assertEqual(user.deleted, True) + + def test_delete__sets_is_active_false(self): + user = self._create_user("tester@tester.com") + user.delete() + self.assertEqual(user.is_active, False) + + def test_recover__sets_deleted_false(self): + user = self._create_user("tester@tester.com") + user.delete() + user.recover() + self.assertEqual(user.deleted, False) + + def test_recover__keeps_is_active_false(self): + user = self._create_user("tester@tester.com") + user.delete() + user.recover() + self.assertEqual(user.is_active, False) + class ChannelHistoryTestCase(StudioTestCase): def setUp(self): diff --git a/contentcuration/contentcuration/tests/test_settings.py b/contentcuration/contentcuration/tests/test_settings.py index c216f088a2..25bbc71a54 100644 --- a/contentcuration/contentcuration/tests/test_settings.py +++ b/contentcuration/contentcuration/tests/test_settings.py @@ -1,5 +1,6 @@ import json +import mock from django.urls import reverse_lazy from .base import BaseAPITestCase @@ -10,7 +11,7 @@ class SettingsTestCase(BaseAPITestCase): def test_username_change(self): - data = json.dumps({"first_name": "New firstname", "last_name": "New lastname",}) + data = json.dumps({"first_name": "New firstname", "last_name": "New lastname", }) request = self.create_post_request( reverse_lazy("update_user_full_name"), data=data, @@ -40,13 +41,19 @@ def test_delete_account_invalid(self): self.assertTrue(User.objects.filter(email=self.user.email).exists()) def test_delete_account(self): - # TODO: send_email causes connection errors - data = json.dumps({"email": self.user.email}) - self.create_post_request( - reverse_lazy("delete_user_account"), - data=data, - content_type="application/json", - ) - # response = DeleteAccountView.as_view()(request) - # self.assertEqual(response.status_code, 200) - # self.assertFalse(User.objects.filter(email=self.user.email).exists()) + with mock.patch("contentcuration.views.users.djangologout") as djangologout: + self.user.delete = mock.Mock() + data = json.dumps({"email": self.user.email}) + request = self.create_post_request( + reverse_lazy("delete_user_account"), + data=data, + content_type="application/json", + ) + response = DeleteAccountView.as_view()(request) + + # Ensure successful response. + self.assertEqual(response.status_code, 200) + # Ensure user's delete method is called. + self.user.delete.assert_called_once() + # Ensure we logout the user. + djangologout.assert_called_once_with(request) diff --git a/contentcuration/contentcuration/tests/views/test_users.py b/contentcuration/contentcuration/tests/views/test_users.py index 0c03d5f626..e79e0cdbf9 100644 --- a/contentcuration/contentcuration/tests/views/test_users.py +++ b/contentcuration/contentcuration/tests/views/test_users.py @@ -98,6 +98,14 @@ def test_login__whitespace(self): self.assertIsInstance(redirect, HttpResponseRedirectBase) self.assertIn("channels", redirect['Location']) + def test_after_delete__no_login(self): + with mock.patch("contentcuration.views.users.djangologin") as djangologin: + self.user.delete() + response = login(self.request) + + self.assertIsInstance(response, HttpResponseForbidden) + djangologin.assert_not_called() + class UserRegistrationViewTestCase(StudioAPITestCase): def setUp(self): @@ -121,6 +129,12 @@ def test_post__no_duplicate_registration(self): response = self.view.post(self.request) self.assertIsInstance(response, HttpResponseForbidden) + def test_after_delete__no_registration(self): + user = testdata.user(email="tester@tester.com") + user.delete() + response = self.view.post(self.request) + self.assertIsInstance(response, HttpResponseForbidden) + class UserActivationViewTestCase(StudioAPITestCase): def setUp(self): diff --git a/contentcuration/contentcuration/views/settings.py b/contentcuration/contentcuration/views/settings.py index f41b6ac926..a186665dec 100644 --- a/contentcuration/contentcuration/views/settings.py +++ b/contentcuration/contentcuration/views/settings.py @@ -34,6 +34,7 @@ from contentcuration.utils.csv_writer import generate_user_csv_filename from contentcuration.utils.messages import get_messages from contentcuration.views.base import current_user_for_context +from contentcuration.views.users import logout from contentcuration.viewsets.channel import SettingsChannelSerializer ISSUE_UPDATE_DATE = datetime(2018, 10, 29) @@ -165,6 +166,7 @@ def form_valid(self, form): os.unlink(csv_path) self.request.user.delete() + logout(self.request) class StorageSettingsView(PostFormMixin, FormView): From ef2799114b0610b6b2400dd59259641ad612d4b6 Mon Sep 17 00:00:00 2001 From: Vivek Agrawal Date: Mon, 10 Oct 2022 20:27:29 +0530 Subject: [PATCH 004/555] fixes a regression where its expected that user-only-editor channels are hard deleted --- contentcuration/contentcuration/models.py | 26 ++++++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/contentcuration/contentcuration/models.py b/contentcuration/contentcuration/models.py index c2a23d1554..1b1963e9b4 100644 --- a/contentcuration/contentcuration/models.py +++ b/contentcuration/contentcuration/models.py @@ -216,12 +216,36 @@ def __unicode__(self): def delete(self): """ - Soft deletes the user. + Hard deletes invitation associated to this account, hard deletes channel and channet sets + only if user is the only editor. Then soft deletes the user account. """ + from contentcuration.viewsets.common import SQCount + + # Hard delete invitations associated to this account. + self.sent_to.all().delete() + + # Hard delete channels associated with this user (if user is the only editor). + user_query = ( + User.objects.filter(editable_channels__id=OuterRef('id')) + .values_list('id', flat=True) + .distinct() + ) + self.editable_channels.annotate(num_editors=SQCount(user_query, field="id")).filter(num_editors=1).delete() + + # Hard delete channel collections associated with this user (if user is the only editor). + user_query = ( + User.objects.filter(channel_sets__id=OuterRef('id')) + .values_list('id', flat=True) + .distinct() + ) + self.channel_sets.annotate(num_editors=SQCount(user_query, field="id")).filter(num_editors=1).delete() + + # Soft delete user. self.deleted = True # Deactivate the user to disallow authentication and also # to let the user verify the email again after recovery. self.is_active = False + self.save() def recover(self): From 2a42f1895cbc1ec4cf3c7ffaeaf0b1d093cb7ffc Mon Sep 17 00:00:00 2001 From: Vivek Agrawal Date: Wed, 19 Oct 2022 16:29:21 +0530 Subject: [PATCH 005/555] User soft delete via garbage collection --- .../contentcuration/constants/user_history.py | 11 +++ contentcuration/contentcuration/forms.py | 2 +- .../management/commands/garbage_collect.py | 13 ++- .../migrations/0141_user_deleted.py | 18 ---- .../migrations/0141_user_soft_delete.py | 31 +++++++ contentcuration/contentcuration/models.py | 83 +++++++++++++------ contentcuration/contentcuration/settings.py | 6 +- .../contentcuration/tests/test_models.py | 28 ++++--- .../contentcuration/utils/garbage_collect.py | 73 +++++++++------- 9 files changed, 177 insertions(+), 88 deletions(-) create mode 100644 contentcuration/contentcuration/constants/user_history.py delete mode 100644 contentcuration/contentcuration/migrations/0141_user_deleted.py create mode 100644 contentcuration/contentcuration/migrations/0141_user_soft_delete.py diff --git a/contentcuration/contentcuration/constants/user_history.py b/contentcuration/contentcuration/constants/user_history.py new file mode 100644 index 0000000000..1eecf79c17 --- /dev/null +++ b/contentcuration/contentcuration/constants/user_history.py @@ -0,0 +1,11 @@ +from django.utils.translation import ugettext_lazy as _ + +DELETION = "deletion" +RECOVERY = "recovery" +RELATED_DATA_HARD_DELETION = "related-data-hard-deletion" + +choices = ( + (DELETION, _("User soft deletion")), + (RECOVERY, _("User soft deletion recovery")), + (RELATED_DATA_HARD_DELETION, _("User related data hard deletion")), +) diff --git a/contentcuration/contentcuration/forms.py b/contentcuration/contentcuration/forms.py index d9dc781f61..d0ae49893f 100644 --- a/contentcuration/contentcuration/forms.py +++ b/contentcuration/contentcuration/forms.py @@ -46,7 +46,7 @@ class RegistrationForm(UserCreationForm, ExtraFormMixin): def clean_email(self): email = self.cleaned_data['email'].strip().lower() - if User.objects.filter(Q(is_active=True) | Q(deleted=True), email__iexact=email).exists(): + if User.objects.filter(Q(is_active=True) | Q(deleted_at__isnull=False), email__iexact=email).exists(): raise UserWarning return email diff --git a/contentcuration/contentcuration/management/commands/garbage_collect.py b/contentcuration/contentcuration/management/commands/garbage_collect.py index f31db7ad5c..f22f70dd4b 100644 --- a/contentcuration/contentcuration/management/commands/garbage_collect.py +++ b/contentcuration/contentcuration/management/commands/garbage_collect.py @@ -11,6 +11,7 @@ from contentcuration.utils.garbage_collect import clean_up_contentnodes from contentcuration.utils.garbage_collect import clean_up_deleted_chefs from contentcuration.utils.garbage_collect import clean_up_feature_flags +from contentcuration.utils.garbage_collect import clean_up_soft_deleted_users from contentcuration.utils.garbage_collect import clean_up_stale_files from contentcuration.utils.garbage_collect import clean_up_tasks @@ -26,15 +27,23 @@ def handle(self, *args, **options): Actual logic for garbage collection. """ - # clean up contentnodes, files and file objects on storage that are associated - # with the orphan tree + # Clean up users that are soft deleted and are older than ACCOUNT_DELETION_BUFFER (90 days). + # Also clean contentnodes, files and file objects on storage that are associated + # with the orphan tree. + logging.info("Cleaning up soft deleted users older than ACCOUNT_DELETION_BUFFER (90 days)") + clean_up_soft_deleted_users() + logging.info("Cleaning up contentnodes from the orphan tree") clean_up_contentnodes() + logging.info("Cleaning up deleted chef nodes") clean_up_deleted_chefs() + logging.info("Cleaning up feature flags") clean_up_feature_flags() + logging.info("Cleaning up stale file objects") clean_up_stale_files() + logging.info("Cleaning up tasks") clean_up_tasks() diff --git a/contentcuration/contentcuration/migrations/0141_user_deleted.py b/contentcuration/contentcuration/migrations/0141_user_deleted.py deleted file mode 100644 index 25444e6577..0000000000 --- a/contentcuration/contentcuration/migrations/0141_user_deleted.py +++ /dev/null @@ -1,18 +0,0 @@ -# Generated by Django 3.2.14 on 2022-10-06 11:18 -from django.db import migrations -from django.db import models - - -class Migration(migrations.Migration): - - dependencies = [ - ('contentcuration', '0140_delete_task'), - ] - - operations = [ - migrations.AddField( - model_name='user', - name='deleted', - field=models.BooleanField(db_index=True, default=False), - ), - ] diff --git a/contentcuration/contentcuration/migrations/0141_user_soft_delete.py b/contentcuration/contentcuration/migrations/0141_user_soft_delete.py new file mode 100644 index 0000000000..1f58e4076a --- /dev/null +++ b/contentcuration/contentcuration/migrations/0141_user_soft_delete.py @@ -0,0 +1,31 @@ +# Generated by Django 3.2.14 on 2022-10-19 09:41 +import django.db.models.deletion +import django.utils.timezone +from django.conf import settings +from django.db import migrations +from django.db import models + + +class Migration(migrations.Migration): + + dependencies = [ + ('contentcuration', '0140_delete_task'), + ] + + operations = [ + migrations.AddField( + model_name='user', + name='deleted_at', + field=models.DateTimeField(blank=True, null=True), + ), + migrations.CreateModel( + name='UserHistory', + fields=[ + ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), + ('action', models.CharField(choices=[('deletion', 'User soft deletion'), ('recovery', 'User soft deletion recovery'), + ('related-data-hard-deletion', 'User related data hard deletion')], max_length=32)), + ('performed_at', models.DateTimeField(default=django.utils.timezone.now)), + ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='history', to=settings.AUTH_USER_MODEL)), + ], + ), + ] diff --git a/contentcuration/contentcuration/models.py b/contentcuration/contentcuration/models.py index 079833c278..537f867978 100644 --- a/contentcuration/contentcuration/models.py +++ b/contentcuration/contentcuration/models.py @@ -66,6 +66,7 @@ from contentcuration.constants import channel_history from contentcuration.constants import completion_criteria +from contentcuration.constants import user_history from contentcuration.constants.contentnode import kind_activity_map from contentcuration.db.models.expressions import Array from contentcuration.db.models.functions import ArrayRemove @@ -199,7 +200,7 @@ class User(AbstractBaseUser, PermissionsMixin): content_defaults = JSONField(default=dict) policies = JSONField(default=dict, null=True) feature_flags = JSONField(default=dict, null=True) - deleted = models.BooleanField(default=False, db_index=True) + deleted_at = models.DateTimeField(null=True, blank=True) _field_updates = FieldTracker(fields=[ # Field to watch for changes @@ -215,44 +216,66 @@ def __unicode__(self): def delete(self): """ - Hard deletes invitation associated to this account, hard deletes channel and channet sets - only if user is the only editor. Then soft deletes the user account. + Soft deletes the user account. + """ + self.deleted_at = timezone.now() + # Deactivate the user to disallow authentication and also + # to let the user verify the email again after recovery. + self.is_active = False + self.save() + self.history.create(user_id=self.pk, action=user_history.DELETION) + + def recover(self): + """ + Use this method when we want to recover a user. + """ + self.deleted_at = None + self.save() + self.history.create(user_id=self.pk, action=user_history.RECOVERY) + + def hard_delete_user_related_data(self): + """ + Hard delete all user related data. But keeps the user record itself intact. + + User related data that gets hard deleted are: + - sole editor non-public channels. + - sole editor non-public channelsets. + - sole editor non-public channels' content nodes and its underlying files that are not + used by any other channel. + - all user invitations. """ from contentcuration.viewsets.common import SQCount # Hard delete invitations associated to this account. self.sent_to.all().delete() + self.sent_by.all().delete() - # Hard delete channels associated with this user (if user is the only editor). - user_query = ( + editable_channels_user_query = ( User.objects.filter(editable_channels__id=OuterRef('id')) .values_list('id', flat=True) .distinct() ) - self.editable_channels.annotate(num_editors=SQCount(user_query, field="id")).filter(num_editors=1).delete() + non_public_channels_sole_editor = self.editable_channels.annotate(num_editors=SQCount( + editable_channels_user_query, field="id")).filter(num_editors=1, public=False) + + # Point sole editor non-public channels' contentnodes to orphan tree to let + # our garbage collection delete the nodes and underlying files. + ContentNode._annotate_channel_id(ContentNode.objects).filter(channel_id__in=list( + non_public_channels_sole_editor)).update(parent_id=settings.ORPHANAGE_ROOT_ID) - # Hard delete channel collections associated with this user (if user is the only editor). + # Hard delete non-public channels associated with this user (if user is the only editor). + non_public_channels_sole_editor.delete() + + # Hard delete non-public channel collections associated with this user (if user is the only editor). user_query = ( User.objects.filter(channel_sets__id=OuterRef('id')) .values_list('id', flat=True) .distinct() ) - self.channel_sets.annotate(num_editors=SQCount(user_query, field="id")).filter(num_editors=1).delete() + self.channel_sets.annotate(num_editors=SQCount(user_query, field="id")).filter(num_editors=1, public=False).delete() - # Soft delete user. - self.deleted = True - # Deactivate the user to disallow authentication and also - # to let the user verify the email again after recovery. - self.is_active = False - - self.save() - - def recover(self): - """ - Use this method when we want to recover a user. - """ - self.deleted = False - self.save() + # Create history! + self.history.create(user_id=self.pk, action=user_history.RELATED_DATA_HARD_DELETION) def can_edit(self, channel_id): return Channel.filter_edit_queryset(Channel.objects.all(), self).filter(pk=channel_id).exists() @@ -424,20 +447,20 @@ def filter_edit_queryset(cls, queryset, user): return queryset.filter(pk=user.pk) @classmethod - def get_for_email(cls, email, deleted=False, **filters): + def get_for_email(cls, email, deleted_at__isnull=True, **filters): """ Returns the appropriate User record given an email, ordered by: - those with is_active=True first, which there should only ever be one - otherwise by ID DESC so most recent inactive shoud be returned Filters out deleted User records by default. Can be overridden with - deleted argument. + deleted_at__isnull argument. :param email: A string of the user's email :param filters: Additional filters to filter the User queryset :return: User or None """ - return User.objects.filter(email__iexact=email.strip(), deleted=deleted, **filters)\ + return User.objects.filter(email__iexact=email.strip(), deleted_at__isnull=deleted_at__isnull, **filters)\ .order_by("-is_active", "-id").first() @@ -1060,6 +1083,16 @@ class Meta: ] +class UserHistory(models.Model): + """ + Model that stores the user's action history. + """ + user = models.ForeignKey(settings.AUTH_USER_MODEL, null=False, blank=False, related_name="history", on_delete=models.CASCADE) + action = models.CharField(max_length=32, choices=user_history.choices) + + performed_at = models.DateTimeField(default=timezone.now) + + class ChannelSet(models.Model): # NOTE: this is referred to as "channel collections" on the front-end, but we need to call it # something else as there is already a ChannelCollection model on the front-end diff --git a/contentcuration/contentcuration/settings.py b/contentcuration/contentcuration/settings.py index b9c3a73ca1..de05fb484a 100644 --- a/contentcuration/contentcuration/settings.py +++ b/contentcuration/contentcuration/settings.py @@ -329,8 +329,10 @@ def gettext(s): HELP_EMAIL = 'content@learningequality.org' DEFAULT_FROM_EMAIL = 'Kolibri Studio ' POLICY_EMAIL = 'legal@learningequality.org' -ACCOUNT_DELETION_BUFFER = 5 # Used to determine how many days a user -# has to undo accidentally deleting account + +# Used to determine how many days a user +# has to undo accidentally deleting account. +ACCOUNT_DELETION_BUFFER = 90 DEFAULT_LICENSE = 1 diff --git a/contentcuration/contentcuration/tests/test_models.py b/contentcuration/contentcuration/tests/test_models.py index 66720a0985..e6fbe38a35 100644 --- a/contentcuration/contentcuration/tests/test_models.py +++ b/contentcuration/contentcuration/tests/test_models.py @@ -1,3 +1,4 @@ +import datetime import uuid import mock @@ -11,6 +12,7 @@ from le_utils.constants import format_presets from contentcuration.constants import channel_history +from contentcuration.constants import user_history from contentcuration.models import AssessmentItem from contentcuration.models import Channel from contentcuration.models import ChannelHistory @@ -22,6 +24,7 @@ from contentcuration.models import Invitation from contentcuration.models import object_storage_name from contentcuration.models import User +from contentcuration.models import UserHistory from contentcuration.tests import testdata from contentcuration.tests.base import StudioTestCase @@ -806,27 +809,30 @@ def test_get_for_email(self): user4.delete() self.assertIsNone(User.get_for_email("testing@test.com")) - def test_delete__sets_deleted_true(self): + def test_delete(self): user = self._create_user("tester@tester.com") user.delete() - self.assertEqual(user.deleted, True) - def test_delete__sets_is_active_false(self): - user = self._create_user("tester@tester.com") - user.delete() + # Sets deleted_at? + self.assertIsInstance(user.deleted_at, datetime.datetime) + # Sets is_active to False? self.assertEqual(user.is_active, False) + # Creates user history? + user_delete_history = UserHistory.objects.filter(user_id=user.id, action=user_history.DELETION).first() + self.assertIsNotNone(user_delete_history) - def test_recover__sets_deleted_false(self): + def test_recover(self): user = self._create_user("tester@tester.com") user.delete() user.recover() - self.assertEqual(user.deleted, False) - def test_recover__keeps_is_active_false(self): - user = self._create_user("tester@tester.com") - user.delete() - user.recover() + # Sets deleted_at to None? + self.assertEqual(user.deleted_at, None) + # Keeps is_active to False? self.assertEqual(user.is_active, False) + # Creates user history? + user_recover_history = UserHistory.objects.filter(user_id=user.id, action=user_history.RECOVERY).first() + self.assertIsNotNone(user_recover_history) class ChannelHistoryTestCase(StudioTestCase): diff --git a/contentcuration/contentcuration/utils/garbage_collect.py b/contentcuration/contentcuration/utils/garbage_collect.py index 3343013b7c..de5a29921f 100755 --- a/contentcuration/contentcuration/utils/garbage_collect.py +++ b/contentcuration/contentcuration/utils/garbage_collect.py @@ -7,7 +7,9 @@ from celery import states from django.conf import settings +from django.core.files.storage import default_storage from django.db.models.expressions import CombinedExpression +from django.db.models.expressions import Exists from django.db.models.expressions import F from django.db.models.expressions import Value from django.db.models.signals import post_delete @@ -37,18 +39,57 @@ def __exit__(self, exc_type, exc_val, exc_tb): self.receivers = None -def get_deleted_chefs_root(): +def _get_deleted_chefs_root(): deleted_chefs_node, _new = ContentNode.objects.get_or_create(pk=settings.DELETED_CHEFS_ROOT_ID, kind_id=content_kinds.TOPIC) return deleted_chefs_node +def _clean_up_files(contentnode_ids): + """ + Clean up the files (both in the DB and in object storage) + associated with the `contentnode_ids` iterable that are + not pointed by any other contentnode. + """ + files = File.objects.filter(contentnode__in=contentnode_ids) + files_on_storage = files.values_list("file_on_disk", flat=True) + + for disk_file_path in files_on_storage: + is_other_node_pointing = Exists(File.objects.filter(file_on_disk=disk_file_path).exclude(contentnode__in=contentnode_ids)) + if not is_other_node_pointing: + default_storage.delete(disk_file_path) + + # use _raw_delete for much fast file deletions + files._raw_delete(files.db) + + +def clean_up_soft_deleted_users(): + """ + Hard deletes user related data for soft deleted users that are older than ACCOUNT_DELETION_BUFFER. + + Note: User record itself is not hard deleted. + + User related data that gets hard deleted are: + - sole editor non-public channels. + - sole editor non-public channelsets. + - sole editor non-public channels' content nodes and its underlying files that are not + used by any other channel. + - all user invitations. + """ + account_deletion_buffer_delta = now() - datetime.timedelta(days=settings.ACCOUNT_DELETION_BUFFER) + users_to_delete = User.objects.filter(deleted_at__lt=account_deletion_buffer_delta) + + for user in users_to_delete: + user.hard_delete_user_related_data() + logging.info("Hard deleted user related data for user {}".format(user.email)) + + def clean_up_deleted_chefs(): """ Clean up all deleted chefs attached to the deleted chefs tree, including all child nodes in that tree. """ - deleted_chefs_node = get_deleted_chefs_root() + deleted_chefs_node = _get_deleted_chefs_root() # we cannot use MPTT methods like get_descendants() or use tree_id because for performance reasons # we are avoiding MPTT entirely. nodes_to_clean_up = ContentNode.objects.filter(parent=deleted_chefs_node) @@ -81,7 +122,7 @@ def clean_up_contentnodes(delete_older_than=settings.ORPHAN_DATE_CLEAN_UP_THRESH # delete all files first with DisablePostDeleteSignal(): - clean_up_files(nodes_to_clean_up) + _clean_up_files(nodes_to_clean_up) # Use _raw_delete for fast bulk deletions try: @@ -92,32 +133,6 @@ def clean_up_contentnodes(delete_older_than=settings.ORPHAN_DATE_CLEAN_UP_THRESH pass -def clean_up_files(contentnode_ids): - """ - Clean up the files (both in the DB and in object storage) - associated with the contentnode_ids given in the `contentnode_ids` - iterable. - """ - - # get all file objects associated with these contentnodes - files = File.objects.filter(contentnode__in=contentnode_ids) - # get all their associated real files in object storage - files_on_storage = files.values_list("file_on_disk") - for f in files_on_storage: - # values_list returns each set of items in a tuple, even - # if there's only one item in there. Extract the file_on_disk - # string value from inside that singleton tuple - f[0] - # NOTE (aron):call the storage's delete method on each file, one by one - # disabled for now until we implement logic to not delete files - # that are referenced by non-orphan nodes - # storage.delete(file_path) - - # finally, remove the entries from object storage - # use _raw_delete for much fast file deletions - files._raw_delete(files.db) - - def clean_up_feature_flags(): """ Removes lingering feature flag settings in User records that aren't currently present in the From 4be9df4359a9638a21c11582e613f3d75dc38786 Mon Sep 17 00:00:00 2001 From: Vivek Agrawal Date: Fri, 21 Oct 2022 13:49:59 +0530 Subject: [PATCH 006/555] fix deleted chefs root references --- contentcuration/contentcuration/tests/test_user.py | 6 ++++-- contentcuration/contentcuration/utils/garbage_collect.py | 4 ++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/contentcuration/contentcuration/tests/test_user.py b/contentcuration/contentcuration/tests/test_user.py index f9995fb48c..36ae42f1c7 100644 --- a/contentcuration/contentcuration/tests/test_user.py +++ b/contentcuration/contentcuration/tests/test_user.py @@ -161,10 +161,12 @@ def test_user_csv_export(self): self.assertIn(videos[index - 1].original_filename, row) self.assertIn(_format_size(videos[index - 1].file_size), row) self.assertEqual(index, len(videos)) + """ + Write and refactor for related data hard delete test cases below. + """ def test_account_deletion(self): - self.user.delete() - self.assertFalse(Channel.objects.filter(pk=self.channel.pk).exists()) + pass def test_account_deletion_shared_channels_preserved(self): # Deleting a user account shouldn't delete shared channels diff --git a/contentcuration/contentcuration/utils/garbage_collect.py b/contentcuration/contentcuration/utils/garbage_collect.py index de5a29921f..206ad74865 100755 --- a/contentcuration/contentcuration/utils/garbage_collect.py +++ b/contentcuration/contentcuration/utils/garbage_collect.py @@ -39,7 +39,7 @@ def __exit__(self, exc_type, exc_val, exc_tb): self.receivers = None -def _get_deleted_chefs_root(): +def get_deleted_chefs_root(): deleted_chefs_node, _new = ContentNode.objects.get_or_create(pk=settings.DELETED_CHEFS_ROOT_ID, kind_id=content_kinds.TOPIC) return deleted_chefs_node @@ -89,7 +89,7 @@ def clean_up_deleted_chefs(): child nodes in that tree. """ - deleted_chefs_node = _get_deleted_chefs_root() + deleted_chefs_node = get_deleted_chefs_root() # we cannot use MPTT methods like get_descendants() or use tree_id because for performance reasons # we are avoiding MPTT entirely. nodes_to_clean_up = ContentNode.objects.filter(parent=deleted_chefs_node) From d3a4263ee3d0669f2c1c03c50373051dca12821b Mon Sep 17 00:00:00 2001 From: Vivek Agrawal Date: Sun, 23 Oct 2022 01:20:36 +0530 Subject: [PATCH 007/555] User UserHistory as single source of truth for timestamps! --- .../contentcuration/constants/user_history.py | 4 +- contentcuration/contentcuration/forms.py | 2 +- ...oft_delete.py => 0141_soft_delete_user.py} | 10 +-- contentcuration/contentcuration/models.py | 20 +++-- .../contentcuration/tests/test_models.py | 84 +++++++++++++++++-- .../contentcuration/tests/test_user.py | 15 ---- .../tests/utils/test_garbage_collect.py | 34 ++++++++ .../contentcuration/utils/garbage_collect.py | 12 ++- 8 files changed, 140 insertions(+), 41 deletions(-) rename contentcuration/contentcuration/migrations/{0141_user_soft_delete.py => 0141_soft_delete_user.py} (63%) diff --git a/contentcuration/contentcuration/constants/user_history.py b/contentcuration/contentcuration/constants/user_history.py index 1eecf79c17..76655993ef 100644 --- a/contentcuration/contentcuration/constants/user_history.py +++ b/contentcuration/contentcuration/constants/user_history.py @@ -1,7 +1,7 @@ from django.utils.translation import ugettext_lazy as _ -DELETION = "deletion" -RECOVERY = "recovery" +DELETION = "soft-deletion" +RECOVERY = "soft-recovery" RELATED_DATA_HARD_DELETION = "related-data-hard-deletion" choices = ( diff --git a/contentcuration/contentcuration/forms.py b/contentcuration/contentcuration/forms.py index d0ae49893f..d9dc781f61 100644 --- a/contentcuration/contentcuration/forms.py +++ b/contentcuration/contentcuration/forms.py @@ -46,7 +46,7 @@ class RegistrationForm(UserCreationForm, ExtraFormMixin): def clean_email(self): email = self.cleaned_data['email'].strip().lower() - if User.objects.filter(Q(is_active=True) | Q(deleted_at__isnull=False), email__iexact=email).exists(): + if User.objects.filter(Q(is_active=True) | Q(deleted=True), email__iexact=email).exists(): raise UserWarning return email diff --git a/contentcuration/contentcuration/migrations/0141_user_soft_delete.py b/contentcuration/contentcuration/migrations/0141_soft_delete_user.py similarity index 63% rename from contentcuration/contentcuration/migrations/0141_user_soft_delete.py rename to contentcuration/contentcuration/migrations/0141_soft_delete_user.py index 1f58e4076a..df66bafcc0 100644 --- a/contentcuration/contentcuration/migrations/0141_user_soft_delete.py +++ b/contentcuration/contentcuration/migrations/0141_soft_delete_user.py @@ -1,4 +1,4 @@ -# Generated by Django 3.2.14 on 2022-10-19 09:41 +# Generated by Django 3.2.14 on 2022-10-22 18:30 import django.db.models.deletion import django.utils.timezone from django.conf import settings @@ -15,15 +15,15 @@ class Migration(migrations.Migration): operations = [ migrations.AddField( model_name='user', - name='deleted_at', - field=models.DateTimeField(blank=True, null=True), + name='deleted', + field=models.BooleanField(db_index=True, default=False), ), migrations.CreateModel( name='UserHistory', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), - ('action', models.CharField(choices=[('deletion', 'User soft deletion'), ('recovery', 'User soft deletion recovery'), - ('related-data-hard-deletion', 'User related data hard deletion')], max_length=32)), + ('action', models.CharField(choices=[('soft-deletion', 'User soft deletion'), ('soft-recovery', + 'User soft deletion recovery'), ('related-data-hard-deletion', 'User related data hard deletion')], max_length=32)), ('performed_at', models.DateTimeField(default=django.utils.timezone.now)), ('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='history', to=settings.AUTH_USER_MODEL)), ], diff --git a/contentcuration/contentcuration/models.py b/contentcuration/contentcuration/models.py index 537f867978..07aa39f2b1 100644 --- a/contentcuration/contentcuration/models.py +++ b/contentcuration/contentcuration/models.py @@ -200,7 +200,7 @@ class User(AbstractBaseUser, PermissionsMixin): content_defaults = JSONField(default=dict) policies = JSONField(default=dict, null=True) feature_flags = JSONField(default=dict, null=True) - deleted_at = models.DateTimeField(null=True, blank=True) + deleted = models.BooleanField(default=False, db_index=True) _field_updates = FieldTracker(fields=[ # Field to watch for changes @@ -218,7 +218,7 @@ def delete(self): """ Soft deletes the user account. """ - self.deleted_at = timezone.now() + self.deleted = True # Deactivate the user to disallow authentication and also # to let the user verify the email again after recovery. self.is_active = False @@ -229,7 +229,7 @@ def recover(self): """ Use this method when we want to recover a user. """ - self.deleted_at = None + self.deleted = False self.save() self.history.create(user_id=self.pk, action=user_history.RECOVERY) @@ -261,7 +261,7 @@ def hard_delete_user_related_data(self): # Point sole editor non-public channels' contentnodes to orphan tree to let # our garbage collection delete the nodes and underlying files. ContentNode._annotate_channel_id(ContentNode.objects).filter(channel_id__in=list( - non_public_channels_sole_editor)).update(parent_id=settings.ORPHANAGE_ROOT_ID) + non_public_channels_sole_editor.values_list("id", flat=True))).update(parent_id=settings.ORPHANAGE_ROOT_ID) # Hard delete non-public channels associated with this user (if user is the only editor). non_public_channels_sole_editor.delete() @@ -447,21 +447,23 @@ def filter_edit_queryset(cls, queryset, user): return queryset.filter(pk=user.pk) @classmethod - def get_for_email(cls, email, deleted_at__isnull=True, **filters): + def get_for_email(cls, email, deleted=False, **filters): """ Returns the appropriate User record given an email, ordered by: - those with is_active=True first, which there should only ever be one - otherwise by ID DESC so most recent inactive shoud be returned - Filters out deleted User records by default. Can be overridden with - deleted_at__isnull argument. + Filters out deleted User records by default. To include both deleted and + undeleted user records pass None to the deleted argument. :param email: A string of the user's email :param filters: Additional filters to filter the User queryset :return: User or None """ - return User.objects.filter(email__iexact=email.strip(), deleted_at__isnull=deleted_at__isnull, **filters)\ - .order_by("-is_active", "-id").first() + user_qs = User.objects.filter(email__iexact=email.strip()) + if deleted is not None: + user_qs = user_qs.filter(deleted=deleted) + return user_qs.filter(**filters).order_by("-is_active", "-id").first() class UUIDField(models.CharField): diff --git a/contentcuration/contentcuration/tests/test_models.py b/contentcuration/contentcuration/tests/test_models.py index e6fbe38a35..9734ef1309 100644 --- a/contentcuration/contentcuration/tests/test_models.py +++ b/contentcuration/contentcuration/tests/test_models.py @@ -1,4 +1,3 @@ -import datetime import uuid import mock @@ -6,6 +5,7 @@ from django.conf import settings from django.core.cache import cache from django.core.exceptions import ValidationError +from django.db.models import Q from django.db.utils import IntegrityError from django.utils import timezone from le_utils.constants import content_kinds @@ -16,6 +16,7 @@ from contentcuration.models import AssessmentItem from contentcuration.models import Channel from contentcuration.models import ChannelHistory +from contentcuration.models import ChannelSet from contentcuration.models import ContentNode from contentcuration.models import CONTENTNODE_TREE_ID_CACHE_KEY from contentcuration.models import File @@ -781,6 +782,51 @@ def _create_user(self, email, password='password', is_active=True): user.save() return user + def _setup_user_related_data(self): + user_a = self._create_user("a@tester.com") + user_b = self._create_user("b@tester.com") + + # Create a sole editor non-public channel. + sole_editor_channel = Channel.objects.create(name="sole-editor") + sole_editor_channel.editors.add(user_a) + + # Create sole-editor channel nodes. + for i in range(0, 3): + testdata.node({ + "title": "sole-editor-channel-node", + "kind_id": "video", + }, parent=sole_editor_channel.main_tree) + + # Create a sole editor public channel. + public_channel = testdata.channel("public") + public_channel.editors.add(user_a) + public_channel.public = True + public_channel.save() + + # Create a shared channel. + shared_channel = testdata.channel("shared-channel") + shared_channel.editors.add(user_a) + shared_channel.editors.add(user_b) + + # Invitations. + Invitation.objects.create(sender_id=user_a.id, invited_id=user_b.id) + Invitation.objects.create(sender_id=user_b.id, invited_id=user_a.id) + + # Channel sets. + channel_set = ChannelSet.objects.create(name="sole-editor") + channel_set.editors.add(user_a) + + channel_set = ChannelSet.objects.create(name="public") + channel_set.editors.add(user_a) + channel_set.public = True + channel_set.save() + + channel_set = ChannelSet.objects.create(name="shared-channelset") + channel_set.editors.add(user_a) + channel_set.editors.add(user_b) + + return user_a + def test_unique_lower_email(self): self._create_user("tester@tester.com") with self.assertRaises(IntegrityError): @@ -813,8 +859,8 @@ def test_delete(self): user = self._create_user("tester@tester.com") user.delete() - # Sets deleted_at? - self.assertIsInstance(user.deleted_at, datetime.datetime) + # Sets deleted? + self.assertEqual(user.deleted, True) # Sets is_active to False? self.assertEqual(user.is_active, False) # Creates user history? @@ -826,14 +872,42 @@ def test_recover(self): user.delete() user.recover() - # Sets deleted_at to None? - self.assertEqual(user.deleted_at, None) + # Sets deleted to False? + self.assertEqual(user.deleted, False) # Keeps is_active to False? self.assertEqual(user.is_active, False) # Creates user history? user_recover_history = UserHistory.objects.filter(user_id=user.id, action=user_history.RECOVERY).first() self.assertIsNotNone(user_recover_history) + def test_hard_delete_user_related_data(self): + user = self._setup_user_related_data() + user.hard_delete_user_related_data() + + # Deletes sole-editor channels. + self.assertFalse(Channel.objects.filter(name="sole-editor").exists()) + # Preserves shared channels. + self.assertTrue(Channel.objects.filter(name="shared-channel").exists()) + # Preserves public channels. + self.assertTrue(Channel.objects.filter(name="public").exists()) + + # Deletes all user related invitations. + self.assertFalse(Invitation.objects.filter(Q(sender_id=user.id) | Q(invited_id=user.id)).exists()) + + # Deletes sole-editor channelsets. + self.assertFalse(ChannelSet.objects.filter(name="sole-editor").exists()) + # Preserves shared channelsets. + self.assertTrue(ChannelSet.objects.filter(name="shared-channelset").exists()) + # Preserves public channelsets. + self.assertTrue(ChannelSet.objects.filter(name="public").exists()) + + # All contentnodes of sole-editor channel points to ORPHANGE ROOT NODE? + self.assertFalse(ContentNode.objects.filter(~Q(parent_id=settings.ORPHANAGE_ROOT_ID) + & Q(title="sole-editor-channel-node")).exists()) + # Creates user history? + user_hard_delete_history = UserHistory.objects.filter(user_id=user.id, action=user_history.RELATED_DATA_HARD_DELETION).first() + self.assertIsNotNone(user_hard_delete_history) + class ChannelHistoryTestCase(StudioTestCase): def setUp(self): diff --git a/contentcuration/contentcuration/tests/test_user.py b/contentcuration/contentcuration/tests/test_user.py index 36ae42f1c7..9fda1ceefe 100644 --- a/contentcuration/contentcuration/tests/test_user.py +++ b/contentcuration/contentcuration/tests/test_user.py @@ -15,7 +15,6 @@ from .base import BaseAPITestCase from .testdata import fileobj_video -from contentcuration.models import Channel from contentcuration.models import DEFAULT_CONTENT_DEFAULTS from contentcuration.models import Invitation from contentcuration.models import User @@ -161,17 +160,3 @@ def test_user_csv_export(self): self.assertIn(videos[index - 1].original_filename, row) self.assertIn(_format_size(videos[index - 1].file_size), row) self.assertEqual(index, len(videos)) - """ - Write and refactor for related data hard delete test cases below. - """ - - def test_account_deletion(self): - pass - - def test_account_deletion_shared_channels_preserved(self): - # Deleting a user account shouldn't delete shared channels - newuser = self.create_user() - self.channel.editors.add(newuser) - self.channel.save() - self.user.delete() - self.assertTrue(Channel.objects.filter(pk=self.channel.pk).exists()) diff --git a/contentcuration/contentcuration/tests/utils/test_garbage_collect.py b/contentcuration/contentcuration/tests/utils/test_garbage_collect.py index 6746fa43a6..178bc25656 100644 --- a/contentcuration/contentcuration/tests/utils/test_garbage_collect.py +++ b/contentcuration/contentcuration/tests/utils/test_garbage_collect.py @@ -17,15 +17,19 @@ from contentcuration import models as cc from contentcuration.api import activate_channel +from contentcuration.constants import user_history from contentcuration.models import ContentNode from contentcuration.models import File from contentcuration.models import TaskResult +from contentcuration.models import UserHistory from contentcuration.tests.base import BaseAPITestCase from contentcuration.tests.base import StudioTestCase from contentcuration.tests.testdata import tree +from contentcuration.utils.db_tools import create_user from contentcuration.utils.garbage_collect import clean_up_contentnodes from contentcuration.utils.garbage_collect import clean_up_deleted_chefs from contentcuration.utils.garbage_collect import clean_up_feature_flags +from contentcuration.utils.garbage_collect import clean_up_soft_deleted_users from contentcuration.utils.garbage_collect import clean_up_stale_files from contentcuration.utils.garbage_collect import clean_up_tasks from contentcuration.utils.garbage_collect import get_deleted_chefs_root @@ -192,6 +196,36 @@ def _create_expired_contentnode(creation_date=THREE_MONTHS_AGO): return c +def _create_expired_deleted_user(email="test@test.com", deletion_date=THREE_MONTHS_AGO): + user = create_user(email, "password", "test", "test") + user.delete() + + user_latest_delete_history = UserHistory.objects.filter(user_id=user.id, action=user_history.DELETION).order_by("-performed_at").first() + user_latest_delete_history.performed_at = deletion_date + user_latest_delete_history.save() + return user + + +class CleanUpSoftDeletedExpiredUsersTestCase(StudioTestCase): + def test_cleanup__all_expired_soft_deleted_users(self): + expired_users = [] + for i in range(0, 5): + expired_users.append(_create_expired_deleted_user(email=f"test-{i}@test.com")) + + clean_up_soft_deleted_users() + + for user in expired_users: + assert UserHistory.objects.filter(user_id=user.id, action=user_history.RELATED_DATA_HARD_DELETION).exists() is True + + def test_no_cleanup__unexpired_soft_deleted_users(self): + # TO DO + pass + + def test_no_cleanup__undeleted_users(self): + # TO DO + pass + + class CleanUpContentNodesTestCase(StudioTestCase): def test_delete_all_contentnodes_in_orphanage_tree(self): diff --git a/contentcuration/contentcuration/utils/garbage_collect.py b/contentcuration/contentcuration/utils/garbage_collect.py index 206ad74865..f7444f5f2e 100755 --- a/contentcuration/contentcuration/utils/garbage_collect.py +++ b/contentcuration/contentcuration/utils/garbage_collect.py @@ -17,11 +17,13 @@ from le_utils.constants import content_kinds from contentcuration.constants import feature_flags +from contentcuration.constants import user_history from contentcuration.db.models.functions import JSONObjectKeys from contentcuration.models import ContentNode from contentcuration.models import File from contentcuration.models import TaskResult from contentcuration.models import User +from contentcuration.models import UserHistory class DisablePostDeleteSignal(object): @@ -76,11 +78,13 @@ def clean_up_soft_deleted_users(): - all user invitations. """ account_deletion_buffer_delta = now() - datetime.timedelta(days=settings.ACCOUNT_DELETION_BUFFER) - users_to_delete = User.objects.filter(deleted_at__lt=account_deletion_buffer_delta) + deleted_users = User.objects.filter(deleted=True) - for user in users_to_delete: - user.hard_delete_user_related_data() - logging.info("Hard deleted user related data for user {}".format(user.email)) + for user in deleted_users: + latest_deletion_time = UserHistory.objects.filter(user_id=user.id, action=user_history.DELETION).order_by("-performed_at").first() + if latest_deletion_time and latest_deletion_time.performed_at < account_deletion_buffer_delta: + user.hard_delete_user_related_data() + logging.info("Hard deleted user related data for user {}".format(user.email)) def clean_up_deleted_chefs(): From 9ab534b2e87ca2590d9fca0847eaa03dce797d0e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 27 Oct 2022 16:58:24 +0000 Subject: [PATCH 008/555] Bump django-mptt from 0.13.4 to 0.14.0 Bumps [django-mptt](https://github.com/django-mptt/django-mptt) from 0.13.4 to 0.14.0. - [Release notes](https://github.com/django-mptt/django-mptt/releases) - [Changelog](https://github.com/django-mptt/django-mptt/blob/main/CHANGELOG.rst) - [Commits](https://github.com/django-mptt/django-mptt/compare/0.13.4...0.14) --- updated-dependencies: - dependency-name: django-mptt dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- requirements.in | 2 +- requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.in b/requirements.in index 8d4b5eb708..233e2f08ae 100644 --- a/requirements.in +++ b/requirements.in @@ -1,6 +1,6 @@ attrs==19.3.0 django-cte==1.2.1 -django-mptt==0.13.4 +django-mptt==0.14.0 django-filter==22.1 djangorestframework==3.12.4 psycopg2-binary==2.9.5 diff --git a/requirements.txt b/requirements.txt index 8b478194a8..15a1d19bfd 100644 --- a/requirements.txt +++ b/requirements.txt @@ -87,7 +87,7 @@ django-mathfilters==1.0.0 # via -r requirements.in django-model-utils==4.2.0 # via -r requirements.in -django-mptt==0.13.4 +django-mptt==0.14.0 # via -r requirements.in django-postmark==0.1.6 # via -r requirements.in From 17a7ac1ec63ab197697eb1a89155cb5c588ee837 Mon Sep 17 00:00:00 2001 From: Vivek Agrawal Date: Mon, 31 Oct 2022 14:11:05 +0530 Subject: [PATCH 009/555] User cleanup garbage collection tests --- contentcuration/contentcuration/models.py | 1 + .../tests/utils/test_garbage_collect.py | 18 +++++++++++------- .../contentcuration/utils/garbage_collect.py | 17 ++++++++++------- 3 files changed, 22 insertions(+), 14 deletions(-) diff --git a/contentcuration/contentcuration/models.py b/contentcuration/contentcuration/models.py index 07aa39f2b1..f6eb908a61 100644 --- a/contentcuration/contentcuration/models.py +++ b/contentcuration/contentcuration/models.py @@ -200,6 +200,7 @@ class User(AbstractBaseUser, PermissionsMixin): content_defaults = JSONField(default=dict) policies = JSONField(default=dict, null=True) feature_flags = JSONField(default=dict, null=True) + deleted = models.BooleanField(default=False, db_index=True) _field_updates = FieldTracker(fields=[ diff --git a/contentcuration/contentcuration/tests/utils/test_garbage_collect.py b/contentcuration/contentcuration/tests/utils/test_garbage_collect.py index 178bc25656..e4c9941df4 100644 --- a/contentcuration/contentcuration/tests/utils/test_garbage_collect.py +++ b/contentcuration/contentcuration/tests/utils/test_garbage_collect.py @@ -196,12 +196,12 @@ def _create_expired_contentnode(creation_date=THREE_MONTHS_AGO): return c -def _create_expired_deleted_user(email="test@test.com", deletion_date=THREE_MONTHS_AGO): +def _create_deleted_user_in_past(deletion_datetime, email="test@test.com"): user = create_user(email, "password", "test", "test") user.delete() user_latest_delete_history = UserHistory.objects.filter(user_id=user.id, action=user_history.DELETION).order_by("-performed_at").first() - user_latest_delete_history.performed_at = deletion_date + user_latest_delete_history.performed_at = deletion_datetime user_latest_delete_history.save() return user @@ -210,7 +210,7 @@ class CleanUpSoftDeletedExpiredUsersTestCase(StudioTestCase): def test_cleanup__all_expired_soft_deleted_users(self): expired_users = [] for i in range(0, 5): - expired_users.append(_create_expired_deleted_user(email=f"test-{i}@test.com")) + expired_users.append(_create_deleted_user_in_past(deletion_datetime=THREE_MONTHS_AGO, email=f"test-{i}@test.com")) clean_up_soft_deleted_users() @@ -218,12 +218,16 @@ def test_cleanup__all_expired_soft_deleted_users(self): assert UserHistory.objects.filter(user_id=user.id, action=user_history.RELATED_DATA_HARD_DELETION).exists() is True def test_no_cleanup__unexpired_soft_deleted_users(self): - # TO DO - pass + two_months_ago = datetime.now() - timedelta(days=63) + user = _create_deleted_user_in_past(deletion_datetime=two_months_ago) + clean_up_soft_deleted_users() + assert UserHistory.objects.filter(user_id=user.id, action=user_history.RELATED_DATA_HARD_DELETION).exists() is False def test_no_cleanup__undeleted_users(self): - # TO DO - pass + user = create_user("test@test.com", "password", "test", "test") + clean_up_soft_deleted_users() + assert user.deleted is False + assert UserHistory.objects.filter(user_id=user.id, action=user_history.RELATED_DATA_HARD_DELETION).exists() is False class CleanUpContentNodesTestCase(StudioTestCase): diff --git a/contentcuration/contentcuration/utils/garbage_collect.py b/contentcuration/contentcuration/utils/garbage_collect.py index f7444f5f2e..44e09e4dab 100755 --- a/contentcuration/contentcuration/utils/garbage_collect.py +++ b/contentcuration/contentcuration/utils/garbage_collect.py @@ -8,9 +8,11 @@ from celery import states from django.conf import settings from django.core.files.storage import default_storage +from django.db.models import Subquery from django.db.models.expressions import CombinedExpression from django.db.models.expressions import Exists from django.db.models.expressions import F +from django.db.models.expressions import OuterRef from django.db.models.expressions import Value from django.db.models.signals import post_delete from django.utils.timezone import now @@ -78,13 +80,14 @@ def clean_up_soft_deleted_users(): - all user invitations. """ account_deletion_buffer_delta = now() - datetime.timedelta(days=settings.ACCOUNT_DELETION_BUFFER) - deleted_users = User.objects.filter(deleted=True) - - for user in deleted_users: - latest_deletion_time = UserHistory.objects.filter(user_id=user.id, action=user_history.DELETION).order_by("-performed_at").first() - if latest_deletion_time and latest_deletion_time.performed_at < account_deletion_buffer_delta: - user.hard_delete_user_related_data() - logging.info("Hard deleted user related data for user {}".format(user.email)) + user_latest_deletion_time_subquery = Subquery(UserHistory.objects.filter(user_id=OuterRef( + "id"), action=user_history.DELETION).values("performed_at").order_by("-performed_at")[:1]) + users_to_delete = User.objects.annotate(latest_deletion_time=user_latest_deletion_time_subquery).filter( + deleted=True, latest_deletion_time__lt=account_deletion_buffer_delta) + + for user in users_to_delete: + user.hard_delete_user_related_data() + logging.info("Hard deleted user related data for user {}.".format(user.email)) def clean_up_deleted_chefs(): From a974c514cdde4d2a427aba8033825091985a3fcb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 31 Oct 2022 16:07:15 +0000 Subject: [PATCH 010/555] Bump pillow from 9.2.0 to 9.3.0 Bumps [pillow](https://github.com/python-pillow/Pillow) from 9.2.0 to 9.3.0. - [Release notes](https://github.com/python-pillow/Pillow/releases) - [Changelog](https://github.com/python-pillow/Pillow/blob/main/CHANGES.rst) - [Commits](https://github.com/python-pillow/Pillow/compare/9.2.0...9.3.0) --- updated-dependencies: - dependency-name: pillow dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- requirements.in | 2 +- requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.in b/requirements.in index 8d4b5eb708..15d1491831 100644 --- a/requirements.in +++ b/requirements.in @@ -36,7 +36,7 @@ future sentry-sdk django-bulk-update html5lib==1.1 -pillow==9.2.0 +pillow==9.3.0 python-dateutil>=2.8.1 jsonschema>=3.2.0 importlib-metadata==1.7.0 diff --git a/requirements.txt b/requirements.txt index 8b478194a8..484592d630 100644 --- a/requirements.txt +++ b/requirements.txt @@ -179,7 +179,7 @@ packaging==20.9 # redis pathlib==1.0.1 # via -r requirements.in -pillow==9.2.0 +pillow==9.3.0 # via -r requirements.in progressbar2==3.55.0 # via -r requirements.in From a13cd878af3349b74e45150e822a1d3414db7a83 Mon Sep 17 00:00:00 2001 From: Vivek Agrawal Date: Tue, 8 Nov 2022 14:28:15 +0530 Subject: [PATCH 011/555] Fixes content_id bug whenever underlying content is changed :tada: --- contentcuration/contentcuration/models.py | 39 +++++++++++++++++++ .../contentcuration/viewsets/file.py | 13 +++++-- 2 files changed, 48 insertions(+), 4 deletions(-) diff --git a/contentcuration/contentcuration/models.py b/contentcuration/contentcuration/models.py index 2b9aea4224..c8190e5601 100644 --- a/contentcuration/contentcuration/models.py +++ b/contentcuration/contentcuration/models.py @@ -1777,6 +1777,14 @@ def mark_complete(self): # noqa C901 self.complete = not errors return errors + def make_content_id_unique(self): + """ + If there exists a contentnode with the same content_id as the self, + then this updates content_id to a random uuid4. + """ + if ContentNode.objects.exclude(pk=self.pk).filter(content_id=self.content_id).exists(): + ContentNode.objects.filter(pk=self.pk).update(content_id=uuid.uuid4().hex) + def on_create(self): self.changed = True self.recalculate_editors_storage() @@ -2051,6 +2059,28 @@ def filter_view_queryset(cls, queryset, user): return queryset.filter(Q(view=True) | Q(edit=True) | Q(public=True)) + def on_create(self): + """ + When an exercise is added to a contentnode, update its content_id + if it's a copied contentnode. + """ + self.contentnode.make_content_id_unique() + + def on_update(self): + """ + When an exercise is updated of a contentnode, update its content_id + if it's a copied contentnode. + """ + self.contentnode.make_content_id_unique() + + def delete(self, *args, **kwargs): + """ + When an exercise is deleted from a contentnode, update its content_id + if it's a copied contentnode. + """ + self.contentnode.make_content_id_unique() + return super(AssessmentItem, self).delete(*args, **kwargs) + class SlideshowSlide(models.Model): contentnode = models.ForeignKey('ContentNode', related_name="slideshow_slides", blank=True, null=True, @@ -2168,10 +2198,19 @@ def filename(self): return os.path.basename(self.file_on_disk.name) + def update_contentnode_content_id(self): + """ + If the file is attached to a contentnode then update that contentnode's content_id + if it's a copied contentnode. + """ + if self.contentnode: + self.contentnode.make_content_id_unique() + def on_update(self): # since modified was added later as a nullable field to File, we don't use a default but # instead we'll just make sure it's always updated through our serializers self.modified = timezone.now() + self.update_contentnode_content_id() def save(self, set_by_file_on_disk=True, *args, **kwargs): """ diff --git a/contentcuration/contentcuration/viewsets/file.py b/contentcuration/contentcuration/viewsets/file.py index 860645a146..89bab016a9 100644 --- a/contentcuration/contentcuration/viewsets/file.py +++ b/contentcuration/contentcuration/viewsets/file.py @@ -114,18 +114,23 @@ class FileViewSet(BulkDeleteMixin, BulkUpdateMixin, ReadOnlyValuesViewset): def delete_from_changes(self, changes): try: - # reset channel resource size cache + # Reset channel resource size cache. keys = [change["key"] for change in changes] - queryset = self.filter_queryset_from_keys( + files_qs = self.filter_queryset_from_keys( self.get_edit_queryset(), keys ).order_by() - # find all root nodes for files, and reset the cache modified date + # Find all root nodes for files, and reset the cache modified date. root_nodes = ContentNode.objects.filter( parent__isnull=True, - tree_id__in=queryset.values_list('contentnode__tree_id', flat=True).distinct(), + tree_id__in=files_qs.values_list('contentnode__tree_id', flat=True).distinct(), ) for root_node in root_nodes: ResourceSizeCache(root_node).reset_modified(None) + + # Update file's contentnode content_id. + for file in files_qs: + file.update_contentnode_content_id() + except Exception as e: report_exception(e) From eb2c8aad36197146d476e9de51b968105a5ac7e0 Mon Sep 17 00:00:00 2001 From: Blaine Jester Date: Fri, 11 Nov 2022 07:38:12 -0800 Subject: [PATCH 012/555] Remove logging config moved to global settings --- contentcuration/contentcuration/not_production_settings.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/contentcuration/contentcuration/not_production_settings.py b/contentcuration/contentcuration/not_production_settings.py index 44c5da8bc5..e98410433d 100644 --- a/contentcuration/contentcuration/not_production_settings.py +++ b/contentcuration/contentcuration/not_production_settings.py @@ -1,5 +1,3 @@ -import logging - from .settings import * # noqa ALLOWED_HOSTS = ["studio.local", "192.168.31.9", "127.0.0.1", "*"] @@ -10,7 +8,6 @@ POSTMARK_TEST_MODE = True SITE_ID = 2 -logging.basicConfig(level="INFO") # Allow the debug() context processor to add variables to template context. # Include here the IPs from which a local dev server might be accessed. See From 46695996fd6daeb3be0eff1c8790b4aabf9bca9a Mon Sep 17 00:00:00 2001 From: Blaine Jester Date: Fri, 11 Nov 2022 07:52:06 -0800 Subject: [PATCH 013/555] Cleanup default details and enhance list format validation --- contentcuration/contentcuration/models.py | 14 +++++++------- .../contentcuration/tests/test_contentnodes.py | 13 +++++++++---- 2 files changed, 16 insertions(+), 11 deletions(-) diff --git a/contentcuration/contentcuration/models.py b/contentcuration/contentcuration/models.py index 8c772e46af..7fe911f14f 100644 --- a/contentcuration/contentcuration/models.py +++ b/contentcuration/contentcuration/models.py @@ -1490,14 +1490,14 @@ def get_details(self, channel_id=None): "resource_size": 0, "includes": {"coach_content": 0, "exercises": 0}, "kind_count": [], - "languages": "", - "accessible_languages": "", - "licenses": "", + "languages": [], + "accessible_languages": [], + "licenses": [], "tags": [], - "copyright_holders": "", - "authors": "", - "aggregators": "", - "providers": "", + "copyright_holders": [], + "authors": [], + "aggregators": [], + "providers": [], "sample_pathway": [], "original_channels": [], "sample_nodes": [], diff --git a/contentcuration/contentcuration/tests/test_contentnodes.py b/contentcuration/contentcuration/tests/test_contentnodes.py index 25569f8988..dd12dcba45 100644 --- a/contentcuration/contentcuration/tests/test_contentnodes.py +++ b/contentcuration/contentcuration/tests/test_contentnodes.py @@ -184,10 +184,15 @@ def test_get_node_details(self): assert details["resource_count"] > 0 assert details["resource_size"] > 0 assert len(details["kind_count"]) > 0 - assert len(details["authors"]) == len([author for author in details["authors"] if author]) - assert len(details["aggregators"]) == len([aggregator for aggregator in details["aggregators"] if aggregator]) - assert len(details["providers"]) == len([provider for provider in details["providers"] if provider]) - assert len(details["copyright_holders"]) == len([holder for holder in details["copyright_holders"] if holder]) + + # assert format of list fields, including that they do not contain invalid data + list_fields = [ + "kind_count", "languages", "accessible_languages", "licenses", "tags", "original_channels", + "authors", "aggregators", "providers", "copyright_holders" + ] + for field in list_fields: + self.assertIsInstance(details.get(field), list, f"Field '{field}' isn't a list") + self.assertEqual(len(details[field]), len([value for value in details[field] if value]), f"List field '{field}' has falsy values") class NodeOperationsTestCase(StudioTestCase): From 3d52560c506c3a2bc3d26721cee89cc3b883587d Mon Sep 17 00:00:00 2001 From: Vivek Agrawal Date: Wed, 16 Nov 2022 01:21:09 +0530 Subject: [PATCH 014/555] New content_id update mechanism - update content_id only of copied nodes - update content_id of nodes on channel syncing - fix sync of assessment items bug wherein assessment items were getting synced based on tags attribute --- contentcuration/contentcuration/models.py | 14 ++++++++------ contentcuration/contentcuration/utils/sync.py | 11 ++++++++++- .../contentcuration/viewsets/channel.py | 2 +- 3 files changed, 19 insertions(+), 8 deletions(-) diff --git a/contentcuration/contentcuration/models.py b/contentcuration/contentcuration/models.py index 7944c0513f..67fc6730b4 100644 --- a/contentcuration/contentcuration/models.py +++ b/contentcuration/contentcuration/models.py @@ -1780,10 +1780,12 @@ def mark_complete(self): # noqa C901 def make_content_id_unique(self): """ - If there exists a contentnode with the same content_id as the self, - then this updates content_id to a random uuid4. + If self is NOT an original contentnode (in other words, a copied contentnode) + and a contentnode with same content_id exists then we update self's content_id. """ - if ContentNode.objects.exclude(pk=self.pk).filter(content_id=self.content_id).exists(): + is_node_original = self.original_source_node_id is None or self.original_source_node_id == self.node_id + does_same_content_exists = ContentNode.objects.exclude(pk=self.pk).filter(content_id=self.content_id).exists() + if (not is_node_original) and does_same_content_exists: ContentNode.objects.filter(pk=self.pk).update(content_id=uuid.uuid4().hex) def on_create(self): @@ -2201,10 +2203,10 @@ def filename(self): def update_contentnode_content_id(self): """ - If the file is attached to a contentnode then update that contentnode's content_id - if it's a copied contentnode. + If the file is attached to a contentnode and is not a thumbnail + then update that contentnode's content_id if it's a copied contentnode. """ - if self.contentnode: + if self.contentnode and self.preset.thumbnail is False: self.contentnode.make_content_id_unique() def on_update(self): diff --git a/contentcuration/contentcuration/utils/sync.py b/contentcuration/contentcuration/utils/sync.py index c42be1d99d..47c334a9dc 100644 --- a/contentcuration/contentcuration/utils/sync.py +++ b/contentcuration/contentcuration/utils/sync.py @@ -106,7 +106,7 @@ def sync_node_tags(node, original): node.changed = True -def sync_node_files(node, original): +def sync_node_files(node, original): # noqa C901 """ Sync all files in ``node`` from the files in ``original`` node. """ @@ -118,6 +118,11 @@ def sync_node_files(node, original): else: file_key = file.preset_id node_files[file_key] = file + # If node has any non-thumbnail file then it means the node + # is an uploaded file. So, we equalize the content_id with the original. + if file.preset.thumbnail is False: + node.content_id = original.content_id + node.changed = True source_files = {} @@ -218,3 +223,7 @@ def sync_node_assessment_items(node, original): # noqa C901 if files_to_create: File.objects.bulk_create(files_to_create) node.changed = True + # Now, node and its original have same content so + # let us equalize its content_id. + node.content_id = original.content_id + node.changed = True diff --git a/contentcuration/contentcuration/viewsets/channel.py b/contentcuration/contentcuration/viewsets/channel.py index c028405130..8a11dea614 100644 --- a/contentcuration/contentcuration/viewsets/channel.py +++ b/contentcuration/contentcuration/viewsets/channel.py @@ -545,7 +545,7 @@ def sync(self, pk, attributes=False, tags=False, files=False, assessment_items=F attributes, tags, files, - tags, + assessment_items, progress_tracker=progress_tracker, ) From f57811736f63ac5030a52b834d91402f57c2528d Mon Sep 17 00:00:00 2001 From: Vivek Agrawal Date: Wed, 16 Nov 2022 15:04:30 +0530 Subject: [PATCH 015/555] Fixes content_id behaviour on syncing --- contentcuration/contentcuration/utils/sync.py | 20 +++++++++++++------ 1 file changed, 14 insertions(+), 6 deletions(-) diff --git a/contentcuration/contentcuration/utils/sync.py b/contentcuration/contentcuration/utils/sync.py index 47c334a9dc..02f44ad3d1 100644 --- a/contentcuration/contentcuration/utils/sync.py +++ b/contentcuration/contentcuration/utils/sync.py @@ -106,11 +106,12 @@ def sync_node_tags(node, original): node.changed = True -def sync_node_files(node, original): # noqa C901 +def sync_node_files(node, original): # noqa C901 """ Sync all files in ``node`` from the files in ``original`` node. """ node_files = {} + is_node_uploaded_file = False for file in node.files.all(): if file.preset_id == format_presets.VIDEO_SUBTITLE: @@ -119,10 +120,9 @@ def sync_node_files(node, original): # noqa C901 file_key = file.preset_id node_files[file_key] = file # If node has any non-thumbnail file then it means the node - # is an uploaded file. So, we equalize the content_id with the original. + # is an uploaded file. if file.preset.thumbnail is False: - node.content_id = original.content_id - node.changed = True + is_node_uploaded_file = True source_files = {} @@ -132,6 +132,10 @@ def sync_node_files(node, original): # noqa C901 else: file_key = file.preset_id source_files[file_key] = file + # If node has any non-thumbnail file then it means the node + # is an uploaded file. + if file.preset.thumbnail is False: + is_node_uploaded_file = True files_to_delete = [] files_to_create = [] @@ -153,6 +157,9 @@ def sync_node_files(node, original): # noqa C901 if files_to_create: File.objects.bulk_create(files_to_create) + if node.changed and is_node_uploaded_file: + node.content_id = original.content_id + assessment_item_fields = ( "type", @@ -223,7 +230,8 @@ def sync_node_assessment_items(node, original): # noqa C901 if files_to_create: File.objects.bulk_create(files_to_create) node.changed = True + # Now, node and its original have same content so # let us equalize its content_id. - node.content_id = original.content_id - node.changed = True + if node.changed: + node.content_id = original.content_id From 4c9bcd423c533c96e5599f135aa98638c2671b1a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 17 Nov 2022 12:56:14 +0000 Subject: [PATCH 016/555] Bump loader-utils from 1.4.1 to 1.4.2 Bumps [loader-utils](https://github.com/webpack/loader-utils) from 1.4.1 to 1.4.2. - [Release notes](https://github.com/webpack/loader-utils/releases) - [Changelog](https://github.com/webpack/loader-utils/blob/v1.4.2/CHANGELOG.md) - [Commits](https://github.com/webpack/loader-utils/compare/v1.4.1...v1.4.2) --- updated-dependencies: - dependency-name: loader-utils dependency-type: indirect ... Signed-off-by: dependabot[bot] --- yarn.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/yarn.lock b/yarn.lock index f42a49c523..0c074104d7 100644 --- a/yarn.lock +++ b/yarn.lock @@ -8201,9 +8201,9 @@ loader-runner@^4.2.0: integrity sha512-3R/1M+yS3j5ou80Me59j7F9IMs4PXs3VqRrm0TU3AbKPxlmpoY1TNscJV/oGJXo8qCatFGTfDbY6W6ipGOYXfg== loader-utils@^1.0.2, loader-utils@^1.1.0: - version "1.4.1" - resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.1.tgz#278ad7006660bccc4d2c0c1578e17c5c78d5c0e0" - integrity sha512-1Qo97Y2oKaU+Ro2xnDMR26g1BwMT29jNbem1EvcujW2jqt+j5COXyscjM7bLQkM9HaxI7pkWeW7gnI072yMI9Q== + version "1.4.2" + resolved "https://registry.yarnpkg.com/loader-utils/-/loader-utils-1.4.2.tgz#29a957f3a63973883eb684f10ffd3d151fec01a3" + integrity sha512-I5d00Pd/jwMD2QCduo657+YM/6L3KZu++pmX9VFncxaxvHcru9jx1lBaFft+r4Mt2jK0Yhp41XlRAihzPxHNCg== dependencies: big.js "^5.2.2" emojis-list "^3.0.0" From 3cb566dc07771ebebe7f0299677b3da75b215054 Mon Sep 17 00:00:00 2001 From: Blaine Jester Date: Thu, 17 Nov 2022 08:50:48 -0800 Subject: [PATCH 017/555] Fix missing information on pending task results --- .../contentcuration/tests/test_asynctask.py | 24 +++++++++---------- .../contentcuration/utils/celery/tasks.py | 7 +++++- 2 files changed, 18 insertions(+), 13 deletions(-) diff --git a/contentcuration/contentcuration/tests/test_asynctask.py b/contentcuration/contentcuration/tests/test_asynctask.py index e09219f98d..58017559ee 100644 --- a/contentcuration/contentcuration/tests/test_asynctask.py +++ b/contentcuration/contentcuration/tests/test_asynctask.py @@ -1,8 +1,6 @@ from __future__ import absolute_import -import json import threading -import uuid from celery import states from celery.result import allow_join_result @@ -191,17 +189,19 @@ def test_only_create_async_task_creates_task_entry(self): self.assertEquals(result, 42) self.assertEquals(TaskResult.objects.filter(task_id=async_result.task_id).count(), 0) - def test_fetch_or_enqueue_task(self): - expected_task = TaskResult.objects.create( - task_id=uuid.uuid4().hex, - task_name=test_task.name, - status=states.PENDING, - user=self.user, - task_kwargs=json.dumps({ - "is_test": True - }), - ) + def test_enqueue_task_adds_result_with_necessary_info(self): + async_result = test_task.enqueue(self.user, is_test=True) + try: + task_result = TaskResult.objects.get(task_id=async_result.task_id) + except TaskResult.DoesNotExist: + self.fail('Missing task result') + self.assertEqual(task_result.task_name, test_task.name) + _, _, encoded_kwargs = test_task.backend.encode_content(dict(is_test=True)) + self.assertEqual(task_result.task_kwargs, encoded_kwargs) + + def test_fetch_or_enqueue_task(self): + expected_task = test_task.enqueue(self.user, is_test=True) async_result = test_task.fetch_or_enqueue(self.user, is_test=True) self.assertEqual(expected_task.task_id, async_result.task_id) diff --git a/contentcuration/contentcuration/utils/celery/tasks.py b/contentcuration/contentcuration/utils/celery/tasks.py index 3eb83d4180..a62d5b10ed 100644 --- a/contentcuration/contentcuration/utils/celery/tasks.py +++ b/contentcuration/contentcuration/utils/celery/tasks.py @@ -3,6 +3,7 @@ import uuid from celery import states +from celery.app.task import Context from celery.app.task import Task from celery.result import AsyncResult @@ -185,8 +186,12 @@ def enqueue(self, user, **kwargs): task_id=task_id, kwargs=kwargs, ) + # ensure the result is saved to the backend (database) - self.backend.add_pending_result(async_result) + request_context = Context(id=task_id, kwargs=kwargs) + request_context.task = self.name + self.backend.store_result(task_id, None, states.PENDING, request=request_context) + # after calling apply, we should have task result model, so get it and set our custom fields task_result = get_task_model(self, task_id) task_result.user = user From 79963abd9d1075287ca3de8c0ff2dbd0fee41985 Mon Sep 17 00:00:00 2001 From: Blaine Jester Date: Thu, 17 Nov 2022 09:30:17 -0800 Subject: [PATCH 018/555] Revert back to add_pending_result, and manually set necessary info --- contentcuration/contentcuration/utils/celery/tasks.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/contentcuration/contentcuration/utils/celery/tasks.py b/contentcuration/contentcuration/utils/celery/tasks.py index a62d5b10ed..c006a79c98 100644 --- a/contentcuration/contentcuration/utils/celery/tasks.py +++ b/contentcuration/contentcuration/utils/celery/tasks.py @@ -3,7 +3,6 @@ import uuid from celery import states -from celery.app.task import Context from celery.app.task import Task from celery.result import AsyncResult @@ -188,12 +187,12 @@ def enqueue(self, user, **kwargs): ) # ensure the result is saved to the backend (database) - request_context = Context(id=task_id, kwargs=kwargs) - request_context.task = self.name - self.backend.store_result(task_id, None, states.PENDING, request=request_context) + self.backend.add_pending_result(async_result) # after calling apply, we should have task result model, so get it and set our custom fields task_result = get_task_model(self, task_id) + task_result.task_name = self.name + task_result.task_kwargs = self.backend.encode_content(kwargs)[2] task_result.user = user task_result.channel_id = channel_id task_result.save() From 1fb3278fcc79bb9c71ca7ccad19ee7681acbf7d9 Mon Sep 17 00:00:00 2001 From: Blaine Jester Date: Thu, 17 Nov 2022 13:25:58 -0800 Subject: [PATCH 019/555] Fix issue comparing kwargs when it contains objects that will be serialized --- contentcuration/contentcuration/tests/test_asynctask.py | 7 +++++++ contentcuration/contentcuration/utils/celery/tasks.py | 5 +++-- 2 files changed, 10 insertions(+), 2 deletions(-) diff --git a/contentcuration/contentcuration/tests/test_asynctask.py b/contentcuration/contentcuration/tests/test_asynctask.py index 58017559ee..681833913e 100644 --- a/contentcuration/contentcuration/tests/test_asynctask.py +++ b/contentcuration/contentcuration/tests/test_asynctask.py @@ -1,6 +1,7 @@ from __future__ import absolute_import import threading +import uuid from celery import states from celery.result import allow_join_result @@ -205,6 +206,12 @@ def test_fetch_or_enqueue_task(self): async_result = test_task.fetch_or_enqueue(self.user, is_test=True) self.assertEqual(expected_task.task_id, async_result.task_id) + def test_fetch_or_enqueue_task__channel_id(self): + channel_id = uuid.uuid4() + expected_task = test_task.enqueue(self.user, channel_id=channel_id) + async_result = test_task.fetch_or_enqueue(self.user, channel_id=channel_id) + self.assertEqual(expected_task.task_id, async_result.task_id) + def test_requeue_task(self): existing_task_ids = requeue_test_task.find_ids() self.assertEqual(len(existing_task_ids), 0) diff --git a/contentcuration/contentcuration/utils/celery/tasks.py b/contentcuration/contentcuration/utils/celery/tasks.py index c006a79c98..390b8a7b4f 100644 --- a/contentcuration/contentcuration/utils/celery/tasks.py +++ b/contentcuration/contentcuration/utils/celery/tasks.py @@ -156,7 +156,8 @@ def fetch_match(self, task_id, **kwargs): """ async_result = self.fetch(task_id) # the task kwargs are serialized in the DB so just ensure that args actually match - if async_result.kwargs == kwargs: + transcoded_kwargs = self.backend.decode(self.backend.encode(kwargs)) + if async_result.kwargs == transcoded_kwargs: return async_result return None @@ -192,7 +193,7 @@ def enqueue(self, user, **kwargs): # after calling apply, we should have task result model, so get it and set our custom fields task_result = get_task_model(self, task_id) task_result.task_name = self.name - task_result.task_kwargs = self.backend.encode_content(kwargs)[2] + task_result.task_kwargs = self.backend.encode(kwargs) task_result.user = user task_result.channel_id = channel_id task_result.save() From 3dbc390f9e988af750a62de2f1dd1c92559674bd Mon Sep 17 00:00:00 2001 From: Blaine Jester Date: Thu, 17 Nov 2022 14:10:44 -0800 Subject: [PATCH 020/555] Fix other edge cases-- uuid be damned --- .../contentcuration/tests/test_asynctask.py | 18 ++++++++++++ .../contentcuration/utils/celery/tasks.py | 29 ++++++++++++------- 2 files changed, 37 insertions(+), 10 deletions(-) diff --git a/contentcuration/contentcuration/tests/test_asynctask.py b/contentcuration/contentcuration/tests/test_asynctask.py index 681833913e..85b40b6237 100644 --- a/contentcuration/contentcuration/tests/test_asynctask.py +++ b/contentcuration/contentcuration/tests/test_asynctask.py @@ -212,6 +212,24 @@ def test_fetch_or_enqueue_task__channel_id(self): async_result = test_task.fetch_or_enqueue(self.user, channel_id=channel_id) self.assertEqual(expected_task.task_id, async_result.task_id) + def test_fetch_or_enqueue_task__channel_id__hex(self): + channel_id = uuid.uuid4() + expected_task = test_task.enqueue(self.user, channel_id=channel_id.hex) + async_result = test_task.fetch_or_enqueue(self.user, channel_id=channel_id.hex) + self.assertEqual(expected_task.task_id, async_result.task_id) + + def test_fetch_or_enqueue_task__channel_id__hex_then_uuid(self): + channel_id = uuid.uuid4() + expected_task = test_task.enqueue(self.user, channel_id=channel_id.hex) + async_result = test_task.fetch_or_enqueue(self.user, channel_id=channel_id) + self.assertEqual(expected_task.task_id, async_result.task_id) + + def test_fetch_or_enqueue_task__channel_id__uuid_then_hex(self): + channel_id = uuid.uuid4() + expected_task = test_task.enqueue(self.user, channel_id=channel_id) + async_result = test_task.fetch_or_enqueue(self.user, channel_id=channel_id.hex) + self.assertEqual(expected_task.task_id, async_result.task_id) + def test_requeue_task(self): existing_task_ids = requeue_test_task.find_ids() self.assertEqual(len(existing_task_ids), 0) diff --git a/contentcuration/contentcuration/utils/celery/tasks.py b/contentcuration/contentcuration/utils/celery/tasks.py index 390b8a7b4f..0d066d3329 100644 --- a/contentcuration/contentcuration/utils/celery/tasks.py +++ b/contentcuration/contentcuration/utils/celery/tasks.py @@ -146,9 +146,10 @@ def fetch(self, task_id): """ return self.AsyncResult(task_id) - def fetch_match(self, task_id, **kwargs): + def _fetch_match(self, task_id, **kwargs): """ - Gets the result object for a task, assuming it was called async, and ensures it was called with kwargs + Gets the result object for a task, assuming it was called async, and ensures it was called with kwargs and + assumes that kwargs is has been decoded from an prepared form :param task_id: The hex task ID :param kwargs: The kwargs the task was called with, which must match when fetching :return: A CeleryAsyncResult @@ -156,11 +157,16 @@ def fetch_match(self, task_id, **kwargs): """ async_result = self.fetch(task_id) # the task kwargs are serialized in the DB so just ensure that args actually match - transcoded_kwargs = self.backend.decode(self.backend.encode(kwargs)) - if async_result.kwargs == transcoded_kwargs: + if async_result.kwargs == kwargs: return async_result return None + def _prepare_kwargs(self, kwargs): + return self.backend.encode({ + key: value.hex if isinstance(value, uuid.UUID) else value + for key, value in kwargs.items() + }) + def enqueue(self, user, **kwargs): """ Enqueues the task called with `kwargs`, and requires the user who wants to enqueue it. If `channel_id` is @@ -177,14 +183,16 @@ def enqueue(self, user, **kwargs): raise TypeError("All tasks must be assigned to a user.") task_id = uuid.uuid4().hex - channel_id = kwargs.get("channel_id") + prepared_kwargs = self._prepare_kwargs(kwargs) + transcoded_kwargs = self.backend.decode(prepared_kwargs) + channel_id = transcoded_kwargs.get("channel_id") - logging.info(f"Enqueuing task:id {self.name}:{task_id} for user:channel {user.pk}:{channel_id} | {kwargs}") + logging.info(f"Enqueuing task:id {self.name}:{task_id} for user:channel {user.pk}:{channel_id} | {prepared_kwargs}") # returns a CeleryAsyncResult async_result = self.apply_async( task_id=task_id, - kwargs=kwargs, + kwargs=transcoded_kwargs, ) # ensure the result is saved to the backend (database) @@ -193,7 +201,7 @@ def enqueue(self, user, **kwargs): # after calling apply, we should have task result model, so get it and set our custom fields task_result = get_task_model(self, task_id) task_result.task_name = self.name - task_result.task_kwargs = self.backend.encode(kwargs) + task_result.task_kwargs = prepared_kwargs task_result.user = user task_result.channel_id = channel_id task_result.save() @@ -212,9 +220,10 @@ def fetch_or_enqueue(self, user, **kwargs): # if we're eagerly executing the task (synchronously), then we shouldn't check for an existing task because # implementations probably aren't prepared to rely on an existing asynchronous task if not self.app.conf.task_always_eager: - task_ids = self.find_incomplete_ids(**kwargs).order_by("date_created")[:1] + transcoded_kwargs = self.backend.decode(self._prepare_kwargs(kwargs)) + task_ids = self.find_incomplete_ids(**transcoded_kwargs).order_by("date_created")[:1] if task_ids: - async_result = self.fetch_match(task_ids[0], **kwargs) + async_result = self._fetch_match(task_ids[0], **transcoded_kwargs) if async_result: logging.info(f"Fetched matching task {self.name} for user {user.pk} with id {async_result.id} | {kwargs}") return async_result From c18c35377652250e3ddc57a59d67eaca8c06cb49 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Thu, 17 Nov 2022 15:22:02 -0800 Subject: [PATCH 021/555] Allow exercises with raw_data set to be complete. --- contentcuration/contentcuration/models.py | 15 +++++++++------ .../contentcuration/tests/test_contentnodes.py | 9 +++++++++ 2 files changed, 18 insertions(+), 6 deletions(-) diff --git a/contentcuration/contentcuration/models.py b/contentcuration/contentcuration/models.py index 121bea10dc..af0578bf37 100644 --- a/contentcuration/contentcuration/models.py +++ b/contentcuration/contentcuration/models.py @@ -1756,12 +1756,15 @@ def mark_complete(self): # noqa C901 if self.kind_id == content_kinds.EXERCISE: # Check to see if the exercise has at least one assessment item that has: if not self.assessment_items.filter( - # A non-blank question - ~Q(question='') - # Non-blank answers - & ~Q(answers='[]') - # With either an input question or one answer marked as correct - & (Q(type=exercises.INPUT_QUESTION) | Q(answers__iregex=r'"correct":\s*true')) + # Item with non-blank raw data + ~Q(raw_data="") | ( + # A non-blank question + ~Q(question='') + # Non-blank answers + & ~Q(answers='[]') + # With either an input question or one answer marked as correct + & (Q(type=exercises.INPUT_QUESTION) | Q(answers__iregex=r'"correct":\s*true')) + ) ).exists(): errors.append("No questions with question text and complete answers") # Check that it has a mastery model set diff --git a/contentcuration/contentcuration/tests/test_contentnodes.py b/contentcuration/contentcuration/tests/test_contentnodes.py index 25569f8988..06864ecd3f 100644 --- a/contentcuration/contentcuration/tests/test_contentnodes.py +++ b/contentcuration/contentcuration/tests/test_contentnodes.py @@ -1061,6 +1061,15 @@ def test_create_exercise_valid_assessment_items(self): new_obj.mark_complete() self.assertTrue(new_obj.complete) + def test_create_exercise_valid_assessment_items_raw_data(self): + licenses = list(License.objects.filter(copyright_holder_required=False, is_custom=False).values_list("pk", flat=True)) + channel = testdata.channel() + new_obj = ContentNode(title="yes", kind_id=content_kinds.EXERCISE, parent=channel.main_tree, license_id=licenses[0], extra_fields=self.new_extra_fields) + new_obj.save() + AssessmentItem.objects.create(contentnode=new_obj, raw_data="{\"question\": {}}") + new_obj.mark_complete() + self.assertTrue(new_obj.complete) + def test_create_exercise_no_extra_fields(self): licenses = list(License.objects.filter(copyright_holder_required=False, is_custom=False).values_list("pk", flat=True)) channel = testdata.channel() From 211ec2a413e6ae1cbc5b734f7d069405f2b1948f Mon Sep 17 00:00:00 2001 From: Vivek Agrawal Date: Sat, 19 Nov 2022 17:38:19 +0530 Subject: [PATCH 022/555] Content ID testcases for exercises and files --- .../tests/viewsets/test_assessmentitem.py | 158 +++++++++++++++++- .../tests/viewsets/test_file.py | 142 ++++++++++++++++ 2 files changed, 297 insertions(+), 3 deletions(-) diff --git a/contentcuration/contentcuration/tests/viewsets/test_assessmentitem.py b/contentcuration/contentcuration/tests/viewsets/test_assessmentitem.py index e6370e7a49..e1ffa0f9d3 100644 --- a/contentcuration/contentcuration/tests/viewsets/test_assessmentitem.py +++ b/contentcuration/contentcuration/tests/viewsets/test_assessmentitem.py @@ -262,12 +262,12 @@ def test_attempt_update_missing_assessmentitem(self): response = self.sync_changes( [ generate_update_event([ - self.channel.main_tree.get_descendants() + self.channel.main_tree.get_descendants() .filter(kind_id=content_kinds.EXERCISE) .first() .id, - uuid.uuid4().hex - ], + uuid.uuid4().hex + ], ASSESSMENTITEM, {"question": "but why is it missing in the first place?"}, channel_id=self.channel.id, @@ -736,3 +736,155 @@ def test_delete_assessmentitem(self): self.fail("AssessmentItem was not deleted") except models.AssessmentItem.DoesNotExist: pass + + +class ContentIDTestCase(SyncTestMixin, StudioAPITestCase): + def setUp(self): + super(ContentIDTestCase, self).setUp() + self.channel = testdata.channel() + self.user = testdata.user() + self.channel.editors.add(self.user) + self.client.force_authenticate(user=self.user) + + def _get_assessmentitem_metadata(self, assessment_id=None, contentnode_id=None): + return { + "assessment_id": assessment_id or uuid.uuid4().hex, + "contentnode_id": contentnode_id or self.channel.main_tree.get_descendants() + .filter(kind_id=content_kinds.EXERCISE) + .first() + .id, + } + + def _create_assessmentitem(self, assessmentitem): + self.sync_changes( + [ + generate_create_event( + [assessmentitem["contentnode_id"], assessmentitem["assessment_id"]], + ASSESSMENTITEM, + assessmentitem, + channel_id=self.channel.id, + ) + ], + ) + + def _update_assessmentitem(self, assessmentitem, update_dict): + self.sync_changes( + [ + generate_update_event( + [assessmentitem["contentnode_id"], assessmentitem["assessment_id"]], + ASSESSMENTITEM, + update_dict, + channel_id=self.channel.id, + ) + ], + ) + + def _delete_assessmentitem(self, assessmentitem): + self.sync_changes( + [ + generate_delete_event( + [assessmentitem["contentnode_id"], assessmentitem["assessment_id"]], + ASSESSMENTITEM, + channel_id=self.channel.id, + ) + ], + ) + + def test_content_id__same_on_copy(self): + # Make a copy of an existing assessmentitem contentnode. + assessmentitem_node = self.channel.main_tree.get_descendants().filter(kind_id=content_kinds.EXERCISE).first() + assessmentitem_node_copy = assessmentitem_node.copy_to(target=self.channel.main_tree) + + # Assert after copying content_id is same. + assessmentitem_node.refresh_from_db() + assessmentitem_node_copy.refresh_from_db() + self.assertEqual(assessmentitem_node.content_id, assessmentitem_node_copy.content_id) + + def test_content_id__changes_on_new_assessmentitem(self): + # Make a copy of an existing assessmentitem contentnode. + assessmentitem_node = self.channel.main_tree.get_descendants().filter(kind_id=content_kinds.EXERCISE).first() + assessmentitem_node_copy = assessmentitem_node.copy_to(target=self.channel.main_tree) + + # Create a new assessmentitem. + self._create_assessmentitem(self._get_assessmentitem_metadata(contentnode_id=assessmentitem_node_copy.id)) + + # Assert after creating a new assessmentitem on copied node, it's content_id should change. + assessmentitem_node.refresh_from_db() + assessmentitem_node_copy.refresh_from_db() + self.assertNotEqual(assessmentitem_node.content_id, assessmentitem_node_copy.content_id) + + def test_content_id__changes_on_deleting_assessmentitem(self): + # Make a copy of an existing assessmentitem contentnode. + assessmentitem_node = self.channel.main_tree.get_descendants().filter(kind_id=content_kinds.EXERCISE).first() + assessmentitem_node_copy = assessmentitem_node.copy_to(target=self.channel.main_tree) + + # Delete an already present assessmentitem from copied contentnode. + assessmentitem_from_db = models.AssessmentItem.objects.filter(contentnode=assessmentitem_node_copy.id).first() + self._delete_assessmentitem(self._get_assessmentitem_metadata(assessmentitem_from_db.assessment_id, assessmentitem_node_copy.id)) + + # Assert after deleting assessmentitem on copied node, it's content_id should change. + assessmentitem_node.refresh_from_db() + assessmentitem_node_copy.refresh_from_db() + self.assertNotEqual(assessmentitem_node.content_id, assessmentitem_node_copy.content_id) + + def test_content_id__changes_on_updating_assessmentitem(self): + # Make a copy of an existing assessmentitem contentnode. + assessmentitem_node = self.channel.main_tree.get_descendants().filter(kind_id=content_kinds.EXERCISE).first() + assessmentitem_node_copy = assessmentitem_node.copy_to(target=self.channel.main_tree) + + # Update an already present assessmentitem from copied contentnode. + assessmentitem_from_db = models.AssessmentItem.objects.filter(contentnode=assessmentitem_node_copy.id).first() + self._update_assessmentitem(self._get_assessmentitem_metadata(assessmentitem_from_db.assessment_id, assessmentitem_node_copy.id), + {"question": "New Question!"}) + + # Assert after updating assessmentitem on copied node, it's content_id should change. + assessmentitem_node.refresh_from_db() + assessmentitem_node_copy.refresh_from_db() + self.assertNotEqual(assessmentitem_node.content_id, assessmentitem_node_copy.content_id) + + def test_content_id__doesnot_changes_of_original_node(self): + # Make a copy of an existing assessmentitem contentnode. + assessmentitem_node = self.channel.main_tree.get_descendants().filter(kind_id=content_kinds.EXERCISE).first() + assessmentitem_node.copy_to(target=self.channel.main_tree) + + content_id_before_updates = assessmentitem_node.content_id + + # Create, update and delete assessmentitems from original contentnode. + assessmentitem_from_db = models.AssessmentItem.objects.filter(contentnode=assessmentitem_node.id).first() + self._update_assessmentitem(self._get_assessmentitem_metadata(assessmentitem_from_db.assessment_id, assessmentitem_node.id), + {"question": "New Question!"}) + self._delete_assessmentitem(self._get_assessmentitem_metadata(assessmentitem_from_db.assessment_id, assessmentitem_node.id)) + self._create_assessmentitem(self._get_assessmentitem_metadata(contentnode_id=assessmentitem_node.id)) + + # Assert content_id before and after updates remain same. + assessmentitem_node.refresh_from_db() + content_id_after_updates = assessmentitem_node.content_id + self.assertEqual(content_id_before_updates, content_id_after_updates) + + def test_content_id__doesnot_changes_if_already_unique(self): + # Make a copy of an existing assessmentitem contentnode. + assessmentitem_node = self.channel.main_tree.get_descendants().filter(kind_id=content_kinds.EXERCISE).first() + assessmentitem_node_copy = assessmentitem_node.copy_to(target=self.channel.main_tree) + + # Create, update and delete assessmentitems of copied contentnode. + assessmentitem_from_db = models.AssessmentItem.objects.filter(contentnode=assessmentitem_node_copy.id).first() + self._update_assessmentitem(self._get_assessmentitem_metadata(assessmentitem_from_db.assessment_id, assessmentitem_node_copy.id), + {"question": "New Question!"}) + self._delete_assessmentitem(self._get_assessmentitem_metadata(assessmentitem_from_db.assessment_id, assessmentitem_node_copy.id)) + self._create_assessmentitem(self._get_assessmentitem_metadata(contentnode_id=assessmentitem_node_copy.id)) + + assessmentitem_node_copy.refresh_from_db() + content_id_after_first_update = assessmentitem_node_copy.content_id + + # Once again, let us create, update and delete assessmentitems of copied contentnode. + assessmentitem_from_db = models.AssessmentItem.objects.filter(contentnode=assessmentitem_node_copy.id).first() + self._update_assessmentitem(self._get_assessmentitem_metadata(assessmentitem_from_db.assessment_id, assessmentitem_node_copy.id), + {"question": "New Question!"}) + self._delete_assessmentitem(self._get_assessmentitem_metadata(assessmentitem_from_db.assessment_id, assessmentitem_node_copy.id)) + self._create_assessmentitem(self._get_assessmentitem_metadata(contentnode_id=assessmentitem_node_copy.id)) + + assessmentitem_node_copy.refresh_from_db() + content_id_after_second_update = assessmentitem_node_copy.content_id + + # Assert after first and second updates of assessmentitem content_id remains same. + self.assertEqual(content_id_after_first_update, content_id_after_second_update) diff --git a/contentcuration/contentcuration/tests/viewsets/test_file.py b/contentcuration/contentcuration/tests/viewsets/test_file.py index 0fa5f146a6..23aa17adc2 100644 --- a/contentcuration/contentcuration/tests/viewsets/test_file.py +++ b/contentcuration/contentcuration/tests/viewsets/test_file.py @@ -443,3 +443,145 @@ def test_upload_url(self): self.assertEqual(response.status_code, 200) file = models.File.objects.get(checksum=self.file["checksum"]) self.assertEqual(10, file.duration) + + def test_upload_url_doesnot_sets_contentnode(self): + self.client.force_authenticate(user=self.user) + response = self.client.post(reverse("file-upload-url"), self.file, format="json",) + file = models.File.objects.get(checksum=self.file["checksum"]) + self.assertEqual(response.status_code, 200) + self.assertEqual(file.contentnode, None) + + +class ContentIDTestCase(SyncTestMixin, StudioAPITestCase): + def setUp(self): + super(ContentIDTestCase, self).setUp() + self.channel = testdata.channel() + self.user = testdata.user() + self.channel.editors.add(self.user) + self.client.force_authenticate(user=self.user) + + def _get_file_metadata(self): + return { + "size": 2500, + "checksum": uuid.uuid4().hex, + "name": "le_studio_file", + "file_format": file_formats.MP3, + "preset": format_presets.AUDIO, + } + + def _upload_file_to_contentnode(self, file_metadata=None, contentnode_id=None): + """ + This method mimics the frontend file upload process which is a two-step + process for the backend. + First, file's upload URL is fetched and then that file's ORM object is updated + to point to the contentnode. + """ + file = file_metadata or self._get_file_metadata() + self.client.post(reverse("file-upload-url"), file, format="json",) + file_from_db = models.File.objects.get(checksum=file["checksum"]) + self.sync_changes( + [generate_update_event( + file_from_db.id, + FILE, + { + "contentnode": contentnode_id or self.channel.main_tree.get_descendants().first().id + }, + channel_id=self.channel.id)],) + file_from_db.refresh_from_db() + return file_from_db + + def _delete_file_from_contentnode(self, file_from_db): + self.sync_changes( + [ + generate_delete_event(file_from_db.id, FILE, channel_id=self.channel.id), + ], + ) + + def test_content_id__same_on_copy_file_node(self): + file = self._upload_file_to_contentnode() + file_contentnode_copy = file.contentnode.copy_to(target=self.channel.main_tree) + + # Assert content_id same after copying. + file.contentnode.refresh_from_db() + file_contentnode_copy.refresh_from_db() + self.assertEqual(file.contentnode.content_id, file_contentnode_copy.content_id) + + def test_content_id__changes_on_upload_file_to_node(self): + file = self._upload_file_to_contentnode() + file_contentnode_copy = file.contentnode.copy_to(target=self.channel.main_tree) + + # Upload a new file to the copied contentnode. + self._upload_file_to_contentnode(contentnode_id=file_contentnode_copy.id) + + # Assert after new file upload, content_id changes. + file.contentnode.refresh_from_db() + file_contentnode_copy.refresh_from_db() + self.assertNotEqual(file.contentnode.content_id, file_contentnode_copy.content_id) + + def test_content_id__changes_on_delete_file_from_node(self): + file = self._upload_file_to_contentnode() + file_contentnode_copy = file.contentnode.copy_to(target=self.channel.main_tree) + + # Delete file from the copied contentnode. + self._delete_file_from_contentnode(file_from_db=file_contentnode_copy.files.first()) + + # Assert after deleting file, content_id changes. + file.contentnode.refresh_from_db() + file_contentnode_copy.refresh_from_db() + self.assertNotEqual(file.contentnode.content_id, file_contentnode_copy.content_id) + + def test_content_id__doesnot_changes_on_update_original_file_node(self): + file = self._upload_file_to_contentnode() + file.contentnode.copy_to(target=self.channel.main_tree) + + # Upload and delete file from the original contentnode. + content_id_before_updates = file.contentnode.content_id + self._upload_file_to_contentnode(contentnode_id=file.contentnode.id) + self._delete_file_from_contentnode(file_from_db=file) + + # Assert after changes to original contentnode, content_id remains same. + file.contentnode.refresh_from_db() + content_id_after_updates = file.contentnode.content_id + self.assertEqual(content_id_before_updates, content_id_after_updates) + + def test_content_id__doesnot_update_if_unique(self): + file = self._upload_file_to_contentnode() + file_contentnode_copy = file.contentnode.copy_to(target=self.channel.main_tree) + + # Upload a new file to the copied contentnode. + self._upload_file_to_contentnode(contentnode_id=file_contentnode_copy.id) + file_contentnode_copy.refresh_from_db() + content_id_after_first_update = file_contentnode_copy.content_id + + # Upload another new file to the copied contentnode. At this point, + # the content_id of copied node is already unique so it should not be updated. + self._upload_file_to_contentnode(contentnode_id=file_contentnode_copy.id) + file_contentnode_copy.refresh_from_db() + content_id_after_second_update = file_contentnode_copy.content_id + + self.assertEqual(content_id_after_first_update, content_id_after_second_update) + + def test_content_id__thumbnails_dont_update_content_id(self): + file = self._upload_file_to_contentnode() + file_contentnode_copy = file.contentnode.copy_to(target=self.channel.main_tree) + + thumbnail_file_meta_1 = self._get_file_metadata() + thumbnail_file_meta_2 = self._get_file_metadata() + thumbnail_file_meta_1.update({"preset": format_presets.AUDIO_THUMBNAIL, "file_format": file_formats.JPEG, }) + thumbnail_file_meta_2.update({"preset": format_presets.AUDIO_THUMBNAIL, "file_format": file_formats.JPEG, }) + + # Upload thumbnail to original contentnode and copied contentnode. + # content_id should remain same for both these nodes. + original_node_content_id_before_upload = file.contentnode.content_id + copied_node_content_id_before_upload = file_contentnode_copy.content_id + self._upload_file_to_contentnode(file_metadata=thumbnail_file_meta_1, contentnode_id=file.contentnode.id) + self._upload_file_to_contentnode(file_metadata=thumbnail_file_meta_2, contentnode_id=file_contentnode_copy.id) + + # Assert content_id is same after uploading thumbnails to nodes. + file.contentnode.refresh_from_db() + file_contentnode_copy.refresh_from_db() + original_node_content_id_after_upload = file.contentnode.content_id + copied_node_content_id_after_upload = file_contentnode_copy.content_id + + self.assertEqual(original_node_content_id_before_upload, original_node_content_id_after_upload) + self.assertEqual(copied_node_content_id_before_upload, copied_node_content_id_after_upload) From 783b9c8e2803bf45f42b0e929d4eeb4d5171807b Mon Sep 17 00:00:00 2001 From: Vivek Agrawal Date: Mon, 21 Nov 2022 17:57:52 +0530 Subject: [PATCH 023/555] Sync channel and contentnode model testcases. Ready to merge with confidence! --- .../contentcuration/tests/test_models.py | 25 ++++ .../contentcuration/tests/test_sync.py | 127 ++++++++++++++++++ .../contentcuration/tests/viewsets/base.py | 13 ++ .../tests/viewsets/test_channel.py | 26 ++++ 4 files changed, 191 insertions(+) diff --git a/contentcuration/contentcuration/tests/test_models.py b/contentcuration/contentcuration/tests/test_models.py index 3c0caa9967..69322abc92 100644 --- a/contentcuration/contentcuration/tests/test_models.py +++ b/contentcuration/contentcuration/tests/test_models.py @@ -439,6 +439,31 @@ def test_filter_by_pk__tree_id_updated_on_move(self): self.assertEqual(after_move_sourcenode.tree_id, testchannel.trash_tree.tree_id) self.assertEqual(tree_id_from_cache, testchannel.trash_tree.tree_id) + def test_make_content_id_unique(self): + channel_original = testdata.channel() + channel_importer = testdata.channel() + + # Import a node from a channel. + original_node = channel_original.main_tree.get_descendants().first() + copied_node = original_node.copy_to(target=channel_importer.main_tree) + + original_node.refresh_from_db() + copied_node.refresh_from_db() + + original_node_old_content_id = original_node.content_id + copied_node_old_content_id = copied_node.content_id + + original_node.make_content_id_unique() + copied_node.make_content_id_unique() + + original_node.refresh_from_db() + copied_node.refresh_from_db() + + # Assert that original node's content_id doesn't change. + self.assertEqual(original_node_old_content_id, original_node.content_id) + # Assert copied node's content_id changes. + self.assertNotEqual(copied_node_old_content_id, copied_node.content_id) + class AssessmentItemTestCase(PermissionQuerysetTestCase): @property diff --git a/contentcuration/contentcuration/tests/test_sync.py b/contentcuration/contentcuration/tests/test_sync.py index 5948ea0d06..0f2459126b 100644 --- a/contentcuration/contentcuration/tests/test_sync.py +++ b/contentcuration/contentcuration/tests/test_sync.py @@ -1,14 +1,27 @@ from __future__ import absolute_import +import uuid + +from django.urls import reverse from le_utils.constants import content_kinds +from le_utils.constants import file_formats +from le_utils.constants import format_presets from .base import StudioTestCase from .testdata import create_temp_file from contentcuration.models import AssessmentItem from contentcuration.models import Channel from contentcuration.models import ContentTag +from contentcuration.models import File +from contentcuration.tests import testdata +from contentcuration.tests.base import StudioAPITestCase +from contentcuration.tests.viewsets.base import generate_create_event +from contentcuration.tests.viewsets.base import generate_update_event +from contentcuration.tests.viewsets.base import SyncTestMixin from contentcuration.utils.publish import mark_all_nodes_as_published from contentcuration.utils.sync import sync_channel +from contentcuration.viewsets.sync.constants import ASSESSMENTITEM +from contentcuration.viewsets.sync.constants import FILE class SyncTestCase(StudioTestCase): @@ -256,3 +269,117 @@ def test_sync_tags_add_multiple_tags(self): ) self.assertTrue(self.derivative_channel.has_changes()) + + +class ContentIDTestCase(SyncTestMixin, StudioAPITestCase): + def setUp(self): + super(ContentIDTestCase, self).setUp() + self.channel = testdata.channel() + self.user = testdata.user() + self.channel.editors.add(self.user) + self.client.force_authenticate(user=self.user) + + def _get_assessmentitem_metadata(self, assessment_id=None, contentnode_id=None): + return { + "assessment_id": assessment_id or uuid.uuid4().hex, + "contentnode_id": contentnode_id or self.channel.main_tree.get_descendants() + .filter(kind_id=content_kinds.EXERCISE) + .first() + .id, + } + + def _get_file_metadata(self): + return { + "size": 2500, + "checksum": uuid.uuid4().hex, + "name": "le_studio_file", + "file_format": file_formats.MP3, + "preset": format_presets.AUDIO, + } + + def _upload_file_to_contentnode(self, file_metadata=None, contentnode_id=None): + """ + This method mimics the frontend file upload process which is a two-step + process for the backend. + First, file's upload URL is fetched and then that file's ORM object is updated + to point to the contentnode. + """ + file = file_metadata or self._get_file_metadata() + self.client.post(reverse("file-upload-url"), file, format="json",) + file_from_db = File.objects.get(checksum=file["checksum"]) + self.sync_changes( + [generate_update_event( + file_from_db.id, + FILE, + { + "contentnode": contentnode_id or self.channel.main_tree.get_descendants().first().id + }, + channel_id=self.channel.id)],) + file_from_db.refresh_from_db() + return file_from_db + + def _create_assessmentitem(self, assessmentitem, channel_id): + self.sync_changes( + [ + generate_create_event( + [assessmentitem["contentnode_id"], assessmentitem["assessment_id"]], + ASSESSMENTITEM, + assessmentitem, + channel_id=channel_id, + ) + ], + ) + + def test_content_id__becomes_equal_on_channel_sync_assessment_item(self): + # Make a copy of an existing assessmentitem contentnode. + assessmentitem_node = self.channel.main_tree.get_descendants().filter(kind_id=content_kinds.EXERCISE).first() + assessmentitem_node_copy = assessmentitem_node.copy_to(target=self.channel.main_tree) + + # Create a new assessmentitem. + self._create_assessmentitem( + assessmentitem=self._get_assessmentitem_metadata(contentnode_id=assessmentitem_node_copy.id), + channel_id=self.channel.id + ) + + # Assert after creating a new assessmentitem on copied node, it's content_id is changed. + assessmentitem_node.refresh_from_db() + assessmentitem_node_copy.refresh_from_db() + self.assertNotEqual(assessmentitem_node.content_id, assessmentitem_node_copy.content_id) + + # Syncs channel. + self.channel.main_tree.refresh_from_db() + self.channel.save() + sync_channel( + self.channel, + sync_assessment_items=True, + ) + + # Now after syncing the original and copied node should have same content_id. + assessmentitem_node.refresh_from_db() + assessmentitem_node_copy.refresh_from_db() + self.assertEqual(assessmentitem_node.content_id, assessmentitem_node_copy.content_id) + + def test_content_id__becomes_equal_on_channel_sync_file(self): + file = self._upload_file_to_contentnode() + file_contentnode_copy = file.contentnode.copy_to(target=self.channel.main_tree) + + # Upload a new file to the copied contentnode. + self._upload_file_to_contentnode(contentnode_id=file_contentnode_copy.id) + + # Assert after new file upload, content_id changes. + file.contentnode.refresh_from_db() + file_contentnode_copy.refresh_from_db() + self.assertNotEqual(file.contentnode.content_id, file_contentnode_copy.content_id) + + # Syncs channel. + self.channel.main_tree.refresh_from_db() + self.channel.save() + sync_channel( + self.channel, + sync_files=True, + ) + + # Assert that after channel syncing, content_id becomes equal. + file.contentnode.refresh_from_db() + file_contentnode_copy.refresh_from_db() + self.assertEqual(file.contentnode.content_id, file_contentnode_copy.content_id) diff --git a/contentcuration/contentcuration/tests/viewsets/base.py b/contentcuration/contentcuration/tests/viewsets/base.py index 5eca0415fe..48ddd1c995 100644 --- a/contentcuration/contentcuration/tests/viewsets/base.py +++ b/contentcuration/contentcuration/tests/viewsets/base.py @@ -5,6 +5,9 @@ from contentcuration.celery import app from contentcuration.models import Change from contentcuration.tests.helpers import clear_tasks +from contentcuration.viewsets.sync.constants import CHANNEL +from contentcuration.viewsets.sync.constants import SYNCED +from contentcuration.viewsets.sync.utils import _generate_event as base_generate_event from contentcuration.viewsets.sync.utils import generate_copy_event as base_generate_copy_event from contentcuration.viewsets.sync.utils import generate_create_event as base_generate_create_event from contentcuration.viewsets.sync.utils import generate_delete_event as base_generate_delete_event @@ -35,6 +38,16 @@ def generate_update_event(*args, **kwargs): return event +def generate_sync_channel_event(channel_id, attributes, tags, files, assessment_items): + event = base_generate_event(key=channel_id, table=CHANNEL, event_type=SYNCED, channel_id=channel_id, user_id=None) + event["rev"] = random.randint(1, 10000000) + event["attributes"] = attributes + event["tags"] = tags + event["files"] = files + event["assessment_items"] = assessment_items + return event + + class SyncTestMixin(object): celery_task_always_eager = None diff --git a/contentcuration/contentcuration/tests/viewsets/test_channel.py b/contentcuration/contentcuration/tests/viewsets/test_channel.py index 63b0940ae4..b88f54ad98 100644 --- a/contentcuration/contentcuration/tests/viewsets/test_channel.py +++ b/contentcuration/contentcuration/tests/viewsets/test_channel.py @@ -2,6 +2,7 @@ import uuid +import mock from django.urls import reverse from contentcuration import models @@ -9,6 +10,7 @@ from contentcuration.tests.base import StudioAPITestCase from contentcuration.tests.viewsets.base import generate_create_event from contentcuration.tests.viewsets.base import generate_delete_event +from contentcuration.tests.viewsets.base import generate_sync_channel_event from contentcuration.tests.viewsets.base import generate_update_event from contentcuration.tests.viewsets.base import SyncTestMixin from contentcuration.viewsets.sync.constants import CHANNEL @@ -273,6 +275,30 @@ def test_cannot_delete_some_channels(self): self.assertTrue(models.Channel.objects.get(id=channel1.id).deleted) self.assertFalse(models.Channel.objects.get(id=channel2.id).deleted) + @mock.patch("contentcuration.viewsets.channel.sync_channel") + def test_sync_channel_called_correctly(self, sync_channel_mock): + user = testdata.user() + channel = testdata.channel() + channel.editors.add(user) + channel_node = channel.main_tree.get_descendants().first() + channel_node.copy_to(target=channel.main_tree) + + self.client.force_authenticate(user=user) + for i in range(1, 5): + sync_channel_mock.reset_mock() + args = [channel.id, False, False, False, False] + args[i] = True + + response = self.sync_changes( + [ + generate_sync_channel_event(*args) + ] + ) + + self.assertEqual(response.status_code, 200) + self.assertEqual(sync_channel_mock.call_args.args[i], True) + sync_channel_mock.assert_called_once() + class CRUDTestCase(StudioAPITestCase): @property From 347901cedf10c7aca107d3c3a6ad9eb6e1e094e4 Mon Sep 17 00:00:00 2001 From: Blaine Jester Date: Fri, 16 Sep 2022 15:57:20 -0700 Subject: [PATCH 024/555] Initialize postgres docker container with developer defined SQL --- .docker/README.md | 17 +++++++++++++++++ .gitignore | 7 +++++-- Makefile | 25 +++++++++++++++++++++---- docker-compose.yml | 3 ++- 4 files changed, 45 insertions(+), 7 deletions(-) create mode 100644 .docker/README.md diff --git a/.docker/README.md b/.docker/README.md new file mode 100644 index 0000000000..66b76b0f2d --- /dev/null +++ b/.docker/README.md @@ -0,0 +1,17 @@ +## What is this directory? +This directory is a space for mounting directories to docker containers, allowing the mounts to be specified in committed code, but the contents of the mounts to remain ignored by git. + +### postgres +The `postgres` directory is mounted to `/docker-entrypoint-initdb.d`. Any `.sh` or `.sql` files will be executed when the container is first started with a new data volume. You may read more regarding this functionality on the [Docker Hub page](https://hub.docker.com/_/postgres), under _Initialization scripts_. + +When running docker services through the Makefile commands, it specifies a docker-compose project name that depends on the name of the current git branch. This causes the volumes to change when the branch changes, which is helpful when switching between many branches that might have incompatible database schema changes. The downside is that whenever you start a new branch, you'll have to re-initialize the database again, like with `yarn run devsetup`. Creating a SQL dump from an existing, initialized database and placing it in this directory will allow you to skip this step. + +To create a SQL dump of your preferred database data useful for local testing, run `make .docker/postgres/init.sql` while the docker postgres container is running. + +> Note: you will likely need to run `make migrate` to ensure your database schema is up-to-date when using this technique. + +#### pgpass +Stores the postgres authentication for the docker service for scripting access without manually providing a password, created by `make .docker/pgpass` + +### minio +The `minio` directory is mounted to `/data`, since it isn't necessarily useful to have this data isolated based off the current git branch. diff --git a/.gitignore b/.gitignore index b5e0261f09..695051b1ba 100644 --- a/.gitignore +++ b/.gitignore @@ -95,8 +95,11 @@ contentcuration/csvs/ # Ignore the TAGS file generated by some editors TAGS -# Ignore Vagrant-created files -/.vagrant/ +# Services +.vagrant/ +.docker/minio/* +.docker/postgres/* +.docker/pgpass # Ignore test files /contentcuration/contentcuration/proxy_settings.py diff --git a/Makefile b/Makefile index 99b55d3762..5a017618cb 100644 --- a/Makefile +++ b/Makefile @@ -138,15 +138,29 @@ devceleryworkers: run-services: $(MAKE) -j 2 dcservicesup devceleryworkers +.docker/minio: + mkdir -p $@ + +.docker/postgres: + mkdir -p $@ + +.docker/pgpass: + echo "localhost:5432:kolibri-studio:learningequality:kolibri" > .docker/pgpass + chmod 600 .docker/pgpass + +.docker/postgres/init.sql: .docker/pgpass + # assumes postgres is running in a docker container + PGPASSFILE=.docker/pgpass pg_dump --host localhost --port 5432 --username learningequality --dbname "kolibri-studio" --file $@ + dcbuild: # build all studio docker image and all dependent services using docker-compose docker-compose build -dcup: +dcup: .docker/minio .docker/postgres # run all services except for cloudprober docker-compose up studio-app celery-worker -dcup-cloudprober: +dcup-cloudprober: .docker/minio .docker/postgres # run all services including cloudprober docker-compose up @@ -163,11 +177,14 @@ dcshell: # bash shell inside the (running!) studio-app container docker-compose exec studio-app /usr/bin/fish -dctest: +dcpsql: .docker/pgpass + PGPASSFILE=.docker/pgpass psql --host localhost --port 5432 --username learningequality --dbname "kolibri-studio" + +dctest: .docker/minio .docker/postgres # run backend tests inside docker, in new instances docker-compose run studio-app make test -dcservicesup: +dcservicesup: .docker/minio .docker/postgres # launch all studio's dependent services using docker-compose docker-compose -f docker-compose.yml -f docker-compose.alt.yml up minio postgres redis diff --git a/docker-compose.yml b/docker-compose.yml index 7f21cc2844..f0b2cb7b86 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -50,7 +50,7 @@ services: MINIO_SECRET_KEY: development MINIO_API_CORS_ALLOW_ORIGIN: 'http://localhost:8080,http://127.0.0.1:8080' volumes: - - minio_data:/data + - .docker/minio:/data postgres: image: postgres:12 @@ -61,6 +61,7 @@ services: POSTGRES_DB: kolibri-studio volumes: - pgdata:/var/lib/postgresql/data/pgdata + - .docker/postgres:/docker-entrypoint-initdb.d redis: image: redis:6.0.9 From 2a0aac1c4a0e357a58ab1dbfe836d5f0cba20127 Mon Sep 17 00:00:00 2001 From: Blaine Jester Date: Fri, 16 Sep 2022 16:13:40 -0700 Subject: [PATCH 025/555] Replace target name to avoid duplicating --- Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index 5a017618cb..398f68e878 100644 --- a/Makefile +++ b/Makefile @@ -145,8 +145,8 @@ run-services: mkdir -p $@ .docker/pgpass: - echo "localhost:5432:kolibri-studio:learningequality:kolibri" > .docker/pgpass - chmod 600 .docker/pgpass + echo "localhost:5432:kolibri-studio:learningequality:kolibri" > $@ + chmod 600 $@ .docker/postgres/init.sql: .docker/pgpass # assumes postgres is running in a docker container From 9d1b82f73aea30b72624e4f5a6d61ccaaf3a9e35 Mon Sep 17 00:00:00 2001 From: Blaine Jester Date: Mon, 19 Sep 2022 08:54:04 -0700 Subject: [PATCH 026/555] Update to use pgpass file since password env var doesn't work anymore --- Makefile | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/Makefile b/Makefile index 398f68e878..0cadc47528 100644 --- a/Makefile +++ b/Makefile @@ -126,9 +126,9 @@ hascaptions: export COMPOSE_PROJECT_NAME=studio_$(shell git rev-parse --abbrev-ref HEAD) -purge-postgres: - -PGPASSWORD=kolibri dropdb -U learningequality "kolibri-studio" --port 5432 -h localhost - PGPASSWORD=kolibri createdb -U learningequality "kolibri-studio" --port 5432 -h localhost +purge-postgres: .docker/pgpass + -PGPASSFILE=.docker/pgpass dropdb -U learningequality "kolibri-studio" --port 5432 -h localhost + PGPASSFILE=.docker/pgpass createdb -U learningequality "kolibri-studio" --port 5432 -h localhost destroy-and-recreate-database: purge-postgres setup From 23021fb87d2f8ba31ae040a2c046664204160b24 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Mon, 21 Nov 2022 15:23:57 -0800 Subject: [PATCH 027/555] Revert server side ordering to fix performance issues. --- .../views/ImportFromChannels/ChannelList.vue | 1 - .../contentcuration/viewsets/channel.py | 17 ++++++----------- 2 files changed, 6 insertions(+), 12 deletions(-) diff --git a/contentcuration/contentcuration/frontend/channelEdit/views/ImportFromChannels/ChannelList.vue b/contentcuration/contentcuration/frontend/channelEdit/views/ImportFromChannels/ChannelList.vue index 58bb5390af..dfa961a3b6 100644 --- a/contentcuration/contentcuration/frontend/channelEdit/views/ImportFromChannels/ChannelList.vue +++ b/contentcuration/contentcuration/frontend/channelEdit/views/ImportFromChannels/ChannelList.vue @@ -124,7 +124,6 @@ [this.channelFilter]: true, page: this.$route.query.page || 1, exclude: this.currentChannelId, - ordering: this.channelFilter === 'public' ? 'name' : '-modified', }).then(page => { this.pageCount = page.total_pages; this.channels = page.results; diff --git a/contentcuration/contentcuration/viewsets/channel.py b/contentcuration/contentcuration/viewsets/channel.py index 4ab57c4595..07d27b7281 100644 --- a/contentcuration/contentcuration/viewsets/channel.py +++ b/contentcuration/contentcuration/viewsets/channel.py @@ -400,8 +400,6 @@ class ChannelViewSet(ValuesViewset): serializer_class = ChannelSerializer pagination_class = ChannelListPagination filterset_class = ChannelFilter - ordering_fields = ["modified", "name"] - ordering = "-modified" field_map = channel_field_map values = base_channel_values + ("edit", "view", "unpublished_changes") @@ -414,15 +412,6 @@ def get_queryset(self): queryset = super(ChannelViewSet, self).get_queryset() user_id = not self.request.user.is_anonymous and self.request.user.id user_queryset = User.objects.filter(id=user_id) - # Add the last modified node modified value as the channel last modified - channel_main_tree_nodes = ContentNode.objects.filter( - tree_id=OuterRef("main_tree__tree_id") - ) - queryset = queryset.annotate( - modified=Subquery( - channel_main_tree_nodes.values("modified").order_by("-modified")[:1] - ) - ) return queryset.annotate( edit=Exists(user_queryset.filter(editable_channels=OuterRef("id"))), @@ -445,6 +434,12 @@ def annotate_queryset(self, queryset): queryset = queryset.annotate( count=SQCount(non_topic_content_ids, field="content_id"), ) + # Add the last modified node modified value as the channel last modified + queryset = queryset.annotate( + modified=Subquery( + channel_main_tree_nodes.values("modified").order_by("-modified")[:1] + ) + ) queryset = queryset.annotate(unpublished_changes=Exists(_unpublished_changes_query(OuterRef("id")))) From 912583294feca5038d17e4e8504c2809a82be743 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Mon, 21 Nov 2022 16:06:52 -0800 Subject: [PATCH 028/555] Simplify sentry request error collection. --- .../contentcuration/frontend/shared/client.js | 49 ++++--------------- 1 file changed, 10 insertions(+), 39 deletions(-) diff --git a/contentcuration/contentcuration/frontend/shared/client.js b/contentcuration/contentcuration/frontend/shared/client.js index a37c9cc7fa..6588c77c3e 100644 --- a/contentcuration/contentcuration/frontend/shared/client.js +++ b/contentcuration/contentcuration/frontend/shared/client.js @@ -32,12 +32,11 @@ const client = axios.create({ client.interceptors.response.use( response => response, error => { - let message; - let url; - let config; + const url = error.config.url; + let message = error.message; + let status = 0; if (error.response) { - config = error.response.config; - url = config.url; + status = error.response.status; message = error.response.statusText; // Don't send a Sentry report for permissions errors // Many 404s are in fact also unauthorized requests so @@ -45,48 +44,20 @@ client.interceptors.response.use( // to catch legitimate request issues in the backend. // // Allow 412 too as that's specific to out of storage checks - if ( - error.response.status === 403 || - error.response.status === 404 || - error.response.status === 405 || - error.response.status === 412 - ) { + if (status === 403 || status === 404 || status === 405 || status === 412) { return Promise.reject(error); } - - if (error.response.status === 0) { - message = 'Network Error: ' + url; - } - - // Put the URL in the main message for timeouts - // so we can see which timeouts are most frequent. - if (error.response.status === 504) { - message = 'Request Timed Out: ' + url; - } - } else if (error.request && error.request.config) { - // Request was sent but no response received - config = error.request.config; - url = config.url; - message = 'Network Error: ' + url; - } else { - message = error.message; } - const extraData = { - url, - type: config ? config.responseType : null, - data: config ? config.data : null, - status: error.response ? error.response.status : null, - error: message, - response: error.response ? error.response.data : null, - }; + message = message ? `${message}: ${url}` : `Network Error: ${url}`; + if (process.env.NODE_ENV !== 'production') { // In dev build log warnings to console for developer use console.warn('AJAX Request Error: ' + message); // eslint-disable-line no-console - console.warn('Error data: ' + JSON.stringify(extraData)); // eslint-disable-line no-console + console.warn('Error data: ' + JSON.stringify(error)); // eslint-disable-line no-console } else { - Sentry.captureMessage(message, { - extra: extraData, + Sentry.captureException(new Error(message), { + contexts: { error }, }); } return Promise.reject(error); From 9f0da6985e3a9e01ded23f9eb4736081ffc0e1b8 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Tue, 22 Nov 2022 10:27:20 -0800 Subject: [PATCH 029/555] Upload error as attachment. --- .../contentcuration/frontend/shared/client.js | 19 ++++++++++++++++--- 1 file changed, 16 insertions(+), 3 deletions(-) diff --git a/contentcuration/contentcuration/frontend/shared/client.js b/contentcuration/contentcuration/frontend/shared/client.js index 6588c77c3e..056f120827 100644 --- a/contentcuration/contentcuration/frontend/shared/client.js +++ b/contentcuration/contentcuration/frontend/shared/client.js @@ -54,10 +54,23 @@ client.interceptors.response.use( if (process.env.NODE_ENV !== 'production') { // In dev build log warnings to console for developer use console.warn('AJAX Request Error: ' + message); // eslint-disable-line no-console - console.warn('Error data: ' + JSON.stringify(error)); // eslint-disable-line no-console + console.warn('Error data: ', error); // eslint-disable-line no-console } else { - Sentry.captureException(new Error(message), { - contexts: { error }, + Sentry.withScope(function(scope) { + scope.addAttachment({ + filename: 'error.json', + data: JSON.stringify(error), + contentType: 'application/json', + }); + Sentry.captureException(new Error(message), { + extra: { + Request: { + headers: error.config.headers, + method: error.config.method, + url, + }, + }, + }); }); } return Promise.reject(error); From 60080d6460c394b7d63ffcf5e540bc45f35346bc Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Tue, 22 Nov 2022 14:27:21 -0800 Subject: [PATCH 030/555] Remove image from innerHTML on remove. --- .../MarkdownEditor/plugins/image-upload/MarkdownImageField.vue | 1 + 1 file changed, 1 insertion(+) diff --git a/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/image-upload/MarkdownImageField.vue b/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/image-upload/MarkdownImageField.vue index d5308e83dc..56bbcf70d0 100644 --- a/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/image-upload/MarkdownImageField.vue +++ b/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/image-upload/MarkdownImageField.vue @@ -136,6 +136,7 @@ ); }, handleRemove() { + this.editorField.innerHTML = ''; this.$destroy(); this.$el.parentNode.removeChild(this.$el); }, From 01b4503bcda26bdf7a5265de49f20123d08dea5f Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Tue, 22 Nov 2022 17:14:46 -0800 Subject: [PATCH 031/555] Create remove event mechanism for clearing custom span element. --- .../plugins/image-upload/MarkdownImageField.vue | 9 ++++++--- .../plugins/registerCustomMarkdownField.js | 8 ++++++-- 2 files changed, 12 insertions(+), 5 deletions(-) diff --git a/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/image-upload/MarkdownImageField.vue b/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/image-upload/MarkdownImageField.vue index 56bbcf70d0..a58aad5705 100644 --- a/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/image-upload/MarkdownImageField.vue +++ b/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/image-upload/MarkdownImageField.vue @@ -136,9 +136,12 @@ ); }, handleRemove() { - this.editorField.innerHTML = ''; - this.$destroy(); - this.$el.parentNode.removeChild(this.$el); + this.editorField.dispatchEvent( + new CustomEvent('remove', { + bubbles: true, + cancelable: true, + }) + ); }, handleResize() { this.resizing = true; diff --git a/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/registerCustomMarkdownField.js b/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/registerCustomMarkdownField.js index 250ae5b30f..9cfccb5ec6 100644 --- a/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/registerCustomMarkdownField.js +++ b/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/registerCustomMarkdownField.js @@ -15,10 +15,10 @@ export default VueComponent => { vueInstanceCreatedCallback() { // by default, `contenteditable` will be false this.setAttribute('contenteditable', Boolean(VueComponent.contentEditable)); - + const id = `markdown-field-${uuidv4()}`; // a hack to prevent squire from merging custom element spans // see here: https://github.com/nhn/tui.editor/blob/master/libs/squire/source/Node.js#L92-L101 - this.classList.add(`markdown-field-${uuidv4()}`); + this.classList.add(id); // pass innerHTML of host element as the `markdown` property this.observer = new MutationObserver(mutations => { @@ -41,6 +41,10 @@ export default VueComponent => { }); this.observer.observe(this, { characterData: true, childList: true }); + this.addEventListener('remove', () => { + this.parentNode.removeChild(this); + }); + // initialize the `markdown` property this.getVueInstance().$root.markdown = this.innerHTML; }, From c09f109af86da7a4c074fc16c2a941aa77fa155c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 23 Nov 2022 16:01:36 +0000 Subject: [PATCH 032/555] Bump jsonschema from 4.16.0 to 4.17.1 Bumps [jsonschema](https://github.com/python-jsonschema/jsonschema) from 4.16.0 to 4.17.1. - [Release notes](https://github.com/python-jsonschema/jsonschema/releases) - [Changelog](https://github.com/python-jsonschema/jsonschema/blob/main/CHANGELOG.rst) - [Commits](https://github.com/python-jsonschema/jsonschema/compare/v4.16.0...v4.17.1) --- updated-dependencies: - dependency-name: jsonschema dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index e6068899d1..2bc759f7b2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -162,7 +162,7 @@ jmespath==0.10.0 # botocore jsonfield==3.1.0 # via -r requirements.in -jsonschema==4.16.0 +jsonschema==4.17.1 # via -r requirements.in kombu==5.2.4 # via celery From 9b10ad419710dbeeda6dbb324e315fd2b298f598 Mon Sep 17 00:00:00 2001 From: Blaine Jester Date: Wed, 23 Nov 2022 10:26:45 -0800 Subject: [PATCH 033/555] Revert "Revert server side ordering to fix performance issues." This reverts commit 23021fb87d2f8ba31ae040a2c046664204160b24. --- .../views/ImportFromChannels/ChannelList.vue | 1 + .../contentcuration/viewsets/channel.py | 17 +++++++++++------ 2 files changed, 12 insertions(+), 6 deletions(-) diff --git a/contentcuration/contentcuration/frontend/channelEdit/views/ImportFromChannels/ChannelList.vue b/contentcuration/contentcuration/frontend/channelEdit/views/ImportFromChannels/ChannelList.vue index dfa961a3b6..58bb5390af 100644 --- a/contentcuration/contentcuration/frontend/channelEdit/views/ImportFromChannels/ChannelList.vue +++ b/contentcuration/contentcuration/frontend/channelEdit/views/ImportFromChannels/ChannelList.vue @@ -124,6 +124,7 @@ [this.channelFilter]: true, page: this.$route.query.page || 1, exclude: this.currentChannelId, + ordering: this.channelFilter === 'public' ? 'name' : '-modified', }).then(page => { this.pageCount = page.total_pages; this.channels = page.results; diff --git a/contentcuration/contentcuration/viewsets/channel.py b/contentcuration/contentcuration/viewsets/channel.py index 07d27b7281..4ab57c4595 100644 --- a/contentcuration/contentcuration/viewsets/channel.py +++ b/contentcuration/contentcuration/viewsets/channel.py @@ -400,6 +400,8 @@ class ChannelViewSet(ValuesViewset): serializer_class = ChannelSerializer pagination_class = ChannelListPagination filterset_class = ChannelFilter + ordering_fields = ["modified", "name"] + ordering = "-modified" field_map = channel_field_map values = base_channel_values + ("edit", "view", "unpublished_changes") @@ -412,6 +414,15 @@ def get_queryset(self): queryset = super(ChannelViewSet, self).get_queryset() user_id = not self.request.user.is_anonymous and self.request.user.id user_queryset = User.objects.filter(id=user_id) + # Add the last modified node modified value as the channel last modified + channel_main_tree_nodes = ContentNode.objects.filter( + tree_id=OuterRef("main_tree__tree_id") + ) + queryset = queryset.annotate( + modified=Subquery( + channel_main_tree_nodes.values("modified").order_by("-modified")[:1] + ) + ) return queryset.annotate( edit=Exists(user_queryset.filter(editable_channels=OuterRef("id"))), @@ -434,12 +445,6 @@ def annotate_queryset(self, queryset): queryset = queryset.annotate( count=SQCount(non_topic_content_ids, field="content_id"), ) - # Add the last modified node modified value as the channel last modified - queryset = queryset.annotate( - modified=Subquery( - channel_main_tree_nodes.values("modified").order_by("-modified")[:1] - ) - ) queryset = queryset.annotate(unpublished_changes=Exists(_unpublished_changes_query(OuterRef("id")))) From 76e8ce5eb75d280a9980c434d23796b684231d38 Mon Sep 17 00:00:00 2001 From: Blaine Jester Date: Wed, 23 Nov 2022 10:30:48 -0800 Subject: [PATCH 034/555] Optimize use of channel in unpublished_changes query --- contentcuration/contentcuration/viewsets/channel.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/contentcuration/contentcuration/viewsets/channel.py b/contentcuration/contentcuration/viewsets/channel.py index 4ab57c4595..9c739cbdfe 100644 --- a/contentcuration/contentcuration/viewsets/channel.py +++ b/contentcuration/contentcuration/viewsets/channel.py @@ -382,9 +382,18 @@ def format_demo_server_url(item): def _unpublished_changes_query(channel): + """ + :param channel: Either an `OuterRef` or `Channel` object + :type channel: Channel|OuterRef + :return: QuerySet for unpublished changes + """ + # double wrap the channel if it's an outer ref so that we can match the outermost channel + # to optimize query performance + channel_ref = OuterRef(channel) if isinstance(channel, OuterRef) else channel + return Change.objects.filter( server_rev__gt=Coalesce(Change.objects.filter( - channel=OuterRef("channel"), + channel=channel_ref, change_type=PUBLISHED, errored=False ).values("server_rev").order_by("-server_rev")[:1], Value(0)), @@ -493,7 +502,7 @@ def publish(self, pk, version_notes="", language=None): "publishing": False, "primary_token": channel.get_human_token().token, "last_published": channel.last_published, - "unpublished_changes": _unpublished_changes_query(channel.id).exists() + "unpublished_changes": _unpublished_changes_query(channel).exists() }, channel_id=channel.id ), ], applied=True) From 02216e960f50230704f1ff0d437fe8564662b590 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Wed, 23 Nov 2022 13:03:25 -0800 Subject: [PATCH 035/555] Consolidate all blank space wrapping in custom markdown field logic. --- .../MarkdownEditor/MarkdownEditor.vue | 38 +---------- .../plugins/registerCustomMarkdownField.js | 63 +++++++++++++++++++ 2 files changed, 64 insertions(+), 37 deletions(-) diff --git a/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/MarkdownEditor/MarkdownEditor.vue b/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/MarkdownEditor/MarkdownEditor.vue index 722c44f4f0..99def78b77 100644 --- a/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/MarkdownEditor/MarkdownEditor.vue +++ b/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/MarkdownEditor/MarkdownEditor.vue @@ -60,7 +60,6 @@ import * as Showdown from 'showdown'; import Editor from '@toast-ui/editor'; - import debounce from 'lodash/debounce'; import { stripHtml } from 'string-strip-html'; import imageUpload, { paramsToImageFieldHTML } from '../plugins/image-upload'; @@ -83,7 +82,6 @@ registerMarkdownFormulaField(); registerMarkdownImageField(); - const wrapWithSpaces = html => ` ${html} `; const AnalyticsActionMap = { Bold: 'Bold', @@ -164,7 +162,6 @@ markdown(newMd, previousMd) { if (newMd !== previousMd && newMd !== this.editor.getMarkdown()) { this.editor.setMarkdown(newMd); - this.updateCustomNodeSpacers(); this.initImageFields(); } }, @@ -306,35 +303,6 @@ this.keyDownEventListener = this.$el.addEventListener('keydown', this.onKeyDown, true); this.clickEventListener = this.$el.addEventListener('click', this.onClick); this.editImageEventListener = this.$el.addEventListener('editImage', this.handleEditImage); - - // Make sure all custom nodes have spacers around them. - // Note: this is debounced because it's called every keystroke - const editorEl = this.$refs.editor; - this.updateCustomNodeSpacers = debounce(() => { - editorEl.querySelectorAll('span[is]').forEach(el => { - el.editing = true; - const hasLeftwardSpace = el => { - return ( - el.previousSibling && - el.previousSibling.textContent && - /\s$/.test(el.previousSibling.textContent) - ); - }; - const hasRightwardSpace = el => { - return ( - el.nextSibling && el.nextSibling.textContent && /^\s/.test(el.nextSibling.textContent) - ); - }; - if (!hasLeftwardSpace(el)) { - el.insertAdjacentText('beforebegin', '\xa0'); - } - if (!hasRightwardSpace(el)) { - el.insertAdjacentText('afterend', '\xa0'); - } - }); - }, 150); - - this.updateCustomNodeSpacers(); }, activated() { this.editor.focus(); @@ -413,8 +381,6 @@ event.preventDefault(); event.stopPropagation(); } - - this.updateCustomNodeSpacers(); }, onPaste(event) { const fragment = clearNodeFormat({ @@ -791,7 +757,6 @@ } else { let squire = this.editor.getSquire(); squire.insertHTML(formulaHTML); - this.updateCustomNodeSpacers(); } }, resetFormulasMenu() { @@ -876,8 +841,7 @@ const mdImageEl = template.content.firstElementChild; mdImageEl.setAttribute('editing', true); - // insert non-breaking spaces to allow users to write text before and after - this.editor.getSquire().insertHTML(wrapWithSpaces(mdImageEl.outerHTML)); + this.editor.getSquire().insertHTML(mdImageEl.outerHTML); this.initImageFields(); } diff --git a/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/registerCustomMarkdownField.js b/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/registerCustomMarkdownField.js index 9cfccb5ec6..1870e9cf35 100644 --- a/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/registerCustomMarkdownField.js +++ b/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/registerCustomMarkdownField.js @@ -2,6 +2,54 @@ import Vue from 'vue'; import vueCustomElement from 'vue-custom-element'; import { v4 as uuidv4 } from 'uuid'; +const leftwardSpaceRegex = /\s$/; + +const leftwardDoubleSpaceRegex = /\s\s$/; + +const hasLeftwardSpace = el => { + return ( + // Has a previous sibling + el.previousSibling && + // Which has text content + el.previousSibling.textContent && + // The text content has white space right before this element + leftwardSpaceRegex.test(el.previousSibling.textContent) && + ( + // And either this sibling doesn't have a previous sibling + !el.previousSibling.previousSibling || + // Or the previous sibling is not another custom field + !el.previousSibling.previousSibling.hasAttribute('is') || + // Or the previous sibling has two white spaces, one for each + // of the custom fields on either side. + leftwardDoubleSpaceRegex.test(el.previousSibling.textContent) + ) + ); +}; + +const rightwardSpaceRegex = /^\s/; + +const rightwardDoubleSpaceRegex = /^\s\s/; + +const hasRightwardSpace = el => { + return ( + // Has a next sibling + el.nextSibling && + // Which has text content + el.nextSibling.textContent && + // The text content has white space right after this element + rightwardSpaceRegex.test(el.nextSibling.textContent) && + ( + // And either this sibling doesn't have a next sibling + !el.nextSibling.nextSibling || + // Or the next sibling is not another custom field + !el.nextSibling.nextSibling.hasAttribute('is') || + // Or the next sibling has two white spaces, one for each + // of the custom fields on either side. + rightwardDoubleSpaceRegex.test(el.nextSibling.textContent) + ) + ); +}; + export default VueComponent => { const dashed = camel => camel.replace(/([a-zA-Z])(?=[A-Z])/g, '$1-').toLowerCase(); const name = dashed(VueComponent.name); @@ -42,9 +90,24 @@ export default VueComponent => { this.observer.observe(this, { characterData: true, childList: true }); this.addEventListener('remove', () => { + if (hasLeftwardSpace(this)) { + this.previousSibling.textContent = this.previousSibling.textContent.replace(leftwardSpaceRegex, ''); + } + if (hasRightwardSpace(this)) { + this.nextSibling.textContent = this.nextSibling.textContent.replace(rightwardSpaceRegex, ''); + } this.parentNode.removeChild(this); }); + this.editing = true; + + if (!hasLeftwardSpace(this)) { + this.insertAdjacentText('beforebegin', '\xa0'); + } + if (!hasRightwardSpace(this)) { + this.insertAdjacentText('afterend', '\xa0'); + } + // initialize the `markdown` property this.getVueInstance().$root.markdown = this.innerHTML; }, From d5b981ef873a9ab0d8aae2e89920227ad8557f02 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Wed, 23 Nov 2022 13:04:15 -0800 Subject: [PATCH 036/555] Clean up squire keydown handling. --- .../MarkdownEditor/MarkdownEditor.vue | 40 +++++----- .../MarkdownEditor/keyHandlers.js | 74 ------------------- 2 files changed, 18 insertions(+), 96 deletions(-) delete mode 100644 contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/MarkdownEditor/keyHandlers.js diff --git a/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/MarkdownEditor/MarkdownEditor.vue b/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/MarkdownEditor/MarkdownEditor.vue index 99def78b77..f99c016c5d 100644 --- a/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/MarkdownEditor/MarkdownEditor.vue +++ b/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/MarkdownEditor/MarkdownEditor.vue @@ -74,7 +74,6 @@ import { registerMarkdownFormulaField } from '../plugins/formulas/MarkdownFormulaField'; import { registerMarkdownImageField } from '../plugins/image-upload/MarkdownImageField'; import { clearNodeFormat, getExtensionMenuPosition } from './utils'; - import keyHandlers from './keyHandlers'; import FormulasMenu from './FormulasMenu/FormulasMenu'; import ImagesMenu from './ImagesMenu/ImagesMenu'; import ClickOutside from 'shared/directives/click-outside'; @@ -82,7 +81,6 @@ registerMarkdownFormulaField(); registerMarkdownImageField(); - const AnalyticsActionMap = { Bold: 'Bold', Italic: 'Italicize', @@ -326,15 +324,9 @@ * a recommended solution here https://github.com/neilj/Squire/issues/107 */ onKeyDown(event) { - const squire = this.editor.getSquire(); - // Apply squire selection workarounds this.fixSquireSelectionOnKeyDown(event); - if (event.key in keyHandlers) { - keyHandlers[event.key](squire); - } - // ESC should close menus if any are open // or close the editor if none are open if (event.key === 'Escape') { @@ -473,7 +465,7 @@ const getRightwardElement = selection => getElementAtRelativeOffset(selection, 1); const getCharacterAtRelativeOffset = (selection, relativeOffset) => { - let { element, offset } = squire.getSelectionInfoByOffset( + const { element, offset } = squire.getSelectionInfoByOffset( selection.startContainer, selection.startOffset + relativeOffset ); @@ -495,27 +487,31 @@ /\s$/.test(getCharacterAtRelativeOffset(selection, 0)); const moveCursor = (selection, amount) => { - let { element, offset } = squire.getSelectionInfoByOffset( - selection.startContainer, - selection.startOffset + amount - ); - if (amount > 0) { - selection.setStart(element, offset); - } else { - selection.setEnd(element, offset); - } + const element = getElementAtRelativeOffset(selection, amount); + selection.setStart(element, 0); + selection.setEnd(element, 0); return selection; }; - // make sure Squire doesn't delete rightward custom nodes when 'backspace' is pressed - if (event.key !== 'ArrowRight' && event.key !== 'Delete') { - if (isCustomNode(getRightwardElement(selection))) { + const rightwardElement = getRightwardElement(selection); + const leftwardElement = getLeftwardElement(selection); + + if (event.key === 'ArrowRight') { + if (isCustomNode(rightwardElement)) { + squire.setSelection(moveCursor(selection, 1)); + } else if (spacerAndCustomElementAreRightward(selection)) { + squire.setSelection(moveCursor(selection, 2)); + } + } + if (event.key === 'ArrowLeft') { + if (isCustomNode(leftwardElement)) { squire.setSelection(moveCursor(selection, -1)); + } else if (spacerAndCustomElementAreLeftward(selection)) { + squire.setSelection(moveCursor(selection, -2)); } } // make sure Squire doesn't get stuck with a broken cursor position when deleting // elements with `contenteditable="false"` in FireFox - let leftwardElement = getLeftwardElement(selection); if (event.key === 'Backspace') { if (selection.startContainer.tagName === 'DIV') { // This happens normally when deleting from the beginning of an empty line... diff --git a/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/MarkdownEditor/keyHandlers.js b/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/MarkdownEditor/keyHandlers.js deleted file mode 100644 index 7cb2b93597..0000000000 --- a/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/MarkdownEditor/keyHandlers.js +++ /dev/null @@ -1,74 +0,0 @@ -import { CLASS_MATH_FIELD, KEY_ARROW_RIGHT, KEY_ARROW_LEFT, KEY_BACKSPACE } from '../constants'; - -/** - * When arrow right pressed and next element is a math field - * then move cursor to a first position after the math field - * - * @param {Object} squire Squire instance - */ -const onArrowRight = squire => { - const selection = squire.getSelection(); - - if ( - selection && - selection.startContainer.nextSibling && - selection.startOffset === selection.startContainer.length && - selection.startContainer.nextSibling.classList.contains(CLASS_MATH_FIELD) - ) { - const rangeAfterMathField = new Range(); - rangeAfterMathField.setStartAfter(selection.startContainer.nextSibling); - - squire.setSelection(rangeAfterMathField); - } -}; - -/** - * When arrow left pressed and previous element is a math field - * then move cursor to a last position before the math field - * - * @param {Object} squire Squire instance - */ -const onArrowLeft = squire => { - const selection = squire.getSelection(); - - if ( - selection && - selection.startContainer.previousSibling && - selection.startOffset === 1 && - selection.startContainer.previousSibling.classList.contains(CLASS_MATH_FIELD) - ) { - const rangeBeforeMathField = new Range(); - rangeBeforeMathField.setStartBefore(selection.startContainer.previousSibling); - - squire.setSelection(rangeBeforeMathField); - } -}; - -/** - * When backspace pressed and previous element is a math field - * then remove the math field - * - * @param {Object} squire Squire instance - */ -const onBackspace = squire => { - const selection = squire.getSelection(); - - if ( - selection && - selection.startContainer.previousSibling && - selection.startOffset === 1 && - selection.startContainer.previousSibling.classList.contains(CLASS_MATH_FIELD) - ) { - const mathFieldRange = new Range(); - mathFieldRange.setStartBefore(selection.startContainer.previousSibling); - mathFieldRange.setEndBefore(selection.startContainer); - - mathFieldRange.deleteContents(); - } -}; - -export default { - [KEY_ARROW_RIGHT]: onArrowRight, - [KEY_ARROW_LEFT]: onArrowLeft, - [KEY_BACKSPACE]: onBackspace, -}; From ff87c48db2b6df94591fae2de5e52fdfadd3e4a8 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Wed, 23 Nov 2022 13:40:02 -0800 Subject: [PATCH 037/555] Modify prop values using web component interface rather than trying to do prohibited direct prop modification on component. Lint. --- .../plugins/registerCustomMarkdownField.js | 31 ++++++++++--------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/registerCustomMarkdownField.js b/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/registerCustomMarkdownField.js index 1870e9cf35..f544eb5fa6 100644 --- a/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/registerCustomMarkdownField.js +++ b/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/registerCustomMarkdownField.js @@ -14,15 +14,13 @@ const hasLeftwardSpace = el => { el.previousSibling.textContent && // The text content has white space right before this element leftwardSpaceRegex.test(el.previousSibling.textContent) && - ( - // And either this sibling doesn't have a previous sibling - !el.previousSibling.previousSibling || + // And either this sibling doesn't have a previous sibling + (!el.previousSibling.previousSibling || // Or the previous sibling is not another custom field !el.previousSibling.previousSibling.hasAttribute('is') || // Or the previous sibling has two white spaces, one for each // of the custom fields on either side. - leftwardDoubleSpaceRegex.test(el.previousSibling.textContent) - ) + leftwardDoubleSpaceRegex.test(el.previousSibling.textContent)) ); }; @@ -38,15 +36,13 @@ const hasRightwardSpace = el => { el.nextSibling.textContent && // The text content has white space right after this element rightwardSpaceRegex.test(el.nextSibling.textContent) && - ( - // And either this sibling doesn't have a next sibling - !el.nextSibling.nextSibling || + // And either this sibling doesn't have a next sibling + (!el.nextSibling.nextSibling || // Or the next sibling is not another custom field !el.nextSibling.nextSibling.hasAttribute('is') || // Or the next sibling has two white spaces, one for each // of the custom fields on either side. - rightwardDoubleSpaceRegex.test(el.nextSibling.textContent) - ) + rightwardDoubleSpaceRegex.test(el.nextSibling.textContent)) ); }; @@ -83,7 +79,7 @@ export default VueComponent => { this.innerHTML = textNodesRemoved.map(n => n.nodeValue).join(); } else { // otherwise, pass the innerHTML to inner Vue component as `markdown` prop - this.getVueInstance().markdown = this.innerHTML; + this.markdown = this.innerHTML; } }); }); @@ -91,10 +87,16 @@ export default VueComponent => { this.addEventListener('remove', () => { if (hasLeftwardSpace(this)) { - this.previousSibling.textContent = this.previousSibling.textContent.replace(leftwardSpaceRegex, ''); + this.previousSibling.textContent = this.previousSibling.textContent.replace( + leftwardSpaceRegex, + '' + ); } if (hasRightwardSpace(this)) { - this.nextSibling.textContent = this.nextSibling.textContent.replace(rightwardSpaceRegex, ''); + this.nextSibling.textContent = this.nextSibling.textContent.replace( + rightwardSpaceRegex, + '' + ); } this.parentNode.removeChild(this); }); @@ -107,9 +109,8 @@ export default VueComponent => { if (!hasRightwardSpace(this)) { this.insertAdjacentText('afterend', '\xa0'); } - // initialize the `markdown` property - this.getVueInstance().$root.markdown = this.innerHTML; + this.markdown = this.innerHTML; }, shadowCss: VueComponent.shadowCSS, shadow: true, From f1f0a5c5fafcdbcf5ab95e4344d2a9f258c96e95 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Wed, 23 Nov 2022 14:30:07 -0800 Subject: [PATCH 038/555] Add extra edge case handling for sibling checking. --- .../MarkdownEditor/plugins/registerCustomMarkdownField.js | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/registerCustomMarkdownField.js b/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/registerCustomMarkdownField.js index f544eb5fa6..c9a3f51940 100644 --- a/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/registerCustomMarkdownField.js +++ b/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/plugins/registerCustomMarkdownField.js @@ -16,6 +16,8 @@ const hasLeftwardSpace = el => { leftwardSpaceRegex.test(el.previousSibling.textContent) && // And either this sibling doesn't have a previous sibling (!el.previousSibling.previousSibling || + // Or it doesn't have a hasAttribute function + typeof el.previousSibling.previousSibling.hasAttribute !== 'function' || // Or the previous sibling is not another custom field !el.previousSibling.previousSibling.hasAttribute('is') || // Or the previous sibling has two white spaces, one for each @@ -38,6 +40,8 @@ const hasRightwardSpace = el => { rightwardSpaceRegex.test(el.nextSibling.textContent) && // And either this sibling doesn't have a next sibling (!el.nextSibling.nextSibling || + // Or it doesn't have a hasAttribute function + typeof el.nextSibling.nextSibling.hasAttribute !== 'function' || // Or the next sibling is not another custom field !el.nextSibling.nextSibling.hasAttribute('is') || // Or the next sibling has two white spaces, one for each From 48a97be1c70fce67ef83fb34999ab87f3e2736d8 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Wed, 23 Nov 2022 14:30:35 -0800 Subject: [PATCH 039/555] Tweak markdown conversion to prevent addition of excess line breaks. --- .../MarkdownEditor/MarkdownEditor/MarkdownEditor.vue | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/MarkdownEditor/MarkdownEditor.vue b/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/MarkdownEditor/MarkdownEditor.vue index f99c016c5d..a70673add2 100644 --- a/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/MarkdownEditor/MarkdownEditor.vue +++ b/contentcuration/contentcuration/frontend/shared/views/MarkdownEditor/MarkdownEditor/MarkdownEditor.vue @@ -177,12 +177,14 @@ const Convertor = tmpEditor.convertor.constructor; class CustomConvertor extends Convertor { toMarkdown(content) { - content = showdown.makeMarkdown(content); content = imagesHtmlToMd(content); content = formulaHtmlToMd(content); - content = content.replaceAll(' ', ' '); - + content = showdown.makeMarkdown(content); // TUI.editor sprinkles in extra `
` tags that Kolibri renders literally + // When showdown has already added linebreaks to render these in markdown + // so we just remove these here. + content = content.replaceAll('
', ''); + // any copy pasted rich text that renders as HTML but does not get converted // will linger here, so remove it as Kolibri will render it literally also. content = stripHtml(content).result; From d79cc65103389f6c3ecf194ff5f4e59aef5efd5f Mon Sep 17 00:00:00 2001 From: AllanOXDi Date: Fri, 25 Nov 2022 19:08:24 +0300 Subject: [PATCH 040/555] disabled double submit while creating a channel --- .../frontend/shared/views/channel/ChannelModal.vue | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/contentcuration/contentcuration/frontend/shared/views/channel/ChannelModal.vue b/contentcuration/contentcuration/frontend/shared/views/channel/ChannelModal.vue index b76c6782b2..d2eb9623f6 100644 --- a/contentcuration/contentcuration/frontend/shared/views/channel/ChannelModal.vue +++ b/contentcuration/contentcuration/frontend/shared/views/channel/ChannelModal.vue @@ -72,7 +72,7 @@ v-model="contentDefaults" /> - + {{ isNew ? $tr('createButton') : $tr('saveChangesButton' ) }} @@ -153,6 +153,7 @@ showUnsavedDialog: false, diffTracker: {}, dialog: true, + isDisable: false, }; }, computed: { @@ -287,21 +288,25 @@ ...mapActions('channel', ['updateChannel', 'loadChannel', 'commitChannel']), ...mapMutations('channel', ['REMOVE_CHANNEL']), saveChannel() { + this.isDisable = true; if (this.$refs.detailsform.validate()) { this.changed = false; if (this.isNew) { return this.commitChannel({ id: this.channelId, ...this.diffTracker }).then(() => { // TODO: Make sure channel gets created before navigating to channel window.location = window.Urls.channel(this.channelId); + this.isDisable = false; }); } else { return this.updateChannel({ id: this.channelId, ...this.diffTracker }).then(() => { this.$store.dispatch('showSnackbarSimple', this.$tr('changesSaved')); this.header = this.channel.name; + this.isDisable = false; }); } } else if (this.$refs.detailsform.$el.scrollIntoView) { this.$refs.detailsform.$el.scrollIntoView({ behavior: 'smooth' }); + this.isDisable = false; } }, updateTitleForPage() { From c3d58f77efaa4ce2065c18fe98af2706f374cbcb Mon Sep 17 00:00:00 2001 From: Vivek Agrawal Date: Sat, 26 Nov 2022 13:05:50 +0530 Subject: [PATCH 041/555] Short circuit query evaluation --- contentcuration/contentcuration/models.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/contentcuration/contentcuration/models.py b/contentcuration/contentcuration/models.py index 67fc6730b4..3ce8f0bf5c 100644 --- a/contentcuration/contentcuration/models.py +++ b/contentcuration/contentcuration/models.py @@ -1784,8 +1784,8 @@ def make_content_id_unique(self): and a contentnode with same content_id exists then we update self's content_id. """ is_node_original = self.original_source_node_id is None or self.original_source_node_id == self.node_id - does_same_content_exists = ContentNode.objects.exclude(pk=self.pk).filter(content_id=self.content_id).exists() - if (not is_node_original) and does_same_content_exists: + node_same_content_id = ContentNode.objects.exclude(pk=self.pk).filter(content_id=self.content_id) + if (not is_node_original) and node_same_content_id.exists(): ContentNode.objects.filter(pk=self.pk).update(content_id=uuid.uuid4().hex) def on_create(self): From b74a5c739cce0b3ab7f6f82683fb26fc6adce057 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 28 Nov 2022 16:04:10 +0000 Subject: [PATCH 042/555] Bump django-s3-storage from 0.13.9 to 0.13.11 Bumps [django-s3-storage](https://github.com/etianen/django-s3-storage) from 0.13.9 to 0.13.11. - [Release notes](https://github.com/etianen/django-s3-storage/releases) - [Changelog](https://github.com/etianen/django-s3-storage/blob/master/CHANGELOG.rst) - [Commits](https://github.com/etianen/django-s3-storage/compare/0.13.9...0.13.11) --- updated-dependencies: - dependency-name: django-s3-storage dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- requirements.in | 2 +- requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.in b/requirements.in index 2704539269..7f1bc54d5f 100644 --- a/requirements.in +++ b/requirements.in @@ -20,7 +20,7 @@ Django==3.2.14 django-webpack-loader==0.7.0 google-cloud-error-reporting google-cloud-storage -django-s3-storage==0.13.9 +django-s3-storage==0.13.11 requests>=2.20.0 google-cloud-core django-db-readonly==0.7.0 diff --git a/requirements.txt b/requirements.txt index e6068899d1..ee2a46cd00 100644 --- a/requirements.txt +++ b/requirements.txt @@ -97,7 +97,7 @@ django-redis==5.2.0 # via -r requirements.in django-registration==3.3 # via -r requirements.in -django-s3-storage==0.13.9 +django-s3-storage==0.13.11 # via -r requirements.in django-webpack-loader==0.7.0 # via -r requirements.in From c849a27cc6f2b0f542af675cff647b1d3b16b83c Mon Sep 17 00:00:00 2001 From: AllanOXDi Date: Mon, 28 Nov 2022 19:40:18 +0300 Subject: [PATCH 043/555] handling the double create in the backend --- contentcuration/contentcuration/viewsets/base.py | 8 +++++++- 1 file changed, 7 insertions(+), 1 deletion(-) diff --git a/contentcuration/contentcuration/viewsets/base.py b/contentcuration/contentcuration/viewsets/base.py index f2e786ab07..e159880d67 100644 --- a/contentcuration/contentcuration/viewsets/base.py +++ b/contentcuration/contentcuration/viewsets/base.py @@ -6,6 +6,7 @@ from celery import states from django.core.exceptions import ObjectDoesNotExist from django.db.models import Q +from django.db.utils import IntegrityError from django.http import Http404 from django.http.request import HttpRequest from django_bulk_update.helper import bulk_update @@ -659,7 +660,12 @@ def create_from_changes(self, changes): def create(self, request, *args, **kwargs): serializer = self.get_serializer(data=request.data) serializer.is_valid(raise_exception=True) - self.perform_create(serializer) + + try: + self.perform_create(serializer) + + except IntegrityError as e: + return Response({"error": str(e)}, status=409) instance = serializer.instance return Response(self.serialize_object(pk=instance.pk), status=HTTP_201_CREATED) From 6129de679c27f05a3957dbf3e87595f3574c6856 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 29 Nov 2022 16:01:28 +0000 Subject: [PATCH 044/555] Bump redis from 4.3.4 to 4.3.5 Bumps [redis](https://github.com/redis/redis-py) from 4.3.4 to 4.3.5. - [Release notes](https://github.com/redis/redis-py/releases) - [Changelog](https://github.com/redis/redis-py/blob/master/CHANGES) - [Commits](https://github.com/redis/redis-py/compare/v4.3.4...v4.3.5) --- updated-dependencies: - dependency-name: redis dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- requirements.txt | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/requirements.txt b/requirements.txt index f927214388..b02eeefecc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -54,8 +54,6 @@ click-repl==0.2.0 # via celery confusable-homoglyphs==3.2.0 # via django-registration -deprecated==1.2.13 - # via redis django==3.2.14 # via # -r requirements.in @@ -229,7 +227,7 @@ pytz==2022.1 # django # django-postmark # google-api-core -redis==4.3.4 +redis==4.3.5 # via # -r requirements.in # django-redis @@ -275,8 +273,6 @@ wcwidth==0.2.5 # via prompt-toolkit webencodings==0.5.1 # via html5lib -wrapt==1.14.1 - # via deprecated zipp==3.4.1 # via importlib-metadata From 4ffa3546863fd6edc7f51296751478b76b275309 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 29 Nov 2022 16:03:46 +0000 Subject: [PATCH 045/555] Bump django-model-utils from 4.2.0 to 4.3.1 Bumps [django-model-utils](https://github.com/jazzband/django-model-utils) from 4.2.0 to 4.3.1. - [Release notes](https://github.com/jazzband/django-model-utils/releases) - [Changelog](https://github.com/jazzband/django-model-utils/blob/master/CHANGES.rst) - [Commits](https://github.com/jazzband/django-model-utils/compare/4.2.0...4.3.1) --- updated-dependencies: - dependency-name: django-model-utils dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- requirements.in | 2 +- requirements.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.in b/requirements.in index 293cf6383b..6de2c0090c 100644 --- a/requirements.in +++ b/requirements.in @@ -29,7 +29,7 @@ django-mathfilters google-cloud-kms==1.1.0 backoff backports-abc==0.5 -django-model-utils==4.2.0 +django-model-utils==4.3.1 django-redis django-prometheus future diff --git a/requirements.txt b/requirements.txt index f927214388..b578dbc81a 100644 --- a/requirements.txt +++ b/requirements.txt @@ -85,7 +85,7 @@ django-js-reverse==0.9.1 # via -r requirements.in django-mathfilters==1.0.0 # via -r requirements.in -django-model-utils==4.2.0 +django-model-utils==4.3.1 # via -r requirements.in django-mptt==0.14.0 # via -r requirements.in From cd004ae1146eeedbf94a1a7d50511151a8982c76 Mon Sep 17 00:00:00 2001 From: Samson Akol Date: Tue, 29 Nov 2022 21:59:54 +0300 Subject: [PATCH 046/555] Updates jsPDF dependency url --- package.json | 2 +- yarn.lock | 14 +++++++------- 2 files changed, 8 insertions(+), 8 deletions(-) diff --git a/package.json b/package.json index 6457bfe0ed..e27b8b1e67 100644 --- a/package.json +++ b/package.json @@ -69,7 +69,7 @@ "i18n-iso-countries": "^7.5.0", "intl": "1.2.5", "jquery": "^2.2.4", - "jspdf": "https://github.com/MrRio/jsPDF.git#b7a1d8239c596292ce86dafa77f05987bcfa2e6e", + "jspdf": "https://github.com/parallax/jsPDF.git#b7a1d8239c596292ce86dafa77f05987bcfa2e6e", "kolibri-constants": "^0.1.41", "kolibri-design-system": "https://github.com/learningequality/kolibri-design-system#e9a2ff34716bb6412fe99f835ded5b17345bab94", "lodash": "^4.17.21", diff --git a/yarn.lock b/yarn.lock index 0c074104d7..b535884415 100644 --- a/yarn.lock +++ b/yarn.lock @@ -7906,9 +7906,9 @@ jsonpointer@^5.0.0: resolved "https://registry.yarnpkg.com/jsonpointer/-/jsonpointer-5.0.0.tgz#f802669a524ec4805fa7389eadbc9921d5dc8072" integrity sha512-PNYZIdMjVIvVgDSYKTT63Y+KZ6IZvGRNNWcxwD+GNnUz1MKPfv30J8ueCjdwcN0nDx2SlshgyB7Oy0epAzVRRg== -"jspdf@https://github.com/MrRio/jsPDF.git#b7a1d8239c596292ce86dafa77f05987bcfa2e6e": +"jspdf@https://github.com/parallax/jsPDF.git#b7a1d8239c596292ce86dafa77f05987bcfa2e6e": version "2.1.1" - resolved "https://github.com/MrRio/jsPDF.git#b7a1d8239c596292ce86dafa77f05987bcfa2e6e" + resolved "https://github.com/parallax/jsPDF.git#b7a1d8239c596292ce86dafa77f05987bcfa2e6e" dependencies: atob "^2.1.2" btoa "^1.2.1" @@ -11112,7 +11112,7 @@ stackblur-canvas@2.2.0: stackblur-canvas@^1.4.1: version "1.4.1" resolved "https://registry.yarnpkg.com/stackblur-canvas/-/stackblur-canvas-1.4.1.tgz#849aa6f94b272ff26f6471fa4130ed1f7e47955b" - integrity sha1-hJqm+UsnL/JvZHH6QTDtH35HlVs= + integrity sha512-TfbTympL5C1K+F/RizDkMBqH18EkUKU8V+4PphIXR+fWhZwwRi3bekP04gy2TOwOT3R6rJQJXAXFrbcZde7wow== static-extend@^0.1.1: version "0.1.2" @@ -11858,7 +11858,7 @@ tr46@^2.1.0: tr46@~0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a" - integrity sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o= + integrity sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw== trim-newlines@^3.0.0: version "3.0.1" @@ -12373,7 +12373,7 @@ web-streams-polyfill@^3.2.1: webidl-conversions@^3.0.0, webidl-conversions@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-3.0.1.tgz#24534275e2a7bc6be7bc86611cc16ae0a5654871" - integrity sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE= + integrity sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ== webidl-conversions@^4.0.2: version "4.0.2" @@ -12533,7 +12533,7 @@ whatwg-mimetype@^2.1.0, whatwg-mimetype@^2.2.0, whatwg-mimetype@^2.3.0: whatwg-url@^2.0.1: version "2.0.1" resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-2.0.1.tgz#5396b2043f020ee6f704d9c45ea8519e724de659" - integrity sha1-U5ayBD8CDub3BNnEXqhRnnJN5lk= + integrity sha512-sX+FT4N6iR0ZiqGqyDEKklyfMGR99zvxZD+LQ8IGae5uVGswQ7DOeLPB5KgJY8FzkwSzwqOXLQeVQvtOTSQU9Q== dependencies: tr46 "~0.0.3" webidl-conversions "^3.0.0" @@ -12874,7 +12874,7 @@ xhr@^2.0.1: "xml-name-validator@>= 2.0.1 < 3.0.0": version "2.0.1" resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-2.0.1.tgz#4d8b8f1eccd3419aa362061becef515e1e559635" - integrity sha1-TYuPHszTQZqjYgYb7O9RXh5VljU= + integrity sha512-jRKe/iQYMyVJpzPH+3HL97Lgu5HrCfii+qSo+TfjKHtOnvbnvdVfMYrn9Q34YV81M2e5sviJlI6Ko9y+nByzvA== xml-name-validator@^3.0.0: version "3.0.0" From 1a1cf8e6c1a39aaf8bae96f4addc2b681cd21137 Mon Sep 17 00:00:00 2001 From: Samson Akol Date: Tue, 29 Nov 2022 22:11:04 +0300 Subject: [PATCH 047/555] Removes misleading offline indicator on sign-in page --- .../frontend/accounts/pages/Main.vue | 71 +++++++++++++------ 1 file changed, 50 insertions(+), 21 deletions(-) diff --git a/contentcuration/contentcuration/frontend/accounts/pages/Main.vue b/contentcuration/contentcuration/frontend/accounts/pages/Main.vue index c15a96fca1..dc05d5204c 100644 --- a/contentcuration/contentcuration/frontend/accounts/pages/Main.vue +++ b/contentcuration/contentcuration/frontend/accounts/pages/Main.vue @@ -6,12 +6,12 @@ justify-center class="main pt-5" > -

- -

- + {{ $tr('kolibriStudio') }} - - + + - - - + + +

- +

- + {{ $tr('signInButton') }} - + {{ $tr('createAccountButton') }}

- +

@@ -90,7 +127,6 @@ import PolicyModals from 'shared/views/policies/PolicyModals'; import { policies } from 'shared/constants'; import LanguageSwitcherList from 'shared/languageSwitcher/LanguageSwitcherList'; - import OfflineText from 'shared/views/OfflineText'; export default { name: 'Main', @@ -100,7 +136,6 @@ LanguageSwitcherList, PasswordField, PolicyModals, - OfflineText, }, data() { return { @@ -191,10 +226,4 @@ content: '•'; } - .corner { - position: absolute; - top: 1em; - left: 1em; - } - From b6e08bab724c095e9e9b42d728855a87dab011c9 Mon Sep 17 00:00:00 2001 From: Blaine Jester Date: Wed, 30 Nov 2022 07:12:49 -0800 Subject: [PATCH 048/555] Only enqueue storage calculation task for non-admins --- contentcuration/contentcuration/utils/user.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/contentcuration/contentcuration/utils/user.py b/contentcuration/contentcuration/utils/user.py index 673b0634c8..9f9dbab75a 100644 --- a/contentcuration/contentcuration/utils/user.py +++ b/contentcuration/contentcuration/utils/user.py @@ -16,6 +16,7 @@ def calculate_user_storage(user_id): if user_id is None: raise User.DoesNotExist user = User.objects.get(pk=user_id) - calculate_user_storage_task.fetch_or_enqueue(user, user_id=user_id) + if not user.is_admin: + calculate_user_storage_task.fetch_or_enqueue(user, user_id=user_id) except User.DoesNotExist: logging.error("Tried to calculate user storage for user with id {} but they do not exist".format(user_id)) From d2d19210b9d43eb1e4478b45d65d1bb280c99b01 Mon Sep 17 00:00:00 2001 From: Blaine Jester Date: Wed, 30 Nov 2022 07:13:53 -0800 Subject: [PATCH 049/555] Add helpers to celery app class for production intervention --- .../contentcuration/utils/celery/app.py | 40 +++++++++++++++++++ 1 file changed, 40 insertions(+) diff --git a/contentcuration/contentcuration/utils/celery/app.py b/contentcuration/contentcuration/utils/celery/app.py index 5870947bad..f95304bc8d 100644 --- a/contentcuration/contentcuration/utils/celery/app.py +++ b/contentcuration/contentcuration/utils/celery/app.py @@ -45,6 +45,46 @@ def get_queued_tasks(self, queue_name="celery"): return decoded_tasks + def count_queued_tasks(self, queue_name="celery"): + """ + :param queue_name: The queue name, defaults to the default "celery" queue + :return: int + """ + with self.pool.acquire(block=True) as conn: + count = conn.default_channel.client.llen(queue_name) + return count + + def _revoke_matching(self, tasks, task_name, matching_kwargs=None): + count = 0 + for task in tasks: + if task['name'] == task_name and (matching_kwargs is None or task['kwargs'] == matching_kwargs): + print('Revoking task {}'.format(task)) + count += 1 + self.control.revoke(task['id'], terminate=True) + return count + + def revoke_reserved_tasks_by_name(self, task_name, matching_kwargs=None): + """ + :param task_name: The string name of the task + :param matching_kwargs: A dict of kwargs to match to the task + :return: The count of revoked tasks + """ + count = 0 + for worker_name, tasks in self.control.inspect().reserved().items(): + count += self._revoke_matching(tasks, task_name, matching_kwargs) + return count + + def terminate_active_tasks_by_name(self, task_name, matching_kwargs=None): + """ + :param task_name: The string name of the task + :param matching_kwargs: A dict of kwargs to match to the task + :return: The count of terminated tasks + """ + count = 0 + for worker_name, tasks in self.control.inspect().active().items(): + count += self._revoke_matching(tasks, task_name, matching_kwargs) + return count + def decode_result(self, result, status=None): """ Decodes the celery result, like the raw result from the database, using celery tools From 43378a58283dea4e18a969c5e80d88dd2d60b79e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 30 Nov 2022 16:01:54 +0000 Subject: [PATCH 050/555] Bump jsonschema from 4.17.1 to 4.17.3 Bumps [jsonschema](https://github.com/python-jsonschema/jsonschema) from 4.17.1 to 4.17.3. - [Release notes](https://github.com/python-jsonschema/jsonschema/releases) - [Changelog](https://github.com/python-jsonschema/jsonschema/blob/main/CHANGELOG.rst) - [Commits](https://github.com/python-jsonschema/jsonschema/compare/v4.17.1...v4.17.3) --- updated-dependencies: - dependency-name: jsonschema dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index f927214388..3cf6c8c5cd 100644 --- a/requirements.txt +++ b/requirements.txt @@ -162,7 +162,7 @@ jmespath==0.10.0 # botocore jsonfield==3.1.0 # via -r requirements.in -jsonschema==4.17.1 +jsonschema==4.17.3 # via -r requirements.in kombu==5.2.4 # via celery From 94ae2acfad6ce2f259cb4ffff799a8ee1e18390a Mon Sep 17 00:00:00 2001 From: Blaine Jester Date: Wed, 30 Nov 2022 10:28:57 -0800 Subject: [PATCH 051/555] Add calculation delay to ricecooker bulk endpoint, update tests --- contentcuration/contentcuration/decorators.py | 4 ++++ .../contentcuration/tests/test_decorators.py | 11 ++++++++--- contentcuration/contentcuration/utils/user.py | 2 +- contentcuration/contentcuration/views/internal.py | 4 ++-- 4 files changed, 15 insertions(+), 6 deletions(-) diff --git a/contentcuration/contentcuration/decorators.py b/contentcuration/contentcuration/decorators.py index e8a2dd5a0d..9c51e83b7a 100644 --- a/contentcuration/contentcuration/decorators.py +++ b/contentcuration/contentcuration/decorators.py @@ -76,6 +76,10 @@ class DelayUserStorageCalculation(ContextDecorator): def is_active(self): return self.depth > 0 + def add(self, user_id): + if user_id not in self.queue: + self.queue.append(user_id) + def __enter__(self): self.depth += 1 diff --git a/contentcuration/contentcuration/tests/test_decorators.py b/contentcuration/contentcuration/tests/test_decorators.py index c3fb08e0eb..2c795716d7 100644 --- a/contentcuration/contentcuration/tests/test_decorators.py +++ b/contentcuration/contentcuration/tests/test_decorators.py @@ -2,17 +2,22 @@ from contentcuration.decorators import delay_user_storage_calculation from contentcuration.tests.base import StudioTestCase +from contentcuration.tests.base import testdata from contentcuration.utils.user import calculate_user_storage class DecoratorsTestCase(StudioTestCase): + def setUp(self): + super(DecoratorsTestCase, self).setUp() + self.user = testdata.user() + @mock.patch("contentcuration.utils.user.calculate_user_storage_task") def test_delay_storage_calculation(self, mock_task): @delay_user_storage_calculation def do_test(): - calculate_user_storage(self.admin_user.id) - calculate_user_storage(self.admin_user.id) + calculate_user_storage(self.user.id) + calculate_user_storage(self.user.id) mock_task.fetch_or_enqueue.assert_not_called() do_test() - mock_task.fetch_or_enqueue.assert_called_once_with(self.admin_user, user_id=self.admin_user.id) + mock_task.fetch_or_enqueue.assert_called_once_with(self.user, user_id=self.user.id) diff --git a/contentcuration/contentcuration/utils/user.py b/contentcuration/contentcuration/utils/user.py index 9f9dbab75a..aeeffbf5b5 100644 --- a/contentcuration/contentcuration/utils/user.py +++ b/contentcuration/contentcuration/utils/user.py @@ -9,7 +9,7 @@ def calculate_user_storage(user_id): from contentcuration.decorators import delay_user_storage_calculation if delay_user_storage_calculation.is_active: - delay_user_storage_calculation.queue.append(user_id) + delay_user_storage_calculation.add(user_id) return try: diff --git a/contentcuration/contentcuration/views/internal.py b/contentcuration/contentcuration/views/internal.py index 754c8fb740..ccbc358e5e 100644 --- a/contentcuration/contentcuration/views/internal.py +++ b/contentcuration/contentcuration/views/internal.py @@ -37,6 +37,7 @@ from contentcuration.api import activate_channel from contentcuration.api import write_file_to_storage from contentcuration.constants import completion_criteria +from contentcuration.decorators import delay_user_storage_calculation from contentcuration.models import AssessmentItem from contentcuration.models import Change from contentcuration.models import Channel @@ -54,7 +55,6 @@ from contentcuration.utils.nodes import map_files_to_node from contentcuration.utils.nodes import map_files_to_slideshow_slide_item from contentcuration.utils.sentry import report_exception -from contentcuration.utils.tracing import trace from contentcuration.viewsets.sync.constants import CHANNEL from contentcuration.viewsets.sync.utils import generate_publish_event from contentcuration.viewsets.sync.utils import generate_update_event @@ -565,7 +565,7 @@ def __init__(self, node, errors): super(IncompleteNodeError, self).__init__(self.message) -@trace +@delay_user_storage_calculation def convert_data_to_nodes(user, content_data, parent_node): """ Parse dict and create nodes accordingly """ try: From a997d0cd0b537a31220e4f95ff8ece9018f8f843 Mon Sep 17 00:00:00 2001 From: Blaine Jester Date: Wed, 30 Nov 2022 10:29:32 -0800 Subject: [PATCH 052/555] Reorganize task revocation logic --- .../contentcuration/tests/helpers.py | 2 ++ .../contentcuration/tests/test_asynctask.py | 17 ++++++++++ .../contentcuration/utils/celery/app.py | 31 ------------------- .../contentcuration/utils/celery/tasks.py | 19 ++++++++++++ 4 files changed, 38 insertions(+), 31 deletions(-) diff --git a/contentcuration/contentcuration/tests/helpers.py b/contentcuration/contentcuration/tests/helpers.py index 8e3172fcd4..eeddd11c5d 100644 --- a/contentcuration/contentcuration/tests/helpers.py +++ b/contentcuration/contentcuration/tests/helpers.py @@ -2,6 +2,7 @@ from importlib import import_module import mock +from celery import states from contentcuration.models import TaskResult @@ -20,6 +21,7 @@ def clear_tasks(except_task_id=None): qs = qs.exclude(task_id=except_task_id) for task_id in qs.values_list("task_id", flat=True): app.control.revoke(task_id, terminate=True) + qs.update(status=states.REVOKED) def mock_class_instance(target): diff --git a/contentcuration/contentcuration/tests/test_asynctask.py b/contentcuration/contentcuration/tests/test_asynctask.py index 85b40b6237..e24e04933e 100644 --- a/contentcuration/contentcuration/tests/test_asynctask.py +++ b/contentcuration/contentcuration/tests/test_asynctask.py @@ -142,6 +142,9 @@ def _wait_for(self, async_result, timeout=30): with allow_join_result(): return async_result.get(timeout=timeout) + def test_app_count_queued_tasks(self): + self.assertIsInstance(app.count_queued_tasks(), int) + def test_asynctask_reports_success(self): """ Tests that when an async task is created and completed, the Task object has a status of 'SUCCESS' and @@ -245,3 +248,17 @@ def test_requeue_task(self): second_result = self._wait_for(second_async_result) self.assertIsNone(second_result) self.assertTrue(second_async_result.successful()) + + def test_revoke_task(self): + channel_id = uuid.uuid4() + async_result = test_task.enqueue(self.user, channel_id=channel_id) + test_task.revoke(channel_id=channel_id) + + # this should raise an exception, even though revoked, because the task is in ready state but not success + with self.assertRaises(Exception): + self._wait_for(async_result) + + try: + TaskResult.objects.get(task_id=async_result.task_id, status=states.REVOKED) + except TaskResult.DoesNotExist: + self.fail('Missing revoked task result') diff --git a/contentcuration/contentcuration/utils/celery/app.py b/contentcuration/contentcuration/utils/celery/app.py index f95304bc8d..521584bd91 100644 --- a/contentcuration/contentcuration/utils/celery/app.py +++ b/contentcuration/contentcuration/utils/celery/app.py @@ -54,37 +54,6 @@ def count_queued_tasks(self, queue_name="celery"): count = conn.default_channel.client.llen(queue_name) return count - def _revoke_matching(self, tasks, task_name, matching_kwargs=None): - count = 0 - for task in tasks: - if task['name'] == task_name and (matching_kwargs is None or task['kwargs'] == matching_kwargs): - print('Revoking task {}'.format(task)) - count += 1 - self.control.revoke(task['id'], terminate=True) - return count - - def revoke_reserved_tasks_by_name(self, task_name, matching_kwargs=None): - """ - :param task_name: The string name of the task - :param matching_kwargs: A dict of kwargs to match to the task - :return: The count of revoked tasks - """ - count = 0 - for worker_name, tasks in self.control.inspect().reserved().items(): - count += self._revoke_matching(tasks, task_name, matching_kwargs) - return count - - def terminate_active_tasks_by_name(self, task_name, matching_kwargs=None): - """ - :param task_name: The string name of the task - :param matching_kwargs: A dict of kwargs to match to the task - :return: The count of terminated tasks - """ - count = 0 - for worker_name, tasks in self.control.inspect().active().items(): - count += self._revoke_matching(tasks, task_name, matching_kwargs) - return count - def decode_result(self, result, status=None): """ Decodes the celery result, like the raw result from the database, using celery tools diff --git a/contentcuration/contentcuration/utils/celery/tasks.py b/contentcuration/contentcuration/utils/celery/tasks.py index 0d066d3329..630fa92d0b 100644 --- a/contentcuration/contentcuration/utils/celery/tasks.py +++ b/contentcuration/contentcuration/utils/celery/tasks.py @@ -246,6 +246,25 @@ def requeue(self, **kwargs): logging.info(f"Re-queuing task {self.name} for user {task_result.user.pk} from {request.id} | {task_kwargs}") return self.enqueue(task_result.user, **task_kwargs) + def revoke(self, exclude_task_ids=None, **kwargs): + """ + Revokes and terminates all unready tasks matching the kwargs + :param exclude_task_ids: Any task ids to exclude from this behavior + :param kwargs: Task keyword arguments that will be used to match against tasks + :return: The number of tasks revoked + """ + task_ids = self.find_incomplete_ids(**self.backend.decode(self._prepare_kwargs(kwargs))) + if exclude_task_ids is not None: + task_ids = task_ids.exclude(task_id__in=task_ids) + count = 0 + for task_id in task_ids: + logging.info(f"Revoking task {task_id}") + self.app.control.revoke(task_id, terminate=True) + count += 1 + # be sure the database backend has these marked appropriately + self.TaskModel.objects.filter(task_id__in=task_ids).update(status=states.REVOKED) + return count + class CeleryAsyncResult(AsyncResult): """ From a331538a92acbdeec3e5b32c803493a9e3600d2d Mon Sep 17 00:00:00 2001 From: Blaine Jester Date: Wed, 30 Nov 2022 10:30:05 -0800 Subject: [PATCH 053/555] Add management command that reconciles tasks for changes --- .../commands/reconcile_change_tasks.py | 48 +++++++++++++++++++ 1 file changed, 48 insertions(+) create mode 100644 contentcuration/contentcuration/management/commands/reconcile_change_tasks.py diff --git a/contentcuration/contentcuration/management/commands/reconcile_change_tasks.py b/contentcuration/contentcuration/management/commands/reconcile_change_tasks.py new file mode 100644 index 0000000000..333ca221a8 --- /dev/null +++ b/contentcuration/contentcuration/management/commands/reconcile_change_tasks.py @@ -0,0 +1,48 @@ +import logging + +from django.core.management.base import BaseCommand + +from contentcuration.celery import app +from contentcuration.models import Change +from contentcuration.models import User + +logging.basicConfig() +logger = logging.getLogger('command') + + +class Command(BaseCommand): + """ + Reconciles that unready tasks are marked as reserved or active according to celery control + """ + + def handle(self, *args, **options): + from contentcuration.tasks import apply_channel_changes_task + from contentcuration.tasks import apply_user_changes_task + + active_task_ids = [] + for worker_name, tasks in app.control.inspect().active().items(): + active_task_ids.extend(task['id'] for task in tasks) + for worker_name, tasks in app.control.inspect().reserved().items(): + active_task_ids.extend(task['id'] for task in tasks) + + channel_changes = Change.objects.filter(channel_id__isnull=False, applied=False, errored=False) \ + .order_by('channel_id', 'created_by_id') \ + .values('channel_id', 'created_by_id') \ + .distinct() + for channel_change in channel_changes: + apply_channel_changes_task.revoke(exclude_task_ids=active_task_ids, channel_id=channel_change['channel_id']) + apply_channel_changes_task.fetch_or_enqueue( + User.objects.get(pk=channel_change['created_by_id']), + channel_id=channel_change['channel_id'] + ) + + user_changes = Change.objects.filter(channel_id__isnull=True, user_id__isnull=False, applied=False, errored=False) \ + .order_by('user_id', 'created_by_id') \ + .values('user_id', 'created_by_id') \ + .distinct() + for user_change in user_changes: + apply_user_changes_task.revoke(exclude_task_ids=active_task_ids, user_id=user_change['user_id']) + apply_user_changes_task.fetch_or_enqueue( + User.objects.get(pk=user_change['created_by_id']), + user_id=user_change['user_id'] + ) From 85f86306e3216b97ed9e4da8ac76e5c6df2f50ec Mon Sep 17 00:00:00 2001 From: Liana Harris <46411498+LianaHarris360@users.noreply.github.com> Date: Thu, 1 Dec 2022 08:57:38 -0600 Subject: [PATCH 054/555] Added bullet points between languages on sign-in page --- .../languageSwitcher/LanguageSwitcherList.vue | 33 ++----------------- 1 file changed, 3 insertions(+), 30 deletions(-) diff --git a/contentcuration/contentcuration/frontend/shared/languageSwitcher/LanguageSwitcherList.vue b/contentcuration/contentcuration/frontend/shared/languageSwitcher/LanguageSwitcherList.vue index 30cc36563b..e95f955e6e 100644 --- a/contentcuration/contentcuration/frontend/shared/languageSwitcher/LanguageSwitcherList.vue +++ b/contentcuration/contentcuration/frontend/shared/languageSwitcher/LanguageSwitcherList.vue @@ -1,7 +1,7 @@ + + + + + + diff --git a/contentcuration/contentcuration/frontend/accounts/pages/resetPassword/ResetPassword.vue b/contentcuration/contentcuration/frontend/accounts/pages/resetPassword/ResetPassword.vue index b3866f5ccd..ae43d92fbc 100644 --- a/contentcuration/contentcuration/frontend/accounts/pages/resetPassword/ResetPassword.vue +++ b/contentcuration/contentcuration/frontend/accounts/pages/resetPassword/ResetPassword.vue @@ -16,9 +16,12 @@ :label="$tr('passwordConfirmLabel')" :additionalRules="passwordConfirmRules" /> - - {{ $tr('submitButton') }} - + @@ -84,3 +87,11 @@ }; + + \ No newline at end of file From 295bde72882a5ea02062d0c528224556c4a770af Mon Sep 17 00:00:00 2001 From: Alex Velez Date: Tue, 10 Jan 2023 07:37:16 -0500 Subject: [PATCH 159/555] Bring back raised buttons for account created/deleted message --- .../accounts/pages/accountDeleted/AccountDeleted.vue | 9 ++++++++- .../accounts/pages/activateAccount/AccountCreated.vue | 9 ++++++++- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/contentcuration/contentcuration/frontend/accounts/pages/accountDeleted/AccountDeleted.vue b/contentcuration/contentcuration/frontend/accounts/pages/accountDeleted/AccountDeleted.vue index 325482ddfd..032cfc506a 100644 --- a/contentcuration/contentcuration/frontend/accounts/pages/accountDeleted/AccountDeleted.vue +++ b/contentcuration/contentcuration/frontend/accounts/pages/accountDeleted/AccountDeleted.vue @@ -2,7 +2,13 @@ + > + + @@ -18,6 +24,7 @@ }, $trs: { accountDeletedTitle: 'Account successfully deleted', + backToLogin: 'Continue to sign-in page', }, }; diff --git a/contentcuration/contentcuration/frontend/accounts/pages/activateAccount/AccountCreated.vue b/contentcuration/contentcuration/frontend/accounts/pages/activateAccount/AccountCreated.vue index b8edb22ee2..14e106c232 100644 --- a/contentcuration/contentcuration/frontend/accounts/pages/activateAccount/AccountCreated.vue +++ b/contentcuration/contentcuration/frontend/accounts/pages/activateAccount/AccountCreated.vue @@ -2,7 +2,13 @@ + > + + @@ -18,6 +24,7 @@ }, $trs: { accountCreatedTitle: 'Account successfully created', + backToLogin: 'Continue to sign-in page', }, }; From fafba94dfbbd74d47d1cd92df99d44e643ff9a09 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Mon, 20 Mar 2023 17:48:36 -0700 Subject: [PATCH 160/555] Migrate channel databases before import to guarantee compatible schema. --- .../management/commands/export_channels_to_kolibri_public.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/contentcuration/kolibri_public/management/commands/export_channels_to_kolibri_public.py b/contentcuration/kolibri_public/management/commands/export_channels_to_kolibri_public.py index 7f64f11b88..f816c2cfc9 100644 --- a/contentcuration/kolibri_public/management/commands/export_channels_to_kolibri_public.py +++ b/contentcuration/kolibri_public/management/commands/export_channels_to_kolibri_public.py @@ -5,7 +5,9 @@ from django.conf import settings from django.core.files.storage import default_storage as storage +from django.core.management import call_command from django.core.management.base import BaseCommand +from kolibri_content.apps import KolibriContentConfig from kolibri_content.models import ChannelMetadata as ExportedChannelMetadata from kolibri_content.router import using_content_database from kolibri_public.models import ChannelMetadata @@ -26,6 +28,8 @@ def _export_channel(self, channel_id): shutil.copyfileobj(storage_file, db_file) db_file.seek(0) with using_content_database(db_file.name): + # Run migration to handle old content databases published prior to current fields being added. + call_command("migrate", app_label=KolibriContentConfig.label, database=db_file.name) channel = ExportedChannelMetadata.objects.get(id=channel_id) logger.info("Found channel {} for id: {} mapping now".format(channel.name, channel_id)) mapper = ChannelMapper(channel) From 2f0f13d63ed74cefdde33460f8564e89a2db2720 Mon Sep 17 00:00:00 2001 From: ozer550 Date: Tue, 21 Mar 2023 22:01:45 +0530 Subject: [PATCH 161/555] add tests for deploy change event --- .../contentcuration/tests/viewsets/base.py | 7 ++ .../tests/viewsets/test_channel.py | 72 +++++++++++++++++++ .../contentcuration/viewsets/channel.py | 48 +++++++++++++ .../contentcuration/viewsets/sync/base.py | 2 + .../viewsets/sync/constants.py | 2 + .../contentcuration/viewsets/sync/utils.py | 6 ++ 6 files changed, 137 insertions(+) diff --git a/contentcuration/contentcuration/tests/viewsets/base.py b/contentcuration/contentcuration/tests/viewsets/base.py index 48ddd1c995..7dd55c9622 100644 --- a/contentcuration/contentcuration/tests/viewsets/base.py +++ b/contentcuration/contentcuration/tests/viewsets/base.py @@ -11,6 +11,7 @@ from contentcuration.viewsets.sync.utils import generate_copy_event as base_generate_copy_event from contentcuration.viewsets.sync.utils import generate_create_event as base_generate_create_event from contentcuration.viewsets.sync.utils import generate_delete_event as base_generate_delete_event +from contentcuration.viewsets.sync.utils import generate_deploy_event as base_generate_deploy_event from contentcuration.viewsets.sync.utils import generate_update_event as base_generate_update_event @@ -48,6 +49,12 @@ def generate_sync_channel_event(channel_id, attributes, tags, files, assessment_ return event +def generate_deploy_channel_event(channel_id, user_id): + event = base_generate_deploy_event(channel_id, user_id=user_id) + event["rev"] = random.randint(1, 10000000) + return event + + class SyncTestMixin(object): celery_task_always_eager = None diff --git a/contentcuration/contentcuration/tests/viewsets/test_channel.py b/contentcuration/contentcuration/tests/viewsets/test_channel.py index b88f54ad98..02e8b6571f 100644 --- a/contentcuration/contentcuration/tests/viewsets/test_channel.py +++ b/contentcuration/contentcuration/tests/viewsets/test_channel.py @@ -4,12 +4,15 @@ import mock from django.urls import reverse +from le_utils.constants import content_kinds from contentcuration import models +from contentcuration import models as cc from contentcuration.tests import testdata from contentcuration.tests.base import StudioAPITestCase from contentcuration.tests.viewsets.base import generate_create_event from contentcuration.tests.viewsets.base import generate_delete_event +from contentcuration.tests.viewsets.base import generate_deploy_channel_event from contentcuration.tests.viewsets.base import generate_sync_channel_event from contentcuration.tests.viewsets.base import generate_update_event from contentcuration.tests.viewsets.base import SyncTestMixin @@ -299,6 +302,75 @@ def test_sync_channel_called_correctly(self, sync_channel_mock): self.assertEqual(sync_channel_mock.call_args.args[i], True) sync_channel_mock.assert_called_once() + def test_deploy_channel_event(self): + channel = testdata.channel() + user = testdata.user() + channel.editors.add(user) + self.client.force_authenticate( + user + ) # This will skip all authentication checks + channel.main_tree.refresh_from_db() + + channel.staging_tree = cc.ContentNode( + kind_id=content_kinds.TOPIC, title="test", node_id="aaa" + ) + channel.staging_tree.save() + channel.previous_tree = cc.ContentNode( + kind_id=content_kinds.TOPIC, title="test", node_id="bbb" + ) + channel.previous_tree.save() + channel.chef_tree = cc.ContentNode( + kind_id=content_kinds.TOPIC, title="test", node_id="ccc" + ) + channel.chef_tree.save() + channel.save() + + self.contentnode = cc.ContentNode.objects.create(kind_id="video") + + response = self.sync_changes( + [ + generate_deploy_channel_event(channel.id, user.id) + ] + ) + + self.assertEqual(response.status_code, 200) + modified_channel = models.Channel.objects.get(id=channel.id) + self.assertEqual(modified_channel.main_tree, channel.staging_tree) + self.assertEqual(modified_channel.staging_tree, None) + self.assertEqual(modified_channel.previous_tree, channel.main_tree) + + def test_deploy_with_staging_tree_None(self): + channel = testdata.channel() + user = testdata.user() + channel.editors.add(user) + self.client.force_authenticate( + user + ) # This will skip all authentication checks + channel.main_tree.refresh_from_db() + + channel.staging_tree = None + channel.previous_tree = cc.ContentNode( + kind_id=content_kinds.TOPIC, title="test", node_id="bbb" + ) + channel.previous_tree.save() + channel.chef_tree = cc.ContentNode( + kind_id=content_kinds.TOPIC, title="test", node_id="ccc" + ) + channel.chef_tree.save() + channel.save() + + self.contentnode = cc.ContentNode.objects.create(kind_id="video") + response = self.sync_changes( + [ + generate_deploy_channel_event(channel.id, user.id) + ] + ) + # Should raise validation error as staging tree was set to NONE + self.assertEqual(len(response.json()["errors"]), 1, response.content) + modified_channel = models.Channel.objects.get(id=channel.id) + self.assertNotEqual(modified_channel.main_tree, channel.staging_tree) + self.assertNotEqual(modified_channel.previous_tree, channel.main_tree) + class CRUDTestCase(StudioAPITestCase): @property diff --git a/contentcuration/contentcuration/viewsets/channel.py b/contentcuration/contentcuration/viewsets/channel.py index 773bc0b740..bd416acafa 100644 --- a/contentcuration/contentcuration/viewsets/channel.py +++ b/contentcuration/contentcuration/viewsets/channel.py @@ -28,6 +28,7 @@ from search.models import ContentNodeFullTextSearch from search.utils import get_fts_search_query +import contentcuration.models as models from contentcuration.decorators import cache_no_user_data from contentcuration.models import Change from contentcuration.models import Channel @@ -36,6 +37,7 @@ from contentcuration.models import generate_storage_url from contentcuration.models import SecretToken from contentcuration.models import User +from contentcuration.utils.garbage_collect import get_deleted_chefs_root from contentcuration.utils.pagination import CachedListPagination from contentcuration.utils.pagination import ValuesViewsetPageNumberPagination from contentcuration.utils.publish import publish_channel @@ -558,6 +560,52 @@ def sync(self, pk, attributes=False, tags=False, files=False, assessment_items=F progress_tracker=progress_tracker, ) + def deploy_from_changes(self, changes): + errors = [] + for deploy in changes: + try: + self.deploy(self.request.user, deploy["key"]) + except Exception as e: + log_sync_exception(e, user=self.request.user, change=deploy) + deploy["errors"] = [str(e)] + errors.append(deploy) + return 1 + + def deploy(self, user, pk): + + channel = self.get_edit_queryset().get(pk=pk) + + if channel.staging_tree is None: + raise ValidationError("Cannot deploy a channel without staging tree") + + user.check_channel_space(channel) + + if channel.previous_tree and channel.previous_tree != channel.main_tree: + # IMPORTANT: Do not remove this block, MPTT updating the deleted chefs block could hang the server + with models.ContentNode.objects.disable_mptt_updates(): + garbage_node = get_deleted_chefs_root() + channel.previous_tree.parent = garbage_node + channel.previous_tree.title = "Previous tree for channel {}".format(channel.pk) + channel.previous_tree.save() + + channel.previous_tree = channel.main_tree + channel.main_tree = channel.staging_tree + channel.staging_tree = None + channel.save() + + user.staged_files.all().delete() + user.set_space_used() + + models.Change.create_change(generate_update_event( + channel.id, + CHANNEL, + { + "root_id": channel.main_tree.id, + "staging_root_id": None + }, + channel_id=channel.id, + ), applied=True, created_by_id=user.id) + @method_decorator( cache_page( diff --git a/contentcuration/contentcuration/viewsets/sync/base.py b/contentcuration/contentcuration/viewsets/sync/base.py index 455a97e4aa..f11a8f4729 100644 --- a/contentcuration/contentcuration/viewsets/sync/base.py +++ b/contentcuration/contentcuration/viewsets/sync/base.py @@ -22,6 +22,7 @@ from contentcuration.viewsets.sync.constants import COPIED from contentcuration.viewsets.sync.constants import CREATED from contentcuration.viewsets.sync.constants import DELETED +from contentcuration.viewsets.sync.constants import DEPLOYED from contentcuration.viewsets.sync.constants import EDITOR_M2M from contentcuration.viewsets.sync.constants import FILE from contentcuration.viewsets.sync.constants import INVITATION @@ -92,6 +93,7 @@ def get_change_type(obj): COPIED: "copy_from_changes", PUBLISHED: "publish_from_changes", SYNCED: "sync_from_changes", + DEPLOYED: "deploy_from_changes", } diff --git a/contentcuration/contentcuration/viewsets/sync/constants.py b/contentcuration/contentcuration/viewsets/sync/constants.py index 6e553b6ccd..84c2b5aad7 100644 --- a/contentcuration/contentcuration/viewsets/sync/constants.py +++ b/contentcuration/contentcuration/viewsets/sync/constants.py @@ -6,6 +6,7 @@ COPIED = 5 PUBLISHED = 6 SYNCED = 7 +DEPLOYED = 8 ALL_CHANGES = set([ @@ -16,6 +17,7 @@ COPIED, PUBLISHED, SYNCED, + DEPLOYED, ]) # Client-side table constants diff --git a/contentcuration/contentcuration/viewsets/sync/utils.py b/contentcuration/contentcuration/viewsets/sync/utils.py index 1d3718b5e2..47b5a17e54 100644 --- a/contentcuration/contentcuration/viewsets/sync/utils.py +++ b/contentcuration/contentcuration/viewsets/sync/utils.py @@ -6,6 +6,7 @@ from contentcuration.viewsets.sync.constants import COPIED from contentcuration.viewsets.sync.constants import CREATED from contentcuration.viewsets.sync.constants import DELETED +from contentcuration.viewsets.sync.constants import DEPLOYED from contentcuration.viewsets.sync.constants import MOVED from contentcuration.viewsets.sync.constants import PUBLISHED from contentcuration.viewsets.sync.constants import UPDATED @@ -74,6 +75,11 @@ def generate_publish_event( return event +def generate_deploy_event(key, user_id): + event = _generate_event(key, CHANNEL, DEPLOYED, channel_id=key, user_id=user_id) + return event + + def log_sync_exception(e, user=None, change=None, changes=None): # Capture exception and report, but allow sync # to complete properly. From 33e9c5e1aad16d219f8fa68d2ab2f8b99dda8fd3 Mon Sep 17 00:00:00 2001 From: Liana Harris <46411498+LianaHarris360@users.noreply.github.com> Date: Tue, 21 Mar 2023 14:01:32 -0500 Subject: [PATCH 162/555] Updates mp3 resource previewer metadata to include has captions and subtitles --- .../frontend/channelEdit/components/ResourcePanel.vue | 6 +++++- .../channelEdit/components/edit/DetailsTabView.vue | 7 +------ .../edit/__tests__/accessibilityOptions.spec.js | 11 +++++++++++ .../contentcuration/frontend/shared/constants.js | 2 +- 4 files changed, 18 insertions(+), 8 deletions(-) diff --git a/contentcuration/contentcuration/frontend/channelEdit/components/ResourcePanel.vue b/contentcuration/contentcuration/frontend/channelEdit/components/ResourcePanel.vue index e651764293..0c505ddbfa 100644 --- a/contentcuration/contentcuration/frontend/channelEdit/components/ResourcePanel.vue +++ b/contentcuration/contentcuration/frontend/channelEdit/components/ResourcePanel.vue @@ -310,7 +310,11 @@ inline /> - + diff --git a/contentcuration/contentcuration/frontend/channelEdit/components/edit/DetailsTabView.vue b/contentcuration/contentcuration/frontend/channelEdit/components/edit/DetailsTabView.vue index e319fc452a..3c899602a7 100644 --- a/contentcuration/contentcuration/frontend/channelEdit/components/edit/DetailsTabView.vue +++ b/contentcuration/contentcuration/frontend/channelEdit/components/edit/DetailsTabView.vue @@ -554,12 +554,7 @@ return this.firstNode.original_channel_name; }, requiresAccessibility() { - return ( - this.oneSelected && - this.nodes.every( - node => node.kind !== ContentKindsNames.AUDIO && node.kind !== ContentKindsNames.TOPIC - ) - ); + return this.oneSelected && this.nodes.every(node => node.kind !== ContentKindsNames.TOPIC); }, audioAccessibility() { return this.oneSelected && this.firstNode.kind === 'audio'; diff --git a/contentcuration/contentcuration/frontend/channelEdit/components/edit/__tests__/accessibilityOptions.spec.js b/contentcuration/contentcuration/frontend/channelEdit/components/edit/__tests__/accessibilityOptions.spec.js index 050f389c6a..9bc5d81c04 100644 --- a/contentcuration/contentcuration/frontend/channelEdit/components/edit/__tests__/accessibilityOptions.spec.js +++ b/contentcuration/contentcuration/frontend/channelEdit/components/edit/__tests__/accessibilityOptions.spec.js @@ -73,6 +73,17 @@ describe('AccessibilityOptions', () => { expect(wrapper.find('[data-test="checkbox-audioDescription"]').exists()).toBe(false); }); + it('should display the correct list of accessibility options if resource is an audio', () => { + const wrapper = mount(AccessibilityOptions, { + propsData: { + kind: 'audio', + }, + }); + + expect(wrapper.find('[data-test="checkbox-captionsSubtitles"]').exists()).toBe(true); + expect(wrapper.find('[data-test="tooltip-captionsSubtitles"]').exists()).toBe(false); + }); + it('should render appropriate tooltips along with the checkbox', () => { const wrapper = mount(AccessibilityOptions, { propsData: { diff --git a/contentcuration/contentcuration/frontend/shared/constants.js b/contentcuration/contentcuration/frontend/shared/constants.js index 21a14afc08..005be9a69c 100644 --- a/contentcuration/contentcuration/frontend/shared/constants.js +++ b/contentcuration/contentcuration/frontend/shared/constants.js @@ -199,11 +199,11 @@ export const ContentModalities = { }; export const AccessibilityCategoriesMap = { - // Note: audio is not included, as it is rendered in the UI differently. document: ['ALT_TEXT', 'HIGH_CONTRAST', 'TAGGED_PDF'], video: ['CAPTIONS_SUBTITLES', 'AUDIO_DESCRIPTION', 'SIGN_LANGUAGE'], exercise: ['ALT_TEXT'], html5: ['ALT_TEXT', 'HIGH_CONTRAST'], + audio: ['CAPTIONS_SUBTITLES'], }; export const CompletionDropdownMap = { From 385cda06f5b167541c65d6829e2c513764529799 Mon Sep 17 00:00:00 2001 From: Richard Tibbles Date: Tue, 21 Mar 2023 13:31:37 -0700 Subject: [PATCH 163/555] Use get_active_content_database to get alias --- .../management/commands/export_channels_to_kolibri_public.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/contentcuration/kolibri_public/management/commands/export_channels_to_kolibri_public.py b/contentcuration/kolibri_public/management/commands/export_channels_to_kolibri_public.py index f816c2cfc9..2f20a0b78c 100644 --- a/contentcuration/kolibri_public/management/commands/export_channels_to_kolibri_public.py +++ b/contentcuration/kolibri_public/management/commands/export_channels_to_kolibri_public.py @@ -9,6 +9,7 @@ from django.core.management.base import BaseCommand from kolibri_content.apps import KolibriContentConfig from kolibri_content.models import ChannelMetadata as ExportedChannelMetadata +from kolibri_content.router import get_active_content_database from kolibri_content.router import using_content_database from kolibri_public.models import ChannelMetadata from kolibri_public.utils.mapper import ChannelMapper @@ -29,7 +30,7 @@ def _export_channel(self, channel_id): db_file.seek(0) with using_content_database(db_file.name): # Run migration to handle old content databases published prior to current fields being added. - call_command("migrate", app_label=KolibriContentConfig.label, database=db_file.name) + call_command("migrate", app_label=KolibriContentConfig.label, database=get_active_content_database()) channel = ExportedChannelMetadata.objects.get(id=channel_id) logger.info("Found channel {} for id: {} mapping now".format(channel.name, channel_id)) mapper = ChannelMapper(channel) From 36c10752a7773b5b321fd6b486a5ccfff0f210eb Mon Sep 17 00:00:00 2001 From: Vivek Agrawal Date: Wed, 22 Mar 2023 16:16:52 +0530 Subject: [PATCH 164/555] Put deploy event into indexedDB --- .../channelEdit/pages/StagingTreePage/index.vue | 2 +- .../channelEdit/vuex/currentChannel/actions.js | 10 +--------- .../frontend/shared/data/constants.js | 1 + .../frontend/shared/data/resources.js | 13 +++++++++++++ 4 files changed, 16 insertions(+), 10 deletions(-) diff --git a/contentcuration/contentcuration/frontend/channelEdit/pages/StagingTreePage/index.vue b/contentcuration/contentcuration/frontend/channelEdit/pages/StagingTreePage/index.vue index 21f5fd82b0..dcf0cb6e50 100644 --- a/contentcuration/contentcuration/frontend/channelEdit/pages/StagingTreePage/index.vue +++ b/contentcuration/contentcuration/frontend/channelEdit/pages/StagingTreePage/index.vue @@ -509,7 +509,7 @@ async onDeployChannelClick() { this.submitDisabled = true; try { - await this.deployCurrentChannel(); + this.deployCurrentChannel(); } catch (e) { this.submitDisabled = false; throw e; diff --git a/contentcuration/contentcuration/frontend/channelEdit/vuex/currentChannel/actions.js b/contentcuration/contentcuration/frontend/channelEdit/vuex/currentChannel/actions.js index dd5bb6139a..9dca09a0a2 100644 --- a/contentcuration/contentcuration/frontend/channelEdit/vuex/currentChannel/actions.js +++ b/contentcuration/contentcuration/frontend/channelEdit/vuex/currentChannel/actions.js @@ -48,15 +48,7 @@ export function reloadCurrentChannelStagingDiff(context) { } export function deployCurrentChannel(context) { - let payload = { - channel_id: context.state.currentChannelId, - }; - return client.post(window.Urls.activate_channel(), payload).catch(e => { - // If response is 'Bad request', channel must already be activated - if (e.response && e.response.status === 400) { - return Promise.resolve(); - } - }); + return Channel.deploy(context.state.currentChannelId); } export function publishChannel(context, version_notes) { diff --git a/contentcuration/contentcuration/frontend/shared/data/constants.js b/contentcuration/contentcuration/frontend/shared/data/constants.js index 8e658c0ecb..bbc35582a3 100644 --- a/contentcuration/contentcuration/frontend/shared/data/constants.js +++ b/contentcuration/contentcuration/frontend/shared/data/constants.js @@ -6,6 +6,7 @@ export const CHANGE_TYPES = { COPIED: 5, PUBLISHED: 6, SYNCED: 7, + DEPLOYED: 8, }; /** * An array of change types that directly result in the creation of nodes diff --git a/contentcuration/contentcuration/frontend/shared/data/resources.js b/contentcuration/contentcuration/frontend/shared/data/resources.js index ac51f15330..2fb4405443 100644 --- a/contentcuration/contentcuration/frontend/shared/data/resources.js +++ b/contentcuration/contentcuration/frontend/shared/data/resources.js @@ -1085,6 +1085,19 @@ export const Channel = new Resource({ }); }, + deploy(id) { + const change = { + key: id, + source: CLIENTID, + table: this.tableName, + type: CHANGE_TYPES.DEPLOYED, + channel_id: id, + }; + return this.transaction({ mode: 'rw', source: IGNORED_SOURCE }, CHANGES_TABLE, () => { + return db[CHANGES_TABLE].put(change); + }); + }, + sync(id, { attributes = false, tags = false, files = false, assessment_items = false } = {}) { const change = { key: id, From b87156022ec081a74cd598e45471d18ed1304737 Mon Sep 17 00:00:00 2001 From: Liana Harris <46411498+LianaHarris360@users.noreply.github.com> Date: Wed, 22 Mar 2023 14:21:55 -0500 Subject: [PATCH 165/555] Updates command to automatically set accessibility metadata for audio nodes with subtitles --- .../management/commands/set_orm_based_has_captions.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/contentcuration/contentcuration/management/commands/set_orm_based_has_captions.py b/contentcuration/contentcuration/management/commands/set_orm_based_has_captions.py index 29769e4389..38865f6b89 100644 --- a/contentcuration/contentcuration/management/commands/set_orm_based_has_captions.py +++ b/contentcuration/contentcuration/management/commands/set_orm_based_has_captions.py @@ -22,13 +22,13 @@ class Command(BaseCommand): def handle(self, *args, **options): start = time.time() - logging.info("Setting 'has captions' for video kinds") + logging.info("Setting 'has captions' for audio kinds") has_captions_subquery = Exists(File.objects.filter(contentnode=OuterRef("id"), language=OuterRef("language"), preset_id=format_presets.VIDEO_SUBTITLE)) - # Only try to update video nodes which have not had any accessibility labels set on them + # Only try to update audio nodes which have not had any accessibility labels set on them # this will allow this management command to be rerun and resume from where it left off # and also prevent stomping previous edits to the accessibility_labels field. - updateable_nodes = ContentNode.objects.filter(has_captions_subquery, kind=content_kinds.VIDEO, accessibility_labels__isnull=True) + updateable_nodes = ContentNode.objects.filter(has_captions_subquery, kind=content_kinds.AUDIO, accessibility_labels__isnull=True) updateable_node_slice = updateable_nodes.values_list("id", flat=True)[0:CHUNKSIZE] From c46f34b5d58f4410a6a70156fbe884a9d7ef0c68 Mon Sep 17 00:00:00 2001 From: Liana Harris <46411498+LianaHarris360@users.noreply.github.com> Date: Fri, 24 Mar 2023 10:18:22 -0500 Subject: [PATCH 166/555] Updates nested folder order in breadcrumb --- .../frontend/channelEdit/views/CurrentTopicView.vue | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/contentcuration/contentcuration/frontend/channelEdit/views/CurrentTopicView.vue b/contentcuration/contentcuration/frontend/channelEdit/views/CurrentTopicView.vue index 644ccd629d..19223ffcab 100644 --- a/contentcuration/contentcuration/frontend/channelEdit/views/CurrentTopicView.vue +++ b/contentcuration/contentcuration/frontend/channelEdit/views/CurrentTopicView.vue @@ -5,11 +5,11 @@ -