Skip to content

Commit 1ca57ec

Browse files
bradenmacdonaldChrisChVpomegranitedrpenido
authored
Basic CRUD REST Endpoints for units in content libraries [FC-0083] (#36371)
* refactor: convert libraries API from attr.s to dataclass, fix types * fix: make corresponding updates to 'search' code * feat: use new version of openedx-learning with containers support * temp: Use opencraft branch of opaquekeys * refactor: Use LibraryElementKey instead of LibraryCollectionKey * refactor: split libraries API & REST API up into smaller modules * feat: new REST API for units in content libraries * feat: python+REST API to get a unit * feat: auto-generate slug/key/ID from title of units * feat: generate search index documents for containers * refactor: rename LibraryElementKey to LibraryItemKey * fix: lint error * feat: adds new units to search index on create/update and when running reindex_studio. Updates requirements for openedx-events and openedx-learning to support these changes. * fix: pylint * fix: temp requirement * fix: search index container events/tasks * feat: add get_library_container_usage_key to libraries API and use it when search indexing containers * fix: index all containers during reindex_studio * chore: bump openedx-events requirement * fix: address review comments * chore: bumps openedx-learning to 0.19.1 * fix: rename api method to library_container_locator since container keys are locators, not usage keys * chore: bumps opaque-keys dependency * test: fix misnamed unit_usage_key * feat: adds APIs to update or delete a container (#757) * feat: adds python and REST APIs to update a container's display_name * refactor: adds _get_container method to api to reduce code duplication * feat: adds python and REST APIs to delete a container * test: add container permission tests --------- Co-authored-by: XnpioChV <[email protected]> Co-authored-by: Jillian Vogel <[email protected]> Co-authored-by: Rômulo Penido <[email protected]>
1 parent 79f33a6 commit 1ca57ec

37 files changed

+1240
-143
lines changed

Diff for: cms/envs/common.py

+1
Original file line numberDiff line numberDiff line change
@@ -1879,6 +1879,7 @@
18791879
"openedx_learning.apps.authoring.components",
18801880
"openedx_learning.apps.authoring.contents",
18811881
"openedx_learning.apps.authoring.publishing",
1882+
"openedx_learning.apps.authoring.units",
18821883
]
18831884

18841885

Diff for: lms/envs/common.py

+1
Original file line numberDiff line numberDiff line change
@@ -3369,6 +3369,7 @@ def _make_locale_paths(settings): # pylint: disable=missing-function-docstring
33693369
"openedx_learning.apps.authoring.components",
33703370
"openedx_learning.apps.authoring.contents",
33713371
"openedx_learning.apps.authoring.publishing",
3372+
"openedx_learning.apps.authoring.units",
33723373
]
33733374

33743375

Diff for: openedx/core/djangoapps/content/search/api.py

+67-1
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
from meilisearch.errors import MeilisearchApiError, MeilisearchError
1919
from meilisearch.models.task import TaskInfo
2020
from opaque_keys.edx.keys import UsageKey
21-
from opaque_keys.edx.locator import LibraryLocatorV2, LibraryCollectionLocator
21+
from opaque_keys.edx.locator import LibraryContainerLocator, LibraryLocatorV2, LibraryCollectionLocator
2222
from openedx_learning.api import authoring as authoring_api
2323
from common.djangoapps.student.roles import GlobalStaff
2424
from rest_framework.request import Request
@@ -40,6 +40,7 @@
4040
meili_id_from_opaque_key,
4141
searchable_doc_for_course_block,
4242
searchable_doc_for_collection,
43+
searchable_doc_for_container,
4344
searchable_doc_for_library_block,
4445
searchable_doc_for_usage_key,
4546
searchable_doc_collections,
@@ -475,6 +476,31 @@ def index_collection_batch(batch, num_done, library_key) -> int:
475476
status_cb(f"Error indexing collection batch {p}: {err}")
476477
return num_done
477478

479+
############## Containers ##############
480+
def index_container_batch(batch, num_done, library_key) -> int:
481+
docs = []
482+
for container in batch:
483+
try:
484+
container_key = lib_api.library_container_locator(
485+
library_key,
486+
container,
487+
)
488+
doc = searchable_doc_for_container(container_key)
489+
# TODO: when we add container tags
490+
# doc.update(searchable_doc_tags_for_container(container_key))
491+
docs.append(doc)
492+
except Exception as err: # pylint: disable=broad-except
493+
status_cb(f"Error indexing container {container.key}: {err}")
494+
num_done += 1
495+
496+
if docs:
497+
try:
498+
# Add docs in batch of 100 at once (usually faster than adding one at a time):
499+
_wait_for_meili_task(client.index(index_name).add_documents(docs))
500+
except (TypeError, KeyError, MeilisearchError) as err:
501+
status_cb(f"Error indexing container batch {p}: {err}")
502+
return num_done
503+
478504
for lib_key in lib_keys:
479505
status_cb(f"{num_contexts_done + 1}/{num_contexts}. Now indexing blocks in library {lib_key}")
480506
lib_docs = index_library(lib_key)
@@ -497,6 +523,22 @@ def index_collection_batch(batch, num_done, library_key) -> int:
497523
IncrementalIndexCompleted.objects.get_or_create(context_key=lib_key)
498524
status_cb(f"{num_collections_done}/{num_collections} collections indexed for library {lib_key}")
499525

526+
# Similarly, batch process Containers (units, sections, etc) in pages of 100
527+
containers = authoring_api.get_containers(library.learning_package_id)
528+
num_containers = containers.count()
529+
num_containers_done = 0
530+
status_cb(f"{num_containers_done}/{num_containers}. Now indexing containers in library {lib_key}")
531+
paginator = Paginator(containers, 100)
532+
for p in paginator.page_range:
533+
num_containers_done = index_container_batch(
534+
paginator.page(p).object_list,
535+
num_containers_done,
536+
lib_key,
537+
)
538+
status_cb(f"{num_containers_done}/{num_containers} containers indexed for library {lib_key}")
539+
if incremental:
540+
IncrementalIndexCompleted.objects.get_or_create(context_key=lib_key)
541+
500542
num_contexts_done += 1
501543

502544
############## Courses ##############
@@ -732,6 +774,30 @@ def update_library_components_collections(
732774
_update_index_docs(docs)
733775

734776

777+
def upsert_library_container_index_doc(container_key: LibraryContainerLocator) -> None:
778+
"""
779+
Creates, updates, or deletes the document for the given Library Container in the search index.
780+
781+
TODO: add support for indexing a container's components, like upsert_library_collection_index_doc does.
782+
"""
783+
doc = searchable_doc_for_container(container_key)
784+
785+
# Soft-deleted/disabled containers are removed from the index
786+
# and their components updated.
787+
if doc.get('_disabled'):
788+
789+
_delete_index_doc(doc[Fields.id])
790+
791+
# Hard-deleted containers are also deleted from the index
792+
elif not doc.get(Fields.type):
793+
794+
_delete_index_doc(doc[Fields.id])
795+
796+
# Otherwise, upsert the container.
797+
else:
798+
_update_index_docs([doc])
799+
800+
735801
def upsert_content_library_index_docs(library_key: LibraryLocatorV2) -> None:
736802
"""
737803
Creates or updates the documents for the given Content Library in the search index

Diff for: openedx/core/djangoapps/content/search/documents.py

+57-1
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
from django.core.exceptions import ObjectDoesNotExist
1111
from opaque_keys.edx.keys import LearningContextKey, UsageKey
1212
from openedx_learning.api import authoring as authoring_api
13-
from opaque_keys.edx.locator import LibraryLocatorV2
13+
from opaque_keys.edx.locator import LibraryLocatorV2, LibraryContainerLocator
1414
from rest_framework.exceptions import NotFound
1515

1616
from openedx.core.djangoapps.content.search.models import SearchAccess
@@ -100,6 +100,7 @@ class DocType:
100100
"""
101101
course_block = "course_block"
102102
library_block = "library_block"
103+
library_container = "library_container"
103104
collection = "collection"
104105

105106

@@ -546,3 +547,58 @@ def searchable_doc_for_collection(
546547
doc['_disabled'] = True
547548

548549
return doc
550+
551+
552+
def searchable_doc_for_container(
553+
container_key: LibraryContainerLocator,
554+
) -> dict:
555+
"""
556+
Generate a dictionary document suitable for ingestion into a search engine
557+
like Meilisearch or Elasticsearch, so that the given collection can be
558+
found using faceted search.
559+
560+
If no container is found for the given container key, the returned document
561+
will contain only basic information derived from the container key, and no
562+
Fields.type value will be included in the returned dict.
563+
"""
564+
doc = {
565+
Fields.id: meili_id_from_opaque_key(container_key),
566+
Fields.context_key: str(container_key.library_key),
567+
Fields.org: str(container_key.org),
568+
# In the future, this may be either course_container or library_container
569+
Fields.type: DocType.library_container,
570+
# To check if it is "unit", "section", "subsection", etc..
571+
Fields.block_type: container_key.container_type,
572+
Fields.usage_key: str(container_key), # Field name isn't exact but this is the closest match
573+
Fields.block_id: container_key.container_id, # Field name isn't exact but this is the closest match
574+
Fields.access_id: _meili_access_id_from_context_key(container_key.library_key),
575+
}
576+
577+
try:
578+
container = lib_api.get_container(container_key)
579+
except lib_api.ContentLibraryCollectionNotFound:
580+
# Container not found, so we can only return the base doc
581+
pass
582+
583+
if container:
584+
# TODO: check if there's a more efficient way to load these num_children counts?
585+
draft_num_children = len(lib_api.get_container_children(container_key, published=False))
586+
587+
doc.update({
588+
Fields.display_name: container.display_name,
589+
Fields.created: container.created.timestamp(),
590+
Fields.modified: container.modified.timestamp(),
591+
Fields.num_children: draft_num_children,
592+
})
593+
library = lib_api.get_library(container_key.library_key)
594+
if library:
595+
doc[Fields.breadcrumbs] = [{"display_name": library.title}]
596+
597+
if container.published_version_num is not None:
598+
published_num_children = len(lib_api.get_container_children(container_key, published=True))
599+
doc[Fields.published] = {
600+
# Fields.published_display_name: container_published.title, TODO: set the published title
601+
Fields.published_num_children: published_num_children,
602+
}
603+
604+
return doc

Diff for: openedx/core/djangoapps/content/search/handlers.py

+33
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
ContentObjectChangedData,
1515
LibraryBlockData,
1616
LibraryCollectionData,
17+
LibraryContainerData,
1718
XBlockData,
1819
)
1920
from openedx_events.content_authoring.signals import (
@@ -25,6 +26,9 @@
2526
LIBRARY_COLLECTION_CREATED,
2627
LIBRARY_COLLECTION_DELETED,
2728
LIBRARY_COLLECTION_UPDATED,
29+
LIBRARY_CONTAINER_CREATED,
30+
LIBRARY_CONTAINER_DELETED,
31+
LIBRARY_CONTAINER_UPDATED,
2832
XBLOCK_CREATED,
2933
XBLOCK_DELETED,
3034
XBLOCK_UPDATED,
@@ -45,6 +49,7 @@
4549
delete_xblock_index_doc,
4650
update_content_library_index_docs,
4751
update_library_collection_index_doc,
52+
update_library_container_index_doc,
4853
upsert_library_block_index_doc,
4954
upsert_xblock_index_doc,
5055
)
@@ -225,3 +230,31 @@ def content_object_associations_changed_handler(**kwargs) -> None:
225230
upsert_block_tags_index_docs(usage_key)
226231
if not content_object.changes or "collections" in content_object.changes:
227232
upsert_block_collections_index_docs(usage_key)
233+
234+
235+
@receiver(LIBRARY_CONTAINER_CREATED)
236+
@receiver(LIBRARY_CONTAINER_DELETED)
237+
@receiver(LIBRARY_CONTAINER_UPDATED)
238+
@only_if_meilisearch_enabled
239+
def library_container_updated_handler(**kwargs) -> None:
240+
"""
241+
Create or update the index for the content library container
242+
"""
243+
library_container = kwargs.get("library_container", None)
244+
if not library_container or not isinstance(library_container, LibraryContainerData): # pragma: no cover
245+
log.error("Received null or incorrect data for event")
246+
return
247+
248+
if library_container.background:
249+
update_library_container_index_doc.delay(
250+
str(library_container.library_key),
251+
library_container.container_key,
252+
)
253+
else:
254+
# Update container index synchronously to make sure that search index is updated before
255+
# the frontend invalidates/refetches index.
256+
# See content_library_updated_handler for more details.
257+
update_library_container_index_doc.apply(args=[
258+
str(library_container.library_key),
259+
library_container.container_key,
260+
])

Diff for: openedx/core/djangoapps/content/search/tasks.py

+15-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
from edx_django_utils.monitoring import set_code_owner_attribute
1212
from meilisearch.errors import MeilisearchError
1313
from opaque_keys.edx.keys import UsageKey
14-
from opaque_keys.edx.locator import LibraryLocatorV2, LibraryUsageLocatorV2
14+
from opaque_keys.edx.locator import LibraryContainerLocator, LibraryLocatorV2, LibraryUsageLocatorV2
1515

1616
from . import api
1717

@@ -110,3 +110,17 @@ def update_library_components_collections(library_key_str: str, collection_key:
110110
log.info("Updating document.collections for library %s collection %s components", library_key, collection_key)
111111

112112
api.update_library_components_collections(library_key, collection_key)
113+
114+
115+
@shared_task(base=LoggedTask, autoretry_for=(MeilisearchError, ConnectionError))
116+
@set_code_owner_attribute
117+
def update_library_container_index_doc(library_key_str: str, container_key_str: str) -> None:
118+
"""
119+
Celery task to update the content index document for a library container
120+
"""
121+
library_key = LibraryLocatorV2.from_string(library_key_str)
122+
container_key = LibraryContainerLocator.from_string(container_key_str)
123+
124+
log.info("Updating content index documents for container %s in library%s", container_key, library_key)
125+
126+
api.upsert_library_container_index_doc(container_key)

Diff for: openedx/core/djangoapps/content/search/tests/test_api.py

+54-5
Original file line numberDiff line numberDiff line change
@@ -208,6 +208,35 @@ def setUp(self):
208208
"breadcrumbs": [{"display_name": "Library"}],
209209
}
210210

211+
# Create a unit:
212+
with freeze_time(created_date):
213+
self.unit = library_api.create_container(
214+
library_key=self.library.key,
215+
container_type=library_api.ContainerType.Unit,
216+
slug="unit-1",
217+
title="Unit 1",
218+
user_id=None,
219+
)
220+
self.unit_key = "lct:org1:lib:unit:unit-1"
221+
self.unit_dict = {
222+
"id": "lctorg1libunitunit-1-e4527f7c",
223+
"block_id": "unit-1",
224+
"block_type": "unit",
225+
"usage_key": self.unit_key,
226+
"type": "library_container",
227+
"display_name": "Unit 1",
228+
# description is not set for containers
229+
"num_children": 0,
230+
"context_key": "lib:org1:lib",
231+
"org": "org1",
232+
"created": created_date.timestamp(),
233+
"modified": created_date.timestamp(),
234+
"access_id": lib_access.id,
235+
"breadcrumbs": [{"display_name": "Library"}],
236+
# "tags" should be here but we haven't implemented them yet
237+
# "published" is not set since we haven't published it yet
238+
}
239+
211240
@override_settings(MEILISEARCH_ENABLED=False)
212241
def test_reindex_meilisearch_disabled(self, mock_meilisearch):
213242
with self.assertRaises(RuntimeError):
@@ -231,14 +260,16 @@ def test_reindex_meilisearch(self, mock_meilisearch):
231260
doc_problem2["collections"] = {'display_name': [], 'key': []}
232261
doc_collection = copy.deepcopy(self.collection_dict)
233262
doc_collection["tags"] = {}
263+
doc_unit = copy.deepcopy(self.unit_dict)
234264

235265
api.rebuild_index()
236-
assert mock_meilisearch.return_value.index.return_value.add_documents.call_count == 3
266+
assert mock_meilisearch.return_value.index.return_value.add_documents.call_count == 4
237267
mock_meilisearch.return_value.index.return_value.add_documents.assert_has_calls(
238268
[
239269
call([doc_sequential, doc_vertical]),
240270
call([doc_problem1, doc_problem2]),
241271
call([doc_collection]),
272+
call([doc_unit]),
242273
],
243274
any_order=True,
244275
)
@@ -259,14 +290,16 @@ def test_reindex_meilisearch_incremental(self, mock_meilisearch):
259290
doc_problem2["collections"] = {"display_name": [], "key": []}
260291
doc_collection = copy.deepcopy(self.collection_dict)
261292
doc_collection["tags"] = {}
293+
doc_unit = copy.deepcopy(self.unit_dict)
262294

263295
api.rebuild_index(incremental=True)
264-
assert mock_meilisearch.return_value.index.return_value.add_documents.call_count == 3
296+
assert mock_meilisearch.return_value.index.return_value.add_documents.call_count == 4
265297
mock_meilisearch.return_value.index.return_value.add_documents.assert_has_calls(
266298
[
267299
call([doc_sequential, doc_vertical]),
268300
call([doc_problem1, doc_problem2]),
269301
call([doc_collection]),
302+
call([doc_unit]),
270303
],
271304
any_order=True,
272305
)
@@ -280,13 +313,13 @@ def simulated_interruption(message):
280313
with pytest.raises(Exception, match="Simulated interruption"):
281314
api.rebuild_index(simulated_interruption, incremental=True)
282315

283-
# two more calls due to collections
284-
assert mock_meilisearch.return_value.index.return_value.add_documents.call_count == 5
316+
# three more calls due to collections and containers
317+
assert mock_meilisearch.return_value.index.return_value.add_documents.call_count == 7
285318
assert IncrementalIndexCompleted.objects.all().count() == 1
286319
api.rebuild_index(incremental=True)
287320
assert IncrementalIndexCompleted.objects.all().count() == 0
288321
# one missing course indexed
289-
assert mock_meilisearch.return_value.index.return_value.add_documents.call_count == 6
322+
assert mock_meilisearch.return_value.index.return_value.add_documents.call_count == 8
290323

291324
@override_settings(MEILISEARCH_ENABLED=True)
292325
def test_reset_meilisearch_index(self, mock_meilisearch):
@@ -340,6 +373,22 @@ def test_reindex_meilisearch_collection_error(self, mock_meilisearch):
340373
f"Error indexing collection {self.collection}: Failed to generate document"
341374
)
342375

376+
@override_settings(MEILISEARCH_ENABLED=True)
377+
@patch(
378+
"openedx.core.djangoapps.content.search.api.searchable_doc_for_container",
379+
Mock(side_effect=Exception("Failed to generate document")),
380+
)
381+
def test_reindex_meilisearch_container_error(self, mock_meilisearch):
382+
383+
mock_logger = Mock()
384+
api.rebuild_index(mock_logger)
385+
assert call(
386+
[self.unit_dict]
387+
) not in mock_meilisearch.return_value.index.return_value.add_documents.mock_calls
388+
mock_logger.assert_any_call(
389+
"Error indexing container unit-1: Failed to generate document"
390+
)
391+
343392
@override_settings(MEILISEARCH_ENABLED=True)
344393
def test_reindex_meilisearch_library_block_error(self, mock_meilisearch):
345394

0 commit comments

Comments
 (0)