diff --git a/openedx/core/djangoapps/content/search/api.py b/openedx/core/djangoapps/content/search/api.py index d81128f65950..389f6d21161f 100644 --- a/openedx/core/djangoapps/content/search/api.py +++ b/openedx/core/djangoapps/content/search/api.py @@ -51,8 +51,7 @@ searchable_doc_for_library_block, searchable_doc_for_key, searchable_doc_tags, - searchable_doc_tags_for_collection, - searchable_doc_units, + searchable_doc_containers, ) log = logging.getLogger(__name__) @@ -452,7 +451,7 @@ def index_library(lib_key: LibraryLocatorV2) -> list: doc.update(searchable_doc_for_library_block(metadata)) doc.update(searchable_doc_tags(metadata.usage_key)) doc.update(searchable_doc_collections(metadata.usage_key)) - doc.update(searchable_doc_units(metadata.usage_key)) + doc.update(searchable_doc_containers(metadata.usage_key, "units")) docs.append(doc) except Exception as err: # pylint: disable=broad-except status_cb(f"Error indexing library component {component}: {err}") @@ -471,7 +470,7 @@ def index_collection_batch(batch, num_done, library_key) -> int: try: collection_key = lib_api.library_collection_locator(library_key, collection.key) doc = searchable_doc_for_collection(collection_key, collection=collection) - doc.update(searchable_doc_tags_for_collection(collection_key)) + doc.update(searchable_doc_tags(collection_key)) docs.append(doc) except Exception as err: # pylint: disable=broad-except status_cb(f"Error indexing collection {collection}: {err}") @@ -497,6 +496,12 @@ def index_container_batch(batch, num_done, library_key) -> int: doc = searchable_doc_for_container(container_key) doc.update(searchable_doc_tags(container_key)) doc.update(searchable_doc_collections(container_key)) + container_type = lib_api.ContainerType(container_key.container_type) + match container_type: + case lib_api.ContainerType.Unit: + doc.update(searchable_doc_containers(container_key, "subsections")) + case lib_api.ContainerType.Subsection: + doc.update(searchable_doc_containers(container_key, "sections")) docs.append(doc) except Exception as err: # pylint: disable=broad-except status_cb(f"Error indexing container {container.key}: {err}") @@ -696,7 +701,7 @@ def upsert_library_collection_index_doc(collection_key: LibraryCollectionLocator If the Collection is not found or disabled (i.e. soft-deleted), then delete it from the search index. """ doc = searchable_doc_for_collection(collection_key) - update_components = False + update_items = False # Soft-deleted/disabled collections are removed from the index # and their components updated. @@ -755,7 +760,8 @@ def update_library_components_collections( library_key, component, ) - doc = searchable_doc_collections(usage_key) + doc = searchable_doc_for_key(usage_key) + doc.update(searchable_doc_collections(usage_key)) docs.append(doc) log.info( @@ -790,7 +796,8 @@ def update_library_containers_collections( library_key, container, ) - doc = searchable_doc_collections(container_key) + doc = searchable_doc_for_key(container_key) + doc.update(searchable_doc_collections(container_key)) docs.append(doc) log.info( @@ -855,21 +862,12 @@ def upsert_item_collections_index_docs(opaque_key: OpaqueKey): _update_index_docs([doc]) -def upsert_item_units_index_docs(opaque_key: OpaqueKey): +def upsert_item_containers_index_docs(opaque_key: OpaqueKey, container_type: str): """ - Updates the units data in documents for the given Course/Library block + Updates the containers (units/subsections/sections) data in documents for the given Course/Library block """ doc = {Fields.id: meili_id_from_opaque_key(opaque_key)} - doc.update(searchable_doc_units(opaque_key)) - _update_index_docs([doc]) - - -def upsert_collection_tags_index_docs(collection_key: LibraryCollectionLocator): - """ - Updates the tags data in documents for the given library collection - """ - - doc = searchable_doc_tags_for_collection(collection_key) + doc.update(searchable_doc_containers(opaque_key, container_type)) _update_index_docs([doc]) diff --git a/openedx/core/djangoapps/content/search/documents.py b/openedx/core/djangoapps/content/search/documents.py index 34a3c86b375e..aaabb0b92a51 100644 --- a/openedx/core/djangoapps/content/search/documents.py +++ b/openedx/core/djangoapps/content/search/documents.py @@ -67,10 +67,15 @@ class Fields: collections = "collections" collections_display_name = "display_name" collections_key = "key" - # Units (dictionary) that this object belongs to. + # Containers (dictionaries) that this object belongs to. units = "units" - units_display_name = "display_name" - units_key = "key" + subsections = "subsections" + sections = "sections" + containers_display_name = "display_name" + containers_key = "key" + + sections_display_name = "display_name" + sections_key = "key" # The "content" field is a dictionary of arbitrary data, depending on the block_type. # It comes from each XBlock's index_dictionary() method (if present) plus some processing. @@ -97,6 +102,8 @@ class Fields: # List of children keys child_usage_keys = "child_usage_keys" + # List of children display names + child_display_names = "child_display_names" # Note: new fields or values can be added at any time, but if they need to be indexed for filtering or keyword # search, the index configuration will need to be changed, which is only done as part of the 'reindex_studio' @@ -255,7 +262,92 @@ class implementation returns only: return block_data -def _tags_for_content_object(object_id: OpaqueKey) -> dict: +def _published_data_from_block(block_published) -> dict: + """ + Given an library block get the published data. + """ + result = { + Fields.published: { + Fields.published_display_name: xblock_api.get_block_display_name(block_published), + } + } + + try: + content_data = _get_content_from_block(block_published) + + description = _get_description_from_block_content( + block_published.scope_ids.block_type, + content_data, + ) + + if description: + result[Fields.published][Fields.published_description] = description + except Exception as err: # pylint: disable=broad-except + log.exception(f"Failed to process index_dictionary for {block_published.usage_key}: {err}") + + return result + + +def searchable_doc_for_library_block(xblock_metadata: lib_api.LibraryXBlockMetadata) -> dict: + """ + Generate a dictionary document suitable for ingestion into a search engine + like Meilisearch or Elasticsearch, so that the given library block can be + found using faceted search. + + Datetime fields (created, modified, last_published) are serialized to POSIX timestamps so that they can be used to + sort the search results. + """ + library_name = lib_api.get_library(xblock_metadata.usage_key.context_key).title + block = xblock_api.load_block(xblock_metadata.usage_key, user=None) + + publish_status = PublishStatus.published + try: + block_published = xblock_api.load_block(xblock_metadata.usage_key, user=None, version=LatestVersion.PUBLISHED) + if xblock_metadata.last_published and xblock_metadata.last_published < xblock_metadata.modified: + publish_status = PublishStatus.modified + except NotFound: + # Never published + block_published = None + publish_status = PublishStatus.never + + doc = searchable_doc_for_key(xblock_metadata.usage_key) + doc.update({ + Fields.type: DocType.library_block, + Fields.breadcrumbs: [], + Fields.created: xblock_metadata.created.timestamp(), + Fields.modified: xblock_metadata.modified.timestamp(), + Fields.last_published: xblock_metadata.last_published.timestamp() if xblock_metadata.last_published else None, + Fields.publish_status: publish_status, + }) + + doc.update(_fields_from_block(block)) + + if block_published: + doc.update(_published_data_from_block(block_published)) + + # Add the breadcrumbs. In v2 libraries, the library itself is not a "parent" of the XBlocks so we add it here: + doc[Fields.breadcrumbs] = [{"display_name": library_name}] + + return doc + + +def searchable_doc_for_course_block(block) -> dict: + """ + Generate a dictionary document suitable for ingestion into a search engine + like Meilisearch or Elasticsearch, so that the given course block can be + found using faceted search. + """ + doc = searchable_doc_for_key(block.usage_key) + doc.update({ + Fields.type: DocType.course_block, + }) + + doc.update(_fields_from_block(block)) + + return doc + + +def searchable_doc_tags(object_id: OpaqueKey) -> dict: """ Given an XBlock, course, library, etc., get the tag data for its index doc. @@ -320,7 +412,7 @@ def _tags_for_content_object(object_id: OpaqueKey) -> dict: return {Fields.tags: result} -def _collections_for_content_object(object_id: OpaqueKey) -> dict: +def searchable_doc_collections(object_id: OpaqueKey) -> dict: """ Given an XBlock, course, library, etc., get the collections for its index doc. @@ -376,9 +468,9 @@ def _collections_for_content_object(object_id: OpaqueKey) -> dict: return result -def _units_for_content_object(object_id: OpaqueKey) -> dict: +def searchable_doc_containers(object_id: OpaqueKey, container_type: str) -> dict: """ - Given an XBlock, course, library, etc., get the units for its index doc. + Given an XBlock, course, library, etc., get the containers that it is part of for its index doc. e.g. for something in Units "UNIT_A" and "UNIT_B", this would return: { @@ -388,173 +480,43 @@ def _units_for_content_object(object_id: OpaqueKey) -> dict: } } - If the object is in no collections, returns: + If the object is in no containers, returns: { - "collections": { + "sections": { "display_name": [], "key": [], }, } """ + container_field = getattr(Fields, container_type) result = { - Fields.units: { - Fields.units_display_name: [], - Fields.units_key: [], + container_field: { + Fields.containers_display_name: [], + Fields.containers_key: [], } } # Gather the units associated with this object - units = None + containers = None try: - if isinstance(object_id, UsageKey): - units = lib_api.get_containers_contains_component(object_id) + if isinstance(object_id, OpaqueKey): + containers = lib_api.get_containers_contains_item(object_id) else: log.warning(f"Unexpected key type for {object_id}") except ObjectDoesNotExist: log.warning(f"No library item found for {object_id}") - if not units: + if not containers: return result - for unit in units: - result[Fields.units][Fields.units_display_name].append(unit.display_name) - result[Fields.units][Fields.units_key].append(str(unit.container_key)) + for container in containers: + result[container_field][Fields.containers_display_name].append(container.display_name) + result[container_field][Fields.containers_key].append(str(container.container_key)) return result -def _published_data_from_block(block_published) -> dict: - """ - Given an library block get the published data. - """ - result = { - Fields.published: { - Fields.published_display_name: xblock_api.get_block_display_name(block_published), - } - } - - try: - content_data = _get_content_from_block(block_published) - - description = _get_description_from_block_content( - block_published.scope_ids.block_type, - content_data, - ) - - if description: - result[Fields.published][Fields.published_description] = description - except Exception as err: # pylint: disable=broad-except - log.exception(f"Failed to process index_dictionary for {block_published.usage_key}: {err}") - - return result - - -def searchable_doc_for_library_block(xblock_metadata: lib_api.LibraryXBlockMetadata) -> dict: - """ - Generate a dictionary document suitable for ingestion into a search engine - like Meilisearch or Elasticsearch, so that the given library block can be - found using faceted search. - - Datetime fields (created, modified, last_published) are serialized to POSIX timestamps so that they can be used to - sort the search results. - """ - library_name = lib_api.get_library(xblock_metadata.usage_key.context_key).title - block = xblock_api.load_block(xblock_metadata.usage_key, user=None) - - publish_status = PublishStatus.published - try: - block_published = xblock_api.load_block(xblock_metadata.usage_key, user=None, version=LatestVersion.PUBLISHED) - if xblock_metadata.last_published and xblock_metadata.last_published < xblock_metadata.modified: - publish_status = PublishStatus.modified - except NotFound: - # Never published - block_published = None - publish_status = PublishStatus.never - - doc = searchable_doc_for_key(xblock_metadata.usage_key) - doc.update({ - Fields.type: DocType.library_block, - Fields.breadcrumbs: [], - Fields.created: xblock_metadata.created.timestamp(), - Fields.modified: xblock_metadata.modified.timestamp(), - Fields.last_published: xblock_metadata.last_published.timestamp() if xblock_metadata.last_published else None, - Fields.publish_status: publish_status, - }) - - doc.update(_fields_from_block(block)) - - if block_published: - doc.update(_published_data_from_block(block_published)) - - # Add the breadcrumbs. In v2 libraries, the library itself is not a "parent" of the XBlocks so we add it here: - doc[Fields.breadcrumbs] = [{"display_name": library_name}] - - return doc - - -def searchable_doc_tags(key: OpaqueKey) -> dict: - """ - Generate a dictionary document suitable for ingestion into a search engine - like Meilisearch or Elasticsearch, with the tags data for the given content object. - """ - doc = searchable_doc_for_key(key) - doc.update(_tags_for_content_object(key)) - - return doc - - -def searchable_doc_collections(opaque_key: OpaqueKey) -> dict: - """ - Generate a dictionary document suitable for ingestion into a search engine - like Meilisearch or Elasticsearch, with the collections data for the given content object. - """ - doc = searchable_doc_for_key(opaque_key) - doc.update(_collections_for_content_object(opaque_key)) - - return doc - - -def searchable_doc_units(opaque_key: OpaqueKey) -> dict: - """ - Generate a dictionary document suitable for ingestion into a search engine - like Meilisearch or Elasticsearch, with the units data for the given content object. - """ - doc = searchable_doc_for_key(opaque_key) - doc.update(_units_for_content_object(opaque_key)) - - return doc - - -def searchable_doc_tags_for_collection( - collection_key: LibraryCollectionLocator -) -> dict: - """ - Generate a dictionary document suitable for ingestion into a search engine - like Meilisearch or Elasticsearch, with the tags data for the given library collection. - """ - doc = searchable_doc_for_key(collection_key) - doc.update(_tags_for_content_object(collection_key)) - - return doc - - -def searchable_doc_for_course_block(block) -> dict: - """ - Generate a dictionary document suitable for ingestion into a search engine - like Meilisearch or Elasticsearch, so that the given course block can be - found using faceted search. - """ - doc = searchable_doc_for_key(block.usage_key) - doc.update({ - Fields.type: DocType.course_block, - }) - - doc.update(_fields_from_block(block)) - - return doc - - def searchable_doc_for_collection( collection_key: LibraryCollectionLocator, *, @@ -674,13 +636,17 @@ def get_child_keys(children) -> list[str]: for child in children ] + def get_child_names(children) -> list[str]: + return [child.display_name for child in children] + doc.update({ Fields.display_name: container.display_name, Fields.created: container.created.timestamp(), Fields.modified: container.modified.timestamp(), Fields.num_children: len(draft_children), Fields.content: { - Fields.child_usage_keys: get_child_keys(draft_children) + Fields.child_usage_keys: get_child_keys(draft_children), + Fields.child_display_names: get_child_names(draft_children), }, Fields.publish_status: publish_status, Fields.last_published: container.last_published.timestamp() if container.last_published else None, @@ -699,6 +665,7 @@ def get_child_keys(children) -> list[str]: Fields.published_num_children: len(published_children), Fields.published_content: { Fields.child_usage_keys: get_child_keys(published_children), + Fields.child_display_names: get_child_names(published_children), }, } diff --git a/openedx/core/djangoapps/content/search/handlers.py b/openedx/core/djangoapps/content/search/handlers.py index fdc52a968e5e..1b0f3f199616 100644 --- a/openedx/core/djangoapps/content/search/handlers.py +++ b/openedx/core/djangoapps/content/search/handlers.py @@ -44,9 +44,8 @@ from .api import ( only_if_meilisearch_enabled, upsert_content_object_tags_index_doc, - upsert_collection_tags_index_docs, upsert_item_collections_index_docs, - upsert_item_units_index_docs, + upsert_item_containers_index_docs, ) from .tasks import ( delete_library_block_index_doc, @@ -258,14 +257,15 @@ def content_object_associations_changed_handler(**kwargs) -> None: # This event's changes may contain both "tags" and "collections", but this will happen rarely, if ever. # So we allow a potential double "upsert" here. if not content_object.changes or "tags" in content_object.changes: - if isinstance(opaque_key, LibraryCollectionLocator): - upsert_collection_tags_index_docs(opaque_key) - else: - upsert_content_object_tags_index_doc(opaque_key) + upsert_content_object_tags_index_doc(opaque_key) if not content_object.changes or "collections" in content_object.changes: upsert_item_collections_index_docs(opaque_key) if not content_object.changes or "units" in content_object.changes: - upsert_item_units_index_docs(opaque_key) + upsert_item_containers_index_docs(opaque_key, "units") + if not content_object.changes or "sections" in content_object.changes: + upsert_item_containers_index_docs(opaque_key, "sections") + if not content_object.changes or "subsections" in content_object.changes: + upsert_item_containers_index_docs(opaque_key, "subsections") @receiver(LIBRARY_CONTAINER_CREATED) diff --git a/openedx/core/djangoapps/content/search/tests/test_api.py b/openedx/core/djangoapps/content/search/tests/test_api.py index 3b22ce389d0f..b2b0cd17169a 100644 --- a/openedx/core/djangoapps/content/search/tests/test_api.py +++ b/openedx/core/djangoapps/content/search/tests/test_api.py @@ -256,7 +256,10 @@ def setUp(self) -> None: "display_name": "Unit 1", # description is not set for containers "num_children": 0, - "content": {"child_usage_keys": []}, + "content": { + "child_usage_keys": [], + "child_display_names": [], + }, "publish_status": "never", "context_key": "lib:org1:lib", "org": "org1", @@ -276,7 +279,10 @@ def setUp(self) -> None: "display_name": "Subsection 1", # description is not set for containers "num_children": 0, - "content": {"child_usage_keys": []}, + "content": { + "child_usage_keys": [], + "child_display_names": [], + }, "publish_status": "never", "context_key": "lib:org1:lib", "org": "org1", @@ -296,7 +302,10 @@ def setUp(self) -> None: "display_name": "Section 1", # description is not set for containers "num_children": 0, - "content": {"child_usage_keys": []}, + "content": { + "child_usage_keys": [], + "child_display_names": [], + }, "publish_status": "never", "context_key": "lib:org1:lib", "org": "org1", @@ -336,9 +345,11 @@ def test_reindex_meilisearch(self, mock_meilisearch) -> None: doc_unit = copy.deepcopy(self.unit_dict) doc_unit["tags"] = {} doc_unit["collections"] = {'display_name': [], 'key': []} + doc_unit["subsections"] = {"display_name": [], "key": []} doc_subsection = copy.deepcopy(self.subsection_dict) doc_subsection["tags"] = {} doc_subsection["collections"] = {'display_name': [], 'key': []} + doc_subsection["sections"] = {'display_name': [], 'key': []} doc_section = copy.deepcopy(self.section_dict) doc_section["tags"] = {} doc_section["collections"] = {'display_name': [], 'key': []} @@ -376,9 +387,11 @@ def test_reindex_meilisearch_incremental(self, mock_meilisearch) -> None: doc_unit = copy.deepcopy(self.unit_dict) doc_unit["tags"] = {} doc_unit["collections"] = {"display_name": [], "key": []} + doc_unit["subsections"] = {"display_name": [], "key": []} doc_subsection = copy.deepcopy(self.subsection_dict) doc_subsection["tags"] = {} doc_subsection["collections"] = {'display_name': [], 'key': []} + doc_subsection["sections"] = {'display_name': [], 'key': []} doc_section = copy.deepcopy(self.section_dict) doc_section["tags"] = {} doc_section["collections"] = {'display_name': [], 'key': []} @@ -983,14 +996,21 @@ def test_delete_index_container(self, container_type, mock_meilisearch) -> None: container_dict["id"], ) + @ddt.data( + "unit", + "subsection", + "section", + ) @override_settings(MEILISEARCH_ENABLED=True) - def test_index_library_container_metadata(self, mock_meilisearch) -> None: + def test_index_library_container_metadata(self, container_type, mock_meilisearch) -> None: """ Test indexing a Library Container. """ - api.upsert_library_container_index_doc(self.unit.container_key) + container = getattr(self, container_type) + container_dict = getattr(self, f"{container_type}_dict") + api.upsert_library_container_index_doc(container.container_key) - mock_meilisearch.return_value.index.return_value.update_documents.assert_called_once_with([self.unit_dict]) + mock_meilisearch.return_value.index.return_value.update_documents.assert_called_once_with([container_dict]) @ddt.data( ("unit", "lctorg1libunitunit-1-e4527f7c"), @@ -1050,7 +1070,10 @@ def test_block_in_units(self, mock_meilisearch) -> None: new_unit_dict = { **self.unit_dict, "num_children": 1, - 'content': {'child_usage_keys': [self.doc_problem1["usage_key"]]} + 'content': { + 'child_usage_keys': [self.doc_problem1["usage_key"]], + 'child_display_names': [self.doc_problem1["display_name"]], + } } assert mock_meilisearch.return_value.index.return_value.update_documents.call_count == 2 @@ -1071,16 +1094,25 @@ def test_units_in_subsection(self, mock_meilisearch) -> None: None, ) - # TODO verify subsections in units - + doc_block_with_subsections = { + "id": self.unit_dict["id"], + "subsections": { + "display_name": [self.subsection.display_name], + "key": [self.subsection_key], + }, + } new_subsection_dict = { **self.subsection_dict, "num_children": 1, - 'content': {'child_usage_keys': [self.unit_key]} + 'content': { + 'child_usage_keys': [self.unit_key], + 'child_display_names': [self.unit.display_name] + } } - assert mock_meilisearch.return_value.index.return_value.update_documents.call_count == 1 + assert mock_meilisearch.return_value.index.return_value.update_documents.call_count == 2 mock_meilisearch.return_value.index.return_value.update_documents.assert_has_calls( [ + call([doc_block_with_subsections]), call([new_subsection_dict]), ], any_order=True, @@ -1095,16 +1127,25 @@ def test_section_in_usbsections(self, mock_meilisearch) -> None: None, ) - # TODO verify section in subsections - + doc_block_with_sections = { + "id": self.subsection_dict["id"], + "sections": { + "display_name": [self.section.display_name], + "key": [self.section_key], + }, + } new_section_dict = { **self.section_dict, "num_children": 1, - 'content': {'child_usage_keys': [self.subsection_key]} + 'content': { + 'child_usage_keys': [self.subsection_key], + 'child_display_names': [self.subsection.display_name], + } } - assert mock_meilisearch.return_value.index.return_value.update_documents.call_count == 1 + assert mock_meilisearch.return_value.index.return_value.update_documents.call_count == 2 mock_meilisearch.return_value.index.return_value.update_documents.assert_has_calls( [ + call([doc_block_with_sections]), call([new_section_dict]), ], any_order=True, diff --git a/openedx/core/djangoapps/content/search/tests/test_documents.py b/openedx/core/djangoapps/content/search/tests/test_documents.py index 0ff45fa53e9b..5a8a2231dcdd 100644 --- a/openedx/core/djangoapps/content/search/tests/test_documents.py +++ b/openedx/core/djangoapps/content/search/tests/test_documents.py @@ -26,13 +26,11 @@ searchable_doc_for_course_block, searchable_doc_for_library_block, searchable_doc_tags, - searchable_doc_tags_for_collection, ) from ..models import SearchAccess except RuntimeError: searchable_doc_for_course_block = lambda x: x searchable_doc_tags = lambda x: x - searchable_doc_tags_for_collection = lambda x: x searchable_doc_for_collection = lambda x: x searchable_doc_for_container = lambda x: x searchable_doc_for_library_block = lambda x: x @@ -484,7 +482,7 @@ def test_html_published_library_block(self): def test_collection_with_library(self): doc = searchable_doc_for_collection(self.collection_key) - doc.update(searchable_doc_tags_for_collection(self.collection_key)) + doc.update(searchable_doc_tags(self.collection_key)) assert doc == { "id": "lib-collectionedx2012_falltoy_collection-d1d907a4", @@ -513,7 +511,7 @@ def test_collection_with_published_library(self): library_api.publish_changes(self.library.key) doc = searchable_doc_for_collection(self.collection_key) - doc.update(searchable_doc_tags_for_collection(self.collection_key)) + doc.update(searchable_doc_tags(self.collection_key)) assert doc == { "id": "lib-collectionedx2012_falltoy_collection-d1d907a4", @@ -564,6 +562,7 @@ def test_draft_container(self, container_name, container_slug, container_type, d "num_children": 0, "content": { "child_usage_keys": [], + "child_display_names": [], }, "publish_status": "never", "context_key": "lib:edX:2012_Fall", @@ -609,6 +608,9 @@ def test_published_container(self): "child_usage_keys": [ "lb:edX:2012_Fall:html:text2", ], + "child_display_names": [ + "Text", + ], }, "publish_status": "published", "context_key": "lib:edX:2012_Fall", @@ -628,6 +630,9 @@ def test_published_container(self): "child_usage_keys": [ "lb:edX:2012_Fall:html:text2", ], + "child_display_names": [ + "Text", + ], }, }, } @@ -676,6 +681,10 @@ def test_published_container_with_changes(self): "lb:edX:2012_Fall:html:text2", "lb:edX:2012_Fall:html:text3", ], + "child_display_names": [ + "Text", + "Text", + ], }, "publish_status": "modified", "context_key": "lib:edX:2012_Fall", @@ -695,6 +704,9 @@ def test_published_container_with_changes(self): "child_usage_keys": [ "lb:edX:2012_Fall:html:text2", ], + "child_display_names": [ + "Text", + ], }, }, } diff --git a/openedx/core/djangoapps/content_libraries/api/blocks.py b/openedx/core/djangoapps/content_libraries/api/blocks.py index 3f6867e72daa..42974f6ff7aa 100644 --- a/openedx/core/djangoapps/content_libraries/api/blocks.py +++ b/openedx/core/djangoapps/content_libraries/api/blocks.py @@ -58,7 +58,7 @@ from .containers import ( create_container, get_container, - get_containers_contains_component, + get_containers_contains_item, update_container_children, ContainerMetadata, ContainerType, @@ -229,7 +229,7 @@ def set_library_block_olx(usage_key: LibraryUsageLocatorV2, new_olx_str: str) -> # For each container, trigger LIBRARY_CONTAINER_UPDATED signal and set background=True to trigger # container indexing asynchronously. - affected_containers = get_containers_contains_component(usage_key) + affected_containers = get_containers_contains_item(usage_key) for container in affected_containers: LIBRARY_CONTAINER_UPDATED.send_event( library_container=LibraryContainerData( @@ -585,7 +585,7 @@ def delete_library_block( component = get_component_from_usage_key(usage_key) library_key = usage_key.context_key affected_collections = authoring_api.get_entity_collections(component.learning_package_id, component.key) - affected_containers = get_containers_contains_component(usage_key) + affected_containers = get_containers_contains_item(usage_key) authoring_api.soft_delete_draft(component.pk, deleted_by=user_id) @@ -673,7 +673,7 @@ def restore_library_block(usage_key: LibraryUsageLocatorV2, user_id: int | None # container indexing asynchronously. # # To update the components count in containers - affected_containers = get_containers_contains_component(usage_key) + affected_containers = get_containers_contains_item(usage_key) for container in affected_containers: LIBRARY_CONTAINER_UPDATED.send_event( library_container=LibraryContainerData( diff --git a/openedx/core/djangoapps/content_libraries/api/containers.py b/openedx/core/djangoapps/content_libraries/api/containers.py index 8e2e486377da..4ec1922fb975 100644 --- a/openedx/core/djangoapps/content_libraries/api/containers.py +++ b/openedx/core/djangoapps/content_libraries/api/containers.py @@ -24,7 +24,7 @@ LIBRARY_CONTAINER_UPDATED, ) from openedx_learning.api import authoring as authoring_api -from openedx_learning.api.authoring_models import Container, ContainerVersion +from openedx_learning.api.authoring_models import Container, ContainerVersion, Component from openedx.core.djangoapps.content_libraries.api.collections import library_collection_locator from openedx.core.djangoapps.xblock.api import get_component_from_usage_key @@ -50,7 +50,7 @@ "delete_container", "restore_container", "update_container_children", - "get_containers_contains_component", + "get_containers_contains_item", "publish_container_changes", ] @@ -513,7 +513,13 @@ def update_container_children( entities_action=entities_action, ) - # TODO add CONTENT_OBJECT_ASSOCIATIONS_CHANGED for subsections + for key in children_ids: + CONTENT_OBJECT_ASSOCIATIONS_CHANGED.send_event( + content_object=ContentObjectChangedData( + object_id=str(key), + changes=["subsections"], + ), + ) case ContainerType.Section: subsections = [_get_container_from_key(key).subsection for key in children_ids] # type: ignore[arg-type] new_version = authoring_api.create_next_section_version( @@ -524,7 +530,13 @@ def update_container_children( entities_action=entities_action, ) - # TODO add CONTENT_OBJECT_ASSOCIATIONS_CHANGED for sections + for key in children_ids: + CONTENT_OBJECT_ASSOCIATIONS_CHANGED.send_event( + content_object=ContentObjectChangedData( + object_id=str(key), + changes=["sections"], + ), + ) case _: raise ValueError(f"Invalid container type: {container_type}") @@ -537,19 +549,26 @@ def update_container_children( return ContainerMetadata.from_container(library_key, new_version.container) -def get_containers_contains_component( - usage_key: LibraryUsageLocatorV2 +def get_containers_contains_item( + key: LibraryUsageLocatorV2 | LibraryContainerLocator ) -> list[ContainerMetadata]: """ - Get containers that contains the component. + Get containers that contains the item, + that can be a component or another container. """ - assert isinstance(usage_key, LibraryUsageLocatorV2) - component = get_component_from_usage_key(usage_key) + item: Component | Container + + if isinstance(key, LibraryUsageLocatorV2): + item = get_component_from_usage_key(key) + + elif isinstance(key, LibraryContainerLocator): + item = _get_container_from_key(key) + containers = authoring_api.get_containers_with_entity( - component.publishable_entity.pk, + item.publishable_entity.pk, ) return [ - ContainerMetadata.from_container(usage_key.context_key, container) + ContainerMetadata.from_container(key.lib_key, container) for container in containers ] diff --git a/openedx/core/djangoapps/content_libraries/library_context.py b/openedx/core/djangoapps/content_libraries/library_context.py index 34a5a90269fe..4ab66706088d 100644 --- a/openedx/core/djangoapps/content_libraries/library_context.py +++ b/openedx/core/djangoapps/content_libraries/library_context.py @@ -121,7 +121,7 @@ def send_container_updated_events(self, usage_key: UsageKeyV2): with the given usage_key. """ assert isinstance(usage_key, LibraryUsageLocatorV2) - affected_containers = api.get_containers_contains_component(usage_key) + affected_containers = api.get_containers_contains_item(usage_key) for container in affected_containers: LIBRARY_CONTAINER_UPDATED.send_event( library_container=LibraryContainerData( diff --git a/openedx/core/djangoapps/content_libraries/tasks.py b/openedx/core/djangoapps/content_libraries/tasks.py index b472126e8ce7..72d2d1b0884f 100644 --- a/openedx/core/djangoapps/content_libraries/tasks.py +++ b/openedx/core/djangoapps/content_libraries/tasks.py @@ -99,7 +99,7 @@ def send_events_after_publish(publish_log_pk: int, library_key_str: str) -> None # Publishing a container will auto-publish its children, but publishing a single component or all changes # in the library will NOT usually include any parent containers. But we do need to notify listeners that the # parent container(s) have changed, e.g. so the search index can update the "has_unpublished_changes" - for parent_container in api.get_containers_contains_component(usage_key): + for parent_container in api.get_containers_contains_item(usage_key): affected_containers.add(parent_container.container_key) # TODO: should this be a CONTAINER_CHILD_PUBLISHED event instead of CONTAINER_PUBLISHED ? elif hasattr(record.entity, "container"): @@ -181,7 +181,7 @@ def send_events_after_revert(draft_change_log_id: int, library_key_str: str) -> # If any containers contain this component, their child list / component count may need to be updated # e.g. if this was a newly created component in the container and is now deleted, or this was deleted and # is now restored. - for parent_container in api.get_containers_contains_component(usage_key): + for parent_container in api.get_containers_contains_item(usage_key): updated_container_keys.add(parent_container.container_key) # TODO: do we also need to send CONTENT_OBJECT_ASSOCIATIONS_CHANGED for this component, or is diff --git a/openedx/core/djangoapps/content_libraries/tests/test_api.py b/openedx/core/djangoapps/content_libraries/tests/test_api.py index 9517e5d0d2f6..0715848c6424 100644 --- a/openedx/core/djangoapps/content_libraries/tests/test_api.py +++ b/openedx/core/djangoapps/content_libraries/tests/test_api.py @@ -746,6 +746,39 @@ def setUp(self) -> None: # Create Units self.unit1 = api.create_container(self.lib1.library_key, api.ContainerType.Unit, 'unit-1', 'Unit 1', None) self.unit2 = api.create_container(self.lib1.library_key, api.ContainerType.Unit, 'unit-2', 'Unit 2', None) + self.unit3 = api.create_container(self.lib1.library_key, api.ContainerType.Unit, 'unit-3', 'Unit 3', None) + + # Create Subsections + self.subsection1 = api.create_container( + self.lib1.library_key, + api.ContainerType.Subsection, + 'subsection-1', + 'Subsection 1', + None, + ) + self.subsection2 = api.create_container( + self.lib1.library_key, + api.ContainerType.Subsection, + 'subsection-2', + 'Subsection 2', + None, + ) + + # Create Sections + self.section1 = api.create_container( + self.lib1.library_key, + api.ContainerType.Section, + 'section-1', + 'Section 1', + None, + ) + self.section2 = api.create_container( + self.lib1.library_key, + api.ContainerType.Section, + 'section-2', + 'Section 2', + None, + ) # Create XBlocks # Create some library blocks in lib1 @@ -753,6 +786,9 @@ def setUp(self) -> None: self.lib1.library_key, "problem", "problem1", ) self.problem_block_usage_key = LibraryUsageLocatorV2.from_string(self.problem_block["id"]) + self.problem_block_2 = self._add_block_to_library( + self.lib1.library_key, "problem", "problem2", + ) self.html_block = self._add_block_to_library( self.lib1.library_key, "html", "html1", ) @@ -770,9 +806,37 @@ def setUp(self) -> None: None, ) - def test_get_containers_contains_component(self) -> None: - problem_block_containers = api.get_containers_contains_component(self.problem_block_usage_key) - html_block_containers = api.get_containers_contains_component(self.html_block_usage_key) + # Add units to subsections + api.update_container_children( + self.subsection1.container_key, + [self.unit1.container_key, self.unit2.container_key], + None, + ) + api.update_container_children( + self.subsection2.container_key, + [self.unit1.container_key], + None, + ) + + # Add subsections to sections + api.update_container_children( + self.section1.container_key, + [self.subsection1.container_key, self.subsection2.container_key], + None, + ) + api.update_container_children( + self.section2.container_key, + [self.subsection1.container_key], + None, + ) + + def test_get_containers_contains_item(self): + problem_block_containers = api.get_containers_contains_item(self.problem_block_usage_key) + html_block_containers = api.get_containers_contains_item(self.html_block_usage_key) + unit_1_containers = api.get_containers_contains_item(self.unit1.container_key) + unit_2_containers = api.get_containers_contains_item(self.unit2.container_key) + subsection_1_containers = api.get_containers_contains_item(self.subsection1.container_key) + subsection_2_containers = api.get_containers_contains_item(self.subsection2.container_key) assert len(problem_block_containers) == 1 assert problem_block_containers[0].container_key == self.unit1.container_key @@ -781,7 +845,21 @@ def test_get_containers_contains_component(self) -> None: assert html_block_containers[0].container_key == self.unit1.container_key assert html_block_containers[1].container_key == self.unit2.container_key - def _validate_calls_of_html_block(self, event_mock) -> None: + assert len(unit_1_containers) == 2 + assert unit_1_containers[0].container_key == self.subsection1.container_key + assert unit_1_containers[1].container_key == self.subsection2.container_key + + assert len(unit_2_containers) == 1 + assert unit_2_containers[0].container_key == self.subsection1.container_key + + assert len(subsection_1_containers) == 2 + assert subsection_1_containers[0].container_key == self.section1.container_key + assert subsection_1_containers[1].container_key == self.section2.container_key + + assert len(subsection_2_containers) == 1 + assert subsection_2_containers[0].container_key == self.section1.container_key + + def _validate_calls_of_html_block(self, event_mock): """ Validate that the `event_mock` has been called twice using the `LIBRARY_CONTAINER_UPDATED` signal. @@ -875,6 +953,76 @@ def test_call_object_changed_signal_when_remove_component(self) -> None: event_reciver.call_args_list[0].kwargs, ) + def test_call_object_changed_signal_when_remove_unit(self) -> None: + unit4 = api.create_container(self.lib1.library_key, api.ContainerType.Unit, 'unit-4', 'Unit 4', None) + + api.update_container_children( + self.subsection2.container_key, + [unit4.container_key], + None, + entities_action=authoring_api.ChildrenEntitiesAction.APPEND, + ) + + event_reciver = mock.Mock() + CONTENT_OBJECT_ASSOCIATIONS_CHANGED.connect(event_reciver) + api.update_container_children( + self.subsection2.container_key, + [unit4.container_key], + None, + entities_action=authoring_api.ChildrenEntitiesAction.REMOVE, + ) + + assert event_reciver.call_count == 1 + self.assertDictContainsSubset( + { + "signal": CONTENT_OBJECT_ASSOCIATIONS_CHANGED, + "sender": None, + "content_object": ContentObjectChangedData( + object_id=str(unit4.container_key), + changes=["subsections"], + ), + }, + event_reciver.call_args_list[0].kwargs, + ) + + def test_call_object_changed_signal_when_remove_subsection(self) -> None: + subsection3 = api.create_container( + self.lib1.library_key, + api.ContainerType.Subsection, + 'subsection-3', + 'Subsection 3', + None, + ) + + api.update_container_children( + self.section2.container_key, + [subsection3.container_key], + None, + entities_action=authoring_api.ChildrenEntitiesAction.APPEND, + ) + + event_reciver = mock.Mock() + CONTENT_OBJECT_ASSOCIATIONS_CHANGED.connect(event_reciver) + api.update_container_children( + self.section2.container_key, + [subsection3.container_key], + None, + entities_action=authoring_api.ChildrenEntitiesAction.REMOVE, + ) + + assert event_reciver.call_count == 1 + self.assertDictContainsSubset( + { + "signal": CONTENT_OBJECT_ASSOCIATIONS_CHANGED, + "sender": None, + "content_object": ContentObjectChangedData( + object_id=str(subsection3.container_key), + changes=["sections"], + ), + }, + event_reciver.call_args_list[0].kwargs, + ) + def test_call_object_changed_signal_when_add_component(self) -> None: event_reciver = mock.Mock() CONTENT_OBJECT_ASSOCIATIONS_CHANGED.connect(event_reciver) @@ -919,19 +1067,104 @@ def test_call_object_changed_signal_when_add_component(self) -> None: event_reciver.call_args_list[1].kwargs, ) + def test_call_object_changed_signal_when_add_unit(self) -> None: + event_reciver = mock.Mock() + CONTENT_OBJECT_ASSOCIATIONS_CHANGED.connect(event_reciver) + + unit4 = api.create_container(self.lib1.library_key, api.ContainerType.Unit, 'unit-4', 'Unit 4', None) + unit5 = api.create_container(self.lib1.library_key, api.ContainerType.Unit, 'unit-5', 'Unit 5', None) + + api.update_container_children( + self.subsection2.container_key, + [unit4.container_key, unit5.container_key], + None, + entities_action=authoring_api.ChildrenEntitiesAction.APPEND, + ) + assert event_reciver.call_count == 2 + self.assertDictContainsSubset( + { + "signal": CONTENT_OBJECT_ASSOCIATIONS_CHANGED, + "sender": None, + "content_object": ContentObjectChangedData( + object_id=str(unit4.container_key), + changes=["subsections"], + ), + }, + event_reciver.call_args_list[0].kwargs, + ) + self.assertDictContainsSubset( + { + "signal": CONTENT_OBJECT_ASSOCIATIONS_CHANGED, + "sender": None, + "content_object": ContentObjectChangedData( + object_id=str(unit5.container_key), + changes=["subsections"], + ), + }, + event_reciver.call_args_list[1].kwargs, + ) + + def test_call_object_changed_signal_when_add_subsection(self) -> None: + event_reciver = mock.Mock() + CONTENT_OBJECT_ASSOCIATIONS_CHANGED.connect(event_reciver) + + subsection3 = api.create_container( + self.lib1.library_key, + api.ContainerType.Subsection, + 'subsection-3', + 'Subsection 3', + None, + ) + subsection4 = api.create_container( + self.lib1.library_key, + api.ContainerType.Subsection, + 'subsection-4', + 'Subsection 4', + None, + ) + api.update_container_children( + self.section2.container_key, + [subsection3.container_key, subsection4.container_key], + None, + entities_action=authoring_api.ChildrenEntitiesAction.APPEND, + ) + assert event_reciver.call_count == 2 + self.assertDictContainsSubset( + { + "signal": CONTENT_OBJECT_ASSOCIATIONS_CHANGED, + "sender": None, + "content_object": ContentObjectChangedData( + object_id=str(subsection3.container_key), + changes=["sections"], + ), + }, + event_reciver.call_args_list[0].kwargs, + ) + self.assertDictContainsSubset( + { + "signal": CONTENT_OBJECT_ASSOCIATIONS_CHANGED, + "sender": None, + "content_object": ContentObjectChangedData( + object_id=str(subsection4.container_key), + changes=["sections"], + ), + }, + event_reciver.call_args_list[1].kwargs, + ) + def test_delete_component_and_revert(self) -> None: """ When a component is deleted and then the delete is reverted, signals will be emitted to update any containing containers. """ # Add components and publish - api.update_container_children(self.unit1.container_key, [ - LibraryUsageLocatorV2.from_string(self.problem_block["id"]), + api.update_container_children(self.unit3.container_key, [ + LibraryUsageLocatorV2.from_string(self.problem_block_2["id"]), ], user_id=None) api.publish_changes(self.lib1.library_key) # Delete component and revert - api.delete_library_block(LibraryUsageLocatorV2.from_string(self.problem_block["id"])) + api.delete_library_block(LibraryUsageLocatorV2.from_string(self.problem_block_2["id"])) container_event_receiver = mock.Mock() LIBRARY_CONTAINER_UPDATED.connect(container_event_receiver) @@ -942,5 +1175,5 @@ def test_delete_component_and_revert(self) -> None: assert { "signal": LIBRARY_CONTAINER_UPDATED, "sender": None, - "library_container": LibraryContainerData(container_key=self.unit1.container_key), + "library_container": LibraryContainerData(container_key=self.unit3.container_key), }.items() <= container_event_receiver.call_args_list[0].kwargs.items()