Skip to content
26 changes: 26 additions & 0 deletions openedx/core/djangoapps/content/search/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,6 +53,8 @@
searchable_doc_tags,
searchable_doc_tags_for_collection,
searchable_doc_units,
searchable_doc_subsections,
searchable_doc_sections,
)

log = logging.getLogger(__name__)
Expand Down Expand Up @@ -497,6 +499,12 @@ def index_container_batch(batch, num_done, library_key) -> int:
doc = searchable_doc_for_container(container_key)
doc.update(searchable_doc_tags(container_key))
doc.update(searchable_doc_collections(container_key))
container_type = lib_api.ContainerType(container_key.container_type)
match container_type:
case lib_api.ContainerType.Unit:
doc.update(searchable_doc_subsections(container_key))
case lib_api.ContainerType.Subsection:
doc.update(searchable_doc_sections(container_key))
docs.append(doc)
except Exception as err: # pylint: disable=broad-except
status_cb(f"Error indexing container {container.key}: {err}")
Expand Down Expand Up @@ -864,6 +872,24 @@ def upsert_item_units_index_docs(opaque_key: OpaqueKey):
_update_index_docs([doc])


def upsert_item_subsections_index_docs(opaque_key: OpaqueKey):
"""
Updates the subsections data in documents for the given Course/Library block
"""
doc = {Fields.id: meili_id_from_opaque_key(opaque_key)}
doc.update(searchable_doc_subsections(opaque_key))
_update_index_docs([doc])


def upsert_item_sections_index_docs(opaque_key: OpaqueKey):
"""
Updates the sections data in documents for the given Course/Library block
"""
doc = {Fields.id: meili_id_from_opaque_key(opaque_key)}
doc.update(searchable_doc_sections(opaque_key))
_update_index_docs([doc])


def upsert_collection_tags_index_docs(collection_key: LibraryCollectionLocator):
"""
Updates the tags data in documents for the given library collection
Expand Down
63 changes: 48 additions & 15 deletions openedx/core/djangoapps/content/search/documents.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,14 @@ class Fields:
units = "units"
units_display_name = "display_name"
units_key = "key"
# Subsections (dictionary) that this object belongs to.
subsections = "subsections"
subsections_display_name = "display_name"
subsections_key = "key"
# Sections (dictionary) that this object belongs to.
sections = "sections"
sections_display_name = "display_name"
sections_key = "key"

# The "content" field is a dictionary of arbitrary data, depending on the block_type.
# It comes from each XBlock's index_dictionary() method (if present) plus some processing.
Expand Down Expand Up @@ -376,9 +384,9 @@ def _collections_for_content_object(object_id: OpaqueKey) -> dict:
return result


def _units_for_content_object(object_id: OpaqueKey) -> dict:
def _containers_for_content_object(object_id: OpaqueKey, container_type: str) -> dict:
"""
Given an XBlock, course, library, etc., get the units for its index doc.
Given an XBlock, course, library, etc., get the containers that it is part of for its index doc.

e.g. for something in Units "UNIT_A" and "UNIT_B", this would return:
{
Expand All @@ -388,38 +396,41 @@ def _units_for_content_object(object_id: OpaqueKey) -> dict:
}
}

If the object is in no collections, returns:
If the object is in no containers, returns:
{
"collections": {
"units": {
"display_name": [],
"key": [],
},
}
"""
container_field = getattr(Fields, container_type)
container_display_name_field = getattr(Fields, f'{container_type}_display_name')
container_key_field = getattr(Fields, f'{container_type}_key')
result = {
Fields.units: {
Fields.units_display_name: [],
Fields.units_key: [],
container_field: {
container_display_name_field: [],
container_key_field: [],
}
}

# Gather the units associated with this object
units = None
containers = None
try:
if isinstance(object_id, UsageKey):
units = lib_api.get_containers_contains_component(object_id)
if isinstance(object_id, OpaqueKey):
containers = lib_api.get_containers_contains_item(object_id)
else:
log.warning(f"Unexpected key type for {object_id}")

except ObjectDoesNotExist:
log.warning(f"No library item found for {object_id}")

if not units:
if not containers:
return result

for unit in units:
result[Fields.units][Fields.units_display_name].append(unit.display_name)
result[Fields.units][Fields.units_key].append(str(unit.container_key))
for container in containers:
result[container_field][container_display_name_field].append(container.display_name)
result[container_field][container_key_field].append(str(container.container_key))

return result

Expand Down Expand Up @@ -521,7 +532,29 @@ def searchable_doc_units(opaque_key: OpaqueKey) -> dict:
like Meilisearch or Elasticsearch, with the units data for the given content object.
"""
doc = searchable_doc_for_key(opaque_key)
doc.update(_units_for_content_object(opaque_key))
doc.update(_containers_for_content_object(opaque_key, "units"))

return doc


def searchable_doc_sections(opaque_key: OpaqueKey) -> dict:
"""
Generate a dictionary document suitable for ingestion into a search engine
like Meilisearch or Elasticsearch, with the sections data for the given content object.
"""
doc = searchable_doc_for_key(opaque_key)
doc.update(_containers_for_content_object(opaque_key, "sections"))

return doc


def searchable_doc_subsections(opaque_key: OpaqueKey) -> dict:
"""
Generate a dictionary document suitable for ingestion into a search engine
like Meilisearch or Elasticsearch, with the subsections data for the given content object.
"""
doc = searchable_doc_for_key(opaque_key)
doc.update(_containers_for_content_object(opaque_key, "subsections"))

return doc

Expand Down
6 changes: 6 additions & 0 deletions openedx/core/djangoapps/content/search/handlers.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,6 +47,8 @@
upsert_collection_tags_index_docs,
upsert_item_collections_index_docs,
upsert_item_units_index_docs,
upsert_item_sections_index_docs,
upsert_item_subsections_index_docs,
)
from .tasks import (
delete_library_block_index_doc,
Expand Down Expand Up @@ -266,6 +268,10 @@ def content_object_associations_changed_handler(**kwargs) -> None:
upsert_item_collections_index_docs(opaque_key)
if not content_object.changes or "units" in content_object.changes:
upsert_item_units_index_docs(opaque_key)
if not content_object.changes or "sections" in content_object.changes:
upsert_item_sections_index_docs(opaque_key)
if not content_object.changes or "subsections" in content_object.changes:
upsert_item_subsections_index_docs(opaque_key)


@receiver(LIBRARY_CONTAINER_CREATED)
Expand Down
41 changes: 32 additions & 9 deletions openedx/core/djangoapps/content/search/tests/test_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -336,9 +336,11 @@ def test_reindex_meilisearch(self, mock_meilisearch) -> None:
doc_unit = copy.deepcopy(self.unit_dict)
doc_unit["tags"] = {}
doc_unit["collections"] = {'display_name': [], 'key': []}
doc_unit["subsections"] = {"display_name": [], "key": []}
doc_subsection = copy.deepcopy(self.subsection_dict)
doc_subsection["tags"] = {}
doc_subsection["collections"] = {'display_name': [], 'key': []}
doc_subsection["sections"] = {'display_name': [], 'key': []}
doc_section = copy.deepcopy(self.section_dict)
doc_section["tags"] = {}
doc_section["collections"] = {'display_name': [], 'key': []}
Expand Down Expand Up @@ -376,9 +378,11 @@ def test_reindex_meilisearch_incremental(self, mock_meilisearch) -> None:
doc_unit = copy.deepcopy(self.unit_dict)
doc_unit["tags"] = {}
doc_unit["collections"] = {"display_name": [], "key": []}
doc_unit["subsections"] = {"display_name": [], "key": []}
doc_subsection = copy.deepcopy(self.subsection_dict)
doc_subsection["tags"] = {}
doc_subsection["collections"] = {'display_name': [], 'key': []}
doc_subsection["sections"] = {'display_name': [], 'key': []}
doc_section = copy.deepcopy(self.section_dict)
doc_section["tags"] = {}
doc_section["collections"] = {'display_name': [], 'key': []}
Expand Down Expand Up @@ -983,14 +987,21 @@ def test_delete_index_container(self, container_type, mock_meilisearch) -> None:
container_dict["id"],
)

@ddt.data(
"unit",
"subsection",
"section",
)
@override_settings(MEILISEARCH_ENABLED=True)
def test_index_library_container_metadata(self, mock_meilisearch) -> None:
def test_index_library_container_metadata(self, container_type, mock_meilisearch) -> None:
"""
Test indexing a Library Container.
"""
api.upsert_library_container_index_doc(self.unit.container_key)
container = getattr(self, container_type)
container_dict = getattr(self, f"{container_type}_dict")
api.upsert_library_container_index_doc(container.container_key)

mock_meilisearch.return_value.index.return_value.update_documents.assert_called_once_with([self.unit_dict])
mock_meilisearch.return_value.index.return_value.update_documents.assert_called_once_with([container_dict])

@ddt.data(
("unit", "lctorg1libunitunit-1-e4527f7c"),
Expand Down Expand Up @@ -1071,16 +1082,22 @@ def test_units_in_subsection(self, mock_meilisearch) -> None:
None,
)

# TODO verify subsections in units

doc_block_with_subsections = {
"id": self.unit_dict["id"],
"subsections": {
"display_name": [self.subsection.display_name],
"key": [self.subsection_key],
},
}
new_subsection_dict = {
**self.subsection_dict,
"num_children": 1,
'content': {'child_usage_keys': [self.unit_key]}
}
assert mock_meilisearch.return_value.index.return_value.update_documents.call_count == 1
assert mock_meilisearch.return_value.index.return_value.update_documents.call_count == 2
mock_meilisearch.return_value.index.return_value.update_documents.assert_has_calls(
[
call([doc_block_with_subsections]),
call([new_subsection_dict]),
],
any_order=True,
Expand All @@ -1095,16 +1112,22 @@ def test_section_in_usbsections(self, mock_meilisearch) -> None:
None,
)

# TODO verify section in subsections

doc_block_with_sections = {
"id": self.subsection_dict["id"],
"sections": {
"display_name": [self.section.display_name],
"key": [self.section_key],
},
}
new_section_dict = {
**self.section_dict,
"num_children": 1,
'content': {'child_usage_keys': [self.subsection_key]}
}
assert mock_meilisearch.return_value.index.return_value.update_documents.call_count == 1
assert mock_meilisearch.return_value.index.return_value.update_documents.call_count == 2
mock_meilisearch.return_value.index.return_value.update_documents.assert_has_calls(
[
call([doc_block_with_sections]),
call([new_section_dict]),
],
any_order=True,
Expand Down
8 changes: 4 additions & 4 deletions openedx/core/djangoapps/content_libraries/api/blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@
from .containers import (
create_container,
get_container,
get_containers_contains_component,
get_containers_contains_item,
update_container_children,
ContainerMetadata,
ContainerType,
Expand Down Expand Up @@ -229,7 +229,7 @@ def set_library_block_olx(usage_key: LibraryUsageLocatorV2, new_olx_str: str) ->

# For each container, trigger LIBRARY_CONTAINER_UPDATED signal and set background=True to trigger
# container indexing asynchronously.
affected_containers = get_containers_contains_component(usage_key)
affected_containers = get_containers_contains_item(usage_key)
for container in affected_containers:
LIBRARY_CONTAINER_UPDATED.send_event(
library_container=LibraryContainerData(
Expand Down Expand Up @@ -585,7 +585,7 @@ def delete_library_block(
component = get_component_from_usage_key(usage_key)
library_key = usage_key.context_key
affected_collections = authoring_api.get_entity_collections(component.learning_package_id, component.key)
affected_containers = get_containers_contains_component(usage_key)
affected_containers = get_containers_contains_item(usage_key)

authoring_api.soft_delete_draft(component.pk, deleted_by=user_id)

Expand Down Expand Up @@ -673,7 +673,7 @@ def restore_library_block(usage_key: LibraryUsageLocatorV2, user_id: int | None
# container indexing asynchronously.
#
# To update the components count in containers
affected_containers = get_containers_contains_component(usage_key)
affected_containers = get_containers_contains_item(usage_key)
for container in affected_containers:
LIBRARY_CONTAINER_UPDATED.send_event(
library_container=LibraryContainerData(
Expand Down
41 changes: 30 additions & 11 deletions openedx/core/djangoapps/content_libraries/api/containers.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
LIBRARY_CONTAINER_UPDATED,
)
from openedx_learning.api import authoring as authoring_api
from openedx_learning.api.authoring_models import Container, ContainerVersion
from openedx_learning.api.authoring_models import Container, ContainerVersion, Component
from openedx.core.djangoapps.content_libraries.api.collections import library_collection_locator

from openedx.core.djangoapps.xblock.api import get_component_from_usage_key
Expand All @@ -50,7 +50,7 @@
"delete_container",
"restore_container",
"update_container_children",
"get_containers_contains_component",
"get_containers_contains_item",
"publish_container_changes",
]

Expand Down Expand Up @@ -513,7 +513,13 @@ def update_container_children(
entities_action=entities_action,
)

# TODO add CONTENT_OBJECT_ASSOCIATIONS_CHANGED for subsections
for key in children_ids:
CONTENT_OBJECT_ASSOCIATIONS_CHANGED.send_event(
content_object=ContentObjectChangedData(
object_id=str(key),
changes=["subsections"],
),
)
case ContainerType.Section:
subsections = [_get_container_from_key(key).subsection for key in children_ids] # type: ignore[arg-type]
new_version = authoring_api.create_next_section_version(
Expand All @@ -524,7 +530,13 @@ def update_container_children(
entities_action=entities_action,
)

# TODO add CONTENT_OBJECT_ASSOCIATIONS_CHANGED for sections
for key in children_ids:
CONTENT_OBJECT_ASSOCIATIONS_CHANGED.send_event(
content_object=ContentObjectChangedData(
object_id=str(key),
changes=["sections"],
),
)
case _:
raise ValueError(f"Invalid container type: {container_type}")

Expand All @@ -537,19 +549,26 @@ def update_container_children(
return ContainerMetadata.from_container(library_key, new_version.container)


def get_containers_contains_component(
usage_key: LibraryUsageLocatorV2
def get_containers_contains_item(
key: LibraryUsageLocatorV2 | LibraryContainerLocator
) -> list[ContainerMetadata]:
"""
Get containers that contains the component.
Get containers that contains the item,
that can be a component or another container.
"""
assert isinstance(usage_key, LibraryUsageLocatorV2)
component = get_component_from_usage_key(usage_key)
item: Component | Container

if isinstance(key, LibraryUsageLocatorV2):
item = get_component_from_usage_key(key)

elif isinstance(key, LibraryContainerLocator):
item = _get_container_from_key(key)

containers = authoring_api.get_containers_with_entity(
component.publishable_entity.pk,
item.publishable_entity.pk,
)
return [
ContainerMetadata.from_container(usage_key.context_key, container)
ContainerMetadata.from_container(key.lib_key, container)
for container in containers
]

Expand Down
Loading
Loading