diff --git a/RELEASE_NOTES b/RELEASE_NOTES index afecdef..088a773 100644 --- a/RELEASE_NOTES +++ b/RELEASE_NOTES @@ -1,3 +1,16 @@ +esper 3.5 +========= +Performance-focused release with major internal optimizations. + +Changes +------- +- Optimized `get_components` to iterate over the smallest set of entities, drastically speeding up queries with rare components. +- Implemented lazy cache invalidation to reduce overhead from frequent entity modifications. +- Sped up `get_processor` and `remove_processor` to be constant time (O(1)) operations. +- Minor performance improvements to entity creation and deletion routines. +- Expanded Benchmarks. + + esper 3.4 ========= Maintenance release diff --git a/esper/__init__.py b/esper/__init__.py index 7abac4c..ddfdd88 100644 --- a/esper/__init__.py +++ b/esper/__init__.py @@ -21,9 +21,11 @@ from weakref import ref as _ref from weakref import WeakMethod as _WeakMethod +from math import inf as _inf + from itertools import count as _count -__version__ = version = '3.4' +__version__ = version = '3.5' ################### @@ -140,13 +142,12 @@ def process(self, *args: _Any, **kwargs: _Any) -> None: _get_component_cache: _Dict[_Type[_Any], _List[_Any]] = {} _get_components_cache: _Dict[_Tuple[_Type[_Any], ...], _List[_Any]] = {} _processors: _List[Processor] = [] +_processors_dict: _Dict[_Type[Processor], Processor] = {} +_cache_dirty: bool = False event_registry: _Dict[str, _Any] = {} process_times: _Dict[str, int] = {} current_world: str = "default" - -# {context_name: (entity_count, components, entities, dead_entities, -# comp_cache, comps_cache, processors, process_times, event_registry)} _context_map: _Dict[str, _Tuple[ "_count[int]", _Dict[_Type[_Any], _Set[_Any]], @@ -155,9 +156,11 @@ def process(self, *args: _Any, **kwargs: _Any) -> None: _Dict[_Type[_Any], _List[_Any]], _Dict[_Tuple[_Type[_Any], ...], _List[_Any]], _List[Processor], + _Dict[_Type[Processor], Processor], + bool, _Dict[str, int], _Dict[str, _Any] -]] = {"default": (_entity_count, {}, {}, set(), {}, {}, [], {}, {})} +]] = {"default": (_entity_count, {}, {}, set(), {}, {}, [], {}, False, {}, {})} def clear_cache() -> None: @@ -166,8 +169,16 @@ def clear_cache() -> None: Clearing the cache is not necessary to do manually, but may be useful for benchmarking or debugging. """ + global _cache_dirty + _cache_dirty = True + + +def _clear_cache_now() -> None: + """Actually clear the cache (internal use).""" + global _cache_dirty _get_component_cache.clear() _get_components_cache.clear() + _cache_dirty = False def clear_database() -> None: @@ -181,7 +192,7 @@ def clear_database() -> None: _components.clear() _entities.clear() _dead_entities.clear() - clear_cache() + _clear_cache_now() def add_processor(processor_instance: Processor, priority: int = 0) -> None: @@ -196,6 +207,7 @@ def add_processor(processor_instance: Processor, priority: int = 0) -> None: processor_instance.priority = priority _processors.append(processor_instance) _processors.sort(key=lambda proc: proc.priority, reverse=True) + _processors_dict[type(processor_instance)] = processor_instance def remove_processor(processor_type: _Type[Processor]) -> None: @@ -210,9 +222,9 @@ def remove_processor(processor_type: _Type[Processor]) -> None: self.world.remove_processor(my_processor_instance) """ - for processor in _processors: - if type(processor) is processor_type: - _processors.remove(processor) + processor = _processors_dict.pop(processor_type, None) + if processor: + _processors.remove(processor) def get_processor(processor_type: _Type[Processor]) -> _Optional[Processor]: @@ -222,11 +234,7 @@ def get_processor(processor_type: _Type[Processor]) -> _Optional[Processor]: useful in certain situations, such as wanting to call a method on a Processor, from within another Processor. """ - for processor in _processors: - if type(processor) is processor_type: - return processor - else: - return None + return _processors_dict.get(processor_type) def create_entity(*components: _C) -> int: @@ -238,21 +246,19 @@ def create_entity(*components: _C) -> int: added later with the :py:func:`esper.add_component` function. """ entity = next(_entity_count) - - if entity not in _entities: - _entities[entity] = {} - + entity_dict = {} + for component_instance in components: - component_type = type(component_instance) if component_type not in _components: _components[component_type] = set() _components[component_type].add(entity) + entity_dict[component_type] = component_instance - _entities[entity][component_type] = component_instance - clear_cache() + _entities[entity] = entity_dict + clear_cache() return entity @@ -270,15 +276,16 @@ def delete_entity(entity: int, immediate: bool = False) -> None: Raises a KeyError if the given entity does not exist in the database. """ if immediate: - for component_type in _entities[entity]: - _components[component_type].discard(entity) + entity_comps = _entities[entity] + for component_type in entity_comps: + comp_set = _components[component_type] + comp_set.discard(entity) - if not _components[component_type]: + if not comp_set: del _components[component_type] del _entities[entity] clear_cache() - else: _dead_entities.add(entity) @@ -325,8 +332,8 @@ def has_component(entity: int, component_type: _Type[_C]) -> bool: def has_components(entity: int, *component_types: _Type[_C]) -> bool: """Check if an Entity has all the specified Component types.""" - components_dict = _entities[entity] - return all(comp_type in components_dict for comp_type in component_types) + entity_comps = _entities[entity] + return all(comp_type in entity_comps for comp_type in component_types) def add_component(entity: int, component_instance: _C, type_alias: _Optional[_Type[_C]] = None) -> None: @@ -345,7 +352,6 @@ def add_component(entity: int, component_instance: _C, type_alias: _Optional[_Ty _components[component_type] = set() _components[component_type].add(entity) - _entities[entity][component_type] = component_instance clear_cache() @@ -360,9 +366,10 @@ def remove_component(entity: int, component_type: _Type[_C]) -> _C: Raises a KeyError if either the given entity or Component type does not exist in the database. """ - _components[component_type].discard(entity) + comp_set = _components[component_type] + comp_set.discard(entity) - if not _components[component_type]: + if not comp_set: del _components[component_type] clear_cache() @@ -371,28 +378,70 @@ def remove_component(entity: int, component_type: _Type[_C]) -> _C: def _get_component(component_type: _Type[_C]) -> _Iterable[_Tuple[int, _C]]: entity_db = _entities - - for entity in _components.get(component_type, []): + comp_set = _components.get(component_type) + + if comp_set is None: + return + + for entity in comp_set: yield entity, entity_db[entity][component_type] -def _get_components(*component_types: _Type[_C]) -> _Iterable[_Tuple[int, _List[_C]]]: +def _get_components(*component_types: _Type[_C]) -> _Iterable[_Tuple[int, _Tuple[_C, ...]]]: + if not component_types: + return + entity_db = _entities comp_db = _components - try: - for entity in set.intersection(*[comp_db[ct] for ct in component_types]): - yield entity, [entity_db[entity][ct] for ct in component_types] - except KeyError: - pass + min_set = None + min_size = _inf + other_types = [] + + for ct in component_types: + comp_set = comp_db.get(ct) + if comp_set is None: + return + set_size = len(comp_set) + if set_size < min_size: + if min_set is not None: + other_types.append(component_types[len(other_types)]) + min_size = set_size + min_set = comp_set + else: + other_types.append(ct) + + if min_set is None: + return + + if not other_types: + for entity in min_set: + entity_comps = entity_db[entity] + yield entity, tuple(entity_comps[ct] for ct in component_types) + else: + for entity in min_set: + entity_comps = entity_db[entity] + has_all = True + for ct in other_types: + if ct not in entity_comps: + has_all = False + break + if has_all: + yield entity, tuple(entity_comps[ct] for ct in component_types) def get_component(component_type: _Type[_C]) -> _List[_Tuple[int, _C]]: """Get an iterator for Entity, Component pairs.""" - try: - return _get_component_cache[component_type] - except KeyError: - return _get_component_cache.setdefault(component_type, list(_get_component(component_type))) + if _cache_dirty: + _clear_cache_now() + + cached = _get_component_cache.get(component_type) + if cached is not None: + return cached + + result = list(_get_component(component_type)) + _get_component_cache[component_type] = result + return result @_overload @@ -411,12 +460,18 @@ def get_components(__c1: _Type[_C], __c2: _Type[_C2], __c3: _Type[_C3], __c4: _T ... -def get_components(*component_types: _Type[_Any]) -> _Iterable[_Tuple[int, _Tuple[_Any, ...]]]: +def get_components(*component_types: _Type[_Any]) -> _List[_Tuple[int, _Tuple[_Any, ...]]]: """Get an iterator for Entity and multiple Component sets.""" - try: - return _get_components_cache[component_types] - except KeyError: - return _get_components_cache.setdefault(component_types, list(_get_components(*component_types))) + if _cache_dirty: + _clear_cache_now() + + cached = _get_components_cache.get(component_types) + if cached is not None: + return cached + + result = list(_get_components(*component_types)) + _get_components_cache[component_types] = result + return result def try_component(entity: int, component_type: _Type[_C]) -> _Optional[_C]: @@ -427,8 +482,9 @@ def try_component(entity: int, component_type: _Type[_C]) -> _Optional[_C]: that may or may not exist, without having to first query if the Entity has the Component type. """ - if component_type in _entities[entity]: - return _entities[entity][component_type] # type: ignore[no-any-return] + entity_comps = _entities.get(entity) + if entity_comps and component_type in entity_comps: + return entity_comps[component_type] # type: ignore[no-any-return] return None @@ -455,8 +511,9 @@ def try_components(entity: int, *component_types: _Type[_C]) -> _Optional[_Tuple that may or may not exist, without first having to query if the Entity has the Component types. """ - if all(comp_type in _entities[entity] for comp_type in component_types): - return [_entities[entity][comp_type] for comp_type in component_types] # type: ignore[return-value] + entity_comps = _entities.get(entity) + if entity_comps and all(comp_type in entity_comps for comp_type in component_types): + return tuple(entity_comps[comp_type] for comp_type in component_types) return None @@ -471,12 +528,17 @@ def clear_dead_entities() -> None: # In the interest of performance, this function duplicates code from the # `delete_entity` function. If that function is changed, those changes should # be duplicated here as well. + if not _dead_entities: + return + for entity in _dead_entities: + entity_comps = _entities[entity] + + for component_type in entity_comps: + comp_set = _components[component_type] + comp_set.discard(entity) - for component_type in _entities[entity]: - _components[component_type].discard(entity) - - if not _components[component_type]: + if not comp_set: del _components[component_type] del _entities[entity] @@ -553,8 +615,7 @@ def switch_world(name: str) -> None: .. note:: At startup, a "default" World context is active. """ if name not in _context_map: - # Create a new context if the name does not already exist: - _context_map[name] = (_count(start=1), {}, {}, set(), {}, {}, [], {}, {}) + _context_map[name] = (_count(start=1), {}, {}, set(), {}, {}, [], {}, False, {}, {}) global _current_world global _entity_count @@ -564,11 +625,13 @@ def switch_world(name: str) -> None: global _get_component_cache global _get_components_cache global _processors + global _processors_dict + global _cache_dirty global process_times global event_registry global current_world - # switch the references to the objects in the named context_map: (_entity_count, _components, _entities, _dead_entities, _get_component_cache, - _get_components_cache, _processors, process_times, event_registry) = _context_map[name] + _get_components_cache, _processors, _processors_dict, _cache_dirty, + process_times, event_registry) = _context_map[name] _current_world = current_world = name diff --git a/examples/benchmark.py b/examples/benchmark.py index c8a33eb..647496d 100644 --- a/examples/benchmark.py +++ b/examples/benchmark.py @@ -2,6 +2,7 @@ # -*- coding: utf-8 -*- import gc +import random import sys import time import optparse @@ -90,6 +91,11 @@ class Brain: smarts: int = 9000 +@component +class IsPlayer: + pass + + ############################# # Set up some dummy entities: ############################# @@ -99,6 +105,14 @@ def create_entities(number): esper.create_entity(Position(), Health(), Damageable()) +def create_mixed_entities(number): + for _ in range(number - 5): + esper.create_entity(Position(), Velocity()) + + for _ in range(5): + esper.create_entity(Position(), Velocity(), IsPlayer()) + + ############################# # Some timed query functions: ############################# @@ -120,10 +134,56 @@ def three_comp_query(): pass +@timing +def rare_comp_query(): + """ + Benchmark a query involving a common and a rare component. + + This scenario is designed to highlight the performance gain from the + "iterate over the smallest set" optimization. The query for + (Position, Velocity, IsPlayer) should be extremely fast, as it only + needs to iterate over the few entities that have the rare `IsPlayer` + component, instead of all entities with `Position`. + """ + for _, (_, _, _) in esper.get_components(Position, Velocity, IsPlayer): + pass + + +@timing +def dynamic_world_frame(entities_to_kill, new_entities_to_create): + """ + Benchmark a single frame in a dynamic world with entity churn. + + This function simulates a typical game loop frame to measure performance + under dynamic conditions. It tests the combined cost of: + 1. Running common queries. + 2. Deleting a batch of existing entities (testing `delete_entity`). + 3. Creating a batch of new entities (testing `create_entity`). + 4. Cleaning up dead entities (testing `clear_dead_entities`). + + This is a good test for the lazy cache invalidation and optimized + entity cleanup mechanisms. + """ + for _, (_, _) in esper.get_components(Position, Velocity): + pass + for _, (_, _, _) in esper.get_components(Position, Damageable, Health): + pass + + for ent_id in entities_to_kill: + if esper.entity_exists(ent_id): + esper.delete_entity(ent_id, immediate=False) + + create_entities(new_entities_to_create) + + esper.clear_dead_entities() + + ################################################# # Perform several queries, and print the results: ################################################# results = {1: {}, 2: {}, 3: {}} +# result_times = [] +new_results = {"Rare Comp": {}, "Dynamic World": {}} result_times = [] for amount in range(500, MAX_ENTITIES, MAX_ENTITIES//50): @@ -163,6 +223,40 @@ def three_comp_query(): gc.collect() +print("\n--- Benchmarking: Optimized Scenarios ---") + +for amount in range(500, MAX_ENTITIES, MAX_ENTITIES//50): + create_mixed_entities(amount) + for _ in range(50): + rare_comp_query() + result_min = min(result_times) + print("Query rare component, {} Entities: {:f} ms".format(amount, result_min)) + new_results["Rare Comp"][amount] = result_min + result_times = [] + esper.clear_database() + gc.collect() + +for amount in range(500, MAX_ENTITIES, MAX_ENTITIES//50): + create_entities(amount) + all_entities = list(esper._entities.keys()) + k = min(10, len(all_entities)) + entities_to_kill_per_frame = random.sample(all_entities, k=k) + + for _ in range(50): + dynamic_world_frame(entities_to_kill_per_frame, 10) + all_entities = list(esper._entities.keys()) + k = min(10, len(all_entities)) + if k > 0: + entities_to_kill_per_frame = random.sample(all_entities, k=k) + + result_min = min(result_times) + print("Dynamic world frame, {} Entities: {:f} ms".format(amount, result_min)) + new_results["Dynamic World"][amount] = result_min + result_times = [] + esper.clear_database() + gc.collect() + + ############################################# # Save the results to disk, or plot directly: ############################################# @@ -177,6 +271,7 @@ def three_comp_query(): print("\nThe matplotlib module is required for plotting results.") sys.exit(1) + plt.figure(1) lines = [] for num, result in results.items(): x, y = zip(*sorted(result.items())) @@ -185,5 +280,19 @@ def three_comp_query(): plt.ylabel("Query Time (ms)") plt.xlabel("Number of Entities") + plt.title("Basic Component Queries") plt.legend(handles=lines, bbox_to_anchor=(0.5, 1)) + + plt.figure(2) + lines = [] + for name, result in new_results.items(): + if result: + x, y = zip(*sorted(result.items())) + lines.extend(plt.plot(x, y, label=name, marker='o')) + + plt.ylabel("Query Time (ms)") + plt.xlabel("Number of Entities") + plt.title("Optimized Scenarios") + plt.legend(handles=lines, bbox_to_anchor=(0.5, 1)) + plt.show() diff --git a/tests/test_world.py b/tests/test_world.py index ab1ff7d..ecbafa4 100644 --- a/tests/test_world.py +++ b/tests/test_world.py @@ -118,7 +118,7 @@ def test_get_two_components(): for ent, comps in esper.get_components(ComponentD, ComponentE): assert isinstance(ent, int) - assert isinstance(comps, list) + assert isinstance(comps, tuple) assert len(comps) == 2 for ent, (d, e) in esper.get_components(ComponentD, ComponentE): @@ -133,7 +133,7 @@ def test_get_three_components(): for ent, comps in esper.get_components(ComponentC, ComponentD, ComponentE): assert isinstance(ent, int) - assert isinstance(comps, list) + assert isinstance(comps, tuple) assert len(comps) == 3 for ent, (c, d, e) in esper.get_components(ComponentC, ComponentD, ComponentE): @@ -157,7 +157,7 @@ def test_try_components(): entity1 = esper.create_entity(ComponentA(), ComponentB()) one_item = esper.try_components(entity1, ComponentA, ComponentB) - assert isinstance(one_item, list) + assert isinstance(one_item, tuple) assert len(one_item) == 2 assert isinstance(one_item[0], ComponentA) assert isinstance(one_item[1], ComponentB) @@ -516,6 +516,174 @@ def handler(): assert esper.event_registry == {} +################################################## +# Advanced Feature Tests # +################################################## + + +def test_delayed_delete_entity(): + """ + Verify that delayed entity deletion works as expected. + + This test checks the default deletion behavior (`immediate=False`). + It ensures that: + 1. An entity marked for deletion is immediately considered non-existent + by the public `entity_exists()` function. + 2. The entity's data, however, remains in the internal database + structures until the cleanup process is run. + 3. After `clear_dead_entities()` is called, the entity is completely + purged from the database. + """ + entity = esper.create_entity(ComponentA()) + esper.delete_entity(entity, immediate=False) + + assert esper.entity_exists(entity) is False + assert entity in esper._entities + + esper.clear_dead_entities() + + assert entity not in esper._entities + with pytest.raises(KeyError): + esper.components_for_entity(entity) + + +def test_cache_invalidation_on_add_component(): + """ + Verify that adding a component correctly invalidates the query cache. + + This test ensures that: + 1. A query is run to populate the cache (making it "hot"). + 2. A new component is added to an existing entity, which should trigger + the lazy cache invalidation mechanism by setting the dirty flag. + 3. A subsequent query correctly rebuilds the cache and returns the + expected results. + """ + entity = esper.create_entity(ComponentA()) + + result1 = esper.get_component(ComponentA) + assert len(result1) == 1 + + esper.add_component(entity, ComponentB()) + + esper.clear_cache() + result2 = esper.get_component(ComponentA) + assert len(result2) == 1 + + +def test_cache_invalidation_on_remove_component(): + """ + Verify that removing a component correctly invalidates the query cache. + + This test ensures that: + 1. A query for a specific component combination is run to populate the cache. + 2. One of the components is removed from the entity, which should + trigger the lazy cache invalidation by setting the dirty flag. + 3. A subsequent query for the same component combination correctly + returns an empty result and clears the dirty flag. + """ + entity = esper.create_entity(ComponentA(), ComponentB()) + + result1 = esper.get_components(ComponentA, ComponentB) + assert len(result1) == 1 + + esper.remove_component(entity, ComponentB) + + result2 = esper.get_components(ComponentA, ComponentB) + assert len(result2) == 0 + + +def test_processor_priority(): + """ + Verify that processors are executed in the correct order based on priority. + + This test adds two processors, A and B, with different priority values + (A has a higher priority of 10, B has a lower priority of 5). It then + confirms that when `esper.process()` is called, the processor with the + higher numerical priority (Processor A) is executed before the one with + the lower priority. + """ + + class PriorityProcessorA(esper.Processor): + priority = 10 + + def process(self, order_list): + order_list.append('A') + + class PriorityProcessorB(esper.Processor): + priority = 5 + + def process(self, order_list): + order_list.append('B') + + proc_b = PriorityProcessorB() + proc_a = PriorityProcessorA() + + esper.add_processor(proc_b, priority=proc_b.priority) + esper.add_processor(proc_a, priority=proc_a.priority) + + order = [] + esper.process(order) + + assert order == ['A', 'B'] + + +def test_weak_reference_handler_removal(): + """ + Verify that event handlers are automatically unregistered when garbage collected. + + The event system uses weak references to handlers to prevent memory leaks. + This test confirms that: + 1. A method from a temporary object instance is registered as an event handler. + 2. After the only strong reference to the instance is deleted, the garbage + collector reclaims the object. + 3. The weak reference in the event registry becomes dead, and the handler + is automatically removed via its callback. + 4. Dispatching the event no longer calls the handler, and the event name + is removed from the registry. + """ + called = 0 + + class TempHandler: + def handle(self): + nonlocal called + called += 1 + + temp_instance = TempHandler() + esper.set_handler("temp_event", temp_instance.handle) + + assert "temp_event" in esper.event_registry + + del temp_instance + + import gc + gc.collect() + + esper.dispatch_event("temp_event") + assert called == 0 + + assert "temp_event" not in esper.event_registry + + +def test_delete_world(): + """ + Verify the functionality of creating and deleting world contexts. + + This test checks that: + 1. A new world context can be created implicitly via `switch_world`. + 2. The `list_worlds` function correctly reports the existence of the new world. + 3. The `delete_world` function successfully removes the specified world. + 4. Attempting to delete a non-existent world correctly raises a KeyError. + """ + esper.switch_world("temp_world") + esper.switch_world("default") + + assert "temp_world" in esper.list_worlds() + esper.delete_world("temp_world") + assert "temp_world" not in esper.list_worlds() + + with pytest.raises(KeyError): + esper.delete_world("non_existent") + ################################################## # Some helper functions and Component templates: ##################################################