Skip to content

Commit

Permalink
NestedProcessingTransformation only gets ProcessingItem's instead of …
Browse files Browse the repository at this point in the history
…whole pipeline
  • Loading branch information
thomaspatzke committed Sep 1, 2024
1 parent 513e352 commit ff10cc6
Show file tree
Hide file tree
Showing 5 changed files with 74 additions and 45 deletions.
4 changes: 2 additions & 2 deletions docs/Processing_Pipelines.rst
Original file line number Diff line number Diff line change
Expand Up @@ -285,7 +285,7 @@ definitions are available:
"convert_type", "ConvertTypeTransformation"
"rule_failure", "RuleFailureTransformation"
"detection_item_failure", "DetectionItemFailureTransformation"
"nested_pipeline", "NestedPipelineTransformation"
"nest", "NestedProcessingTransformation"

.. autoclass:: sigma.processing.transformations.FieldMappingTransformation

Expand Down Expand Up @@ -326,7 +326,7 @@ This is useful if different data models are used in the same system.
.. autoclass:: sigma.processing.transformations.ConvertTypeTransformation
.. autoclass:: sigma.processing.transformations.RuleFailureTransformation
.. autoclass:: sigma.processing.transformations.DetectionItemFailureTransformation
.. autoclass:: sigma.processing.transformations.NestedPipelineTransformation
.. autoclass:: sigma.processing.transformations.NestedProcessingTransformation

YAML example:

Expand Down
5 changes: 5 additions & 0 deletions sigma/processing/tracking.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,3 +64,8 @@ def add_mapping(self, source: str, target: Union[str, List[str]]) -> None:
self[source].update(target)
for t in target:
self.target_fields[t].add(source)

def merge(self, other: "FieldMappingTracking") -> None:
"""Merge another FieldMappingTracking into this one."""
for source, target_set in other.items():
self.add_mapping(source, list(target_set))
31 changes: 23 additions & 8 deletions sigma/processing/transformations.py
Original file line number Diff line number Diff line change
Expand Up @@ -965,24 +965,34 @@ def apply_detection_item(self, detection_item: SigmaDetectionItem) -> None:


@dataclass
class NestedPipelineTransformation(Transformation):
"""Executes a whole nested processing pipeline as transformation. Main purpose is to apply a
class NestedProcessingTransformation(Transformation):
"""Executes a nested processing pipeline as transformation. Main purpose is to apply a
whole set of transformations that match the given conditions of the enclosng processing item.
"""

pipeline: "sigma.processing.pipeline.ProcessingPipeline"
items: List["sigma.processing.pipeline.ProcessingItem"]
_nested_pipeline: "sigma.processing.pipeline.ProcessingPipeline" = field(
init=False, compare=False, repr=False
)

def __post_init__(self):
from sigma.processing.pipeline import (
ProcessingPipeline,
) # TODO: move to top-level after restructuring code

self._nested_pipeline = ProcessingPipeline(items=self.items)

@classmethod
def from_dict(cls, d: Dict) -> Transformation:
from sigma.processing.pipeline import (
ProcessingPipeline,
ProcessingItem,
) # TODO: move to top-level after restructuring code

try:
return cls(pipeline=ProcessingPipeline.from_dict(d["pipeline"]))
return cls(items=[ProcessingItem.from_dict(item) for item in d["items"]])
except KeyError:
raise SigmaConfigurationError(
"Nested pipeline transformation requires a 'pipeline' key."
"Nested processing transformation requires a 'items' key."
)

def apply(
Expand All @@ -991,7 +1001,12 @@ def apply(
rule: Union[SigmaRule, SigmaCorrelationRule],
) -> None:
super().apply(pipeline, rule)
self.pipeline.apply(rule)
self._nested_pipeline.apply(rule)
pipeline.applied.extend(self._nested_pipeline.applied)
pipeline.applied_ids.update(self._nested_pipeline.applied_ids)
pipeline.field_name_applied_ids.update(self._nested_pipeline.field_name_applied_ids)
pipeline.field_mappings.merge(self._nested_pipeline.field_mappings)
pipeline.state.update(self._nested_pipeline.state)


transformations: Dict[str, Transformation] = {
Expand All @@ -1017,5 +1032,5 @@ def apply(
"convert_type": ConvertTypeTransformation,
"rule_failure": RuleFailureTransformation,
"detection_item_failure": DetectionItemFailureTransformation,
"nested_pipeline": NestedPipelineTransformation,
"nest": NestedProcessingTransformation,
}
17 changes: 17 additions & 0 deletions tests/test_processing_tracking.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,3 +25,20 @@ def test_field_mapping_tracking_listitem_replaced(
field_mapping_tracking.add_mapping("fieldA", ["mappedA", "mappedB", "mappedC"])
field_mapping_tracking.add_mapping("mappedB", "mappedD")
assert field_mapping_tracking["fieldA"] == {"mappedA", "mappedC", "mappedD"}


def test_field_mapping_tracking_merge():
field_mapping_tracking = FieldMappingTracking()
field_mapping_tracking.add_mapping("fieldA", "mappedA")
field_mapping_tracking.add_mapping("mappedA", "mappedB")
field_mapping_tracking.add_mapping("fieldB", "mappedC")

other_field_mapping_tracking = FieldMappingTracking()
other_field_mapping_tracking.add_mapping("mappedB", "mappedD")
other_field_mapping_tracking.add_mapping("fieldC", "mappedE")

field_mapping_tracking.merge(other_field_mapping_tracking)

assert field_mapping_tracking["fieldA"] == {"mappedD"}
assert field_mapping_tracking["fieldB"] == {"mappedC"}
assert field_mapping_tracking["fieldC"] == {"mappedE"}
62 changes: 27 additions & 35 deletions tests/test_processing_transformations.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from sigma.processing.transformations import (
AddFieldTransformation,
ConvertTypeTransformation,
NestedPipelineTransformation,
NestedProcessingTransformation,
RemoveFieldTransformation,
SetFieldTransformation,
SetValueTransformation,
Expand Down Expand Up @@ -1575,45 +1575,37 @@ def test_detection_item_failure_transformation(dummy_pipeline, sigma_rule):

@pytest.fixture
def nested_pipeline_transformation():
return NestedPipelineTransformation(
pipeline=ProcessingPipeline(
name="Test",
priority=10,
items=[
ProcessingItem(
transformation=TransformationAppend(s="Test"),
rule_condition_linking=any,
rule_conditions=[
RuleConditionTrue(dummy="test-true"),
RuleConditionFalse(dummy="test-false"),
],
identifier="test",
)
],
)
return NestedProcessingTransformation(
items=[
ProcessingItem(
transformation=TransformationAppend(s="Test"),
rule_condition_linking=any,
rule_conditions=[
RuleConditionTrue(dummy="test-true"),
RuleConditionFalse(dummy="test-false"),
],
identifier="test",
)
],
)


def test_nested_pipeline_transformation_from_dict(nested_pipeline_transformation):
assert (
NestedPipelineTransformation.from_dict(
NestedProcessingTransformation.from_dict(
{
"pipeline": {
"name": "Test",
"priority": 10,
"transformations": [
{
"id": "test",
"rule_conditions": [
{"type": "true", "dummy": "test-true"},
{"type": "false", "dummy": "test-false"},
],
"rule_cond_op": "or",
"type": "append",
"s": "Test",
}
],
}
"items": [
{
"id": "test",
"rule_conditions": [
{"type": "true", "dummy": "test-true"},
{"type": "false", "dummy": "test-false"},
],
"rule_cond_op": "or",
"type": "append",
"s": "Test",
}
],
}
)
== nested_pipeline_transformation
Expand All @@ -1630,7 +1622,7 @@ def test_nested_pipeline_transformation_from_dict_apply(

def test_nested_pipeline_transformation_no_pipeline():
with pytest.raises(SigmaConfigurationError, match="requires a 'pipeline' key"):
NestedPipelineTransformation.from_dict({"test": "fails"})
NestedProcessingTransformation.from_dict({"test": "fails"})


def test_transformation_identifier_completeness():
Expand Down

0 comments on commit ff10cc6

Please sign in to comment.