Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ jobs:
strategy:
matrix:
os: [ 'ubuntu-24.04' ]
python-version: ['3.9', '3.10', '3.11', '3.12' ]
python-version: ['3.10', '3.11', '3.12' ]
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v4
Expand Down
973 changes: 566 additions & 407 deletions poetry.lock

Large diffs are not rendered by default.

6 changes: 3 additions & 3 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[tool.poetry]
name = "pysigma-backend-logpoint"
version = "0.3.0"
version = "0.4.0"
description = "pySigma Logpoint backend"
readme="README.md"
authors = ["Surya Majhi <[email protected]>", "Swachchhanda Shrawan Poudel <[email protected]>"]
Expand All @@ -11,8 +11,8 @@ packages = [
]

[tool.poetry.dependencies]
python = "^3.9"
pysigma = "^0.11.23"
python = "^3.10"
pysigma = "^1.0.0"

[tool.poetry.group.dev.dependencies]
black = "^24.4.2"
Expand Down
2 changes: 2 additions & 0 deletions sigma/pipelines/logpoint/__init__.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
from .windows import logpoint_windows_pipeline
from .m365 import logpoint_m365_pipeline
from .azure import logpoint_azure_pipeline
from .linux import logpoint_linux_pipeline

pipelines = {
"logpoint_windows": logpoint_windows_pipeline,
"logpoint_o365": logpoint_m365_pipeline,
"logpoint_azure": logpoint_azure_pipeline,
"logpoint_linux": logpoint_linux_pipeline,
}
26 changes: 17 additions & 9 deletions sigma/pipelines/logpoint/azure.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from sigma.processing.conditions import (
LogsourceCondition,
)
import re
from sigma.processing.conditions import LogsourceCondition
from sigma.processing.pipeline import ProcessingItem, ProcessingPipeline
from sigma.processing.transformations import (
FieldMappingTransformation,
Expand All @@ -14,10 +13,19 @@
)


def azure_field_mapping(field: str):
if field.lower().startswith("targetresources.modifiedproperties"):
def azure_field_mapping(field: str) -> str:
# 1. Handle specific prefix logic
if field and field.lower().startswith("targetresources.modifiedproperties"):
return "target_modified_property." + field
return field

# 2. Fallback: Dynamic snake_case conversion
if not field:
return field

words = re.findall(r"([a-z0-9]+|[A-Z][a-z0-9]+|[A-Z0-9]+)", field)
if len(words) > 1:
return "_".join(words).lower()
return words[0].lower() if words else field.lower()


def logpoint_azure_pipeline() -> ProcessingPipeline:
Expand Down Expand Up @@ -78,9 +86,9 @@ def logpoint_azure_pipeline() -> ProcessingPipeline:
],
),
ProcessingItem(
identifier="logpoint_azure_activity_enrich",
transformation=(
FieldFunctionTransformation(transform_func=azure_field_mapping)
identifier="logpoint_azure_custom_field_mapping",
transformation=FieldFunctionTransformation(
mapping={}, transform_func=azure_field_mapping
),
rule_conditions=[LogsourceCondition(product="azure")],
),
Expand Down
53 changes: 53 additions & 0 deletions sigma/pipelines/logpoint/linux.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
import re
from typing import Dict, Union, List

from sigma.processing.conditions import LogsourceCondition
from sigma.processing.pipeline import ProcessingItem, ProcessingPipeline
from sigma.processing.transformations import (
FieldMappingTransformation,
FieldFunctionTransformation,
)

from sigma.pipelines.logpoint.logpoint_mapping import (
logpoint_linux_auditd_mapping,
logpoint_linux_common_taxonomy,
)


def to_snake_case(field: str) -> str:
"""Convert field name to snake_case."""
if not field:
return field

words = re.findall(r"([a-z0-9]+|[A-Z][a-z0-9]+|[A-Z0-9]+)", field)
if len(words) > 1:
return "_".join(words).lower()
return words[0].lower() if words else field.lower()


def logpoint_linux_pipeline() -> ProcessingPipeline:
return ProcessingPipeline(
name="Logpoint Linux",
allowed_backends={"logpoint"},
priority=20,
items=[
ProcessingItem(
identifier="logpoint_linux_auditd_mapping",
transformation=FieldMappingTransformation(
logpoint_linux_auditd_mapping
),
rule_conditions=[
LogsourceCondition(product="linux", service="auditd"),
],
),
ProcessingItem(
identifier="logpoint_linux_generic_mapping",
transformation=FieldFunctionTransformation(
transform_func=to_snake_case, mapping=logpoint_linux_common_taxonomy
),
rule_conditions=[
LogsourceCondition(product="linux"),
],
),
],
)
44 changes: 44 additions & 0 deletions sigma/pipelines/logpoint/logpoint_mapping.py
Original file line number Diff line number Diff line change
Expand Up @@ -2503,3 +2503,47 @@
"OperationId": "operation_id",
"Operation": "operation",
}

logpoint_linux_auditd_mapping = {
"type": "event_type",
"proctitle": "command",
"auid": "user_id",
"uid": "user_id",
"gid": "group_id",
"euid": "effective_user_id",
"egid": "effective_group_id",
"fsuid": "filesystem_user_id",
"fsgid": "filesystem_group_id",
"suid": "saved_user_id",
"sgid": "saved_group_id",
"pid": "process_id",
"ppid": "parent_process_id",
"ses": "session_id",
"comm": "command_name",
"exe": "process",
"key": "key",
"cwd": "path",
"name": "file",
"nametype": "file_type",
"addr": "address",
"hostname": "host",
"terminal": "terminal",
"res": "result",
"msg": "message",
"acct": "account",
}

logpoint_linux_common_taxonomy = {
"Image": "process",
"CommandLine": "command",
"ParentImage": "parent_process",
"ParentCommandLine": "parent_command",
"User": "user",
"TargetUser": "target_user",
"SourceIp": "source_address",
"DestinationIp": "destination_address",
"SourcePort": "source_port",
"DestinationPort": "destination_port",
"EventID": "event_id",
}

17 changes: 14 additions & 3 deletions sigma/pipelines/logpoint/m365.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import re
from sigma.processing.conditions import (
LogsourceCondition,
)
Expand All @@ -12,9 +13,16 @@


def m365_field_mapping(field: str):
# 1. Handle specific prefix logic
if field.lower().startswith("modifiedproperties"):
return "modified_property." + field
return field

# 2. Fallback: Dynamic snake_case conversion
# (Consistent with logic used in other pipelines)
words = re.findall(r"([a-z0-9]+|[A-Z][a-z0-9]+|[A-Z0-9]+)", field)
if len(words) > 1:
return "_".join(words).lower()
return words[0].lower() if words else field.lower()


def logpoint_m365_pipeline() -> ProcessingPipeline:
Expand Down Expand Up @@ -43,9 +51,12 @@ def logpoint_m365_pipeline() -> ProcessingPipeline:
rule_conditions=[LogsourceCondition(product="m365")],
),
ProcessingItem(
identifier="logpoint_azure_activity_enrich",
# Renamed identifier to be specific to this pipeline
identifier="logpoint_m365_custom_mapping",
transformation=(
FieldFunctionTransformation(transform_func=m365_field_mapping)
FieldFunctionTransformation(
mapping={}, transform_func=m365_field_mapping
)
),
rule_conditions=[LogsourceCondition(product="m365")],
),
Expand Down
84 changes: 15 additions & 69 deletions sigma/pipelines/logpoint/windows.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,6 @@
import dataclasses
import re
from dataclasses import dataclass
from typing import Dict, Union, List, ClassVar, Pattern

from sigma.conditions import ConditionOR
from sigma.pipelines.common import (
logsource_windows_process_creation,
logsource_windows_registry_add,
Expand Down Expand Up @@ -32,12 +29,11 @@
)
from sigma.processing.pipeline import ProcessingItem, ProcessingPipeline
from sigma.processing.transformations import (
FieldMappingTransformationBase,
FieldMappingTransformation,
AddConditionTransformation,
HashesFieldsDetectionItemTransformation,
FieldFunctionTransformation,
)
from sigma.rule import SigmaDetectionItem, SigmaDetection

from sigma.pipelines.logpoint.logpoint_mapping import (
logpoint_windows_sysmon_variable_mappings,
Expand All @@ -49,77 +45,26 @@
)


@dataclass
class SnakeCaseMappingTransformation(FieldMappingTransformationBase):
"""Map a field name to one or multiple different."""
def to_snake_case(field: str) -> str:
"""Convert field name to snake_case."""
if not field:
return field

mapping: Dict[str, Union[str, List[str]]]
_re_to_snake_case: ClassVar[Pattern] = re.compile(
"([a-z0-9]+|[A-Z][a-z0-9]+|[A-Z0-9]+)"
)

def to_snake_case(self, key):
"""_to_snake_case converts the fields to snake_case

Args:
key (str): field name in any other case

Returns:
snake_case (str): filed name converted to the snake_case
"""
words = self._re_to_snake_case.findall(key)
if len(words) > 1:
snake_case = "_".join(words).lower()
else:
snake_case = words[0].lower()
return snake_case

def get_mapping(self, field: str) -> Union[None, str, List[str]]:
if field in self.mapping:
mapping = self.mapping[field]
return mapping

def apply_detection_item(self, detection_item: SigmaDetectionItem):
super().apply_detection_item(detection_item)
field = detection_item.field
mapping = self.get_mapping(field) or self.to_snake_case(field)
if mapping is not None and self.processing_item.match_field_name(
self._pipeline, field
):
self._pipeline.field_mappings.add_mapping(field, mapping)
if isinstance(
mapping, str
): # 1:1 mapping, map field name of detection item directly
detection_item.field = mapping
self.processing_item_applied(detection_item)
else:
return SigmaDetection(
[
dataclasses.replace(
detection_item, field=field, auto_modifiers=False
)
for field in mapping
],
item_linking=ConditionOR,
)

def apply_field_name(self, field: str) -> Union[str, List[str]]:
mapping = self.get_mapping(field) or self.to_snake_case(field)
if isinstance(mapping, str):
return [mapping]
else:
return mapping
words = re.findall(r"([a-z0-9]+|[A-Z][a-z0-9]+|[A-Z0-9]+)", field)
if len(words) > 1:
return "_".join(words).lower()
return words[0].lower() if words else field.lower()


def generate_windows_sysmon_enriched_query(
identifier_template: str = "windows_sysmon_{category}",
) -> List[ProcessingItem]:
"""Generate processing items for all Windows sysmon mappings for addition of labels.
:param identifier_template: Template for processing item identifier. Usually, the defaults are
fine. Should contain service placeholder if changed.
:param identifier_template: Template for processing item identifier. Usually, the defaults are
fine. Should contain service placeholder if changed.
:type identifier_template: str
:return: List of ProcessingItem that can be used in the items attribute of a ProcessingPipeline
object. Usually, an additional field name mapping between the Sigma taxonomy and the target
object. Usually, an additional field name mapping between the Sigma taxonomy and the target
system field names is required.
:rtype: List[ProcessingItem]
"""
Expand Down Expand Up @@ -255,8 +200,9 @@ def logpoint_windows_pipeline() -> ProcessingPipeline:
),
ProcessingItem( # Generic Field mappings
identifier="logpoint_windows_generic_field_mapping",
transformation=SnakeCaseMappingTransformation(
logpoint_windows_common_taxonomy
transformation=FieldFunctionTransformation(
transform_func=to_snake_case,
mapping=logpoint_windows_common_taxonomy,
),
field_name_condition_negation=True,
field_name_condition_linking=any,
Expand Down
Loading