diff --git a/dynamite_nsm/services/base/tasks.py b/dynamite_nsm/services/base/tasks.py index 25849fb7..b2b32d1a 100644 --- a/dynamite_nsm/services/base/tasks.py +++ b/dynamite_nsm/services/base/tasks.py @@ -1,19 +1,30 @@ import os import sys +import logging import requests import subprocess from time import sleep import crontab + +from typing import List, Optional, Tuple + from dynamite_nsm import const from dynamite_nsm import utilities -from typing import List, Optional, Tuple +from dynamite_nsm.logger import get_logger class BaseTask: - def __init__(self, name: str, package_link: Optional[str] = None, description: Optional[str] = None): + def __init__(self, name: str, package_link: Optional[str] = None, description: Optional[str] = None, + verbose: Optional[bool] = False, stdout: Optional[bool] = True): self.name = name self.package_link = package_link self.description = description + log_level = logging.INFO + if verbose: + log_level = logging.DEBUG + self.stdout = stdout + self.verbose = verbose + self.logger = get_logger(str(name), level=log_level, stdout=stdout) def download_and_install(self): raise NotImplemented() @@ -24,11 +35,13 @@ def invoke(self): class BaseShellCommandsTask(BaseTask): - def __init__(self, name: str, package_link: str, commands: List[List[str]], description: Optional[str] = None): - super().__init__(name, package_link, description) + def __init__(self, name: str, package_link: str, commands: List[List[str]], description: Optional[str] = None, + verbose: Optional[bool] = False, stdout: Optional[bool] = True): + super().__init__(name, package_link, description, stdout=stdout, verbose=verbose) self.commands = commands - def invoke(self, shell: Optional[bool] = False, cwd: Optional[str] = os.getcwd()) -> List[Tuple[List, bytes, bytes]]: + def invoke(self, shell: Optional[bool] = False, cwd: Optional[str] = os.getcwd()) -> List[ + Tuple[List, bytes, bytes]]: results = [] for command in self.commands: if not shell: @@ -62,13 +75,15 @@ def remove_cronjob(self): class BaseShellCommandTask(BaseShellCommandsTask): - def __init__(self, name: str, package_link: str, command: str, args: List[str], description: Optional[str] = None): + def __init__(self, name: str, package_link: str, command: str, args: List[str], description: Optional[str] = None, + verbose: Optional[bool] = False, stdout: Optional[bool] = True): command = [ command ] command.extend(args) - super().__init__(name, commands=[command], package_link=package_link, description=description) + super().__init__(name, commands=[command], package_link=package_link, description=description, verbose=verbose, + stdout=stdout) self.command = command self.args = args @@ -92,8 +107,8 @@ def __init__(self, name: str, kibana_package_link: Optional[str] = None, usernam password: Optional[str] = 'admin', target: Optional[str] = f'http://{utilities.get_primary_ip_address()}:5601', tenant: Optional[str] = '', - description: Optional[str] = ''): - super().__init__(name, kibana_package_link, description) + description: Optional[str] = '', verbose: Optional[bool] = False, stdout: Optional[bool] = True): + super().__init__(name, kibana_package_link, description, verbose=verbose, stdout=stdout) self.username = username self.password = password self.target = target diff --git a/dynamite_nsm/services/suricata/config.py b/dynamite_nsm/services/suricata/config.py index b68c8f68..d0fb5bc0 100644 --- a/dynamite_nsm/services/suricata/config.py +++ b/dynamite_nsm/services/suricata/config.py @@ -7,15 +7,36 @@ from yaml import Loader from yaml import load +from suricata.update import config +from suricata.update import sources +from suricata.update.commands.enablesource import write_source_config -from dynamite_nsm import exceptions as general_exceptions from dynamite_nsm import const, utilities from dynamite_nsm.services.base import install -from dynamite_nsm.services.base.config import YamlConfigManager +from dynamite_nsm import exceptions as general_exceptions +from dynamite_nsm.services.base.config import YamlConfigManager, GenericConfigManager from dynamite_nsm.services.base.config_objects.suricata import misc, rules +class SourceAlreadyExists(Exception): + def __init__(self, name): + msg = f"This source ({name}) already exists. You must first remove it before it can be added again." + super(SourceAlreadyExists, self).__init__(msg) + + +class SourceUrlMissing(Exception): + def __init__(self, name): + msg = f"You must specify a URL for this source ({name})." + super(SourceUrlMissing, self).__init__(msg) + + +class SourceSecretMissing(Exception): + def __init__(self, name): + msg = f"You must specify a secret for this source ({name})." + super(SourceSecretMissing, self).__init__(msg) + + def lookup_rule_definition(rule_id: str) -> Dict: """Return the definition, categories, and friendly_name of a given script Args: @@ -280,3 +301,170 @@ def commit(self, out_file_path: Optional[str] = None, backup_directory: Optional self._af_packet_interfaces_raw = self.af_packet_interfaces.get_raw() self._threading_raw = self.threading.get_raw() super(ConfigManager, self).commit(out_file_path, backup_directory, top_text=top_text) + + +class UpdateConfigManager(YamlConfigManager): + + def __init__(self, configuration_directory: str, verbose: Optional[bool] = False, stdout: Optional[bool] = True): + extract_tokens = { + 'disable_conf': ('disable-conf',), + 'enable_conf': ('enable-conf',), + 'modify_conf': ('modify-conf',), + 'ignore': ('ignore',), + 'sources': ('sources',), + 'local': ('local',) + } + self.disable_conf = None + self.enable_conf = None + self.modify_conf = None + self.ignore = None + self.sources = None + self.local = None + + self.configuration_directory = configuration_directory + self.suricata_config_file = os.path.join(self.configuration_directory, 'update.yaml') + try: + with open(self.suricata_config_file, 'r') as configyaml: + self.config_data_raw = load(configyaml, Loader=Loader) + except (IOError, ValueError): + raise general_exceptions.ReadConfigError(f'Failed to read or parse {self.suricata_config_file}.') + + super().__init__(self.config_data_raw, name='suricata.update.config', verbose=verbose, stdout=stdout, + **extract_tokens) + + self.parse_yaml_file() + + def commit(self, out_file_path: Optional[str] = None, backup_directory: Optional[str] = None, + top_text: Optional[str] = None) -> None: + if not out_file_path: + out_file_path = f'{self.configuration_directory}/update.yaml' + super(UpdateConfigManager, self).commit(out_file_path, backup_directory, top_text=top_text) + + +class SourcesConfigManager(GenericConfigManager): + + DEFAULT_SOURCE = 'et/open' + + def __init__(self, configuration_directory: str, verbose: Optional[bool] = False, + stdout: Optional[bool] = True): + self.configuration_directory = configuration_directory + config.DEFAULT_DATA_DIRECTORY = f'{self.configuration_directory}/data/' + config.DEFAULT_UPDATE_YAML_PATH = f'{self.configuration_directory}/update.yaml' + config.DEFAULT_SURICATA_YAML_PATH = [f'{self.configuration_directory}/suricata.yaml'] + self.config = config + self.source_index = sources.load_source_index(config) + super().__init__({}, 'suricata.update.sources', verbose, stdout) + + @staticmethod + def _format_sources_as_list(raw_sources: Dict) -> List[Dict]: + new_sources = [] + for k, v in raw_sources.items(): + v.update({'name': k}) + if v.get('min-version'): + v['min_version'] = v.pop('min-version') + if v.get('support-url'): + v['support-url'] = v.pop('support-url') + new_sources.append(v) + return new_sources + + def _enable_index_source(self, name: str, secret: Optional[str] = None): + source_directory = sources.get_source_directory() + source = self.source_index.get_sources()[name] + source_parameters = source.get('parameters', {}) + if 'secret-code' in source_parameters: + if not secret: + raise SourceSecretMissing(name) + source_parameters['secret-code'] = secret + if 'checksum' in source: + checksum = source["checksum"] + else: + checksum = source.get("checksum", True) + new_source = sources.SourceConfiguration( + name, params=source_parameters, checksum=checksum) + if not os.path.exists(source_directory): + utilities.makedirs(source_directory) + if "replaces" in source and self.DEFAULT_SOURCE in source["replaces"]: + self.logger.debug("Not enabling default source as selected source replaces it") + elif new_source.name == self.DEFAULT_SOURCE: + self.logger.debug( + "Not enabling default source as selected source is the default") + else: + self.logger.info(f"Enabling default source {self.DEFAULT_SOURCE}") + if not self.source_index.get_source_by_name(self.DEFAULT_SOURCE): + self.logger.error(f"Default source {self.DEFAULT_SOURCE} not in index") + else: + default_source_config = sources.SourceConfiguration(self.DEFAULT_SOURCE) + write_source_config(default_source_config, True) + write_source_config(new_source, True) + self.logger.info(f'Source {str(new_source)} enabled.') + if "replaces" in source: + for replaces in source["replaces"]: + filename = sources.get_enabled_source_filename(replaces) + if os.path.exists(filename): + os.unlink(filename) + + def add_source(self, name: str, url: Optional[str] = None, secret: Optional[str] = None, + header: Optional[str] = None) -> None: + """Add a source from an index of known public sources, or add a source from a custom URL + Args: + name: The name of the source to add, if not found within the index a new one will be created + url: The url where the rules can be downloaded + secret: A secret key required to retrieve some commercial rule-sets + header: An http header sometimes required when basic HTTP authentication is used + + Returns: + None + """ + enabled_source_filename = sources.get_enabled_source_filename(name) + if os.path.exists(enabled_source_filename): + raise SourceAlreadyExists(name) + + if name not in self.source_index.get_sources(): + if not url: + raise SourceUrlMissing(name) + checksum = None + if sources.source_name_exists(name): + raise SourceAlreadyExists(name) + source_config = sources.SourceConfiguration( + name, header=header, url=url, checksum=checksum) + sources.save_source_config(source_config) + else: + self._enable_index_source(name, secret) + + def list_enabled_sources(self) -> List[Dict]: + """Get enabled sources + Returns: + A list of enabled sources + """ + self.logger.debug(f'Fetching enabled sources from {sources.get_source_directory()}') + return self._format_sources_as_list(sources.get_enabled_sources()) + + def list_available_sources(self) -> List[Dict]: + """Get all available sources + Returns: + A list of available sources + """ + return self._format_sources_as_list(sources.load_source_index(self.config).get_sources()) + + def remove_source(self, name: str) -> None: + """Remove a source + Args: + name: The name of the source + Returns: + None + """ + enabled_source_filename = sources.get_enabled_source_filename(name) + if os.path.exists(enabled_source_filename): + self.logger.debug(f"Deleting file {enabled_source_filename}.") + os.remove(enabled_source_filename) + self.logger.info(f"Source {name} removed, previously enabled.") + disabled_source_filename = sources.get_disabled_source_filename(name) + if os.path.exists(disabled_source_filename): + self.logger.debug(f"Deleting file {disabled_source_filename}.", ) + os.remove(disabled_source_filename) + self.logger.info(f"Source {name} removed, previously disabled.") + + +if __name__ == '__main__': + s = SourcesConfigManager('/etc/dynamite/suricata/') + s.remove_source('et/open') \ No newline at end of file diff --git a/dynamite_nsm/services/suricata/install.py b/dynamite_nsm/services/suricata/install.py index ddd38319..9550a0be 100644 --- a/dynamite_nsm/services/suricata/install.py +++ b/dynamite_nsm/services/suricata/install.py @@ -167,6 +167,8 @@ def setup(self, inspect_interfaces: List[str]): self.create_update_suricata_environment_variables() self.logger.debug(f'Creating directory: {self.configuration_directory}') utilities.makedirs(self.configuration_directory) + self.logger.debug(f'Creating directory: {self.configuration_directory}/data') + utilities.makedirs(f'{self.configuration_directory}/data') self.logger.debug(f'Creating directory: {self.install_directory}') utilities.makedirs(self.install_directory) self.logger.debug(f'Creating directory: {self.log_directory}') @@ -181,6 +183,10 @@ def setup(self, inspect_interfaces: List[str]): f'{const.DEFAULT_CONFIGS}/suricata/suricata.yaml', self.configuration_directory ) + self.copy_file_or_directory_to_destination( + f'{const.DEFAULT_CONFIGS}/suricata/update.yaml', + self.configuration_directory + ) suricata_config = config.ConfigManager(self.configuration_directory, stdout=self.stdout, verbose=self.verbose) suricata_config.default_log_directory = self.log_directory @@ -212,6 +218,8 @@ def setup(self, inspect_interfaces: List[str]): utilities.set_ownership_of_file(self.install_directory, user='dynamite', group='dynamite') utilities.set_ownership_of_file(self.log_directory, user='dynamite', group='dynamite') utilities.set_permissions_of_file(f'{self.configuration_directory}/suricata.yaml', 660) + utilities.set_permissions_of_file(f'{self.configuration_directory}/update.yaml', 660) + utilities.set_permissions_of_file(f'{self.configuration_directory}/data', 770) post_install_bootstrap_updater(self.install_directory, stdout=self.stdout, verbose=self.verbose) self.logger.info('Setting up Suricata capture rules for dynamite user.') diff --git a/dynamite_nsm/services/suricata/rules/__init__.py b/dynamite_nsm/services/suricata/rules/__init__.py new file mode 100644 index 00000000..fd40910d --- /dev/null +++ b/dynamite_nsm/services/suricata/rules/__init__.py @@ -0,0 +1,4 @@ + + + + diff --git a/dynamite_nsm/services/suricata/rules/objects.py b/dynamite_nsm/services/suricata/rules/objects.py new file mode 100644 index 00000000..1b6bd301 --- /dev/null +++ b/dynamite_nsm/services/suricata/rules/objects.py @@ -0,0 +1,741 @@ +from __future__ import annotations + +import os.path +import re +from random import randint +from typing import Dict, List, Optional, Tuple, Union + +import sqlalchemy.exc +from sqlalchemy import create_engine +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy import Column, Boolean, Integer, String +from sqlalchemy.orm import scoped_session, sessionmaker + +from dynamite_nsm import utilities +from dynamite_nsm.services.suricata.rules import validators +from dynamite_nsm.services.base.config import GenericConfigManager + +env = utilities.get_environment_file_dict() + +SURICATA_CONFIGURATION = env.get('SURICATA_CONFIG') +Model = declarative_base(name='Model') + + +class MissingSid(Exception): + def __init__(self): + msg = f"This rule is missing an 'sid'." + super(MissingSid, self).__init__(msg) + + +class InvalidRule(Exception): + def __init__(self, sid, msg): + msg = f"SID: {sid} is invalid: {msg}." + super(InvalidRule, self).__init__(msg) + + +class MissingRule(Exception): + def __init__(self, sid): + msg = f"Rule with SID: {sid} does not exist." + super(MissingRule, self).__init__(msg) + + +def parse_suricata_rule_options_blob(opts: str) -> List[Union[Tuple, str]]: + """Parses the options section of Suricata Rules + Args: + opts: A valid set of ";" separated options + + Returns: + A List of options + """ + options = [] + + # split by ; excluding those found inside quotations + tokenized_opts = re.split(r";(?![(\"]*[\")])", opts) + for opt in tokenized_opts: + opt = opt.strip() + if not opt: + continue + tokenized_opt = opt.split(':') + if opt.startswith('pcre:'): + k = tokenized_opt[0] + v = opt[5:] + options.append((k, v)) + elif len(tokenized_opt) == 2: + k, v = tokenized_opt + v = v.replace(';', '') + options.append((k, v)) + elif len(tokenized_opt) == 1: + options.append(tokenized_opt[0].replace(';', '')) + else: + k = tokenized_opt[0] + v = ':'.join(tokenized_opt[1:]) + options.append((k, v)) + return options + + +def serialize_suricata_rule(rule: str) -> Rule: + """Convert a plaintext Suricata rule into a Rule object + Args: + rule: A plaintext Suricata rule + + Returns: + A Suricata Rule object + + """ + + def format_bracketed_text(s: str) -> str: + """Remove spaces from strings inside of brackets + Args: + s: A String that includes brackets + + Returns: + A string where the spaces have been removed inside of bracketed content + + """ + new_string = '' + inside_bracket = False + for c in s: + if c == '[': + inside_bracket = True + elif c == ']': + inside_bracket = False + if inside_bracket and c == ' ': + continue + new_string += c + return new_string + + enabled = True + if rule.startswith('#'): + enabled = False + rule = rule[1:].strip() + o_paren_index = rule.index('(') + 1 + c_paren_index = max([i for i, c in enumerate(rule) if c == ')']) + # Get rid of extra spaces + action_header = re.sub(r'\s+', ' ', rule[0: o_paren_index - 1]).strip() + + # Remove spaces inside of brackets + action_header = format_bracketed_text(action_header) + rule_options = rule[o_paren_index:c_paren_index] + action = action_header.split(' ')[0].strip() + header = action_header.replace(action, '').strip() + header_proto = header.split(' ')[0].strip() + header_source = header.split(' ')[1].strip() + header_source_port = header.split(' ')[2].strip() + header_direction = header.split(' ')[3].strip() + header_destination = header.split(' ')[4].strip() + header_destination_port = header.split(' ')[5].strip() + options = parse_suricata_rule_options_blob(rule_options) + return Rule(enabled, action, header_proto, header_source, header_source_port, header_direction, header_destination, + header_destination_port, options) + + +class Rule: + + def __init__(self, enabled: bool, action: str, proto: str, source: str, source_port: str, direction: str, + destination: str, destination_port: str, options: List): + extracted_options = self.extract_options(options) + self.sid = extracted_options.get('sid') + self.class_type = extracted_options.get('class_type') + self.enabled = enabled + self.action = action + self.proto = proto + self.source = source + self.source_port = source_port + self.direction = direction + self.destination = destination + self.destination_port = destination_port + self.options = options + + def __str__(self): + enabled = '#' if not self.enabled else '' + return f'{enabled}{self.action} {self.proto} {self.source} {self.source_port} {self.direction} ' \ + f'{self.destination} {self.destination_port} ({self.options_blob()})' + + @classmethod + def create_from_ruleset_entry(cls, ruleset: Ruleset) -> Rule: + """Create an instance of this class using `models.Ruleset` entry + Args: + ruleset: A `models.Ruleset` instance + + Returns: + An instance of this class + """ + return Rule(enabled=ruleset.enabled, + action=ruleset.action, + proto=ruleset.proto, + source=ruleset.source, + source_port=ruleset.source_port, + direction=ruleset.direction, + destination=ruleset.destination, + destination_port=ruleset.destination_port, + options=parse_suricata_rule_options_blob(ruleset.options_blob) + ) + + @staticmethod + def generate_sid(): + return randint(10 ** 5, 10 ** 6) + + @staticmethod + def extract_options(options: List) -> Dict: + """Parse out the required sid and classtype fields from given options + Returns: + The SID of the rule. + """ + key_val_opts = dict([opt for opt in options if isinstance(opt, tuple)]) + sid = key_val_opts.get('sid').strip() + if not sid: + sid = Rule.generate_sid() + class_type = key_val_opts.get('classtype').strip() + if not class_type: + class_type = 'unknown' + return dict(sid=sid, class_type=class_type) + + def compare(self, rule: Rule): + if self.action != rule.action: + return False + elif self.source != rule.source: + return False + elif self.source_port != rule.source_port: + return False + elif self.direction != rule.direction: + return False + elif self.destination != rule.destination: + return False + elif self.destination_port != rule.destination_port: + return False + elif self.options != rule.options: + return False + return True + + def header(self) -> str: + """Retrieve the rule header + Returns: + The rule header (E.G tcp 192.168.0.5 any -> 192.168.0.13 3389) + """ + return f'{self.action} {self.proto} {self.source} {self.source_port} {self.direction} {self.destination_port}' + + def options_blob(self) -> str: + """Retrieve the rule options as a string + Returns: + A String representation of the rule options + """ + options = [] + found_sid = False + found_class_type = False + for opt in self.options: + if isinstance(opt, tuple): + k, v = opt + # PCRE options have lots of string literals that often need to be escaped + if k.lower() == 'pcre': + # repr function attempts to escape single quote characters which is not what we want as all pcre + # options are encapsulated in double quotes. + v = v.replace("'", "singlequotechar") + v = repr(v)[1:-1] + v = v.replace('singlequotechar', "'") + # replace double backslashes with single backslashes + v = v.replace('\\\\', '\\') + elif k.lower() == 'sid': + v = self.sid + found_sid = True + elif k.lower() == 'classtype': + v = self.class_type + found_class_type = True + options.append(f'{k}:{v}') + elif isinstance(opt, str): + options.append(opt) + if not found_class_type: + options.append(f'classtype:{self.class_type}') + if not found_sid: + options.append(f'sid:{self.sid}') + return '; '.join(options) + ';' + + def validate(self) -> Dict: + """Determine if the rule is valid, and return metadata associated with it + Returns: + A dictionary containing the sid, options count, and rule character count + """ + if not validators.validate_suricata_address_group_values(self.source): + raise InvalidRule(sid=self.sid, msg=f'source is invalid: {self.source}') + elif not validators.validate_suricata_port_group_values(self.source_port): + raise InvalidRule(sid=self.sid, msg=f'source_port is invalid: {self.source_port}') + elif not validators.validate_suricata_address_group_values(self.destination): + raise InvalidRule(sid=self.sid, msg=f'destination is invalid: {self.destination}') + if not validators.validate_suricata_port_group_values(self.destination_port): + raise InvalidRule(sid=self.sid, msg=f'destination_port is invalid: {self.destination_port}') + return { + 'enabled': self.enabled, + 'sid': self.sid, + 'class_type': self.class_type, + 'options': len(self.options), + 'characters': len(self.__str__()) + } + + +class Ruleset(Model): + __tablename__ = 'ruleset' + id = Column('id', Integer, primary_key=True, autoincrement=True, nullable=False) + sid = Column('sid', Integer, unique=True, index=True) + class_type = Column('class_type', String(254), index=True) + lineno = Column('lineno', Integer, index=True) + lineos = Column('lineos', Integer, index=True) + enabled = Column('enabled', Boolean) + action = Column('action', String(12)) + proto = Column('proto', String(12)) + source = Column('source', String(2048)) + source_port = Column('source_port', String(2048)) + direction = Column('direction', String(2)) + destination = Column('destination', String(2048)) + destination_port = Column('destination_port', String(2048)) + options_blob = Column('options', String(4096)) + + def __init__(self, sid: int, class_type: str, lineno: int, lineos: int, enabled: bool, action: str, proto: str, + source: str, source_port: str, direction: str, destination: str, destination_port: str, + options_blob: str): + self.sid = sid + self.class_type = class_type + self.lineno = lineno + self.lineos = lineos + self.enabled = enabled + self.action = action + self.proto = proto + self.source = source + self.source_port = source_port + self.direction = direction + self.destination = destination + self.destination_port = destination_port + self.options_blob = options_blob + + @classmethod + def create_from_rule(cls, rule: Rule, sid: Optional[int] = None, lineno: Optional[int] = -1, + lineos: Optional[int] = -1) -> Ruleset: + if sid: + rule.sid = sid + + return cls( + sid=rule.sid, + class_type=rule.class_type, + enabled=rule.enabled, + action=rule.action, + proto=rule.proto, + source=rule.source, + source_port=rule.source_port, + direction=rule.direction, + destination=rule.destination, + destination_port=rule.destination_port, + options_blob=rule.options_blob(), + lineno=lineno, + lineos=lineos, + ) + + +class RuleFile(GenericConfigManager): + + def __init__(self, rule_file_path: str): + self._idx = 1 + super().__init__({}, 'suricata.rules.manager') + first_init = False + db_path = f"{SURICATA_CONFIGURATION}/{os.path.basename(rule_file_path).replace('.rules', '.db')}" + if not os.path.exists(db_path): + first_init = True + self.cache_path = db_path + self.engine = create_engine(f'sqlite:///{self.cache_path}') + self.db_session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=self.engine)) + self.rule_file_path = rule_file_path + self.suricata_configuration_root = env['SURICATA_CONFIG'] + if first_init: + self.logger.info('First init detected, building cache.') + self.build_cache() + + def __iter__(self): + return self + + def __next__(self): + _raw = self.db_session.query(Ruleset).get(self._idx) + self._idx += 1 + if not _raw: + raise StopIteration() + return Rule.create_from_ruleset_entry(_raw) + + def init_cache(self) -> None: + """Initialize the sqlite3 database; removing any old ones + Returns: + None + """ + utilities.safely_remove_file(self.cache_path) + Model.metadata.create_all(bind=self.engine) + + def build_cache(self): + """Populate our database with the rules found in suricata.rules file. + Returns: + None + """ + self.init_cache() + with open(self.rule_file_path, 'r') as rule_file_in: + lineno = 1 + while True: + line = rule_file_in.readline() + if line == '': + break + rule = serialize_suricata_rule(line) + if not validators.validate_suricata_address_group_values(rule.source): + self.logger.warning(f'{rule.sid} source ({rule.source}) is not valid.') + elif not validators.validate_suricata_port_group_values(rule.source_port): + self.logger.warning(f'{rule.sid} source_port ({rule.source_port}) is not valid.') + elif not validators.validate_suricata_address_group_values(rule.destination): + self.logger.warning(f'{rule.sid} destination ({rule.destination}) is not valid.') + elif not validators.validate_suricata_port_group_values(rule.destination_port): + self.logger.warning(f'{rule.sid} destination_port ({rule.destination_port}) is not valid.') + + rs = Ruleset( + sid=rule.sid, + class_type=rule.class_type, + lineno=lineno + 1, + lineos=rule_file_in.tell(), + enabled=rule.enabled, + action=rule.action, + proto=rule.proto, + source=rule.source, + source_port=rule.source_port, + direction=rule.direction, + destination=rule.destination, + destination_port=rule.destination_port, + options_blob=rule.options_blob() + ) + self.db_session.add(rs) + lineno += 1 + self.db_session.commit() + + def add_rule(self, new_rule: Rule) -> None: + """Add a new custom rule + Args: + new_rule: A `Rule` instance + Returns: + None + """ + new_rule.validate() + self.logger.debug(f'Adding rule {new_rule.sid} -> {new_rule}') + with open(f'{self.suricata_configuration_root}/.deltas', 'a') as deltas_f_out: + deltas_f_out.write( + f'{new_rule.sid},add,{new_rule}\n' + ) + + def delete_rule(self, sid: int) -> None: + """Remove a custom rule if it was previously added via the `add_rule` method. + Args: + sid: The sid of the rule to delete + Returns: + None + """ + new_content = '' + with open(f'{self.suricata_configuration_root}/.deltas', 'r') as deltas_f_in: + for line in deltas_f_in.readlines(): + line_tokens = line.split(',') + parsed_sid = line_tokens[0] + if parsed_sid.strip() == str(sid): + new_content += f'{sid},delete\n' + else: + new_content += f'{line.strip()}\n' + + with open(f'{self.suricata_configuration_root}/.deltas', 'w') as deltas_f_out: + deltas_f_out.write(new_content) + + def disable_rule(self, sid: int) -> None: + """Disable a rule + Args: + sid: The sid of the rule to enable + Returns: + None + """ + self.get_rule(sid) + with open(f'{self.suricata_configuration_root}/.deltas', 'a') as deltas_f_out: + deltas_f_out.write( + f'{sid},disable\n' + ) + + def enable_rule(self, sid: int) -> None: + """Enable a rule + Args: + sid: The sid of the rule to enable + Returns: + None + """ + self.get_rule(sid) + with open(f'{self.suricata_configuration_root}/.deltas', 'a') as deltas_f_out: + deltas_f_out.write( + f'{sid},enable\n' + ) + + def enable_rule_by_class_type(self, class_type: str): + rules = self.get_rules_by_class_type(class_type) + with open(f'{self.suricata_configuration_root}/.deltas', 'a') as deltas_f_out: + for rule in rules: + deltas_f_out.write( + f'{rule.sid},enable\n' + ) + + def disable_rule_by_class_type(self, class_type: str): + rules = self.get_rules_by_class_type(class_type) + with open(f'{self.suricata_configuration_root}/.deltas', 'a') as deltas_f_out: + for rule in rules: + deltas_f_out.write( + f'{rule.sid},disable\n' + ) + + def edit_rule(self, sid: int, new_rule: Rule) -> None: + """Replace an existing rule with a new one + Args: + sid: The sid of the rule to delete + new_rule: A `Rule` instance + Returns: + None + """ + new_rule.sid = sid + self.get_rule(sid) + new_rule.validate() + self.logger.debug(f'Editing rule {new_rule.sid} -> {new_rule}') + with open(f'{self.suricata_configuration_root}/.deltas', 'a') as deltas_f_out: + deltas_f_out.write( + f'{new_rule.sid},edit,{new_rule}\n' + ) + + def get_rule_class_types(self) -> List[str]: + """Get a distinct list of available rule classtypes + Returns: + A list of available rule-types + """ + return sorted([c[0] for c in self.db_session.query(Ruleset.class_type).distinct().all()]) + + def get_rule(self, sid: int) -> Optional[Rule]: + """Given the sid for a cached rule, returns the corresponding `Rule` instance + Args: + sid: The sid of the rule to fetch + + Returns: + A `Rule` instance + """ + rules = self.get_rules([sid]) + if not rules: + raise MissingRule(sid) + return rules[0] + + def get_rules(self, sids: List[int]) -> List[Rule]: + """Given multiple sids for cached rules, returns the corresponding `Rule` instance for every id that is found + Args: + sids: The list of sids the rules to fetch + + Returns: + A list of `Rule` instances + """ + rules = [] + rule_records = self.db_session.query(Ruleset).filter(Ruleset.sid.in_(sids)).all() + for rule_record in rule_records: + rule = Rule( + enabled=rule_record.enabled, + action=rule_record.action, + proto=rule_record.proto, + source=rule_record.source, + source_port=rule_record.source_port, + direction=rule_record.direction, + destination=rule_record.destination, + destination_port=rule_record.destination_port, + options=parse_suricata_rule_options_blob(rule_record.options_blob) + ) + rules.append(rule) + return rules + + def get_rules_by_class_type(self, class_type: str) -> List[Rule]: + """Given a rule classtype returns the corresponding `Rule` instance for every match + Args: + class_type: A suricata classtype + Returns: + A list of `Rule` instances + """ + rules = [] + rule_records = self.db_session.query(Ruleset).filter_by(class_type=class_type).all() + for rule_record in rule_records: + rule = Rule( + enabled=rule_record.enabled, + action=rule_record.action, + proto=rule_record.proto, + source=rule_record.source, + source_port=rule_record.source_port, + direction=rule_record.direction, + destination=rule_record.destination, + destination_port=rule_record.destination_port, + options=parse_suricata_rule_options_blob(rule_record.options_blob) + ) + rules.append(rule) + return rules + + def merge(self): + """Perform operations found in the .deltas file against the cache: + - Add custom rules + - Remove custom rules + - Edit any rule + - Enable any rule + - Disable any rule + The `commit` method must be called to apply the changes to the suricata.rules file. + """ + change_set_map = {} + with open(f'{self.suricata_configuration_root}/.deltas', 'r') as deltas_f_in: + # Loop through the .deltas file and parse out the sid, action, and data + # Create a change_set_map that maps a rule sid to the actions to perform on that rule + # {sid: [(action, data), ...]} + for line in deltas_f_in.readlines(): + tokenized_line = line.split(',') + sid = tokenized_line[0] + action = tokenized_line[1].strip() + data = ','.join(tokenized_line[2:]).strip() + if not change_set_map.get(sid): + change_set_map[sid] = [(action, data)] + else: + change_set_map[sid].append((action, data)) + + # Loop through change_set_map, each iteration will inspect a rule mapped to one or more changes. + # Changes are applied to the database in order. + for sid, changes in change_set_map.items(): + # Loop through all the changes that are applied to a particular rule + for change in changes: + action, data = change + action, data = action.strip(), data.strip() + + # Add the rule to our ruleset database cache. + if action == 'add': + self.logger.info(f'Adding {sid} -> {data} to cache.') + rule = serialize_suricata_rule(data) + rs = Ruleset.create_from_rule(rule, sid=int(sid)) + self.db_session.add(rs) + try: + self.db_session.commit() + except sqlalchemy.exc.IntegrityError as e: + if 'UNIQUE constraint failed' in str(e): + self.db_session.rollback() + self.logger.info(f'{sid} already exists in the cache, skipping add.') + + # Remove the rule from our ruleset database cache. + elif action == 'delete': + self.logger.info(f'Deleting {sid} from cache.') + try: + ruleset = self.db_session.query(Ruleset).filter_by(sid=sid).one() + except sqlalchemy.exc.NoResultFound: + ruleset = None + if ruleset: + self.db_session.delete(ruleset) + self.db_session.commit() + else: + self.logger.info(f'{sid} does not exists in the cache, skipping delete.') + elif action == 'disable': + self.logger.debug(f'Disabling {sid} in cache.') + try: + ruleset = self.db_session.query(Ruleset).filter_by(sid=sid).one() + except sqlalchemy.exc.NoResultFound: + ruleset = None + if ruleset: + ruleset.enabled = False + self.db_session.commit() + else: + self.logger.info(f'{sid} does not exists in the cache, skipping disable.') + elif action == 'enable': + self.logger.debug(f'Enabling {sid} in cache.') + try: + ruleset = self.db_session.query(Ruleset).filter_by(sid=sid).one() + except sqlalchemy.exc.NoResultFound: + ruleset = None + if ruleset: + ruleset.enabled = True + self.db_session.commit() + else: + self.logger.info(f'{sid} does not exists in the cache, skipping enable.') + elif action == 'edit': + self.logger.info(f'Editing {sid} in cache.') + rule = serialize_suricata_rule(data) + try: + ruleset = self.db_session.query(Ruleset).filter_by(sid=sid).one() + except sqlalchemy.exc.NoResultFound: + ruleset = None + if not ruleset: + self.logger.info(f'{sid} does not exists in the cache, skipping edit.') + continue + if rule.action != ruleset.action: + self.logger.debug(f'Updating action {ruleset.action} -> {rule.action}') + ruleset.action = rule.action + if rule.enabled != ruleset.enabled: + self.logger.debug(f'Updating enabled {ruleset.enabled} -> {rule.enabled}') + ruleset.enabled = rule.enabled + if rule.source != ruleset.source: + self.logger.debug(f'Updating source {ruleset.source} -> {rule.source}') + ruleset.source = rule.source + if rule.source_port != ruleset.source_port: + self.logger.debug(f'Updating source_port {ruleset.source_port} -> {rule.source_port}') + ruleset.source_port = rule.source_port + if rule.direction != ruleset.direction: + self.logger.debug(f'Updating direction {ruleset.direction} -> {rule.direction}') + ruleset.direction = rule.direction + if rule.destination != ruleset.destination: + self.logger.debug(f'Updating destination {ruleset.destination} -> {rule.destination}') + ruleset.destination = rule.destination + if rule.destination_port != ruleset.destination_port: + self.logger.debug(f'Updating destination_port {ruleset.destination_port} -> ' + f'{rule.destination_port}') + ruleset.destination_port = rule.destination_port + if rule.options_blob() != ruleset.options_blob: + self.logger.debug(f'Updating destination_port {ruleset.options_blob} -> {rule.options_blob()}') + ruleset.options_blob = rule.options_blob() + + def prune(self) -> None: + """Removes changes from .deltas file that are no longer applicable, should only be run after a merge operation + Returns: + None + """ + self.logger.info('Pruning .deltas file.') + change_set_map = {} + with open(f'{self.suricata_configuration_root}/.deltas', 'r') as deltas_f_in: + # Loop through the .deltas file and parse out the sid, action, and data + # Create a change_set_map that maps a rule sid to the actions to perform on that rule + # {sid: [(action, data), ...]} + for line in deltas_f_in.readlines(): + tokenized_line = line.split(',') + sid = tokenized_line[0] + action = tokenized_line[1].strip() + data = ','.join(tokenized_line[2:]).strip() + if not change_set_map.get(sid): + change_set_map[sid] = [(action, data)] + else: + change_set_map[sid].append((action, data)) + with open(f'{self.suricata_configuration_root}/.deltas', 'w') as deltas_f_out: + for sid, changes in change_set_map.items(): + if changes[-1][0] == 'disable': + for change in changes[0:-1]: + action = change[0] + if action != 'edit': + self.logger.info(f'Pruning {sid}.{action} from .deltas file.') + else: + deltas_f_out.write(f'{sid},edit,{action.strip()}\n') + else: + for change in changes: + action, new_rule = change + if not new_rule: + deltas_f_out.write(f'{sid},{action.strip()}\n') + else: + deltas_f_out.write(f'{sid},{action.strip()},{new_rule.strip()}\n') + + def commit(self, out_file_path: Optional[str] = None, backup_directory: Optional[str] = None) -> None: + """Merge in our deltas; Dump the database to a suricata.rules file; prune the .deltas file""" + self.merge() + if not out_file_path: + out_file_path = self.rule_file_path + row_count = self.db_session.query(Ruleset.sid).count() + self.logger.info(f'Dumping {row_count} rules to {out_file_path}.') + with open(out_file_path, 'w') as rule_file_out: + for row in self.db_session.query(Ruleset).order_by(Ruleset.lineno): + rule = Rule.create_from_ruleset_entry(row) + rule_file_out.write(str(rule) + '\n') + self.prune() + + +if __name__ == '__main__': + rf = RuleFile('/etc/dynamite/suricata/data/rules/suricata.rules') + print(rf.get_rule_class_types()) + rf.enable_rule_by_class_type('unknown') + rf.enable_rule_by_class_type('denial-of-service') + rf.commit() diff --git a/dynamite_nsm/services/suricata/rules/tests/__init__.py b/dynamite_nsm/services/suricata/rules/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/dynamite_nsm/services/suricata/rules/tests/rule_cache_tests.py b/dynamite_nsm/services/suricata/rules/tests/rule_cache_tests.py new file mode 100644 index 00000000..893cad9f --- /dev/null +++ b/dynamite_nsm/services/suricata/rules/tests/rule_cache_tests.py @@ -0,0 +1,65 @@ +import os +import unittest +from dynamite_nsm.services.suricata.rules import objects +from dynamite_nsm.utilities import get_environment_file_dict, safely_remove_file + + +class TestSuricataRuleCache(unittest.TestCase): + """ + Test the logic that handles Suricata rule caching + """ + env_file = get_environment_file_dict() + test_rules_directory = '/tmp/' + + suricata_config = env_file.get('SURICATA_CONFIG') + suricata_rules = objects.RuleFile(f'{suricata_config}/data/rules/suricata.rules') + delta_file = f'{suricata_config}/.deltas' + + def test_caches_are_equivalent(self): + # Serialize the suricata.rules file located at /etc/dynamite/suricata/data/rules/suricata.rules + # into a temporary sqlite3 cache; then dump the cache into a new file at /tmp/suricata-test.rules + # and compare them against one another + + self.suricata_rules.commit(f'{self.test_rules_directory}/suricata-test.rules') + temp_rule_file = objects.RuleFile(f'{self.test_rules_directory}/suricata-test.rules') + equivalent = True + for rule in self.suricata_rules: + new_rule = temp_rule_file.get_rule(rule.sid) + if not rule.compare(new_rule): + equivalent = False + break + assert (equivalent is True) + + def test_add_rule(self): + temp_rules_path = f'{self.test_rules_directory}/suricata-test.rules' + os.rename(self.delta_file, f'{self.delta_file}.original') + self.suricata_rules.commit(temp_rules_path) + temp_rule_file = objects.RuleFile(temp_rules_path) + temp_rule_file.build_cache() + temp_rule_file.add_rule( + objects.Rule( + enabled=True, + action='alert', + proto='tcp', + source='192.168.0.5/24', + direction='->', + destination='$EXTERNAL_NET', + source_port='any', + destination_port='[53,5353]', + options=[('sid', 333333333)] + ) + ) + temp_rule_file.merge() + temp_rule_file.commit() + os.rename(f'{self.delta_file}.original', self.delta_file) + with open(temp_rules_path) as new_rules_in: + new_rule_sid = objects.serialize_suricata_rule(new_rules_in.readline()).sid + safely_remove_file(f'{self.test_rules_directory}/suricata-test.rules') + assert (str(new_rule_sid) == str(333333333)) + + @classmethod + def tearDownClass(cls) -> None: + if os.path.exists(f'{cls.delta_file}.original'): + os.rename(f'{cls.delta_file}.original', cls.delta_file) + if os.path.exists(f'{cls.suricata_config}/suricata-test.db'): + safely_remove_file(f'{cls.suricata_config}/suricata-test.db') diff --git a/dynamite_nsm/services/suricata/rules/validators.py b/dynamite_nsm/services/suricata/rules/validators.py new file mode 100644 index 00000000..a5a8909e --- /dev/null +++ b/dynamite_nsm/services/suricata/rules/validators.py @@ -0,0 +1,197 @@ +import re + + +ipv4_address_pattern = re.compile('^(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1' + '[0-9]{2}|2[0-4][0-9]|25[0-5])$') +ipv4_cidr_pattern = re.compile(f'^{ipv4_address_pattern.pattern[1:-1]}/((?:[0-9])|(?:[1-2][0-9])|(?:3[0-2]))$') + +ipv6_address_pattern = re.compile('^(?:(?:[0-9A-Fa-f]{1,4}:){6}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]' + '|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]' + '{2}|2[0-4][0-9]|25[0-5]))|::(?:[0-9A-Fa-f]{1,4}:){5}(?:[0-9A-Fa-f]{1,4}:' + '[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.)' + '{3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:[0-9A-Fa-f]{1,4})?::' + '(?:[0-9A-Fa-f]{1,4}:){4}(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]' + '|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25' + '[0-5]))|(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){3}(?:' + '[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]' + '|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:' + '[0-9A-Fa-f]{1,4}:){,2}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){2}(?:[0-9A-Fa-f]' + '{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.)' + '{3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:)' + '{,3}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}:(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|' + '(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|' + '1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:[0-9A-Fa-f]{1,4}:){,4}[0-9A-Fa-f]{1,4})?::' + '(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|(?:(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]' + '|25[0-5])\\.){3}(?:[0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5]))|(?:(?:' + '[0-9A-Fa-f]{1,4}:){,5}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}|(?:(?:[0-9A-Fa-f]{1,4}:)' + '{,6}[0-9A-Fa-f]{1,4})?::)$') + +ipv6_cidr_pattern = re.compile( + f'^{ipv6_address_pattern.pattern[1:-1]}/((?:[0-9])|(?:[1-9][0-9])|(?:10[0-9])|(?:11[0-9])|(?:12[0-8]))$') + + +def validate_suricata_address_group_values(s): + """Determine if a string is a valid Suricata address group + Must be like the following: + ! 1.1.1.1 (Every IP address but 1.1.1.1) + ![1.1.1.1, 1.1.1.2] (Every IP address but 1.1.1.1 and 1.1.1.2) + $HOME_NET (Your setting of HOME_NET in yaml) + [$EXTERNAL_NET, !$HOME_NET] (EXTERNAL_NET and not HOME_NET) + [10.0.0.0/24, !10.0.0.5] (10.0.0.0/24 except for 10.0.0.5) + Args: + s: Test String + Returns: + True if meets the suricata_address_group_value conditions + """ + + def validate_token(token): + if str(token).startswith('!'): + token = token[1:] + + def token_is_cidr(tok): + if '/' in tok: + # Not a valid IP/CIDR pair + try: + ip, prefix = tok.split('/') + except ValueError: + return False + # Check for invalid prefix + try: + if int(prefix) < 0 or int(prefix) > 128: + return False + except ValueError: + return False + else: + return False + return True + + def token_is_ip(tok): + return bool(ipv4_address_pattern.findall(tok) or ipv6_address_pattern.findall(tok)) + + def token_is_list(tok): + tok = str(tok) + if '[' in tok and ']' in tok: + # Negation is valid against sets as well (E.G ![ $HOME_NET, 192.168.0.0/24]) + if tok.startswith('!'): + tok = tok[1:] + return validate_suricata_address_group_values(tok) + return False + + return token_is_cidr(token) or token_is_ip(token) or token_is_list(token) + + s = str(s).replace(' ', '') + valid_group_value_vars = ['$HOME_NET', '$EXTERNAL_NET', '$HTTP_SERVERS', '$SQL_SERVERS', + '$DNS_SERVERS', '$TELNET_SERVERS', '$AIM_SERVERS', '$DC_SERVERS', + '$SMTP_SERVERS', '$MODBUS_SERVER', '$MODBUS_CLIENT', '$ENIP_CLIENT', '$ENIP_SERVER'] + valid_neg_group_value_vars = ['!' + g for g in valid_group_value_vars] + + valid_var_subs = valid_group_value_vars + valid_neg_group_value_vars + + # List Formatting + if '[' in s and ']' in s: + + # Negation is valid against sets as well (E.G ![ $HOME_NET, 192.168.0.0/24]) + if s.startswith('!'): + s = s.replace(' ', '')[1:] + + # split on comma, but exclude values in square brackets + tokenized_list = re.split(r",(?![^(\[]*[\])])", s[1:-1]) + for t in tokenized_list: + # Check if token in string is valid variable substitution, IP, or CIDR + if t not in valid_var_subs and not validate_token(t): + return False + # Check if 'any' is in the list (you can't combine any with other values) + elif 'any' == t: + return False + return True + # String formatting + else: + if 'any' == s: + return True + # Check if string is valid variable substitution, IP, or CIDR + return validate_token(s) or s in valid_var_subs + + +def validate_suricata_port_group_values(s): + """Determine if a string is a valid Suricata address group + Must be like the following: + [80, 81, 82] (port 80, 81 and 82) + [80: 82] (Range from 80 till 82) + [1024: ] (From 1024 till the highest port-number) + !80 (Every port but 80) + [80:100,!99] (Range from 80 till 100 but 99 excluded) + [1:80,![2,4]] + + Args: + s: Test String + Returns: + True if meets the suricata_address_group_value conditions + """ + + def validate_token(token): + + if str(token).startswith('!'): + token = token[1:] + + def token_is_port(tok): + tok = str(tok) + try: + int(tok) + if int(tok) < 0 or int(tok) > 65535: + return False + except ValueError: + return False + return '.' not in str(tok) + + def token_is_range(tok): + tok = str(tok) + if ':' in str(tok): + port_range = tok.split(':') + port_range = [p for p in port_range if p.strip()] + if len(port_range) == 1: + tok = port_range[0] + return token_is_port(tok) + elif len(port_range) == 2: + r1, r2 = port_range + return token_is_port(r1) and token_is_port(r2) and int(r1) < int(r2) + + else: + return False + + def token_is_list(tok): + tok = str(tok) + if '[' in tok and ']' in tok: + + # Negation is valid against sets as well (E.G ![ $HOME_NET, 192.168.0.0/24]) + if tok.startswith('!'): + tok = tok[1:] + return validate_suricata_port_group_values(tok) + return False + + return token_is_range(token) or token_is_port(token) or token_is_list(token) + + s = str(s).replace(' ', '') + valid_group_value_vars = ['$HTTP_PORTS', '$SHELLCODE_PORTS', '$ORACLE_PORTS', '$SSH_PORTS', + '$DNP3_PORTS', '$MODBUS_PORTS', '$FILE_DATA_PORTS', '$FTP_PORTS'] + + valid_neg_group_value_vars = ['!' + g for g in valid_group_value_vars] + + valid_var_subs = valid_group_value_vars + valid_neg_group_value_vars + # List Formatting + if '[' in s and ']' in s: + # Negation is valid against sets as well (E.G ![ $HOME_NET, 192.168.0.0/24]) + if s.startswith('!'): + s = s[1:] + # split on comma, but exclude values in square brackets + tokenized_list = re.split(r",(?![^(\[]*[\])])", s[1:-1]) + for t in tokenized_list: + if t not in valid_var_subs and not validate_token(t): + return False + # Check if 'any' is in the list (you can't combine any with other values) + elif 'any' == t: + return False + return True + else: + if 'any' == s: + return True + return validate_token(s) or s in valid_var_subs \ No newline at end of file diff --git a/dynamite_nsm/services/suricata/tasks/set_caps.py b/dynamite_nsm/services/suricata/tasks/set_caps.py index b006521a..1e0702c1 100644 --- a/dynamite_nsm/services/suricata/tasks/set_caps.py +++ b/dynamite_nsm/services/suricata/tasks/set_caps.py @@ -12,3 +12,7 @@ def __init__(self, suricata_install_directory: str): def invoke(self, shell: Optional[bool] = False, cwd: Optional[str] = os.getcwd()) -> List[Tuple[List, bytes, bytes]]: return super().invoke(shell, cwd) + + +if __name__ == '__main__': + SetCapturePermissions('/opt/dynamite/suricata/').invoke() \ No newline at end of file diff --git a/dynamite_nsm/services/suricata/tasks/update_rules.py b/dynamite_nsm/services/suricata/tasks/update_rules.py new file mode 100644 index 00000000..762b596f --- /dev/null +++ b/dynamite_nsm/services/suricata/tasks/update_rules.py @@ -0,0 +1,272 @@ +import io +import os +import re +import shutil +import subprocess +from typing import Optional +from datetime import datetime + +from suricata.update import net +from suricata.update import util +from suricata.update import config +from suricata.update import engine + +from dynamite_nsm import utilities +from dynamite_nsm import exceptions +from dynamite_nsm.services.base import tasks +from dynamite_nsm.services.suricata.rules.objects import RuleFile + + +class DummyArgs: + config = None + offline = False + force = False + quiet = True + url = [] + now = datetime.now() + + +class UpdateRules(tasks.BaseTask): + + def __init__(self, no_merge: Optional[bool] = False, sid_msg_map_file: Optional[str] = None, + sid_msg_map_2_file: Optional[str] = None, threshold_in_file: Optional[str] = None, + threshold_out_file: Optional[str] = None, + yaml_fragment_file: Optional[str] = None, force: Optional[bool] = False, + verbose: Optional[bool] = False, stdout: Optional[bool] = True): + + env = utilities.get_environment_file_dict() + self.configuration_directory = env.get('SURICATA_CONFIG') + self.install_directory = env.get('SURICATA_HOME') + self.no_merge = no_merge + self.threshold_in_file = threshold_in_file + self.threshold_out_file = threshold_out_file + self.sid_msg_map_file = sid_msg_map_file + self.sid_msg_map_2_file = sid_msg_map_2_file + self.force = force + self.yaml_fragment_file = yaml_fragment_file + config.DEFAULT_DATA_DIRECTORY = f'{self.configuration_directory}/data/' + config.DEFAULT_UPDATE_YAML_PATH = f'{self.configuration_directory}/update.yaml' + config.DEFAULT_SURICATA_YAML_PATH = [f'{self.configuration_directory}/suricata.yaml'] + self.config = config + super().__init__('update_suricata_rules', verbose=verbose, stdout=stdout) + + def invoke(self): + from suricata.update import main + from suricata.update.main import FileTracker, ThresholdProcessor + from suricata.update.main import DEFAULT_OUTPUT_RULE_FILENAME + + from suricata.update.main import build_rule_map, check_vars, check_output_directory, copytree, \ + copytree_ignore_backup, disable_ja3, ignore_file, load_matchers, load_filters, load_drop_filters, \ + load_sources, load_dist_rules, matchers_mod, manage_classification, notes, rule_mod, resolve_flowbits, \ + test_suricata, write_merged, write_yaml_fragment, write_sid_msg_map, write_to_directory + + main.args = DummyArgs() + config.init(DummyArgs()) + self.logger.info('Beginning Suricata Rule Update') + suricata_path = f'{self.install_directory}/bin/suricata' + suricata_conf_path = f'{self.configuration_directory}/suricata.yaml' + suricata_version = engine.get_version(suricata_path) + net.set_user_agent_suricata_version(suricata_version.full) + file_tracker = FileTracker() + + disable_matchers = [] + enable_matchers = [] + modify_filters = [] + drop_filters = [] + + # Load user provided disable filters. + disable_conf_filename = config.get("disable-conf") + if disable_conf_filename and os.path.exists(disable_conf_filename): + self.logger.info(f"Loading {disable_conf_filename}.") + disable_matchers += load_matchers(disable_conf_filename) + + # Load user provided enable filters. + enable_conf_filename = config.get("enable-conf") + if enable_conf_filename and os.path.exists(enable_conf_filename): + self.logger.info(f"Loading {enable_conf_filename}.") + enable_matchers += load_matchers(enable_conf_filename) + + # Load user provided modify filters. + modify_conf_filename = config.get("modify-conf") + if modify_conf_filename and os.path.exists(modify_conf_filename): + modify_filters += load_filters(modify_conf_filename) + + # Load user provided drop filters. + drop_conf_filename = config.get("drop-conf") + if drop_conf_filename and os.path.exists(drop_conf_filename): + drop_filters += load_drop_filters(drop_conf_filename) + + # Load the Suricata configuration if we can. + suriconf = None + + if suricata_conf_path and \ + os.path.exists(suricata_conf_path) and \ + suricata_path and os.path.exists(suricata_path): + try: + suriconf = engine.Configuration.load( + suricata_conf_path, suricata_path=suricata_path) + except subprocess.CalledProcessError: + return exceptions.CallProcessError(f'Could not invoke {suricata_path}') + # Disable rule that are for app-layers that are not enabled. + if suriconf: + for key in suriconf.keys(): + m = re.match("app-layer\.protocols\.([^\.]+)\.enabled", key) + if m: + proto = m.group(1) + if not suriconf.is_true(key, ["detection-only"]): + disable_matchers.append(matchers_mod.ProtoRuleMatcher(proto)) + elif proto == "smb" and suriconf.build_info: + # Special case for SMB rules. For versions less + # than 5, disable smb rules if Rust is not + # available. + if suriconf.build_info["version"].major < 5: + if not "RUST" in suriconf.build_info["features"]: + disable_matchers.append(matchers_mod.ProtoRuleMatcher(proto)) + # Check that the cache directory exists and is writable. + if not os.path.exists(config.get_cache_dir()): + try: + os.makedirs(config.get_cache_dir(), mode=0o770) + except Exception: + config.set_cache_dir("/var/tmp") + + files = load_sources(suricata_version) + + load_dist_rules(files) + + rules = [] + classification_files = [] + dep_files = {} + for entry in sorted(files, key=lambda e: e.filename): + if "classification.config" in entry.filename: + classification_files.append((entry.filename, entry.content)) + continue + if not entry.filename.endswith(".rules"): + dep_files.update({entry.filename: entry.content}) + continue + if ignore_file(config.get("ignore"), entry.filename): + continue + rules += rule_mod.parse_fileobj(io.BytesIO(entry.content), entry.filename) + + rulemap = build_rule_map(rules) + + # Counts of user enabled and modified rules. + enable_count = 0 + modify_count = 0 + drop_count = 0 + + # List of rules disabled by user. Used for counting, and to log + # rules that are re-enabled to meet flowbit requirements. + disabled_rules = [] + for key, rule in rulemap.items(): + + # To avoid duplicate counts when a rule has more than one modification + # to it, we track the actions here then update the counts at the end. + enabled = False + modified = False + dropped = False + + for matcher in disable_matchers: + if rule.enabled and matcher.match(rule): + rule.enabled = False + disabled_rules.append(rule) + + for matcher in enable_matchers: + if not rule.enabled and matcher.match(rule): + rule.enabled = True + enabled = True + + for fltr in drop_filters: + if fltr.match(rule): + rule = fltr.run(rule) + dropped = True + + for fltr in modify_filters: + if fltr.match(rule): + rule = fltr.run(rule) + modified = True + + if enabled: + enable_count += 1 + if modified: + modify_count += 1 + if dropped: + drop_count += 1 + + rulemap[key] = rule + + # Check if we should disable ja3 rules. + try: + disable_ja3(suriconf, rulemap, disabled_rules) + except Exception as err: + self.logger.error("Failed to dynamically disable ja3 rules: %s" % (err)) + + # Check rule vars, disabling rules that use unknown vars. + check_vars(suriconf, rulemap) + + self.logger.info("Disabled %d rules." % (len(disabled_rules))) + self.logger.info("Enabled %d rules." % (enable_count)) + self.logger.info("Modified %d rules." % (modify_count)) + self.logger.info("Dropped %d rules." % (drop_count)) + + # Fixup flowbits. + resolve_flowbits(rulemap, disabled_rules) + + # Check that output directory exists, creating it if needed. + check_output_directory(config.get_output_dir()) + + # Check that output directory is writable. + if not os.access(config.get_output_dir(), os.W_OK): + self.logger.error(f"Output directory is not writable: {config.get_output_dir()}") + raise PermissionError(config.get_output_dir()) + + # Backup the output directory. + self.logger.info("Backing up current rules.") + backup_directory = util.mktempdir() + shutil.copytree(config.get_output_dir(), os.path.join( + backup_directory, "backup"), ignore=copytree_ignore_backup) + + if not self.no_merge: + # The default, write out a merged file. + output_filename = os.path.join( + config.get_output_dir(), DEFAULT_OUTPUT_RULE_FILENAME) + file_tracker.add(output_filename) + write_merged(os.path.join(output_filename), rulemap, dep_files) + else: + for file in files: + file_tracker.add( + os.path.join( + config.get_output_dir(), os.path.basename(file.filename))) + write_to_directory(config.get_output_dir(), files, rulemap, dep_files) + + manage_classification(suriconf, classification_files) + + if self.yaml_fragment_file: + file_tracker.add(self.yaml_fragment_file) + write_yaml_fragment(self.yaml_fragment_file, files) + + if self.sid_msg_map_file: + write_sid_msg_map(self.sid_msg_map_file, rulemap, version=1) + if self.sid_msg_map_2_file: + write_sid_msg_map(self.sid_msg_map_2_file, rulemap, version=2) + + if self.threshold_in_file and self.threshold_out_file: + file_tracker.add(self.threshold_out_file) + threshold_processor = ThresholdProcessor() + threshold_processor.process( + open(self.threshold_in_file), open(self.threshold_out_file, "w"), rulemap) + + self.logger.info('Merging in changes.') + rule_file = RuleFile(f'{self.configuration_directory}/data/rules/suricata.rules') + rule_file.build_cache() + rule_file.merge() + rule_file.commit() + + if not test_suricata(suricata_path): + self.logger.error("Suricata test failed, aborting.") + self.logger.error("Restoring previous rules.") + copytree( + os.path.join(backup_directory, "backup"), config.get_output_dir()) + + +if __name__ == '__main__': + UpdateRules().invoke() diff --git a/dynamite_nsm/services/suricata/update.py b/dynamite_nsm/services/suricata/update.py index 9c0a17b1..abdb972d 100644 --- a/dynamite_nsm/services/suricata/update.py +++ b/dynamite_nsm/services/suricata/update.py @@ -1,5 +1,5 @@ from typing import Optional -from dynamite_nsm.services.suricata.oinkmaster import update_suricata_rules +from dynamite_nsm.services.suricata.tasks import update_rules from dynamite_nsm.services.base.install import BaseInstallManager @@ -21,4 +21,4 @@ def update(self): Returns: None """ - update_suricata_rules(stdout=self.stdout, verbose=self.verbose) + update_rules.UpdateRules().invoke() diff --git a/setup.py b/setup.py index b60df7ef..2f9baae3 100644 --- a/setup.py +++ b/setup.py @@ -1,5 +1,20 @@ +import os +from typing import List from setuptools import setup, find_packages + +def package_files(directory: str) -> List[str]: + paths = [] + for (path, directories, filenames) in os.walk(directory): + for filename in filenames: + paths.append(os.path.join('..', path, filename)) + return paths + + +package_data = ['bin/*'] +package_data.extend(package_files('dynamite_nsm/confs')) + + setup( name='dynamite-nsm', version='1.1.4', @@ -15,7 +30,7 @@ 'make securing your network environment simple and intuitive.', include_package_data=True, package_data={ - 'dynamite_nsm': ['bin/*'] + 'dynamite_nsm': package_data }, install_requires=[ 'bcrypt==3.2.0',