diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 9802ad91d3b..784ee42b676 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -2,12 +2,18 @@ name: Integration tests on: workflow_call: + inputs: + auditlog_type: + type: string + default: "django-auditlog" jobs: integration_tests: # run tests with docker compose name: User Interface Tests runs-on: ubuntu-latest + env: + AUDITLOG_TYPE: ${{ inputs.auditlog_type }} strategy: matrix: test-case: [ diff --git a/.github/workflows/rest-framework-tests.yml b/.github/workflows/rest-framework-tests.yml index 0b222fed842..5df066ec486 100644 --- a/.github/workflows/rest-framework-tests.yml +++ b/.github/workflows/rest-framework-tests.yml @@ -6,11 +6,16 @@ on: platform: type: string default: "linux/amd64" + auditlog_type: + type: string + default: "django-auditlog" jobs: unit_tests: name: Rest Framework Unit Tests runs-on: ${{ inputs.platform == 'linux/arm64' && 'ubuntu-24.04-arm' || 'ubuntu-latest' }} + env: + AUDITLOG_TYPE: ${{ inputs.auditlog_type }} strategy: matrix: diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index e16990520df..cbda2b40caf 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -25,18 +25,26 @@ jobs: strategy: matrix: platform: ['linux/amd64', 'linux/arm64'] + auditlog_type: ['django-auditlog', 'django-pghistory'] fail-fast: false needs: build-docker-containers uses: ./.github/workflows/rest-framework-tests.yml secrets: inherit with: platform: ${{ matrix.platform}} + auditlog_type: ${{ matrix.auditlog_type }} # only run integration tests for linux/amd64 (default) test-user-interface: needs: build-docker-containers uses: ./.github/workflows/integration-tests.yml secrets: inherit + strategy: + matrix: + auditlog_type: ['django-auditlog', 'django-pghistory'] + fail-fast: false + with: + auditlog_type: ${{ matrix.auditlog_type }} # only run k8s tests for linux/amd64 (default) test-k8s: diff --git a/docker/entrypoint-initializer.sh b/docker/entrypoint-initializer.sh index 52650b036bf..ec193ef6f06 100755 --- a/docker/entrypoint-initializer.sh +++ b/docker/entrypoint-initializer.sh @@ -110,6 +110,8 @@ python3 manage.py makemigrations --no-input --check --dry-run --verbosity 3 || { cat <<-EOF ******************************************************************************** +WARNING: Missing Database Migrations Detected +******************************************************************************** You made changes to the models without creating a DB migration for them. @@ -119,15 +121,25 @@ If you're not familiar with migrations in Django, please read the great documentation thoroughly: https://docs.djangoproject.com/en/5.0/topics/migrations/ +This is now a WARNING and the container will continue to start. +However, you should create the necessary migrations as soon as possible using: +docker compose exec uwsgi bash -c 'python manage.py makemigrations -v2' + ******************************************************************************** EOF - exit 1 + echo "WARNING: Continuing startup despite missing migrations..." } echo "Migrating" python3 manage.py migrate +echo "Configuring pghistory triggers based on audit settings" +cat < tuple[int, int, bool]: + """ + Generic batched deletion by timestamp for a set of models. + + Returns (deleted_or_would_delete_total, batches_done_or_needed, reached_limit) + """ + # Use a timestamp and not a date. this allows for efficient databse index use. + cutoff_dt = timezone.now() - relativedelta(months=retention_period) + logger.info("Audit flush cutoff datetime: %s (retention_period=%s months)", cutoff_dt, retention_period) + + total_deleted = 0 + total_batches = 0 + reached_any_limit = False + + for Model in models_to_flush: + deleted_total = 0 + batches_done = 0 + filter_kwargs = {f"{timestamp_field}__lt": cutoff_dt} + last_pk = None + verb = "Would delete" if dry_run else "Deleted" + + while batches_done < max_batches: + batch_qs = Model.objects.filter(**filter_kwargs) + if last_pk is not None: + batch_qs = batch_qs.filter(pk__gt=last_pk) + batch_qs = batch_qs.order_by("pk") + + pks = list(batch_qs.values_list("pk", flat=True)[:batch_size]) + if not pks: + if batches_done == 0: + logger.info("No outdated %s entries found", Model._meta.object_name) + break + + if dry_run: + deleted_count = len(pks) + else: + qs = Model.objects.filter(pk__in=pks) + deleted_count = int(qs._raw_delete(qs.db)) + + deleted_total += deleted_count + batches_done += 1 + last_pk = pks[-1] + + logger.info( + "%s %s batch %s (size ~%s), total %s: %s", + verb, + Model._meta.object_name, + batches_done, + batch_size, + verb.lower(), + deleted_total, + ) + + total_deleted += deleted_total + total_batches += batches_done + if batches_done >= max_batches: + reached_any_limit = True + + return total_deleted, total_batches, reached_any_limit + + +def _flush_django_auditlog(retention_period: int, batch_size: int, max_batches: int, *, dry_run: bool = False) -> tuple[int, int, bool]: + # Import inside to avoid model import issues at startup + from auditlog.models import LogEntry # noqa: PLC0415 + + return _flush_models_in_batches([LogEntry], "timestamp", retention_period, batch_size, max_batches, dry_run=dry_run) + + +def _iter_pghistory_event_models(): + """Yield pghistory Event models registered under the dojo app.""" + for model in apps.get_app_config("dojo").get_models(): + if model._meta.object_name.endswith("Event"): + # Ensure the model has pgh_created_at field + if any(f.name == "pgh_created_at" for f in model._meta.fields): + yield model + + +def _flush_pghistory_events(retention_period: int, batch_size: int, max_batches: int, *, dry_run: bool = False) -> tuple[int, int, bool]: + models_to_flush = list(_iter_pghistory_event_models()) + return _flush_models_in_batches(models_to_flush, "pgh_created_at", retention_period, batch_size, max_batches, dry_run=dry_run) + + +def run_flush_auditlog(retention_period: int | None = None, + batch_size: int | None = None, + max_batches: int | None = None, + *, + dry_run: bool = False) -> tuple[int, int, bool]: + """ + Deletes audit entries older than the configured retention from both + django-auditlog and django-pghistory log entries. + + Returns a tuple of (deleted_total, batches_done, reached_limit). + """ + retention_period = retention_period if retention_period is not None else getattr(settings, "AUDITLOG_FLUSH_RETENTION_PERIOD", -1) + if retention_period < 0: + logger.info("Flushing audit logs is disabled") + return 0, 0, False + + batch_size = batch_size if batch_size is not None else getattr(settings, "AUDITLOG_FLUSH_BATCH_SIZE", 1000) + max_batches = max_batches if max_batches is not None else getattr(settings, "AUDITLOG_FLUSH_MAX_BATCHES", 100) + + phase = "DRY RUN" if dry_run else "Cleanup" + logger.info("Running %s for django-auditlog entries with %d Months retention across all backends", phase, retention_period) + d_deleted, d_batches, d_limit = _flush_django_auditlog(retention_period, batch_size, max_batches, dry_run=dry_run) + logger.info("Running %s for django-pghistory entries with %d Months retention across all backends", phase, retention_period) + p_deleted, p_batches, p_limit = _flush_pghistory_events(retention_period, batch_size, max_batches, dry_run=dry_run) + + total_deleted = d_deleted + p_deleted + total_batches = d_batches + p_batches + reached_limit = bool(d_limit or p_limit) + + verb = "would delete" if dry_run else "deleted" + logger.info("Audit flush summary: django-auditlog %s=%s batches=%s; pghistory %s=%s batches=%s; total_%s=%s total_batches=%s", + verb, d_deleted, d_batches, verb, p_deleted, p_batches, verb.replace(" ", "_"), total_deleted, total_batches) + + return total_deleted, total_batches, reached_limit + + +def enable_django_auditlog(): + """Enable django-auditlog by registering models.""" + # Import inside function to avoid AppRegistryNotReady errors + from auditlog.registry import auditlog # noqa: PLC0415 + + from dojo.models import ( # noqa: PLC0415 + Cred_User, + Dojo_User, + Endpoint, + Engagement, + Finding, + Finding_Group, + Finding_Template, + Notification_Webhooks, + Product, + Product_Type, + Risk_Acceptance, + Test, + ) + + logger.info("Enabling django-auditlog: Registering models") + auditlog.register(Dojo_User, exclude_fields=["password"]) + auditlog.register(Endpoint) + auditlog.register(Engagement) + auditlog.register(Finding, m2m_fields={"reviewers"}) + auditlog.register(Finding_Group) + auditlog.register(Product_Type) + auditlog.register(Product) + auditlog.register(Test) + auditlog.register(Risk_Acceptance) + auditlog.register(Finding_Template) + auditlog.register(Cred_User, exclude_fields=["password"]) + auditlog.register(Notification_Webhooks, exclude_fields=["header_name", "header_value"]) + logger.info("Successfully enabled django-auditlog") + + +def disable_django_auditlog(): + """Disable django-auditlog by unregistering models.""" + # Import inside function to avoid AppRegistryNotReady errors + from auditlog.registry import auditlog # noqa: PLC0415 + + from dojo.models import ( # noqa: PLC0415 + Cred_User, + Dojo_User, + Endpoint, + Engagement, + Finding, + Finding_Group, + Finding_Template, + Notification_Webhooks, + Product, + Product_Type, + Risk_Acceptance, + Test, + ) + + # Only log during actual application startup, not during shell commands + if "shell" not in sys.argv: + logger.info("Django-auditlog disabled - unregistering models") + + # Unregister all models from auditlog + models_to_unregister = [ + Dojo_User, Endpoint, Engagement, Finding, Finding_Group, + Product_Type, Product, Test, Risk_Acceptance, Finding_Template, + Cred_User, Notification_Webhooks, + ] + + for model in models_to_unregister: + with contextlib.suppress(Exception): + # Model might not be registered, ignore the error + auditlog.unregister(model) + + +def register_django_pghistory_models(): + """ + Register models with django-pghistory (always called to avoid migrations). + + Note: This function is always called regardless of audit logging settings because: + 1. Django migrations are generated based on model registration at import time + 2. If pghistory models are not registered, Django will try to create migrations + to remove the pghistory tables when the models are not found + 3. This would cause migration conflicts and database inconsistencies + 4. By always registering the models, we ensure the database schema remains + stable while controlling audit behavior through trigger enable/disable + So we always register the models and make migrations for them. + Then we control the enabling/disabling by enabling/disabling the underlying database + triggers. + """ + # Import models inside function to avoid AppRegistryNotReady errors + from dojo.models import ( # noqa: PLC0415 + Cred_User, + Dojo_User, + Endpoint, + Engagement, + Finding, + Finding_Group, + Finding_Template, + Notification_Webhooks, + Product, + Product_Type, + Risk_Acceptance, + Test, + ) + + # Only log during actual application startup, not during shell commands + if "shell" not in sys.argv: + logger.info("Registering models with django-pghistory") + + # Register models with pghistory for tracking changes + # Using pghistory.track() as a decorator function (correct syntax) + # The function returns a decorator that should be applied to the model class + + # Track Dojo_User with excluded fields + pghistory.track( + pghistory.InsertEvent(), + pghistory.UpdateEvent(condition=pghistory.AnyChange(exclude_auto=True)), + pghistory.DeleteEvent(), + pghistory.ManualEvent(label="initial_import"), + exclude=["password"], + # add some indexes manually so we don't have to define a customer phistory Event model with overridden fields. + meta={ + "indexes": [ + models.Index(fields=["pgh_created_at"]), + models.Index(fields=["pgh_label"]), + models.Index(fields=["pgh_context_id"]), + ], + }, + )(Dojo_User) + + pghistory.track( + pghistory.InsertEvent(), + pghistory.UpdateEvent(condition=pghistory.AnyChange(exclude_auto=True)), + pghistory.DeleteEvent(), + pghistory.ManualEvent(label="initial_import"), + meta={ + "indexes": [ + models.Index(fields=["pgh_created_at"]), + models.Index(fields=["pgh_label"]), + models.Index(fields=["pgh_context_id"]), + ], + }, + )(Endpoint) + + pghistory.track( + pghistory.InsertEvent(), + pghistory.UpdateEvent(condition=pghistory.AnyChange(exclude_auto=True)), + pghistory.DeleteEvent(), + pghistory.ManualEvent(label="initial_import"), + meta={ + "indexes": [ + models.Index(fields=["pgh_created_at"]), + models.Index(fields=["pgh_label"]), + models.Index(fields=["pgh_context_id"]), + ], + }, + )(Engagement) + + pghistory.track( + pghistory.InsertEvent(), + pghistory.UpdateEvent(condition=pghistory.AnyChange(exclude_auto=True)), + pghistory.DeleteEvent(), + pghistory.ManualEvent(label="initial_import"), + meta={ + "indexes": [ + models.Index(fields=["pgh_created_at"]), + models.Index(fields=["pgh_label"]), + models.Index(fields=["pgh_context_id"]), + ], + }, + )(Finding) + + pghistory.track( + pghistory.InsertEvent(), + pghistory.UpdateEvent(condition=pghistory.AnyChange(exclude_auto=True)), + pghistory.DeleteEvent(), + pghistory.ManualEvent(label="initial_import"), + meta={ + "indexes": [ + models.Index(fields=["pgh_created_at"]), + models.Index(fields=["pgh_label"]), + models.Index(fields=["pgh_context_id"]), + ], + }, + )(Finding_Group) + + pghistory.track( + pghistory.InsertEvent(), + pghistory.UpdateEvent(condition=pghistory.AnyChange(exclude_auto=True)), + pghistory.DeleteEvent(), + pghistory.ManualEvent(label="initial_import"), + meta={ + "indexes": [ + models.Index(fields=["pgh_created_at"]), + models.Index(fields=["pgh_label"]), + models.Index(fields=["pgh_context_id"]), + ], + }, + )(Product_Type) + + pghistory.track( + pghistory.InsertEvent(), + pghistory.UpdateEvent(condition=pghistory.AnyChange(exclude_auto=True)), + pghistory.DeleteEvent(), + pghistory.ManualEvent(label="initial_import"), + meta={ + "indexes": [ + models.Index(fields=["pgh_created_at"]), + models.Index(fields=["pgh_label"]), + models.Index(fields=["pgh_context_id"]), + ], + }, + )(Product) + + pghistory.track( + pghistory.InsertEvent(), + pghistory.UpdateEvent(condition=pghistory.AnyChange(exclude_auto=True)), + pghistory.DeleteEvent(), + pghistory.ManualEvent(label="initial_import"), + meta={ + "indexes": [ + models.Index(fields=["pgh_created_at"]), + models.Index(fields=["pgh_label"]), + models.Index(fields=["pgh_context_id"]), + ], + }, + )(Test) + + pghistory.track( + pghistory.InsertEvent(), + pghistory.UpdateEvent(condition=pghistory.AnyChange(exclude_auto=True)), + pghistory.DeleteEvent(), + pghistory.ManualEvent(label="initial_import"), + meta={ + "indexes": [ + models.Index(fields=["pgh_created_at"]), + models.Index(fields=["pgh_label"]), + models.Index(fields=["pgh_context_id"]), + ], + }, + )(Risk_Acceptance) + + pghistory.track( + pghistory.InsertEvent(), + pghistory.UpdateEvent(condition=pghistory.AnyChange(exclude_auto=True)), + pghistory.DeleteEvent(), + pghistory.ManualEvent(label="initial_import"), + meta={ + "indexes": [ + models.Index(fields=["pgh_created_at"]), + models.Index(fields=["pgh_label"]), + models.Index(fields=["pgh_context_id"]), + ], + }, + )(Finding_Template) + + pghistory.track( + pghistory.InsertEvent(), + pghistory.UpdateEvent(condition=pghistory.AnyChange(exclude_auto=True)), + pghistory.DeleteEvent(), + pghistory.ManualEvent(label="initial_import"), + exclude=["password"], + meta={ + "indexes": [ + models.Index(fields=["pgh_created_at"]), + models.Index(fields=["pgh_label"]), + models.Index(fields=["pgh_context_id"]), + ], + }, + )(Cred_User) + + pghistory.track( + pghistory.InsertEvent(), + pghistory.UpdateEvent(condition=pghistory.AnyChange(exclude_auto=True)), + pghistory.DeleteEvent(), + pghistory.ManualEvent(label="initial_import"), + exclude=["header_name", "header_value"], + meta={ + "indexes": [ + models.Index(fields=["pgh_created_at"]), + models.Index(fields=["pgh_label"]), + models.Index(fields=["pgh_context_id"]), + ], + }, + )(Notification_Webhooks) + + # Only log during actual application startup, not during shell commands + if "shell" not in sys.argv: + logger.info("Successfully registered models with django-pghistory") + + +def enable_django_pghistory(): + """Enable django-pghistory by enabling triggers.""" + logger.info("Enabling django-pghistory: Enabling triggers") + + # Enable pghistory triggers + try: + call_command("pgtrigger", "enable") + logger.info("Successfully enabled pghistory triggers") + except Exception as e: + logger.warning(f"Failed to enable pgtrigger triggers: {e}") + # Don't raise the exception as this shouldn't prevent Django from starting + + +def disable_django_pghistory(): + """Disable django-pghistory by disabling triggers.""" + logger.info("Disabling django-pghistory: Disabling triggers") + try: + call_command("pgtrigger", "disable") + logger.info("Successfully disabled pghistory triggers") + except Exception as e: + logger.warning(f"Failed to disable pgtrigger triggers: {e}") + # Don't raise the exception as this shouldn't prevent Django from starting + + +def configure_pghistory_triggers(): + """ + Configure pghistory triggers based on audit settings. + + This function should be called after Django startup and migrations to properly + enable/disable pghistory triggers without database access warnings. + """ + if not settings.ENABLE_AUDITLOG: + logger.info("Audit logging disabled - disabling pghistory triggers") + try: + call_command("pgtrigger", "disable") + logger.info("Successfully disabled pghistory triggers") + except Exception as e: + logger.error(f"Failed to disable pghistory triggers: {e}") + raise + elif settings.AUDITLOG_TYPE == "django-pghistory": + try: + call_command("pgtrigger", "enable") + logger.info("Successfully enabled pghistory triggers") + except Exception as e: + logger.error(f"Failed to enable pghistory triggers: {e}") + raise + else: + try: + call_command("pgtrigger", "disable") + logger.info("Successfully disabled pghistory triggers") + except Exception as e: + logger.error(f"Failed to disable pghistory triggers: {e}") + raise + + +def configure_audit_system(): + """ + Configure the audit system based on settings. + + Note: This function only handles auditlog registration. pghistory model registration + is handled in apps.py, and trigger management should be done via the + configure_pghistory_triggers() function to avoid database access during initialization. + """ + # Only log during actual application startup, not during shell commands + log_enabled = "shell" not in sys.argv + + if not settings.ENABLE_AUDITLOG: + if log_enabled: + logger.info("Audit logging disabled") + disable_django_auditlog() + return + + if settings.AUDITLOG_TYPE == "django-auditlog": + if log_enabled: + logger.info("Configuring audit system: django-auditlog enabled") + enable_django_auditlog() + else: + if log_enabled: + logger.info("django-auditlog disabled (pghistory or other audit type selected)") + disable_django_auditlog() diff --git a/dojo/db_migrations/0243_pghistory_models.py b/dojo/db_migrations/0243_pghistory_models.py new file mode 100644 index 00000000000..57d3c723eab --- /dev/null +++ b/dojo/db_migrations/0243_pghistory_models.py @@ -0,0 +1,938 @@ +# Generated by Django 5.1.11 on 2025-09-12 16:57 + +import django.contrib.auth.validators +import django.core.validators +import django.db.models.deletion +import django.db.models.manager +import django.utils.timezone +import django_extensions.db.fields +import dojo.models +import dojo.validators +import pgtrigger.compiler +import pgtrigger.migrations +from decimal import Decimal +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('dojo', '0242_file_upload_cleanup'), + ('pghistory', '0007_auto_20250421_0444'), + ] + + operations = [ + migrations.CreateModel( + name='Cred_UserEvent', + fields=[ + ('pgh_id', models.AutoField(primary_key=True, serialize=False)), + ('pgh_created_at', models.DateTimeField(auto_now_add=True)), + ('pgh_label', models.TextField(help_text='The event label.')), + ('id', models.IntegerField()), + ('name', models.CharField(max_length=200)), + ('username', models.CharField(max_length=200)), + ('role', models.CharField(max_length=200)), + ('authentication', models.CharField(choices=[('Form', 'Form Authentication'), ('SSO', 'SSO Redirect')], default='Form', max_length=15)), + ('http_authentication', models.CharField(blank=True, choices=[('Basic', 'Basic'), ('NTLM', 'NTLM')], max_length=15, null=True)), + ('description', models.CharField(blank=True, max_length=2000, null=True)), + ('url', models.URLField(max_length=2000)), + ('login_regex', models.CharField(blank=True, max_length=200, null=True)), + ('logout_regex', models.CharField(blank=True, max_length=200, null=True)), + ('is_valid', models.BooleanField(default=True, verbose_name='Login is valid')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Dojo_UserEvent', + fields=[ + ('pgh_id', models.AutoField(primary_key=True, serialize=False)), + ('pgh_created_at', models.DateTimeField(auto_now_add=True)), + ('pgh_label', models.TextField(help_text='The event label.')), + ('id', models.IntegerField()), + ('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')), + ('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')), + ('username', models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username')), + ('first_name', models.CharField(blank=True, max_length=150, verbose_name='first name')), + ('last_name', models.CharField(blank=True, max_length=150, verbose_name='last name')), + ('email', models.EmailField(blank=True, max_length=254, verbose_name='email address')), + ('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')), + ('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')), + ('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='EndpointEvent', + fields=[ + ('pgh_id', models.AutoField(primary_key=True, serialize=False)), + ('pgh_created_at', models.DateTimeField(auto_now_add=True)), + ('pgh_label', models.TextField(help_text='The event label.')), + ('id', models.IntegerField()), + ('protocol', models.CharField(blank=True, help_text="The communication protocol/scheme such as 'http', 'ftp', 'dns', etc.", max_length=20, null=True)), + ('userinfo', models.CharField(blank=True, help_text="User info as 'alice', 'bob', etc.", max_length=500, null=True)), + ('host', models.CharField(blank=True, help_text="The host name or IP address. It must not include the port number. For example '127.0.0.1', 'localhost', 'yourdomain.com'.", max_length=500, null=True)), + ('port', models.IntegerField(blank=True, help_text='The network port associated with the endpoint.', null=True)), + ('path', models.CharField(blank=True, help_text="The location of the resource, it must not start with a '/'. For example endpoint/420/edit", max_length=500, null=True)), + ('query', models.CharField(blank=True, help_text="The query string, the question mark should be omitted.For example 'group=4&team=8'", max_length=1000, null=True)), + ('fragment', models.CharField(blank=True, help_text="The fragment identifier which follows the hash mark. The hash mark should be omitted. For example 'section-13', 'paragraph-2'.", max_length=500, null=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='EngagementEvent', + fields=[ + ('pgh_id', models.AutoField(primary_key=True, serialize=False)), + ('pgh_created_at', models.DateTimeField(auto_now_add=True)), + ('pgh_label', models.TextField(help_text='The event label.')), + ('id', models.IntegerField()), + ('name', models.CharField(blank=True, max_length=300, null=True)), + ('description', models.CharField(blank=True, max_length=2000, null=True)), + ('version', models.CharField(blank=True, help_text='Version of the product the engagement tested.', max_length=100, null=True)), + ('first_contacted', models.DateField(blank=True, null=True)), + ('target_start', models.DateField()), + ('target_end', models.DateField()), + ('reason', models.CharField(blank=True, max_length=2000, null=True)), + ('updated', models.DateTimeField(auto_now=True, null=True)), + ('created', models.DateTimeField(auto_now_add=True, null=True)), + ('active', models.BooleanField(default=True, editable=False)), + ('tracker', models.URLField(blank=True, help_text='Link to epic or ticket system with changes to version.', null=True)), + ('test_strategy', models.URLField(blank=True, null=True)), + ('threat_model', models.BooleanField(default=True)), + ('api_test', models.BooleanField(default=True)), + ('pen_test', models.BooleanField(default=True)), + ('check_list', models.BooleanField(default=True)), + ('status', models.CharField(choices=[('Not Started', 'Not Started'), ('Blocked', 'Blocked'), ('Cancelled', 'Cancelled'), ('Completed', 'Completed'), ('In Progress', 'In Progress'), ('On Hold', 'On Hold'), ('Waiting for Resource', 'Waiting for Resource')], default='', max_length=2000, null=True)), + ('progress', models.CharField(default='threat_model', editable=False, max_length=100)), + ('tmodel_path', models.CharField(blank=True, default='none', editable=False, max_length=1000, null=True)), + ('done_testing', models.BooleanField(default=False, editable=False)), + ('engagement_type', models.CharField(choices=[('Interactive', 'Interactive'), ('CI/CD', 'CI/CD')], default='Interactive', max_length=30, null=True)), + ('build_id', models.CharField(blank=True, help_text='Build ID of the product the engagement tested.', max_length=150, null=True, verbose_name='Build ID')), + ('commit_hash', models.CharField(blank=True, help_text='Commit hash from repo', max_length=150, null=True, verbose_name='Commit Hash')), + ('branch_tag', models.CharField(blank=True, help_text='Tag or branch of the product the engagement tested.', max_length=150, null=True, verbose_name='Branch/Tag')), + ('source_code_management_uri', models.URLField(blank=True, help_text='Resource link to source code', max_length=600, null=True, verbose_name='Repo')), + ('deduplication_on_engagement', models.BooleanField(default=False, help_text='If enabled deduplication will only mark a finding in this engagement as duplicate of another finding if both findings are in this engagement. If disabled, deduplication is on the product level.', verbose_name='Deduplication within this engagement only')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Finding_GroupEvent', + fields=[ + ('pgh_id', models.AutoField(primary_key=True, serialize=False)), + ('pgh_created_at', models.DateTimeField(auto_now_add=True)), + ('pgh_label', models.TextField(help_text='The event label.')), + ('id', models.IntegerField()), + ('created', django_extensions.db.fields.CreationDateTimeField(auto_now_add=True, verbose_name='created')), + ('modified', django_extensions.db.fields.ModificationDateTimeField(auto_now=True, verbose_name='modified')), + ('name', models.CharField(max_length=255)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Finding_TemplateEvent', + fields=[ + ('pgh_id', models.AutoField(primary_key=True, serialize=False)), + ('pgh_created_at', models.DateTimeField(auto_now_add=True)), + ('pgh_label', models.TextField(help_text='The event label.')), + ('id', models.IntegerField()), + ('title', models.TextField(max_length=1000)), + ('cwe', models.IntegerField(blank=True, default=None, null=True)), + ('cve', models.CharField(help_text='An id of a vulnerability in a security advisory associated with this finding. Can be a Common Vulnerabilities and Exposures (CVE) or from other sources.', max_length=50, null=True, verbose_name='Vulnerability Id')), + ('cvssv3', models.TextField(help_text='Common Vulnerability Scoring System version 3 (CVSSv3) score associated with this finding.', max_length=117, null=True, validators=[dojo.validators.cvss3_validator], verbose_name='CVSS v3 vector')), + ('severity', models.CharField(blank=True, max_length=200, null=True)), + ('description', models.TextField(blank=True, null=True)), + ('mitigation', models.TextField(blank=True, null=True)), + ('impact', models.TextField(blank=True, null=True)), + ('references', models.TextField(blank=True, db_column='refs', null=True)), + ('last_used', models.DateTimeField(editable=False, null=True)), + ('numerical_severity', models.CharField(blank=True, editable=False, max_length=4, null=True)), + ('template_match', models.BooleanField(default=False, help_text='Enables this template for matching remediation advice. Match will be applied to all active, verified findings by CWE.', verbose_name='Template Match Enabled')), + ('template_match_title', models.BooleanField(default=False, help_text='Matches by title text (contains search) and CWE.', verbose_name='Match Template by Title and CWE')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='FindingEvent', + fields=[ + ('pgh_id', models.AutoField(primary_key=True, serialize=False)), + ('pgh_created_at', models.DateTimeField(auto_now_add=True)), + ('pgh_label', models.TextField(help_text='The event label.')), + ('id', models.IntegerField()), + ('title', models.CharField(help_text='A short description of the flaw.', max_length=511, verbose_name='Title')), + ('date', models.DateField(default=dojo.models.get_current_date, help_text='The date the flaw was discovered.', verbose_name='Date')), + ('sla_start_date', models.DateField(blank=True, help_text="(readonly)The date used as start date for SLA calculation. Set by expiring risk acceptances. Empty by default, causing a fallback to 'date'.", null=True, verbose_name='SLA Start Date')), + ('sla_expiration_date', models.DateField(blank=True, help_text="(readonly)The date SLA expires for this finding. Empty by default, causing a fallback to 'date'.", null=True, verbose_name='SLA Expiration Date')), + ('cwe', models.IntegerField(blank=True, default=0, help_text='The CWE number associated with this flaw.', null=True, verbose_name='CWE')), + ('cve', models.CharField(help_text='An id of a vulnerability in a security advisory associated with this finding. Can be a Common Vulnerabilities and Exposures (CVE) or from other sources.', max_length=50, null=True, verbose_name='Vulnerability Id')), + ('epss_score', models.FloatField(blank=True, default=None, help_text='EPSS score for the CVE. Describes how likely it is the vulnerability will be exploited in the next 30 days.', null=True, validators=[django.core.validators.MinValueValidator(0.0), django.core.validators.MaxValueValidator(1.0)], verbose_name='EPSS Score')), + ('epss_percentile', models.FloatField(blank=True, default=None, help_text='EPSS percentile for the CVE. Describes how many CVEs are scored at or below this one.', null=True, validators=[django.core.validators.MinValueValidator(0.0), django.core.validators.MaxValueValidator(1.0)], verbose_name='EPSS percentile')), + ('known_exploited', models.BooleanField(default=False, help_text='Whether this vulnerability is known to have been exploited in the wild.', verbose_name='Known Exploited')), + ('ransomware_used', models.BooleanField(default=False, help_text='Whether this vulnerability is known to have been leveraged as part of a ransomware campaign.', verbose_name='Used in Ransomware')), + ('kev_date', models.DateField(blank=True, help_text='The date the vulnerability was added to the KEV catalog.', null=True, validators=[django.core.validators.MaxValueValidator(dojo.models.tomorrow)], verbose_name='KEV Date Added')), + ('cvssv3', models.TextField(help_text='Common Vulnerability Scoring System version 3 (CVSS3) score associated with this finding.', max_length=117, null=True, validators=[dojo.validators.cvss3_validator], verbose_name='CVSS3 Vector')), + ('cvssv3_score', models.FloatField(blank=True, help_text='Numerical CVSSv3 score for the vulnerability. If the vector is given, the score is updated while saving the finding. The value must be between 0-10.', null=True, validators=[django.core.validators.MinValueValidator(0.0), django.core.validators.MaxValueValidator(10.0)], verbose_name='CVSS3 Score')), + ('cvssv4', models.TextField(help_text='Common Vulnerability Scoring System version 4 (CVSS4) score associated with this finding.', max_length=255, null=True, validators=[dojo.validators.cvss4_validator], verbose_name='CVSS4 vector')), + ('cvssv4_score', models.FloatField(blank=True, help_text='Numerical CVSSv4 score for the vulnerability. If the vector is given, the score is updated while saving the finding. The value must be between 0-10.', null=True, validators=[django.core.validators.MinValueValidator(0.0), django.core.validators.MaxValueValidator(10.0)], verbose_name='CVSSv4 Score')), + ('url', models.TextField(blank=True, editable=False, help_text='External reference that provides more information about this flaw.', null=True, verbose_name='URL')), + ('severity', models.CharField(help_text='The severity level of this flaw (Critical, High, Medium, Low, Info).', max_length=200, verbose_name='Severity')), + ('description', models.TextField(help_text='Longer more descriptive information about the flaw.', verbose_name='Description')), + ('mitigation', models.TextField(blank=True, help_text='Text describing how to best fix the flaw.', null=True, verbose_name='Mitigation')), + ('fix_available', models.BooleanField(default=None, help_text='Denotes if there is a fix available for this flaw.', null=True, verbose_name='Fix Available')), + ('impact', models.TextField(blank=True, help_text='Text describing the impact this flaw has on systems, products, enterprise, etc.', null=True, verbose_name='Impact')), + ('steps_to_reproduce', models.TextField(blank=True, help_text='Text describing the steps that must be followed in order to reproduce the flaw / bug.', null=True, verbose_name='Steps to Reproduce')), + ('severity_justification', models.TextField(blank=True, help_text='Text describing why a certain severity was associated with this flaw.', null=True, verbose_name='Severity Justification')), + ('references', models.TextField(blank=True, db_column='refs', help_text='The external documentation available for this flaw.', null=True, verbose_name='References')), + ('active', models.BooleanField(default=True, help_text='Denotes if this flaw is active or not.', verbose_name='Active')), + ('verified', models.BooleanField(default=False, help_text='Denotes if this flaw has been manually verified by the tester.', verbose_name='Verified')), + ('false_p', models.BooleanField(default=False, help_text='Denotes if this flaw has been deemed a false positive by the tester.', verbose_name='False Positive')), + ('duplicate', models.BooleanField(default=False, help_text='Denotes if this flaw is a duplicate of other flaws reported.', verbose_name='Duplicate')), + ('out_of_scope', models.BooleanField(default=False, help_text='Denotes if this flaw falls outside the scope of the test and/or engagement.', verbose_name='Out Of Scope')), + ('risk_accepted', models.BooleanField(default=False, help_text='Denotes if this finding has been marked as an accepted risk.', verbose_name='Risk Accepted')), + ('under_review', models.BooleanField(default=False, help_text='Denotes is this flaw is currently being reviewed.', verbose_name='Under Review')), + ('last_status_update', models.DateTimeField(auto_now_add=True, help_text='Timestamp of latest status update (change in status related fields).', null=True, verbose_name='Last Status Update')), + ('under_defect_review', models.BooleanField(default=False, help_text='Denotes if this finding is under defect review.', verbose_name='Under Defect Review')), + ('is_mitigated', models.BooleanField(default=False, help_text='Denotes if this flaw has been fixed.', verbose_name='Is Mitigated')), + ('thread_id', models.IntegerField(default=0, editable=False, verbose_name='Thread ID')), + ('mitigated', models.DateTimeField(blank=True, editable=False, help_text='Denotes if this flaw has been fixed by storing the date it was fixed.', null=True, verbose_name='Mitigated')), + ('numerical_severity', models.CharField(help_text='The numerical representation of the severity (S0, S1, S2, S3, S4).', max_length=4, verbose_name='Numerical Severity')), + ('last_reviewed', models.DateTimeField(editable=False, help_text="Provides the date the flaw was last 'touched' by a tester.", null=True, verbose_name='Last Reviewed')), + ('param', models.TextField(blank=True, editable=False, help_text='Parameter used to trigger the issue (DAST).', null=True, verbose_name='Parameter')), + ('payload', models.TextField(blank=True, editable=False, help_text='Payload used to attack the service / application and trigger the bug / problem.', null=True, verbose_name='Payload')), + ('hash_code', models.CharField(blank=True, editable=False, help_text='A hash over a configurable set of fields that is used for findings deduplication.', max_length=64, null=True, verbose_name='Hash Code')), + ('line', models.IntegerField(blank=True, help_text='Source line number of the attack vector.', null=True, verbose_name='Line number')), + ('file_path', models.CharField(blank=True, help_text='Identified file(s) containing the flaw.', max_length=4000, null=True, verbose_name='File path')), + ('component_name', models.CharField(blank=True, help_text='Name of the affected component (library name, part of a system, ...).', max_length=500, null=True, verbose_name='Component name')), + ('component_version', models.CharField(blank=True, help_text='Version of the affected component.', max_length=100, null=True, verbose_name='Component version')), + ('static_finding', models.BooleanField(default=False, help_text='Flaw has been detected from a Static Application Security Testing tool (SAST).', verbose_name='Static finding (SAST)')), + ('dynamic_finding', models.BooleanField(default=True, help_text='Flaw has been detected from a Dynamic Application Security Testing tool (DAST).', verbose_name='Dynamic finding (DAST)')), + ('created', models.DateTimeField(auto_now_add=True, help_text='The date the finding was created inside DefectDojo.', null=True, verbose_name='Created')), + ('scanner_confidence', models.IntegerField(blank=True, default=None, editable=False, help_text='Confidence level of vulnerability which is supplied by the scanner.', null=True, verbose_name='Scanner confidence')), + ('unique_id_from_tool', models.CharField(blank=True, help_text='Vulnerability technical id from the source tool. Allows to track unique vulnerabilities over time across subsequent scans.', max_length=500, null=True, verbose_name='Unique ID from tool')), + ('vuln_id_from_tool', models.CharField(blank=True, help_text='Non-unique technical id from the source tool associated with the vulnerability type.', max_length=500, null=True, verbose_name='Vulnerability ID from tool')), + ('sast_source_object', models.CharField(blank=True, help_text='Source object (variable, function...) of the attack vector.', max_length=500, null=True, verbose_name='SAST Source Object')), + ('sast_sink_object', models.CharField(blank=True, help_text='Sink object (variable, function...) of the attack vector.', max_length=500, null=True, verbose_name='SAST Sink Object')), + ('sast_source_line', models.IntegerField(blank=True, help_text='Source line number of the attack vector.', null=True, verbose_name='SAST Source Line number')), + ('sast_source_file_path', models.CharField(blank=True, help_text='Source file path of the attack vector.', max_length=4000, null=True, verbose_name='SAST Source File Path')), + ('nb_occurences', models.IntegerField(blank=True, help_text='Number of occurences in the source tool when several vulnerabilites were found and aggregated by the scanner.', null=True, verbose_name='Number of occurences')), + ('publish_date', models.DateField(blank=True, help_text='Date when this vulnerability was made publicly available.', null=True, verbose_name='Publish date')), + ('service', models.CharField(blank=True, help_text='A service is a self-contained piece of functionality within a Product. This is an optional field which is used in deduplication of findings when set.', max_length=200, null=True, verbose_name='Service')), + ('planned_remediation_date', models.DateField(help_text='The date the flaw is expected to be remediated.', null=True, verbose_name='Planned Remediation Date')), + ('planned_remediation_version', models.CharField(blank=True, help_text='The target version when the vulnerability should be fixed / remediated', max_length=99, null=True, verbose_name='Planned remediation version')), + ('effort_for_fixing', models.CharField(blank=True, help_text='Effort for fixing / remediating the vulnerability (Low, Medium, High)', max_length=99, null=True, verbose_name='Effort for fixing')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Notification_WebhooksEvent', + fields=[ + ('pgh_id', models.AutoField(primary_key=True, serialize=False)), + ('pgh_created_at', models.DateTimeField(auto_now_add=True)), + ('pgh_label', models.TextField(help_text='The event label.')), + ('id', models.IntegerField()), + ('name', models.CharField(default='', help_text='Name of the incoming webhook', max_length=100)), + ('url', models.URLField(default='', help_text='The full URL of the incoming webhook')), + ('status', models.CharField(choices=[('active', 'Active'), ('active_tmp', 'Active but 5xx (or similar) error detected'), ('inactive_tmp', 'Temporary inactive because of 5xx (or similar) error'), ('inactive_permanent', 'Permanently inactive')], default='active', editable=False, help_text='Status of the incoming webhook', max_length=20)), + ('first_error', models.DateTimeField(blank=True, editable=False, help_text='If endpoint is active, when error happened first time', null=True)), + ('last_error', models.DateTimeField(blank=True, editable=False, help_text='If endpoint is active, when error happened last time', null=True)), + ('note', models.CharField(blank=True, default='', editable=False, help_text='Description of the latest error', max_length=1000, null=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Product_TypeEvent', + fields=[ + ('pgh_id', models.AutoField(primary_key=True, serialize=False)), + ('pgh_created_at', models.DateTimeField(auto_now_add=True)), + ('pgh_label', models.TextField(help_text='The event label.')), + ('id', models.IntegerField()), + ('name', models.CharField(max_length=255)), + ('description', models.CharField(blank=True, max_length=4000, null=True)), + ('critical_product', models.BooleanField(default=False)), + ('key_product', models.BooleanField(default=False)), + ('updated', models.DateTimeField(auto_now=True, null=True)), + ('created', models.DateTimeField(auto_now_add=True, null=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='ProductEvent', + fields=[ + ('pgh_id', models.AutoField(primary_key=True, serialize=False)), + ('pgh_created_at', models.DateTimeField(auto_now_add=True)), + ('pgh_label', models.TextField(help_text='The event label.')), + ('id', models.IntegerField()), + ('name', models.CharField(max_length=255)), + ('description', models.CharField(max_length=4000)), + ('created', models.DateTimeField(auto_now_add=True, null=True)), + ('updated', models.DateTimeField(auto_now=True, null=True)), + ('tid', models.IntegerField(default=0, editable=False)), + ('prod_numeric_grade', models.IntegerField(blank=True, null=True)), + ('business_criticality', models.CharField(blank=True, choices=[('very high', 'Very High'), ('high', 'High'), ('medium', 'Medium'), ('low', 'Low'), ('very low', 'Very Low'), ('none', 'None')], max_length=9, null=True)), + ('platform', models.CharField(blank=True, choices=[('web service', 'API'), ('desktop', 'Desktop'), ('iot', 'Internet of Things'), ('mobile', 'Mobile'), ('web', 'Web')], max_length=11, null=True)), + ('lifecycle', models.CharField(blank=True, choices=[('construction', 'Construction'), ('production', 'Production'), ('retirement', 'Retirement')], max_length=12, null=True)), + ('origin', models.CharField(blank=True, choices=[('third party library', 'Third Party Library'), ('purchased', 'Purchased'), ('contractor', 'Contractor Developed'), ('internal', 'Internally Developed'), ('open source', 'Open Source'), ('outsourced', 'Outsourced')], max_length=19, null=True)), + ('user_records', models.PositiveIntegerField(blank=True, help_text='Estimate the number of user records within the application.', null=True)), + ('revenue', models.DecimalField(blank=True, decimal_places=2, help_text="Estimate the application's revenue.", max_digits=15, null=True, validators=[django.core.validators.MinValueValidator(Decimal('0.00'))])), + ('external_audience', models.BooleanField(default=False, help_text='Specify if the application is used by people outside the organization.')), + ('internet_accessible', models.BooleanField(default=False, help_text='Specify if the application is accessible from the public internet.')), + ('enable_product_tag_inheritance', models.BooleanField(default=False, help_text='Enables product tag inheritance. Any tags added on a product will automatically be added to all Engagements, Tests, and Findings', verbose_name='Enable Product Tag Inheritance')), + ('enable_simple_risk_acceptance', models.BooleanField(default=False, help_text='Allows simple risk acceptance by checking/unchecking a checkbox.')), + ('enable_full_risk_acceptance', models.BooleanField(default=True, help_text='Allows full risk acceptance using a risk acceptance form, expiration date, uploaded proof, etc.')), + ('disable_sla_breach_notifications', models.BooleanField(default=False, help_text='Disable SLA breach notifications if configured in the global settings', verbose_name='Disable SLA breach notifications')), + ('async_updating', models.BooleanField(default=False, help_text='Findings under this Product or SLA configuration are asynchronously being updated')), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='Risk_AcceptanceEvent', + fields=[ + ('pgh_id', models.AutoField(primary_key=True, serialize=False)), + ('pgh_created_at', models.DateTimeField(auto_now_add=True)), + ('pgh_label', models.TextField(help_text='The event label.')), + ('id', models.IntegerField()), + ('name', models.CharField(help_text='Descriptive name which in the future may also be used to group risk acceptances together across engagements and products', max_length=300)), + ('recommendation', models.CharField(choices=[('A', 'Accept (The risk is acknowledged, yet remains)'), ('V', 'Avoid (Do not engage with whatever creates the risk)'), ('M', 'Mitigate (The risk still exists, yet compensating controls make it less of a threat)'), ('F', 'Fix (The risk is eradicated)'), ('T', 'Transfer (The risk is transferred to a 3rd party)')], default='F', help_text='Recommendation from the security team.', max_length=2, verbose_name='Security Recommendation')), + ('recommendation_details', models.TextField(blank=True, help_text='Explanation of security recommendation', null=True, verbose_name='Security Recommendation Details')), + ('decision', models.CharField(choices=[('A', 'Accept (The risk is acknowledged, yet remains)'), ('V', 'Avoid (Do not engage with whatever creates the risk)'), ('M', 'Mitigate (The risk still exists, yet compensating controls make it less of a threat)'), ('F', 'Fix (The risk is eradicated)'), ('T', 'Transfer (The risk is transferred to a 3rd party)')], default='A', help_text='Risk treatment decision by risk owner', max_length=2)), + ('decision_details', models.TextField(blank=True, default=None, help_text='If a compensating control exists to mitigate the finding or reduce risk, then list the compensating control(s).', null=True)), + ('accepted_by', models.CharField(blank=True, default=None, help_text='The person that accepts the risk, can be outside of DefectDojo.', max_length=200, null=True, verbose_name='Accepted By')), + ('path', models.FileField(blank=True, null=True, upload_to='risk/%Y/%m/%d', verbose_name='Proof')), + ('expiration_date', models.DateTimeField(blank=True, default=None, help_text='When the risk acceptance expires, the findings will be reactivated (unless disabled below).', null=True)), + ('expiration_date_warned', models.DateTimeField(blank=True, default=None, help_text='(readonly) Date at which notice about the risk acceptance expiration was sent.', null=True)), + ('expiration_date_handled', models.DateTimeField(blank=True, default=None, help_text='(readonly) When the risk acceptance expiration was handled (manually or by the daily job).', null=True)), + ('reactivate_expired', models.BooleanField(default=True, help_text='Reactivate findings when risk acceptance expires?', verbose_name='Reactivate findings on expiration')), + ('restart_sla_expired', models.BooleanField(default=False, help_text='When enabled, the SLA for findings is restarted when the risk acceptance expires.', verbose_name='Restart SLA on expiration')), + ('created', models.DateTimeField(auto_now_add=True)), + ('updated', models.DateTimeField(auto_now=True)), + ], + options={ + 'abstract': False, + }, + ), + migrations.CreateModel( + name='TestEvent', + fields=[ + ('pgh_id', models.AutoField(primary_key=True, serialize=False)), + ('pgh_created_at', models.DateTimeField(auto_now_add=True)), + ('pgh_label', models.TextField(help_text='The event label.')), + ('id', models.IntegerField()), + ('scan_type', models.TextField(null=True)), + ('title', models.CharField(blank=True, max_length=255, null=True)), + ('description', models.TextField(blank=True, null=True)), + ('target_start', models.DateTimeField()), + ('target_end', models.DateTimeField()), + ('percent_complete', models.IntegerField(blank=True, null=True)), + ('updated', models.DateTimeField(auto_now=True, null=True)), + ('created', models.DateTimeField(auto_now_add=True, null=True)), + ('version', models.CharField(blank=True, max_length=100, null=True)), + ('build_id', models.CharField(blank=True, help_text='Build ID that was tested, a reimport may update this field.', max_length=150, null=True, verbose_name='Build ID')), + ('commit_hash', models.CharField(blank=True, help_text='Commit hash tested, a reimport may update this field.', max_length=150, null=True, verbose_name='Commit Hash')), + ('branch_tag', models.CharField(blank=True, help_text='Tag or branch that was tested, a reimport may update this field.', max_length=150, null=True, verbose_name='Branch/Tag')), + ], + options={ + 'abstract': False, + }, + ), + pgtrigger.migrations.AddTrigger( + model_name='cred_user', + trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_cred_userevent" ("authentication", "description", "environment_id", "http_authentication", "id", "is_valid", "login_regex", "logout_regex", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "role", "url", "username") VALUES (NEW."authentication", NEW."description", NEW."environment_id", NEW."http_authentication", NEW."id", NEW."is_valid", NEW."login_regex", NEW."logout_regex", NEW."name", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."role", NEW."url", NEW."username"); RETURN NULL;', hash='43f52f9845d27e920508439dbfbd3ec6e9597d25', operation='INSERT', pgid='pgtrigger_insert_insert_318ed', table='dojo_cred_user', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='cred_user', + trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD."authentication" IS DISTINCT FROM (NEW."authentication") OR OLD."description" IS DISTINCT FROM (NEW."description") OR OLD."environment_id" IS DISTINCT FROM (NEW."environment_id") OR OLD."http_authentication" IS DISTINCT FROM (NEW."http_authentication") OR OLD."id" IS DISTINCT FROM (NEW."id") OR OLD."is_valid" IS DISTINCT FROM (NEW."is_valid") OR OLD."login_regex" IS DISTINCT FROM (NEW."login_regex") OR OLD."logout_regex" IS DISTINCT FROM (NEW."logout_regex") OR OLD."name" IS DISTINCT FROM (NEW."name") OR OLD."role" IS DISTINCT FROM (NEW."role") OR OLD."url" IS DISTINCT FROM (NEW."url") OR OLD."username" IS DISTINCT FROM (NEW."username"))', func='INSERT INTO "dojo_cred_userevent" ("authentication", "description", "environment_id", "http_authentication", "id", "is_valid", "login_regex", "logout_regex", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "role", "url", "username") VALUES (NEW."authentication", NEW."description", NEW."environment_id", NEW."http_authentication", NEW."id", NEW."is_valid", NEW."login_regex", NEW."logout_regex", NEW."name", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."role", NEW."url", NEW."username"); RETURN NULL;', hash='630b91cc38735df1a354209f05519c55538abd7a', operation='UPDATE', pgid='pgtrigger_update_update_3889b', table='dojo_cred_user', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='cred_user', + trigger=pgtrigger.compiler.Trigger(name='delete_delete', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_cred_userevent" ("authentication", "description", "environment_id", "http_authentication", "id", "is_valid", "login_regex", "logout_regex", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "role", "url", "username") VALUES (OLD."authentication", OLD."description", OLD."environment_id", OLD."http_authentication", OLD."id", OLD."is_valid", OLD."login_regex", OLD."logout_regex", OLD."name", _pgh_attach_context(), NOW(), \'delete\', OLD."id", OLD."role", OLD."url", OLD."username"); RETURN NULL;', hash='4947f3c486b6f7922a82690473cc5166506d79fc', operation='DELETE', pgid='pgtrigger_delete_delete_d8a8c', table='dojo_cred_user', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='dojo_user', + trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_dojo_userevent" ("date_joined", "email", "first_name", "id", "is_active", "is_staff", "is_superuser", "last_login", "last_name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "username") VALUES (NEW."date_joined", NEW."email", NEW."first_name", NEW."id", NEW."is_active", NEW."is_staff", NEW."is_superuser", NEW."last_login", NEW."last_name", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."username"); RETURN NULL;', hash='f997de9e21fa9d350c04039bc6aa7ac450d72c3d', operation='INSERT', pgid='pgtrigger_insert_insert_f69f3', table='auth_user', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='dojo_user', + trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD."date_joined" IS DISTINCT FROM (NEW."date_joined") OR OLD."email" IS DISTINCT FROM (NEW."email") OR OLD."first_name" IS DISTINCT FROM (NEW."first_name") OR OLD."id" IS DISTINCT FROM (NEW."id") OR OLD."is_active" IS DISTINCT FROM (NEW."is_active") OR OLD."is_staff" IS DISTINCT FROM (NEW."is_staff") OR OLD."is_superuser" IS DISTINCT FROM (NEW."is_superuser") OR OLD."last_login" IS DISTINCT FROM (NEW."last_login") OR OLD."last_name" IS DISTINCT FROM (NEW."last_name") OR OLD."username" IS DISTINCT FROM (NEW."username"))', func='INSERT INTO "dojo_dojo_userevent" ("date_joined", "email", "first_name", "id", "is_active", "is_staff", "is_superuser", "last_login", "last_name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "username") VALUES (NEW."date_joined", NEW."email", NEW."first_name", NEW."id", NEW."is_active", NEW."is_staff", NEW."is_superuser", NEW."last_login", NEW."last_name", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."username"); RETURN NULL;', hash='4791d2880e30b42d862087ba0ad17e2f5f350fc3', operation='UPDATE', pgid='pgtrigger_update_update_1c25d', table='auth_user', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='dojo_user', + trigger=pgtrigger.compiler.Trigger(name='delete_delete', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_dojo_userevent" ("date_joined", "email", "first_name", "id", "is_active", "is_staff", "is_superuser", "last_login", "last_name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "username") VALUES (OLD."date_joined", OLD."email", OLD."first_name", OLD."id", OLD."is_active", OLD."is_staff", OLD."is_superuser", OLD."last_login", OLD."last_name", _pgh_attach_context(), NOW(), \'delete\', OLD."id", OLD."username"); RETURN NULL;', hash='0fc44952f13a80dfc9661150ebe99cdb865bf305', operation='DELETE', pgid='pgtrigger_delete_delete_37974', table='auth_user', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='endpoint', + trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_endpointevent" ("fragment", "host", "id", "path", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "port", "product_id", "protocol", "query", "userinfo") VALUES (NEW."fragment", NEW."host", NEW."id", NEW."path", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."port", NEW."product_id", NEW."protocol", NEW."query", NEW."userinfo"); RETURN NULL;', hash='68e589c514e437831f4da3e802ca18e4cced175d', operation='INSERT', pgid='pgtrigger_insert_insert_c7973', table='dojo_endpoint', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='endpoint', + trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD.* IS DISTINCT FROM NEW.*)', func='INSERT INTO "dojo_endpointevent" ("fragment", "host", "id", "path", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "port", "product_id", "protocol", "query", "userinfo") VALUES (NEW."fragment", NEW."host", NEW."id", NEW."path", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."port", NEW."product_id", NEW."protocol", NEW."query", NEW."userinfo"); RETURN NULL;', hash='fce28bcbd64850a383950a720a72f134da7989d9', operation='UPDATE', pgid='pgtrigger_update_update_2b19a', table='dojo_endpoint', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='endpoint', + trigger=pgtrigger.compiler.Trigger(name='delete_delete', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_endpointevent" ("fragment", "host", "id", "path", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "port", "product_id", "protocol", "query", "userinfo") VALUES (OLD."fragment", OLD."host", OLD."id", OLD."path", _pgh_attach_context(), NOW(), \'delete\', OLD."id", OLD."port", OLD."product_id", OLD."protocol", OLD."query", OLD."userinfo"); RETURN NULL;', hash='5e88416165d3e62cc6717b7d7f5d82933929de32', operation='DELETE', pgid='pgtrigger_delete_delete_dd1f9', table='dojo_endpoint', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='engagement', + trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_engagementevent" ("active", "api_test", "branch_tag", "build_id", "build_server_id", "check_list", "commit_hash", "created", "deduplication_on_engagement", "description", "done_testing", "engagement_type", "first_contacted", "id", "lead_id", "name", "orchestration_engine_id", "pen_test", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "preset_id", "product_id", "progress", "reason", "report_type_id", "requester_id", "source_code_management_server_id", "source_code_management_uri", "status", "target_end", "target_start", "test_strategy", "threat_model", "tmodel_path", "tracker", "updated", "version") VALUES (NEW."active", NEW."api_test", NEW."branch_tag", NEW."build_id", NEW."build_server_id", NEW."check_list", NEW."commit_hash", NEW."created", NEW."deduplication_on_engagement", NEW."description", NEW."done_testing", NEW."engagement_type", NEW."first_contacted", NEW."id", NEW."lead_id", NEW."name", NEW."orchestration_engine_id", NEW."pen_test", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."preset_id", NEW."product_id", NEW."progress", NEW."reason", NEW."report_type_id", NEW."requester_id", NEW."source_code_management_server_id", NEW."source_code_management_uri", NEW."status", NEW."target_end", NEW."target_start", NEW."test_strategy", NEW."threat_model", NEW."tmodel_path", NEW."tracker", NEW."updated", NEW."version"); RETURN NULL;', hash='4155b326b45cff0de61bd509545845d4a9fa6a18', operation='INSERT', pgid='pgtrigger_insert_insert_125f1', table='dojo_engagement', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='engagement', + trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD."active" IS DISTINCT FROM (NEW."active") OR OLD."api_test" IS DISTINCT FROM (NEW."api_test") OR OLD."branch_tag" IS DISTINCT FROM (NEW."branch_tag") OR OLD."build_id" IS DISTINCT FROM (NEW."build_id") OR OLD."build_server_id" IS DISTINCT FROM (NEW."build_server_id") OR OLD."check_list" IS DISTINCT FROM (NEW."check_list") OR OLD."commit_hash" IS DISTINCT FROM (NEW."commit_hash") OR OLD."deduplication_on_engagement" IS DISTINCT FROM (NEW."deduplication_on_engagement") OR OLD."description" IS DISTINCT FROM (NEW."description") OR OLD."done_testing" IS DISTINCT FROM (NEW."done_testing") OR OLD."engagement_type" IS DISTINCT FROM (NEW."engagement_type") OR OLD."first_contacted" IS DISTINCT FROM (NEW."first_contacted") OR OLD."id" IS DISTINCT FROM (NEW."id") OR OLD."lead_id" IS DISTINCT FROM (NEW."lead_id") OR OLD."name" IS DISTINCT FROM (NEW."name") OR OLD."orchestration_engine_id" IS DISTINCT FROM (NEW."orchestration_engine_id") OR OLD."pen_test" IS DISTINCT FROM (NEW."pen_test") OR OLD."preset_id" IS DISTINCT FROM (NEW."preset_id") OR OLD."product_id" IS DISTINCT FROM (NEW."product_id") OR OLD."progress" IS DISTINCT FROM (NEW."progress") OR OLD."reason" IS DISTINCT FROM (NEW."reason") OR OLD."report_type_id" IS DISTINCT FROM (NEW."report_type_id") OR OLD."requester_id" IS DISTINCT FROM (NEW."requester_id") OR OLD."source_code_management_server_id" IS DISTINCT FROM (NEW."source_code_management_server_id") OR OLD."source_code_management_uri" IS DISTINCT FROM (NEW."source_code_management_uri") OR OLD."status" IS DISTINCT FROM (NEW."status") OR OLD."target_end" IS DISTINCT FROM (NEW."target_end") OR OLD."target_start" IS DISTINCT FROM (NEW."target_start") OR OLD."test_strategy" IS DISTINCT FROM (NEW."test_strategy") OR OLD."threat_model" IS DISTINCT FROM (NEW."threat_model") OR OLD."tmodel_path" IS DISTINCT FROM (NEW."tmodel_path") OR OLD."tracker" IS DISTINCT FROM (NEW."tracker") OR OLD."version" IS DISTINCT FROM (NEW."version"))', func='INSERT INTO "dojo_engagementevent" ("active", "api_test", "branch_tag", "build_id", "build_server_id", "check_list", "commit_hash", "created", "deduplication_on_engagement", "description", "done_testing", "engagement_type", "first_contacted", "id", "lead_id", "name", "orchestration_engine_id", "pen_test", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "preset_id", "product_id", "progress", "reason", "report_type_id", "requester_id", "source_code_management_server_id", "source_code_management_uri", "status", "target_end", "target_start", "test_strategy", "threat_model", "tmodel_path", "tracker", "updated", "version") VALUES (NEW."active", NEW."api_test", NEW."branch_tag", NEW."build_id", NEW."build_server_id", NEW."check_list", NEW."commit_hash", NEW."created", NEW."deduplication_on_engagement", NEW."description", NEW."done_testing", NEW."engagement_type", NEW."first_contacted", NEW."id", NEW."lead_id", NEW."name", NEW."orchestration_engine_id", NEW."pen_test", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."preset_id", NEW."product_id", NEW."progress", NEW."reason", NEW."report_type_id", NEW."requester_id", NEW."source_code_management_server_id", NEW."source_code_management_uri", NEW."status", NEW."target_end", NEW."target_start", NEW."test_strategy", NEW."threat_model", NEW."tmodel_path", NEW."tracker", NEW."updated", NEW."version"); RETURN NULL;', hash='ef3645585741c02419e463ccd066ab5daf10eddd', operation='UPDATE', pgid='pgtrigger_update_update_65136', table='dojo_engagement', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='engagement', + trigger=pgtrigger.compiler.Trigger(name='delete_delete', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_engagementevent" ("active", "api_test", "branch_tag", "build_id", "build_server_id", "check_list", "commit_hash", "created", "deduplication_on_engagement", "description", "done_testing", "engagement_type", "first_contacted", "id", "lead_id", "name", "orchestration_engine_id", "pen_test", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "preset_id", "product_id", "progress", "reason", "report_type_id", "requester_id", "source_code_management_server_id", "source_code_management_uri", "status", "target_end", "target_start", "test_strategy", "threat_model", "tmodel_path", "tracker", "updated", "version") VALUES (OLD."active", OLD."api_test", OLD."branch_tag", OLD."build_id", OLD."build_server_id", OLD."check_list", OLD."commit_hash", OLD."created", OLD."deduplication_on_engagement", OLD."description", OLD."done_testing", OLD."engagement_type", OLD."first_contacted", OLD."id", OLD."lead_id", OLD."name", OLD."orchestration_engine_id", OLD."pen_test", _pgh_attach_context(), NOW(), \'delete\', OLD."id", OLD."preset_id", OLD."product_id", OLD."progress", OLD."reason", OLD."report_type_id", OLD."requester_id", OLD."source_code_management_server_id", OLD."source_code_management_uri", OLD."status", OLD."target_end", OLD."target_start", OLD."test_strategy", OLD."threat_model", OLD."tmodel_path", OLD."tracker", OLD."updated", OLD."version"); RETURN NULL;', hash='7c91af532c0625d121388fb6d3fff7a0321d06b5', operation='DELETE', pgid='pgtrigger_delete_delete_9f4df', table='dojo_engagement', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='finding', + trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_findingevent" ("active", "component_name", "component_version", "created", "cve", "cvssv3", "cvssv3_score", "cvssv4", "cvssv4_score", "cwe", "date", "defect_review_requested_by_id", "description", "duplicate", "duplicate_finding_id", "dynamic_finding", "effort_for_fixing", "epss_percentile", "epss_score", "false_p", "file_path", "fix_available", "hash_code", "id", "impact", "is_mitigated", "kev_date", "known_exploited", "last_reviewed", "last_reviewed_by_id", "last_status_update", "line", "mitigated", "mitigated_by_id", "mitigation", "nb_occurences", "numerical_severity", "out_of_scope", "param", "payload", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "planned_remediation_date", "planned_remediation_version", "publish_date", "ransomware_used", "refs", "reporter_id", "review_requested_by_id", "risk_accepted", "sast_sink_object", "sast_source_file_path", "sast_source_line", "sast_source_object", "scanner_confidence", "service", "severity", "severity_justification", "sla_expiration_date", "sla_start_date", "sonarqube_issue_id", "static_finding", "steps_to_reproduce", "test_id", "thread_id", "title", "under_defect_review", "under_review", "unique_id_from_tool", "url", "verified", "vuln_id_from_tool") VALUES (NEW."active", NEW."component_name", NEW."component_version", NEW."created", NEW."cve", NEW."cvssv3", NEW."cvssv3_score", NEW."cvssv4", NEW."cvssv4_score", NEW."cwe", NEW."date", NEW."defect_review_requested_by_id", NEW."description", NEW."duplicate", NEW."duplicate_finding_id", NEW."dynamic_finding", NEW."effort_for_fixing", NEW."epss_percentile", NEW."epss_score", NEW."false_p", NEW."file_path", NEW."fix_available", NEW."hash_code", NEW."id", NEW."impact", NEW."is_mitigated", NEW."kev_date", NEW."known_exploited", NEW."last_reviewed", NEW."last_reviewed_by_id", NEW."last_status_update", NEW."line", NEW."mitigated", NEW."mitigated_by_id", NEW."mitigation", NEW."nb_occurences", NEW."numerical_severity", NEW."out_of_scope", NEW."param", NEW."payload", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."planned_remediation_date", NEW."planned_remediation_version", NEW."publish_date", NEW."ransomware_used", NEW."refs", NEW."reporter_id", NEW."review_requested_by_id", NEW."risk_accepted", NEW."sast_sink_object", NEW."sast_source_file_path", NEW."sast_source_line", NEW."sast_source_object", NEW."scanner_confidence", NEW."service", NEW."severity", NEW."severity_justification", NEW."sla_expiration_date", NEW."sla_start_date", NEW."sonarqube_issue_id", NEW."static_finding", NEW."steps_to_reproduce", NEW."test_id", NEW."thread_id", NEW."title", NEW."under_defect_review", NEW."under_review", NEW."unique_id_from_tool", NEW."url", NEW."verified", NEW."vuln_id_from_tool"); RETURN NULL;', hash='4feb4f6a7e26a63edec0aed0646e539d83151bad', operation='INSERT', pgid='pgtrigger_insert_insert_2fbbb', table='dojo_finding', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='finding', + trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD."active" IS DISTINCT FROM (NEW."active") OR OLD."component_name" IS DISTINCT FROM (NEW."component_name") OR OLD."component_version" IS DISTINCT FROM (NEW."component_version") OR OLD."cve" IS DISTINCT FROM (NEW."cve") OR OLD."cvssv3" IS DISTINCT FROM (NEW."cvssv3") OR OLD."cvssv3_score" IS DISTINCT FROM (NEW."cvssv3_score") OR OLD."cvssv4" IS DISTINCT FROM (NEW."cvssv4") OR OLD."cvssv4_score" IS DISTINCT FROM (NEW."cvssv4_score") OR OLD."cwe" IS DISTINCT FROM (NEW."cwe") OR OLD."date" IS DISTINCT FROM (NEW."date") OR OLD."defect_review_requested_by_id" IS DISTINCT FROM (NEW."defect_review_requested_by_id") OR OLD."description" IS DISTINCT FROM (NEW."description") OR OLD."duplicate" IS DISTINCT FROM (NEW."duplicate") OR OLD."duplicate_finding_id" IS DISTINCT FROM (NEW."duplicate_finding_id") OR OLD."dynamic_finding" IS DISTINCT FROM (NEW."dynamic_finding") OR OLD."effort_for_fixing" IS DISTINCT FROM (NEW."effort_for_fixing") OR OLD."epss_percentile" IS DISTINCT FROM (NEW."epss_percentile") OR OLD."epss_score" IS DISTINCT FROM (NEW."epss_score") OR OLD."false_p" IS DISTINCT FROM (NEW."false_p") OR OLD."file_path" IS DISTINCT FROM (NEW."file_path") OR OLD."fix_available" IS DISTINCT FROM (NEW."fix_available") OR OLD."hash_code" IS DISTINCT FROM (NEW."hash_code") OR OLD."id" IS DISTINCT FROM (NEW."id") OR OLD."impact" IS DISTINCT FROM (NEW."impact") OR OLD."is_mitigated" IS DISTINCT FROM (NEW."is_mitigated") OR OLD."kev_date" IS DISTINCT FROM (NEW."kev_date") OR OLD."known_exploited" IS DISTINCT FROM (NEW."known_exploited") OR OLD."last_reviewed" IS DISTINCT FROM (NEW."last_reviewed") OR OLD."last_reviewed_by_id" IS DISTINCT FROM (NEW."last_reviewed_by_id") OR OLD."line" IS DISTINCT FROM (NEW."line") OR OLD."mitigated" IS DISTINCT FROM (NEW."mitigated") OR OLD."mitigated_by_id" IS DISTINCT FROM (NEW."mitigated_by_id") OR OLD."mitigation" IS DISTINCT FROM (NEW."mitigation") OR OLD."nb_occurences" IS DISTINCT FROM (NEW."nb_occurences") OR OLD."numerical_severity" IS DISTINCT FROM (NEW."numerical_severity") OR OLD."out_of_scope" IS DISTINCT FROM (NEW."out_of_scope") OR OLD."param" IS DISTINCT FROM (NEW."param") OR OLD."payload" IS DISTINCT FROM (NEW."payload") OR OLD."planned_remediation_date" IS DISTINCT FROM (NEW."planned_remediation_date") OR OLD."planned_remediation_version" IS DISTINCT FROM (NEW."planned_remediation_version") OR OLD."publish_date" IS DISTINCT FROM (NEW."publish_date") OR OLD."ransomware_used" IS DISTINCT FROM (NEW."ransomware_used") OR OLD."refs" IS DISTINCT FROM (NEW."refs") OR OLD."reporter_id" IS DISTINCT FROM (NEW."reporter_id") OR OLD."review_requested_by_id" IS DISTINCT FROM (NEW."review_requested_by_id") OR OLD."risk_accepted" IS DISTINCT FROM (NEW."risk_accepted") OR OLD."sast_sink_object" IS DISTINCT FROM (NEW."sast_sink_object") OR OLD."sast_source_file_path" IS DISTINCT FROM (NEW."sast_source_file_path") OR OLD."sast_source_line" IS DISTINCT FROM (NEW."sast_source_line") OR OLD."sast_source_object" IS DISTINCT FROM (NEW."sast_source_object") OR OLD."scanner_confidence" IS DISTINCT FROM (NEW."scanner_confidence") OR OLD."service" IS DISTINCT FROM (NEW."service") OR OLD."severity" IS DISTINCT FROM (NEW."severity") OR OLD."severity_justification" IS DISTINCT FROM (NEW."severity_justification") OR OLD."sla_expiration_date" IS DISTINCT FROM (NEW."sla_expiration_date") OR OLD."sla_start_date" IS DISTINCT FROM (NEW."sla_start_date") OR OLD."sonarqube_issue_id" IS DISTINCT FROM (NEW."sonarqube_issue_id") OR OLD."static_finding" IS DISTINCT FROM (NEW."static_finding") OR OLD."steps_to_reproduce" IS DISTINCT FROM (NEW."steps_to_reproduce") OR OLD."test_id" IS DISTINCT FROM (NEW."test_id") OR OLD."thread_id" IS DISTINCT FROM (NEW."thread_id") OR OLD."title" IS DISTINCT FROM (NEW."title") OR OLD."under_defect_review" IS DISTINCT FROM (NEW."under_defect_review") OR OLD."under_review" IS DISTINCT FROM (NEW."under_review") OR OLD."unique_id_from_tool" IS DISTINCT FROM (NEW."unique_id_from_tool") OR OLD."url" IS DISTINCT FROM (NEW."url") OR OLD."verified" IS DISTINCT FROM (NEW."verified") OR OLD."vuln_id_from_tool" IS DISTINCT FROM (NEW."vuln_id_from_tool"))', func='INSERT INTO "dojo_findingevent" ("active", "component_name", "component_version", "created", "cve", "cvssv3", "cvssv3_score", "cvssv4", "cvssv4_score", "cwe", "date", "defect_review_requested_by_id", "description", "duplicate", "duplicate_finding_id", "dynamic_finding", "effort_for_fixing", "epss_percentile", "epss_score", "false_p", "file_path", "fix_available", "hash_code", "id", "impact", "is_mitigated", "kev_date", "known_exploited", "last_reviewed", "last_reviewed_by_id", "last_status_update", "line", "mitigated", "mitigated_by_id", "mitigation", "nb_occurences", "numerical_severity", "out_of_scope", "param", "payload", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "planned_remediation_date", "planned_remediation_version", "publish_date", "ransomware_used", "refs", "reporter_id", "review_requested_by_id", "risk_accepted", "sast_sink_object", "sast_source_file_path", "sast_source_line", "sast_source_object", "scanner_confidence", "service", "severity", "severity_justification", "sla_expiration_date", "sla_start_date", "sonarqube_issue_id", "static_finding", "steps_to_reproduce", "test_id", "thread_id", "title", "under_defect_review", "under_review", "unique_id_from_tool", "url", "verified", "vuln_id_from_tool") VALUES (NEW."active", NEW."component_name", NEW."component_version", NEW."created", NEW."cve", NEW."cvssv3", NEW."cvssv3_score", NEW."cvssv4", NEW."cvssv4_score", NEW."cwe", NEW."date", NEW."defect_review_requested_by_id", NEW."description", NEW."duplicate", NEW."duplicate_finding_id", NEW."dynamic_finding", NEW."effort_for_fixing", NEW."epss_percentile", NEW."epss_score", NEW."false_p", NEW."file_path", NEW."fix_available", NEW."hash_code", NEW."id", NEW."impact", NEW."is_mitigated", NEW."kev_date", NEW."known_exploited", NEW."last_reviewed", NEW."last_reviewed_by_id", NEW."last_status_update", NEW."line", NEW."mitigated", NEW."mitigated_by_id", NEW."mitigation", NEW."nb_occurences", NEW."numerical_severity", NEW."out_of_scope", NEW."param", NEW."payload", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."planned_remediation_date", NEW."planned_remediation_version", NEW."publish_date", NEW."ransomware_used", NEW."refs", NEW."reporter_id", NEW."review_requested_by_id", NEW."risk_accepted", NEW."sast_sink_object", NEW."sast_source_file_path", NEW."sast_source_line", NEW."sast_source_object", NEW."scanner_confidence", NEW."service", NEW."severity", NEW."severity_justification", NEW."sla_expiration_date", NEW."sla_start_date", NEW."sonarqube_issue_id", NEW."static_finding", NEW."steps_to_reproduce", NEW."test_id", NEW."thread_id", NEW."title", NEW."under_defect_review", NEW."under_review", NEW."unique_id_from_tool", NEW."url", NEW."verified", NEW."vuln_id_from_tool"); RETURN NULL;', hash='e3a33f1fd38ee7d34a56dfaf4ca3706f986b5953', operation='UPDATE', pgid='pgtrigger_update_update_92175', table='dojo_finding', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='finding', + trigger=pgtrigger.compiler.Trigger(name='delete_delete', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_findingevent" ("active", "component_name", "component_version", "created", "cve", "cvssv3", "cvssv3_score", "cvssv4", "cvssv4_score", "cwe", "date", "defect_review_requested_by_id", "description", "duplicate", "duplicate_finding_id", "dynamic_finding", "effort_for_fixing", "epss_percentile", "epss_score", "false_p", "file_path", "fix_available", "hash_code", "id", "impact", "is_mitigated", "kev_date", "known_exploited", "last_reviewed", "last_reviewed_by_id", "last_status_update", "line", "mitigated", "mitigated_by_id", "mitigation", "nb_occurences", "numerical_severity", "out_of_scope", "param", "payload", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "planned_remediation_date", "planned_remediation_version", "publish_date", "ransomware_used", "refs", "reporter_id", "review_requested_by_id", "risk_accepted", "sast_sink_object", "sast_source_file_path", "sast_source_line", "sast_source_object", "scanner_confidence", "service", "severity", "severity_justification", "sla_expiration_date", "sla_start_date", "sonarqube_issue_id", "static_finding", "steps_to_reproduce", "test_id", "thread_id", "title", "under_defect_review", "under_review", "unique_id_from_tool", "url", "verified", "vuln_id_from_tool") VALUES (OLD."active", OLD."component_name", OLD."component_version", OLD."created", OLD."cve", OLD."cvssv3", OLD."cvssv3_score", OLD."cvssv4", OLD."cvssv4_score", OLD."cwe", OLD."date", OLD."defect_review_requested_by_id", OLD."description", OLD."duplicate", OLD."duplicate_finding_id", OLD."dynamic_finding", OLD."effort_for_fixing", OLD."epss_percentile", OLD."epss_score", OLD."false_p", OLD."file_path", OLD."fix_available", OLD."hash_code", OLD."id", OLD."impact", OLD."is_mitigated", OLD."kev_date", OLD."known_exploited", OLD."last_reviewed", OLD."last_reviewed_by_id", OLD."last_status_update", OLD."line", OLD."mitigated", OLD."mitigated_by_id", OLD."mitigation", OLD."nb_occurences", OLD."numerical_severity", OLD."out_of_scope", OLD."param", OLD."payload", _pgh_attach_context(), NOW(), \'delete\', OLD."id", OLD."planned_remediation_date", OLD."planned_remediation_version", OLD."publish_date", OLD."ransomware_used", OLD."refs", OLD."reporter_id", OLD."review_requested_by_id", OLD."risk_accepted", OLD."sast_sink_object", OLD."sast_source_file_path", OLD."sast_source_line", OLD."sast_source_object", OLD."scanner_confidence", OLD."service", OLD."severity", OLD."severity_justification", OLD."sla_expiration_date", OLD."sla_start_date", OLD."sonarqube_issue_id", OLD."static_finding", OLD."steps_to_reproduce", OLD."test_id", OLD."thread_id", OLD."title", OLD."under_defect_review", OLD."under_review", OLD."unique_id_from_tool", OLD."url", OLD."verified", OLD."vuln_id_from_tool"); RETURN NULL;', hash='af149137c005baecd86b57ceea9f19ca5cacb8b2', operation='DELETE', pgid='pgtrigger_delete_delete_72933', table='dojo_finding', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='finding_group', + trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_finding_groupevent" ("created", "creator_id", "id", "modified", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "test_id") VALUES (NEW."created", NEW."creator_id", NEW."id", NEW."modified", NEW."name", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."test_id"); RETURN NULL;', hash='ccce37d431a0be588c20aa39b570893c63be9b4b', operation='INSERT', pgid='pgtrigger_insert_insert_94bd2', table='dojo_finding_group', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='finding_group', + trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD."creator_id" IS DISTINCT FROM (NEW."creator_id") OR OLD."id" IS DISTINCT FROM (NEW."id") OR OLD."name" IS DISTINCT FROM (NEW."name") OR OLD."test_id" IS DISTINCT FROM (NEW."test_id"))', func='INSERT INTO "dojo_finding_groupevent" ("created", "creator_id", "id", "modified", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "test_id") VALUES (NEW."created", NEW."creator_id", NEW."id", NEW."modified", NEW."name", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."test_id"); RETURN NULL;', hash='d5d94d19b54079f6c284d9448666eb408b20f245', operation='UPDATE', pgid='pgtrigger_update_update_37b5f', table='dojo_finding_group', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='finding_group', + trigger=pgtrigger.compiler.Trigger(name='delete_delete', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_finding_groupevent" ("created", "creator_id", "id", "modified", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "test_id") VALUES (OLD."created", OLD."creator_id", OLD."id", OLD."modified", OLD."name", _pgh_attach_context(), NOW(), \'delete\', OLD."id", OLD."test_id"); RETURN NULL;', hash='4d7241855d22804d01d455fb4228e750c5b11497', operation='DELETE', pgid='pgtrigger_delete_delete_24b41', table='dojo_finding_group', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='finding_template', + trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_finding_templateevent" ("cve", "cvssv3", "cwe", "description", "id", "impact", "last_used", "mitigation", "numerical_severity", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "refs", "severity", "template_match", "template_match_title", "title") VALUES (NEW."cve", NEW."cvssv3", NEW."cwe", NEW."description", NEW."id", NEW."impact", NEW."last_used", NEW."mitigation", NEW."numerical_severity", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."refs", NEW."severity", NEW."template_match", NEW."template_match_title", NEW."title"); RETURN NULL;', hash='1e871e8199f43721385ad52a22ab150158f7ee6e', operation='INSERT', pgid='pgtrigger_insert_insert_59092', table='dojo_finding_template', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='finding_template', + trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD.* IS DISTINCT FROM NEW.*)', func='INSERT INTO "dojo_finding_templateevent" ("cve", "cvssv3", "cwe", "description", "id", "impact", "last_used", "mitigation", "numerical_severity", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "refs", "severity", "template_match", "template_match_title", "title") VALUES (NEW."cve", NEW."cvssv3", NEW."cwe", NEW."description", NEW."id", NEW."impact", NEW."last_used", NEW."mitigation", NEW."numerical_severity", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."refs", NEW."severity", NEW."template_match", NEW."template_match_title", NEW."title"); RETURN NULL;', hash='c59aaa841042474f2c9b84f779ce466d4f3f93bc', operation='UPDATE', pgid='pgtrigger_update_update_43036', table='dojo_finding_template', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='finding_template', + trigger=pgtrigger.compiler.Trigger(name='delete_delete', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_finding_templateevent" ("cve", "cvssv3", "cwe", "description", "id", "impact", "last_used", "mitigation", "numerical_severity", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "refs", "severity", "template_match", "template_match_title", "title") VALUES (OLD."cve", OLD."cvssv3", OLD."cwe", OLD."description", OLD."id", OLD."impact", OLD."last_used", OLD."mitigation", OLD."numerical_severity", _pgh_attach_context(), NOW(), \'delete\', OLD."id", OLD."refs", OLD."severity", OLD."template_match", OLD."template_match_title", OLD."title"); RETURN NULL;', hash='75d1cfac63c77fa4b5edd4f0e2ed83b316713e3a', operation='DELETE', pgid='pgtrigger_delete_delete_3f3a6', table='dojo_finding_template', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='notification_webhooks', + trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_notification_webhooksevent" ("first_error", "id", "last_error", "name", "note", "owner_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "status", "url") VALUES (NEW."first_error", NEW."id", NEW."last_error", NEW."name", NEW."note", NEW."owner_id", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."status", NEW."url"); RETURN NULL;', hash='6e06e90d2d601262224f9a53d1965a0ddd65115e', operation='INSERT', pgid='pgtrigger_insert_insert_e0fa8', table='dojo_notification_webhooks', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='notification_webhooks', + trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD."first_error" IS DISTINCT FROM (NEW."first_error") OR OLD."id" IS DISTINCT FROM (NEW."id") OR OLD."last_error" IS DISTINCT FROM (NEW."last_error") OR OLD."name" IS DISTINCT FROM (NEW."name") OR OLD."note" IS DISTINCT FROM (NEW."note") OR OLD."owner_id" IS DISTINCT FROM (NEW."owner_id") OR OLD."status" IS DISTINCT FROM (NEW."status") OR OLD."url" IS DISTINCT FROM (NEW."url"))', func='INSERT INTO "dojo_notification_webhooksevent" ("first_error", "id", "last_error", "name", "note", "owner_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "status", "url") VALUES (NEW."first_error", NEW."id", NEW."last_error", NEW."name", NEW."note", NEW."owner_id", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."status", NEW."url"); RETURN NULL;', hash='0474c577e4fb71352b9675dc736e9b6cd3075acd', operation='UPDATE', pgid='pgtrigger_update_update_6e00f', table='dojo_notification_webhooks', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='notification_webhooks', + trigger=pgtrigger.compiler.Trigger(name='delete_delete', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_notification_webhooksevent" ("first_error", "id", "last_error", "name", "note", "owner_id", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "status", "url") VALUES (OLD."first_error", OLD."id", OLD."last_error", OLD."name", OLD."note", OLD."owner_id", _pgh_attach_context(), NOW(), \'delete\', OLD."id", OLD."status", OLD."url"); RETURN NULL;', hash='6348aaeba50ec158b4baca1b33611221e0e7b7cd', operation='DELETE', pgid='pgtrigger_delete_delete_21b9f', table='dojo_notification_webhooks', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='product', + trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_productevent" ("async_updating", "business_criticality", "created", "description", "disable_sla_breach_notifications", "enable_full_risk_acceptance", "enable_product_tag_inheritance", "enable_simple_risk_acceptance", "external_audience", "id", "internet_accessible", "lifecycle", "name", "origin", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "platform", "prod_numeric_grade", "prod_type_id", "product_manager_id", "revenue", "sla_configuration_id", "team_manager_id", "technical_contact_id", "tid", "updated", "user_records") VALUES (NEW."async_updating", NEW."business_criticality", NEW."created", NEW."description", NEW."disable_sla_breach_notifications", NEW."enable_full_risk_acceptance", NEW."enable_product_tag_inheritance", NEW."enable_simple_risk_acceptance", NEW."external_audience", NEW."id", NEW."internet_accessible", NEW."lifecycle", NEW."name", NEW."origin", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."platform", NEW."prod_numeric_grade", NEW."prod_type_id", NEW."product_manager_id", NEW."revenue", NEW."sla_configuration_id", NEW."team_manager_id", NEW."technical_contact_id", NEW."tid", NEW."updated", NEW."user_records"); RETURN NULL;', hash='71f5c7cfbba0f755e995508e10ef1bd8822667e1', operation='INSERT', pgid='pgtrigger_insert_insert_d5d32', table='dojo_product', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='product', + trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD."async_updating" IS DISTINCT FROM (NEW."async_updating") OR OLD."business_criticality" IS DISTINCT FROM (NEW."business_criticality") OR OLD."description" IS DISTINCT FROM (NEW."description") OR OLD."disable_sla_breach_notifications" IS DISTINCT FROM (NEW."disable_sla_breach_notifications") OR OLD."enable_full_risk_acceptance" IS DISTINCT FROM (NEW."enable_full_risk_acceptance") OR OLD."enable_product_tag_inheritance" IS DISTINCT FROM (NEW."enable_product_tag_inheritance") OR OLD."enable_simple_risk_acceptance" IS DISTINCT FROM (NEW."enable_simple_risk_acceptance") OR OLD."external_audience" IS DISTINCT FROM (NEW."external_audience") OR OLD."id" IS DISTINCT FROM (NEW."id") OR OLD."internet_accessible" IS DISTINCT FROM (NEW."internet_accessible") OR OLD."lifecycle" IS DISTINCT FROM (NEW."lifecycle") OR OLD."name" IS DISTINCT FROM (NEW."name") OR OLD."origin" IS DISTINCT FROM (NEW."origin") OR OLD."platform" IS DISTINCT FROM (NEW."platform") OR OLD."prod_numeric_grade" IS DISTINCT FROM (NEW."prod_numeric_grade") OR OLD."prod_type_id" IS DISTINCT FROM (NEW."prod_type_id") OR OLD."product_manager_id" IS DISTINCT FROM (NEW."product_manager_id") OR OLD."revenue" IS DISTINCT FROM (NEW."revenue") OR OLD."sla_configuration_id" IS DISTINCT FROM (NEW."sla_configuration_id") OR OLD."team_manager_id" IS DISTINCT FROM (NEW."team_manager_id") OR OLD."technical_contact_id" IS DISTINCT FROM (NEW."technical_contact_id") OR OLD."tid" IS DISTINCT FROM (NEW."tid") OR OLD."user_records" IS DISTINCT FROM (NEW."user_records"))', func='INSERT INTO "dojo_productevent" ("async_updating", "business_criticality", "created", "description", "disable_sla_breach_notifications", "enable_full_risk_acceptance", "enable_product_tag_inheritance", "enable_simple_risk_acceptance", "external_audience", "id", "internet_accessible", "lifecycle", "name", "origin", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "platform", "prod_numeric_grade", "prod_type_id", "product_manager_id", "revenue", "sla_configuration_id", "team_manager_id", "technical_contact_id", "tid", "updated", "user_records") VALUES (NEW."async_updating", NEW."business_criticality", NEW."created", NEW."description", NEW."disable_sla_breach_notifications", NEW."enable_full_risk_acceptance", NEW."enable_product_tag_inheritance", NEW."enable_simple_risk_acceptance", NEW."external_audience", NEW."id", NEW."internet_accessible", NEW."lifecycle", NEW."name", NEW."origin", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."platform", NEW."prod_numeric_grade", NEW."prod_type_id", NEW."product_manager_id", NEW."revenue", NEW."sla_configuration_id", NEW."team_manager_id", NEW."technical_contact_id", NEW."tid", NEW."updated", NEW."user_records"); RETURN NULL;', hash='ac04dd898b94200f9795a19fcf097d74b493aa51', operation='UPDATE', pgid='pgtrigger_update_update_e7040', table='dojo_product', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='product', + trigger=pgtrigger.compiler.Trigger(name='delete_delete', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_productevent" ("async_updating", "business_criticality", "created", "description", "disable_sla_breach_notifications", "enable_full_risk_acceptance", "enable_product_tag_inheritance", "enable_simple_risk_acceptance", "external_audience", "id", "internet_accessible", "lifecycle", "name", "origin", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "platform", "prod_numeric_grade", "prod_type_id", "product_manager_id", "revenue", "sla_configuration_id", "team_manager_id", "technical_contact_id", "tid", "updated", "user_records") VALUES (OLD."async_updating", OLD."business_criticality", OLD."created", OLD."description", OLD."disable_sla_breach_notifications", OLD."enable_full_risk_acceptance", OLD."enable_product_tag_inheritance", OLD."enable_simple_risk_acceptance", OLD."external_audience", OLD."id", OLD."internet_accessible", OLD."lifecycle", OLD."name", OLD."origin", _pgh_attach_context(), NOW(), \'delete\', OLD."id", OLD."platform", OLD."prod_numeric_grade", OLD."prod_type_id", OLD."product_manager_id", OLD."revenue", OLD."sla_configuration_id", OLD."team_manager_id", OLD."technical_contact_id", OLD."tid", OLD."updated", OLD."user_records"); RETURN NULL;', hash='7c403d993524408760d5430e6e8b5b6fd86753db', operation='DELETE', pgid='pgtrigger_delete_delete_064dd', table='dojo_product', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='product_type', + trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_product_typeevent" ("created", "critical_product", "description", "id", "key_product", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "updated") VALUES (NEW."created", NEW."critical_product", NEW."description", NEW."id", NEW."key_product", NEW."name", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."updated"); RETURN NULL;', hash='3f3d8e8a68a63bc86ff3557f8fc5f54d950e9d6d', operation='INSERT', pgid='pgtrigger_insert_insert_2d109', table='dojo_product_type', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='product_type', + trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD."critical_product" IS DISTINCT FROM (NEW."critical_product") OR OLD."description" IS DISTINCT FROM (NEW."description") OR OLD."id" IS DISTINCT FROM (NEW."id") OR OLD."key_product" IS DISTINCT FROM (NEW."key_product") OR OLD."name" IS DISTINCT FROM (NEW."name"))', func='INSERT INTO "dojo_product_typeevent" ("created", "critical_product", "description", "id", "key_product", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "updated") VALUES (NEW."created", NEW."critical_product", NEW."description", NEW."id", NEW."key_product", NEW."name", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."updated"); RETURN NULL;', hash='97c18814aec2e06d73b89e3eb65e0cfbf832dda5', operation='UPDATE', pgid='pgtrigger_update_update_a0136', table='dojo_product_type', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='product_type', + trigger=pgtrigger.compiler.Trigger(name='delete_delete', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_product_typeevent" ("created", "critical_product", "description", "id", "key_product", "name", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "updated") VALUES (OLD."created", OLD."critical_product", OLD."description", OLD."id", OLD."key_product", OLD."name", _pgh_attach_context(), NOW(), \'delete\', OLD."id", OLD."updated"); RETURN NULL;', hash='1b3bb470de18c3270ba2a1d4453d276b34f650dc', operation='DELETE', pgid='pgtrigger_delete_delete_66b18', table='dojo_product_type', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='risk_acceptance', + trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_risk_acceptanceevent" ("accepted_by", "created", "decision", "decision_details", "expiration_date", "expiration_date_handled", "expiration_date_warned", "id", "name", "owner_id", "path", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "reactivate_expired", "recommendation", "recommendation_details", "restart_sla_expired", "updated") VALUES (NEW."accepted_by", NEW."created", NEW."decision", NEW."decision_details", NEW."expiration_date", NEW."expiration_date_handled", NEW."expiration_date_warned", NEW."id", NEW."name", NEW."owner_id", NEW."path", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."reactivate_expired", NEW."recommendation", NEW."recommendation_details", NEW."restart_sla_expired", NEW."updated"); RETURN NULL;', hash='5b2ded87a2593823805cc232abdcc9aecea43e09', operation='INSERT', pgid='pgtrigger_insert_insert_d29bd', table='dojo_risk_acceptance', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='risk_acceptance', + trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD."accepted_by" IS DISTINCT FROM (NEW."accepted_by") OR OLD."decision" IS DISTINCT FROM (NEW."decision") OR OLD."decision_details" IS DISTINCT FROM (NEW."decision_details") OR OLD."expiration_date" IS DISTINCT FROM (NEW."expiration_date") OR OLD."expiration_date_handled" IS DISTINCT FROM (NEW."expiration_date_handled") OR OLD."expiration_date_warned" IS DISTINCT FROM (NEW."expiration_date_warned") OR OLD."id" IS DISTINCT FROM (NEW."id") OR OLD."name" IS DISTINCT FROM (NEW."name") OR OLD."owner_id" IS DISTINCT FROM (NEW."owner_id") OR OLD."path" IS DISTINCT FROM (NEW."path") OR OLD."reactivate_expired" IS DISTINCT FROM (NEW."reactivate_expired") OR OLD."recommendation" IS DISTINCT FROM (NEW."recommendation") OR OLD."recommendation_details" IS DISTINCT FROM (NEW."recommendation_details") OR OLD."restart_sla_expired" IS DISTINCT FROM (NEW."restart_sla_expired"))', func='INSERT INTO "dojo_risk_acceptanceevent" ("accepted_by", "created", "decision", "decision_details", "expiration_date", "expiration_date_handled", "expiration_date_warned", "id", "name", "owner_id", "path", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "reactivate_expired", "recommendation", "recommendation_details", "restart_sla_expired", "updated") VALUES (NEW."accepted_by", NEW."created", NEW."decision", NEW."decision_details", NEW."expiration_date", NEW."expiration_date_handled", NEW."expiration_date_warned", NEW."id", NEW."name", NEW."owner_id", NEW."path", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."reactivate_expired", NEW."recommendation", NEW."recommendation_details", NEW."restart_sla_expired", NEW."updated"); RETURN NULL;', hash='dc733156f0b9f5e70e3f64c07afaeaf511088cc9', operation='UPDATE', pgid='pgtrigger_update_update_55e64', table='dojo_risk_acceptance', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='risk_acceptance', + trigger=pgtrigger.compiler.Trigger(name='delete_delete', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_risk_acceptanceevent" ("accepted_by", "created", "decision", "decision_details", "expiration_date", "expiration_date_handled", "expiration_date_warned", "id", "name", "owner_id", "path", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "reactivate_expired", "recommendation", "recommendation_details", "restart_sla_expired", "updated") VALUES (OLD."accepted_by", OLD."created", OLD."decision", OLD."decision_details", OLD."expiration_date", OLD."expiration_date_handled", OLD."expiration_date_warned", OLD."id", OLD."name", OLD."owner_id", OLD."path", _pgh_attach_context(), NOW(), \'delete\', OLD."id", OLD."reactivate_expired", OLD."recommendation", OLD."recommendation_details", OLD."restart_sla_expired", OLD."updated"); RETURN NULL;', hash='6ab6fabb2607af135635fa216cd8980bbdd38d66', operation='DELETE', pgid='pgtrigger_delete_delete_7d103', table='dojo_risk_acceptance', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='test', + trigger=pgtrigger.compiler.Trigger(name='insert_insert', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_testevent" ("api_scan_configuration_id", "branch_tag", "build_id", "commit_hash", "created", "description", "engagement_id", "environment_id", "id", "lead_id", "percent_complete", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "scan_type", "target_end", "target_start", "test_type_id", "title", "updated", "version") VALUES (NEW."api_scan_configuration_id", NEW."branch_tag", NEW."build_id", NEW."commit_hash", NEW."created", NEW."description", NEW."engagement_id", NEW."environment_id", NEW."id", NEW."lead_id", NEW."percent_complete", _pgh_attach_context(), NOW(), \'insert\', NEW."id", NEW."scan_type", NEW."target_end", NEW."target_start", NEW."test_type_id", NEW."title", NEW."updated", NEW."version"); RETURN NULL;', hash='0b6ec21ca35b61b1abcc0b2f8629cb4d1cc92930', operation='INSERT', pgid='pgtrigger_insert_insert_ecfc1', table='dojo_test', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='test', + trigger=pgtrigger.compiler.Trigger(name='update_update', sql=pgtrigger.compiler.UpsertTriggerSql(condition='WHEN (OLD."api_scan_configuration_id" IS DISTINCT FROM (NEW."api_scan_configuration_id") OR OLD."branch_tag" IS DISTINCT FROM (NEW."branch_tag") OR OLD."build_id" IS DISTINCT FROM (NEW."build_id") OR OLD."commit_hash" IS DISTINCT FROM (NEW."commit_hash") OR OLD."description" IS DISTINCT FROM (NEW."description") OR OLD."engagement_id" IS DISTINCT FROM (NEW."engagement_id") OR OLD."environment_id" IS DISTINCT FROM (NEW."environment_id") OR OLD."id" IS DISTINCT FROM (NEW."id") OR OLD."lead_id" IS DISTINCT FROM (NEW."lead_id") OR OLD."percent_complete" IS DISTINCT FROM (NEW."percent_complete") OR OLD."scan_type" IS DISTINCT FROM (NEW."scan_type") OR OLD."target_end" IS DISTINCT FROM (NEW."target_end") OR OLD."target_start" IS DISTINCT FROM (NEW."target_start") OR OLD."test_type_id" IS DISTINCT FROM (NEW."test_type_id") OR OLD."title" IS DISTINCT FROM (NEW."title") OR OLD."version" IS DISTINCT FROM (NEW."version"))', func='INSERT INTO "dojo_testevent" ("api_scan_configuration_id", "branch_tag", "build_id", "commit_hash", "created", "description", "engagement_id", "environment_id", "id", "lead_id", "percent_complete", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "scan_type", "target_end", "target_start", "test_type_id", "title", "updated", "version") VALUES (NEW."api_scan_configuration_id", NEW."branch_tag", NEW."build_id", NEW."commit_hash", NEW."created", NEW."description", NEW."engagement_id", NEW."environment_id", NEW."id", NEW."lead_id", NEW."percent_complete", _pgh_attach_context(), NOW(), \'update\', NEW."id", NEW."scan_type", NEW."target_end", NEW."target_start", NEW."test_type_id", NEW."title", NEW."updated", NEW."version"); RETURN NULL;', hash='777c92a16d48f7e590e50cb8fb6c0d77c9afa1b6', operation='UPDATE', pgid='pgtrigger_update_update_c40f8', table='dojo_test', when='AFTER')), + ), + pgtrigger.migrations.AddTrigger( + model_name='test', + trigger=pgtrigger.compiler.Trigger(name='delete_delete', sql=pgtrigger.compiler.UpsertTriggerSql(func='INSERT INTO "dojo_testevent" ("api_scan_configuration_id", "branch_tag", "build_id", "commit_hash", "created", "description", "engagement_id", "environment_id", "id", "lead_id", "percent_complete", "pgh_context_id", "pgh_created_at", "pgh_label", "pgh_obj_id", "scan_type", "target_end", "target_start", "test_type_id", "title", "updated", "version") VALUES (OLD."api_scan_configuration_id", OLD."branch_tag", OLD."build_id", OLD."commit_hash", OLD."created", OLD."description", OLD."engagement_id", OLD."environment_id", OLD."id", OLD."lead_id", OLD."percent_complete", _pgh_attach_context(), NOW(), \'delete\', OLD."id", OLD."scan_type", OLD."target_end", OLD."target_start", OLD."test_type_id", OLD."title", OLD."updated", OLD."version"); RETURN NULL;', hash='51bce27193221308adc41e62f1faff5122bbbceb', operation='DELETE', pgid='pgtrigger_delete_delete_66d18', table='dojo_test', when='AFTER')), + ), + migrations.AddField( + model_name='cred_userevent', + name='environment', + field=models.ForeignKey(db_constraint=False, db_index=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.development_environment'), + ), + migrations.AddField( + model_name='cred_userevent', + name='pgh_context', + field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context'), + ), + migrations.AddField( + model_name='cred_userevent', + name='pgh_obj', + field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='dojo.cred_user'), + ), + migrations.AddField( + model_name='dojo_userevent', + name='pgh_context', + field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context'), + ), + migrations.AddField( + model_name='dojo_userevent', + name='pgh_obj', + field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='dojo.dojo_user'), + ), + migrations.AddField( + model_name='endpointevent', + name='pgh_context', + field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context'), + ), + migrations.AddField( + model_name='endpointevent', + name='pgh_obj', + field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='dojo.endpoint'), + ), + migrations.AddField( + model_name='endpointevent', + name='product', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.product'), + ), + migrations.AddField( + model_name='engagementevent', + name='build_server', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, help_text='Build server responsible for CI/CD test', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.tool_configuration', verbose_name='Build Server'), + ), + migrations.AddField( + model_name='engagementevent', + name='lead', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user'), + ), + migrations.AddField( + model_name='engagementevent', + name='orchestration_engine', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, help_text='Orchestration service responsible for CI/CD test', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.tool_configuration', verbose_name='Orchestration Engine'), + ), + migrations.AddField( + model_name='engagementevent', + name='pgh_context', + field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context'), + ), + migrations.AddField( + model_name='engagementevent', + name='pgh_obj', + field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='dojo.engagement'), + ), + migrations.AddField( + model_name='engagementevent', + name='preset', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, help_text='Settings and notes for performing this engagement.', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.engagement_presets'), + ), + migrations.AddField( + model_name='engagementevent', + name='product', + field=models.ForeignKey(db_constraint=False, db_index=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.product'), + ), + migrations.AddField( + model_name='engagementevent', + name='report_type', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.report_type'), + ), + migrations.AddField( + model_name='engagementevent', + name='requester', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.contact'), + ), + migrations.AddField( + model_name='engagementevent', + name='source_code_management_server', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, help_text='Source code server for CI/CD test', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.tool_configuration', verbose_name='SCM Server'), + ), + migrations.AddField( + model_name='finding_groupevent', + name='creator', + field=models.ForeignKey(db_constraint=False, db_index=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user'), + ), + migrations.AddField( + model_name='finding_groupevent', + name='pgh_context', + field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context'), + ), + migrations.AddField( + model_name='finding_groupevent', + name='pgh_obj', + field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='dojo.finding_group'), + ), + migrations.AddField( + model_name='finding_groupevent', + name='test', + field=models.ForeignKey(db_constraint=False, db_index=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.test'), + ), + migrations.AddField( + model_name='finding_templateevent', + name='pgh_context', + field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context'), + ), + migrations.AddField( + model_name='finding_templateevent', + name='pgh_obj', + field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='dojo.finding_template'), + ), + migrations.AddField( + model_name='findingevent', + name='defect_review_requested_by', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, help_text='Documents who requested a defect review for this flaw.', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user', verbose_name='Defect Review Requested By'), + ), + migrations.AddField( + model_name='findingevent', + name='duplicate_finding', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, editable=False, help_text='Link to the original finding if this finding is a duplicate.', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.finding', verbose_name='Duplicate Finding'), + ), + migrations.AddField( + model_name='findingevent', + name='last_reviewed_by', + field=models.ForeignKey(db_constraint=False, db_index=False, editable=False, help_text='Provides the person who last reviewed the flaw.', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user', verbose_name='Last Reviewed By'), + ), + migrations.AddField( + model_name='findingevent', + name='mitigated_by', + field=models.ForeignKey(db_constraint=False, db_index=False, editable=False, help_text='Documents who has marked this flaw as fixed.', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user', verbose_name='Mitigated By'), + ), + migrations.AddField( + model_name='findingevent', + name='pgh_context', + field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context'), + ), + migrations.AddField( + model_name='findingevent', + name='pgh_obj', + field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='dojo.finding'), + ), + migrations.AddField( + model_name='findingevent', + name='reporter', + field=models.ForeignKey(db_constraint=False, db_index=False, default=1, editable=False, help_text='Documents who reported the flaw.', on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user', verbose_name='Reporter'), + ), + migrations.AddField( + model_name='findingevent', + name='review_requested_by', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, help_text='Documents who requested a review for this finding.', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user', verbose_name='Review Requested By'), + ), + migrations.AddField( + model_name='findingevent', + name='sonarqube_issue', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, help_text='The SonarQube issue associated with this finding.', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.sonarqube_issue', verbose_name='SonarQube issue'), + ), + migrations.AddField( + model_name='findingevent', + name='test', + field=models.ForeignKey(db_constraint=False, db_index=False, editable=False, help_text='The test that is associated with this flaw.', on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.test', verbose_name='Test'), + ), + migrations.AddField( + model_name='notification_webhooksevent', + name='owner', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, help_text='Owner/receiver of notification, if empty processed as system notification', null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user'), + ), + migrations.AddField( + model_name='notification_webhooksevent', + name='pgh_context', + field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context'), + ), + migrations.AddField( + model_name='notification_webhooksevent', + name='pgh_obj', + field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='dojo.notification_webhooks'), + ), + migrations.AddField( + model_name='product_typeevent', + name='pgh_context', + field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context'), + ), + migrations.AddField( + model_name='product_typeevent', + name='pgh_obj', + field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='dojo.product_type'), + ), + migrations.AddField( + model_name='productevent', + name='pgh_context', + field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context'), + ), + migrations.AddField( + model_name='productevent', + name='pgh_obj', + field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='dojo.product'), + ), + migrations.AddField( + model_name='productevent', + name='prod_type', + field=models.ForeignKey(db_constraint=False, db_index=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.product_type'), + ), + migrations.AddField( + model_name='productevent', + name='product_manager', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user'), + ), + migrations.AddField( + model_name='productevent', + name='sla_configuration', + field=models.ForeignKey(db_constraint=False, db_index=False, default=1, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.sla_configuration'), + ), + migrations.AddField( + model_name='productevent', + name='team_manager', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user'), + ), + migrations.AddField( + model_name='productevent', + name='technical_contact', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user'), + ), + migrations.AddField( + model_name='risk_acceptanceevent', + name='owner', + field=models.ForeignKey(db_constraint=False, db_index=False, help_text='User in DefectDojo owning this acceptance. Only the owner and staff users can edit the risk acceptance.', on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user'), + ), + migrations.AddField( + model_name='risk_acceptanceevent', + name='pgh_context', + field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context'), + ), + migrations.AddField( + model_name='risk_acceptanceevent', + name='pgh_obj', + field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='dojo.risk_acceptance'), + ), + migrations.AddField( + model_name='testevent', + name='api_scan_configuration', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.product_api_scan_configuration', verbose_name='API Scan Configuration'), + ), + migrations.AddField( + model_name='testevent', + name='engagement', + field=models.ForeignKey(db_constraint=False, db_index=False, editable=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.engagement'), + ), + migrations.AddField( + model_name='testevent', + name='environment', + field=models.ForeignKey(db_constraint=False, db_index=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.development_environment'), + ), + migrations.AddField( + model_name='testevent', + name='lead', + field=models.ForeignKey(blank=True, db_constraint=False, db_index=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.dojo_user'), + ), + migrations.AddField( + model_name='testevent', + name='pgh_context', + field=models.ForeignKey(db_constraint=False, null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', to='pghistory.context'), + ), + migrations.AddField( + model_name='testevent', + name='pgh_obj', + field=models.ForeignKey(db_constraint=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='events', to='dojo.test'), + ), + migrations.AddField( + model_name='testevent', + name='test_type', + field=models.ForeignKey(db_constraint=False, db_index=False, on_delete=django.db.models.deletion.DO_NOTHING, related_name='+', related_query_name='+', to='dojo.test_type'), + ), + migrations.AddIndex( + model_name='cred_userevent', + index=models.Index(fields=['pgh_created_at'], name='dojo_cred_u_pgh_cre_675d71_idx'), + ), + migrations.AddIndex( + model_name='cred_userevent', + index=models.Index(fields=['pgh_label'], name='dojo_cred_u_pgh_lab_7842ac_idx'), + ), + migrations.AddIndex( + model_name='cred_userevent', + index=models.Index(fields=['pgh_context_id'], name='dojo_cred_u_pgh_con_9ac3a9_idx'), + ), + migrations.AddIndex( + model_name='dojo_userevent', + index=models.Index(fields=['pgh_created_at'], name='dojo_dojo_u_pgh_cre_dd25e9_idx'), + ), + migrations.AddIndex( + model_name='dojo_userevent', + index=models.Index(fields=['pgh_label'], name='dojo_dojo_u_pgh_lab_5e3d06_idx'), + ), + migrations.AddIndex( + model_name='dojo_userevent', + index=models.Index(fields=['pgh_context_id'], name='dojo_dojo_u_pgh_con_29c3a1_idx'), + ), + migrations.AddIndex( + model_name='endpointevent', + index=models.Index(fields=['pgh_created_at'], name='dojo_endpoi_pgh_cre_b3a7c8_idx'), + ), + migrations.AddIndex( + model_name='endpointevent', + index=models.Index(fields=['pgh_label'], name='dojo_endpoi_pgh_lab_7d4c97_idx'), + ), + migrations.AddIndex( + model_name='endpointevent', + index=models.Index(fields=['pgh_context_id'], name='dojo_endpoi_pgh_con_5d9640_idx'), + ), + migrations.AddIndex( + model_name='engagementevent', + index=models.Index(fields=['pgh_created_at'], name='dojo_engage_pgh_cre_9e6148_idx'), + ), + migrations.AddIndex( + model_name='engagementevent', + index=models.Index(fields=['pgh_label'], name='dojo_engage_pgh_lab_bac3f6_idx'), + ), + migrations.AddIndex( + model_name='engagementevent', + index=models.Index(fields=['pgh_context_id'], name='dojo_engage_pgh_con_a73738_idx'), + ), + migrations.AddIndex( + model_name='finding_groupevent', + index=models.Index(fields=['pgh_created_at'], name='dojo_findin_pgh_cre_7acb9c_idx'), + ), + migrations.AddIndex( + model_name='finding_groupevent', + index=models.Index(fields=['pgh_label'], name='dojo_findin_pgh_lab_1a7554_idx'), + ), + migrations.AddIndex( + model_name='finding_groupevent', + index=models.Index(fields=['pgh_context_id'], name='dojo_findin_pgh_con_d97c84_idx'), + ), + migrations.AddIndex( + model_name='finding_templateevent', + index=models.Index(fields=['pgh_created_at'], name='dojo_findin_pgh_cre_397616_idx'), + ), + migrations.AddIndex( + model_name='finding_templateevent', + index=models.Index(fields=['pgh_label'], name='dojo_findin_pgh_lab_751cf8_idx'), + ), + migrations.AddIndex( + model_name='finding_templateevent', + index=models.Index(fields=['pgh_context_id'], name='dojo_findin_pgh_con_9aac28_idx'), + ), + migrations.AddIndex( + model_name='findingevent', + index=models.Index(fields=['pgh_created_at'], name='dojo_findin_pgh_cre_b4aed3_idx'), + ), + migrations.AddIndex( + model_name='findingevent', + index=models.Index(fields=['pgh_label'], name='dojo_findin_pgh_lab_3d7ed7_idx'), + ), + migrations.AddIndex( + model_name='findingevent', + index=models.Index(fields=['pgh_context_id'], name='dojo_findin_pgh_con_807d35_idx'), + ), + migrations.AddIndex( + model_name='notification_webhooksevent', + index=models.Index(fields=['pgh_created_at'], name='dojo_notifi_pgh_cre_14aea2_idx'), + ), + migrations.AddIndex( + model_name='notification_webhooksevent', + index=models.Index(fields=['pgh_label'], name='dojo_notifi_pgh_lab_0abf77_idx'), + ), + migrations.AddIndex( + model_name='notification_webhooksevent', + index=models.Index(fields=['pgh_context_id'], name='dojo_notifi_pgh_con_359cd6_idx'), + ), + migrations.AddIndex( + model_name='product_typeevent', + index=models.Index(fields=['pgh_created_at'], name='dojo_produc_pgh_cre_ff015d_idx'), + ), + migrations.AddIndex( + model_name='product_typeevent', + index=models.Index(fields=['pgh_label'], name='dojo_produc_pgh_lab_07c954_idx'), + ), + migrations.AddIndex( + model_name='product_typeevent', + index=models.Index(fields=['pgh_context_id'], name='dojo_produc_pgh_con_c3c1ea_idx'), + ), + migrations.AddIndex( + model_name='productevent', + index=models.Index(fields=['pgh_created_at'], name='dojo_produc_pgh_cre_e3146f_idx'), + ), + migrations.AddIndex( + model_name='productevent', + index=models.Index(fields=['pgh_label'], name='dojo_produc_pgh_lab_5f11db_idx'), + ), + migrations.AddIndex( + model_name='productevent', + index=models.Index(fields=['pgh_context_id'], name='dojo_produc_pgh_con_cdffb4_idx'), + ), + migrations.AddIndex( + model_name='risk_acceptanceevent', + index=models.Index(fields=['pgh_created_at'], name='dojo_risk_a_pgh_cre_c97aae_idx'), + ), + migrations.AddIndex( + model_name='risk_acceptanceevent', + index=models.Index(fields=['pgh_label'], name='dojo_risk_a_pgh_lab_9a6ce2_idx'), + ), + migrations.AddIndex( + model_name='risk_acceptanceevent', + index=models.Index(fields=['pgh_context_id'], name='dojo_risk_a_pgh_con_5de681_idx'), + ), + migrations.AddIndex( + model_name='testevent', + index=models.Index(fields=['pgh_created_at'], name='dojo_testev_pgh_cre_377964_idx'), + ), + migrations.AddIndex( + model_name='testevent', + index=models.Index(fields=['pgh_label'], name='dojo_testev_pgh_lab_b15edd_idx'), + ), + migrations.AddIndex( + model_name='testevent', + index=models.Index(fields=['pgh_context_id'], name='dojo_testev_pgh_con_e18502_idx'), + ), + # DojoEvents proxy model for structured context field access + migrations.CreateModel( + name='DojoEvents', + fields=[ + ], + options={ + 'proxy': True, + 'indexes': [], + 'constraints': [], + }, + bases=('pghistory.events',), + managers=[ + ('no_objects', django.db.models.manager.Manager()), + ], + ), + ] diff --git a/dojo/db_migrations/0244_pghistory_indices.py b/dojo/db_migrations/0244_pghistory_indices.py new file mode 100644 index 00000000000..73eed4455bd --- /dev/null +++ b/dojo/db_migrations/0244_pghistory_indices.py @@ -0,0 +1,44 @@ +# Generated manually for pghistory performance indexes + +from django.db import migrations + + +class Migration(migrations.Migration): + # Mark as atomic=False to allow CONCURRENTLY operations + atomic = False + + dependencies = [ + ('dojo', '0243_pghistory_models'), + ] + + operations = [ + migrations.RunSQL( + # Forward migration - add indexes with CONCURRENTLY to avoid table locks + # Note: pghistory stores context as JSON in the 'metadata' column + sql=[ + # GIN index on the entire JSON metadata field - supports general JSON queries + # This is excellent for @>, ?, ?&, ?| operators and general JSON containment + 'CREATE INDEX CONCURRENTLY IF NOT EXISTS "pghistory_context_metadata_gin_idx" ON "pghistory_context" USING GIN ("metadata");', + + # Specific expression indexes for common filtering patterns + # These complement the GIN index for exact value lookups + + # Index on user field from JSON - most selective for exact user filtering + 'CREATE INDEX CONCURRENTLY IF NOT EXISTS "pghistory_context_user_idx" ON "pghistory_context" ((metadata->>\'user\'));', + + # Index on remote_addr field from JSON - for IP address filtering (supports icontains) + 'CREATE INDEX CONCURRENTLY IF NOT EXISTS "pghistory_context_remote_addr_idx" ON "pghistory_context" ((metadata->>\'remote_addr\'));', + + # Index on url field from JSON - for URL filtering (helps with icontains queries) + 'CREATE INDEX CONCURRENTLY IF NOT EXISTS "pghistory_context_url_idx" ON "pghistory_context" ((metadata->>\'url\'));', + ], + # Reverse migration - drop indexes safely + reverse_sql=[ + 'DROP INDEX CONCURRENTLY IF EXISTS "pghistory_context_metadata_gin_idx";', + 'DROP INDEX CONCURRENTLY IF EXISTS "pghistory_context_user_idx";', + 'DROP INDEX CONCURRENTLY IF EXISTS "pghistory_context_remote_addr_idx";', + 'DROP INDEX CONCURRENTLY IF EXISTS "pghistory_context_url_idx";', + ], + ), + ] + diff --git a/dojo/endpoint/signals.py b/dojo/endpoint/signals.py index 6bdfe8e7a40..50251c5a80a 100644 --- a/dojo/endpoint/signals.py +++ b/dojo/endpoint/signals.py @@ -2,6 +2,7 @@ from auditlog.models import LogEntry from django.conf import settings +from django.contrib.auth import get_user_model from django.contrib.contenttypes.models import ContentType from django.db.models.signals import post_delete from django.dispatch import receiver @@ -10,6 +11,7 @@ from dojo.models import Endpoint from dojo.notifications.helper import create_notification +from dojo.pghistory_models import DojoEvents @receiver(post_delete, sender=Endpoint) @@ -18,14 +20,38 @@ def endpoint_post_delete(sender, instance, using, origin, **kwargs): with contextlib.suppress(sender.DoesNotExist): if instance == origin: description = _('The endpoint "%(name)s" was deleted') % {"name": str(instance)} + user = None + if settings.ENABLE_AUDITLOG: - if le := LogEntry.objects.filter( - action=LogEntry.Action.DELETE, - content_type=ContentType.objects.get(app_label="dojo", model="endpoint"), - object_id=instance.id, - ).order_by("-id").first(): + # First try to find deletion author in pghistory events + # Look for delete events for this specific endpoint instance + pghistory_delete_events = DojoEvents.objects.filter( + pgh_obj_model="dojo.Endpoint", + pgh_obj_id=instance.id, + pgh_label="delete", + ).order_by("-pgh_created_at") + + if pghistory_delete_events.exists(): + latest_delete = pghistory_delete_events.first() + # Extract user from pghistory context + if latest_delete.user: + User = get_user_model() + with contextlib.suppress(User.DoesNotExist): + user = User.objects.get(id=latest_delete.user) + + # Fall back to django-auditlog if no user found in pghistory + if not user: + if le := LogEntry.objects.filter( + action=LogEntry.Action.DELETE, + content_type=ContentType.objects.get(app_label="dojo", model="endpoint"), + object_id=instance.id, + ).order_by("-id").first(): + user = le.actor + + # Update description with user if found + if user: description = _('The endpoint "%(name)s" was deleted by %(user)s') % { - "name": str(instance), "user": le.actor} + "name": str(instance), "user": user} create_notification(event="endpoint_deleted", # template does not exists, it will default to "other" but this event name needs to stay because of unit testing title=_("Deletion of %(name)s") % {"name": str(instance)}, description=description, diff --git a/dojo/engagement/signals.py b/dojo/engagement/signals.py index 77cd4ca6d1f..144094a3264 100644 --- a/dojo/engagement/signals.py +++ b/dojo/engagement/signals.py @@ -2,6 +2,7 @@ from auditlog.models import LogEntry from django.conf import settings +from django.contrib.auth import get_user_model from django.contrib.contenttypes.models import ContentType from django.db.models.signals import post_delete, post_save, pre_delete, pre_save from django.dispatch import receiver @@ -12,6 +13,7 @@ from dojo.models import Engagement, Product from dojo.notes.helper import delete_related_notes from dojo.notifications.helper import create_notification +from dojo.pghistory_models import DojoEvents @receiver(post_save, sender=Engagement) @@ -45,14 +47,38 @@ def engagement_post_delete(sender, instance, using, origin, **kwargs): with contextlib.suppress(sender.DoesNotExist, Product.DoesNotExist): if instance == origin: description = _('The engagement "%(name)s" was deleted') % {"name": instance.name} + user = None + if settings.ENABLE_AUDITLOG: - if le := LogEntry.objects.filter( - action=LogEntry.Action.DELETE, - content_type=ContentType.objects.get(app_label="dojo", model="engagement"), - object_id=instance.id, - ).order_by("-id").first(): + # First try to find deletion author in pghistory events + # Look for delete events for this specific engagement instance + pghistory_delete_events = DojoEvents.objects.filter( + pgh_obj_model="dojo.Engagement", + pgh_obj_id=instance.id, + pgh_label="delete", + ).order_by("-pgh_created_at") + + if pghistory_delete_events.exists(): + latest_delete = pghistory_delete_events.first() + # Extract user from pghistory context + if latest_delete.user: + User = get_user_model() + with contextlib.suppress(User.DoesNotExist): + user = User.objects.get(id=latest_delete.user) + + # Fall back to django-auditlog if no user found in pghistory + if not user: + if le := LogEntry.objects.filter( + action=LogEntry.Action.DELETE, + content_type=ContentType.objects.get(app_label="dojo", model="engagement"), + object_id=instance.id, + ).order_by("-id").first(): + user = le.actor + + # Update description with user if found + if user: description = _('The engagement "%(name)s" was deleted by %(user)s') % { - "name": instance.name, "user": le.actor} + "name": instance.name, "user": user} create_notification(event="engagement_deleted", # template does not exists, it will default to "other" but this event name needs to stay because of unit testing title=_("Deletion of %(name)s") % {"name": instance.name}, description=description, diff --git a/dojo/filters.py b/dojo/filters.py index 48dd5cfc824..47232dd7298 100644 --- a/dojo/filters.py +++ b/dojo/filters.py @@ -3500,6 +3500,81 @@ class Meta: } +class PgHistoryFilter(DojoFilter): + + """ + Filter for django-pghistory audit entries. + + This filter works with pghistory event tables that have: + - pgh_created_at: timestamp of the event + - pgh_label: event type (insert/update/delete) + - user: user ID from context + - url: URL from context + - remote_addr: IP address from context + """ + + # Filter by event creation time (equivalent to auditlog timestamp) + pgh_created_at = DateRangeFilter(field_name="pgh_created_at", label="Timestamp") + + # Filter by event type/label + pgh_label = ChoiceFilter( + field_name="pgh_label", + label="Event Type", + choices=[ + ("", "All"), + ("insert", "Insert"), + ("update", "Update"), + ("delete", "Delete"), + ("initial_import", "Initial Import"), + ], + ) + + # Filter by user (from context) + user = ModelChoiceFilter( + field_name="user", + queryset=Dojo_User.objects.none(), + label="User", + empty_label="All Users", + ) + + # Filter by IP address (from context) + remote_addr = CharFilter( + field_name="remote_addr", + lookup_expr="icontains", + label="IP Address Contains", + ) + + # Filter by changes/diff field (JSON field containing what changed) + pgh_diff = CharFilter( + method="filter_pgh_diff_contains", + label="Changes Contains", + help_text="Search for field names or values in the changes (optimized for JSONB, but can be slow)", + ) + + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.form.fields["user"].queryset = get_authorized_users(Permissions.Product_View) + + def filter_pgh_diff_contains(self, queryset, name, value): + """ + Custom filter for pgh_diff that uses efficient JSONB operations. + Searches both keys and values in the JSONB field. + """ + if not value: + return queryset + + # Search in both keys and values using JSONB operators + return queryset.filter( + Q(pgh_diff__has_key=value) | # Search in keys: {"severity": [...]} + Q(pgh_diff__has_any_keys=[value]) | # Alternative key search + Q(pgh_diff__contains=f'"{value}"'), # Search in values: ["severity", "other"] + ) + + class Meta: + fields = ["pgh_created_at", "pgh_label", "user", "url", "remote_addr", "pgh_diff"] + exclude = [] + + class ProductTypeFilter(DojoFilter): name = CharFilter(lookup_expr="icontains") diff --git a/dojo/finding_group/signals.py b/dojo/finding_group/signals.py index 1e2d771b557..3e7ffe7c7b7 100644 --- a/dojo/finding_group/signals.py +++ b/dojo/finding_group/signals.py @@ -1,5 +1,8 @@ +import contextlib + from auditlog.models import LogEntry from django.conf import settings +from django.contrib.auth import get_user_model from django.contrib.contenttypes.models import ContentType from django.db.models.signals import post_delete from django.dispatch import receiver @@ -8,20 +11,45 @@ from dojo.models import Finding_Group from dojo.notifications.helper import create_notification +from dojo.pghistory_models import DojoEvents @receiver(post_delete, sender=Finding_Group) def finding_group_post_delete(sender, instance, using, origin, **kwargs): if instance == origin: description = _('The finding group "%(name)s" was deleted') % {"name": instance.name} + user = None + if settings.ENABLE_AUDITLOG: - if le := LogEntry.objects.filter( - action=LogEntry.Action.DELETE, - content_type=ContentType.objects.get(app_label="dojo", model="finding_group"), - object_id=instance.id, - ).order_by("-id").first(): + # First try to find deletion author in pghistory events + # Look for delete events for this specific finding_group instance + pghistory_delete_events = DojoEvents.objects.filter( + pgh_obj_model="dojo.Finding_Group", + pgh_obj_id=instance.id, + pgh_label="delete", + ).order_by("-pgh_created_at") + + if pghistory_delete_events.exists(): + latest_delete = pghistory_delete_events.first() + # Extract user from pghistory context + if latest_delete.user: + User = get_user_model() + with contextlib.suppress(User.DoesNotExist): + user = User.objects.get(id=latest_delete.user) + + # Fall back to django-auditlog if no user found in pghistory + if not user: + if le := LogEntry.objects.filter( + action=LogEntry.Action.DELETE, + content_type=ContentType.objects.get(app_label="dojo", model="finding_group"), + object_id=instance.id, + ).order_by("-id").first(): + user = le.actor + + # Update description with user if found + if user: description = _('The finding group "%(name)s" was deleted by %(user)s') % { - "name": instance.name, "user": le.actor} + "name": instance.name, "user": user} create_notification(event="finding_group_deleted", # template does not exists, it will default to "other" but this event name needs to stay because of unit testing title=_("Deletion of %(name)s") % {"name": instance.name}, description=description, diff --git a/dojo/fixtures/questionnaire_testdata.json b/dojo/fixtures/questionnaire_testdata.json index c95278c83ac..2e75807c2ac 100644 --- a/dojo/fixtures/questionnaire_testdata.json +++ b/dojo/fixtures/questionnaire_testdata.json @@ -1,52 +1,4 @@ [ - { - "fields": { - "model": "question", - "app_label": "dojo" - }, - "model": "contenttypes.contenttype", - "pk": 65 - }, - { - "fields": { - "model": "answer", - "app_label": "dojo" - }, - "model": "contenttypes.contenttype", - "pk": 68 - }, - { - "fields": { - "model": "textquestion", - "app_label": "dojo" - }, - "model": "contenttypes.contenttype", - "pk": 66 - }, - { - "fields": { - "model": "textanswer", - "app_label": "dojo" - }, - "model": "contenttypes.contenttype", - "pk": 69 - }, - { - "fields": { - "model": "choicequestion", - "app_label": "dojo" - }, - "model": "contenttypes.contenttype", - "pk": 71 - }, - { - "fields": { - "model": "choiceanswer", - "app_label": "dojo" - }, - "model": "contenttypes.contenttype", - "pk": 70 - }, { "pk": 1, "model": "auth.user", @@ -211,7 +163,7 @@ "model": "dojo.question", "pk": 14, "fields": { - "polymorphic_ctype": 66, + "polymorphic_ctype": ["dojo", "textquestion"], "created": "2015-03-30T19:57:22Z", "modified": "2015-03-30T19:57:22Z", "order": 1, @@ -223,7 +175,7 @@ "model": "dojo.question", "pk": 15, "fields": { - "polymorphic_ctype": 66, + "polymorphic_ctype": ["dojo", "textquestion"], "created": "2015-03-30T19:57:34Z", "modified": "2015-03-30T19:57:34Z", "order": 1, @@ -235,7 +187,7 @@ "model": "dojo.question", "pk": 16, "fields": { - "polymorphic_ctype": 66, + "polymorphic_ctype": ["dojo", "textquestion"], "created": "2015-03-30T19:57:55Z", "modified": "2015-03-30T19:57:55Z", "order": 1, @@ -247,7 +199,7 @@ "model": "dojo.question", "pk": 17, "fields": { - "polymorphic_ctype": 66, + "polymorphic_ctype": ["dojo", "textquestion"], "created": "2015-03-30T19:58:36Z", "modified": "2015-03-30T19:58:36Z", "order": 1, @@ -259,7 +211,7 @@ "model": "dojo.question", "pk": 18, "fields": { - "polymorphic_ctype": 66, + "polymorphic_ctype": ["dojo", "textquestion"], "created": "2015-03-30T20:00:35Z", "modified": "2015-03-30T20:00:35Z", "order": 1, @@ -271,7 +223,7 @@ "model": "dojo.question", "pk": 19, "fields": { - "polymorphic_ctype": 66, + "polymorphic_ctype": ["dojo", "textquestion"], "created": "2015-03-30T20:00:46Z", "modified": "2015-03-30T20:00:46Z", "order": 1, @@ -283,7 +235,7 @@ "model": "dojo.question", "pk": 20, "fields": { - "polymorphic_ctype": 66, + "polymorphic_ctype": ["dojo", "textquestion"], "created": "2015-03-30T20:00:58Z", "modified": "2015-03-30T20:00:58Z", "order": 1, @@ -295,7 +247,7 @@ "model": "dojo.question", "pk": 44, "fields": { - "polymorphic_ctype": 71, + "polymorphic_ctype": ["dojo", "choicequestion"], "created": "2023-03-02T17:58:59.698Z", "modified": "2023-03-02T17:58:59.737Z", "order": 1, @@ -458,7 +410,7 @@ "model": "dojo.answer", "pk": 1, "fields": { - "polymorphic_ctype": 70, + "polymorphic_ctype": ["dojo", "choiceanswer"], "created": "2023-03-02T19:07:55.430Z", "modified": "2023-03-02T19:07:55.447Z", "question": 44, @@ -469,7 +421,7 @@ "model": "dojo.answer", "pk": 2, "fields": { - "polymorphic_ctype": 69, + "polymorphic_ctype": ["dojo", "textanswer"], "created": "2023-03-02T19:14:07.816Z", "modified": "2023-03-02T19:14:07.822Z", "question": 14, @@ -480,7 +432,7 @@ "model": "dojo.answer", "pk": 3, "fields": { - "polymorphic_ctype": 69, + "polymorphic_ctype": ["dojo", "textanswer"], "created": "2023-03-02T19:14:07.829Z", "modified": "2023-03-02T19:14:07.833Z", "question": 15, @@ -491,7 +443,7 @@ "model": "dojo.answer", "pk": 4, "fields": { - "polymorphic_ctype": 69, + "polymorphic_ctype": ["dojo", "textanswer"], "created": "2023-03-02T19:14:07.838Z", "modified": "2023-03-02T19:14:07.841Z", "question": 16, @@ -502,7 +454,7 @@ "model": "dojo.answer", "pk": 5, "fields": { - "polymorphic_ctype": 69, + "polymorphic_ctype": ["dojo", "textanswer"], "created": "2023-03-02T19:14:07.845Z", "modified": "2023-03-02T19:14:07.848Z", "question": 17, @@ -513,7 +465,7 @@ "model": "dojo.answer", "pk": 6, "fields": { - "polymorphic_ctype": 69, + "polymorphic_ctype": ["dojo", "textanswer"], "created": "2023-03-02T19:14:07.853Z", "modified": "2023-03-02T19:14:07.856Z", "question": 19, @@ -524,7 +476,7 @@ "model": "dojo.answer", "pk": 7, "fields": { - "polymorphic_ctype": 69, + "polymorphic_ctype": ["dojo", "textanswer"], "created": "2023-03-02T19:14:07.861Z", "modified": "2023-03-02T19:14:07.864Z", "question": 20, @@ -535,7 +487,7 @@ "model": "dojo.answer", "pk": 8, "fields": { - "polymorphic_ctype": 69, + "polymorphic_ctype": ["dojo", "textanswer"], "created": "2023-03-02T19:14:07.868Z", "modified": "2023-03-02T19:14:07.871Z", "question": 18, diff --git a/dojo/management/commands/flush_auditlog.py b/dojo/management/commands/flush_auditlog.py new file mode 100644 index 00000000000..6e3594363c7 --- /dev/null +++ b/dojo/management/commands/flush_auditlog.py @@ -0,0 +1,25 @@ +from django.core.management.base import BaseCommand + +from dojo.auditlog import run_flush_auditlog + + +class Command(BaseCommand): + help = "Flush old audit log entries based on retention and batching settings" + + def add_arguments(self, parser): + parser.add_argument("--retention-months", type=int, default=None, help="Override retention period in months") + parser.add_argument("--batch-size", type=int, default=None, help="Override batch size") + parser.add_argument("--max-batches", type=int, default=None, help="Override max batches per run") + parser.add_argument("--dry-run", action="store_true", help="Only show how many entries would be deleted") + + def handle(self, *args, **options): + deleted_total, batches_done, reached_limit = run_flush_auditlog( + retention_period=options.get("retention_months"), + batch_size=options.get("batch_size"), + max_batches=options.get("max_batches"), + dry_run=options.get("dry_run", False), + ) + verb = "Would delete" if options.get("dry_run") else "Deleted" + style = self.style.WARNING if options.get("dry_run") else self.style.SUCCESS + suffix = " (reached max batches)" if reached_limit else "" + self.stdout.write(style(f"{verb} {deleted_total} audit log entries in {batches_done} batches{suffix}.")) diff --git a/dojo/management/commands/pghistory_backfill.py b/dojo/management/commands/pghistory_backfill.py new file mode 100644 index 00000000000..52367e32c1c --- /dev/null +++ b/dojo/management/commands/pghistory_backfill.py @@ -0,0 +1,265 @@ +""" +Management command to backfill existing data into django-pghistory. + +This command creates initial snapshots for all existing records in tracked models. +""" +import logging + +from django.apps import apps +from django.conf import settings +from django.core.management.base import BaseCommand +from django.utils import timezone + +logger = logging.getLogger(__name__) + + +class Command(BaseCommand): + help = "Backfill existing data into django-pghistory" + + def add_arguments(self, parser): + parser.add_argument( + "--model", + type=str, + help='Specific model to backfill (e.g., "Finding", "Product")', + ) + parser.add_argument( + "--batch-size", + type=int, + default=1000, + help="Number of records to process in each batch (default: 1000)", + ) + parser.add_argument( + "--dry-run", + action="store_true", + help="Show what would be done without actually creating events", + ) + + def get_excluded_fields(self, model_name): + """Get the list of excluded fields for a specific model from pghistory configuration.""" + # Define excluded fields for each model (matching auditlog.py) + excluded_fields_map = { + "Dojo_User": ["password"], + "Product": ["updated"], # This is the key change + "Cred_User": ["password"], + "Notification_Webhooks": ["header_name", "header_value"], + } + return excluded_fields_map.get(model_name, []) + + def handle(self, *args, **options): + if not settings.ENABLE_AUDITLOG or settings.AUDITLOG_TYPE != "django-pghistory": + self.stdout.write( + self.style.WARNING( + "pghistory is not enabled. Set DD_ENABLE_AUDITLOG=True and " + "DD_AUDITLOG_TYPE=django-pghistory", + ), + ) + return + + # Models that are tracked by pghistory + tracked_models = [ + "Dojo_User", "Endpoint", "Engagement", "Finding", "Finding_Group", + "Product_Type", "Product", "Test", "Risk_Acceptance", + "Finding_Template", "Cred_User", "Notification_Webhooks", + ] + + specific_model = options.get("model") + if specific_model: + if specific_model not in tracked_models: + self.stdout.write( + self.style.ERROR( + f'Model "{specific_model}" is not tracked by pghistory. ' + f'Available models: {", ".join(tracked_models)}', + ), + ) + return + tracked_models = [specific_model] + + batch_size = options["batch_size"] + dry_run = options["dry_run"] + + if dry_run: + self.stdout.write( + self.style.WARNING("DRY RUN MODE - No events will be created"), + ) + + total_processed = 0 + self.stdout.write(f"Starting backfill for {len(tracked_models)} model(s)...") + + for model_name in tracked_models: + self.stdout.write(f"\nProcessing {model_name}...") + + try: + # Get the Django model + Model = apps.get_model("dojo", model_name) + + # Get total count + total_count = Model.objects.count() + if total_count == 0: + self.stdout.write(f" No records found for {model_name}") + continue + + self.stdout.write(f" Found {total_count:,} records") + + # Get the corresponding Event model for bulk operations + event_table_name = f"{model_name}Event" + try: + EventModel = apps.get_model("dojo", event_table_name) + except LookupError: + self.stdout.write( + self.style.ERROR( + f" Event model {event_table_name} not found. " + f"Is {model_name} tracked by pghistory?", + ), + ) + continue + + # Get IDs of records that already have initial_import events + existing_initial_import_ids = set( + EventModel.objects.filter(pgh_label="initial_import").values_list("pgh_obj_id", flat=True), + ) + + # Filter to only get records that don't have initial_import events + records_needing_backfill = Model.objects.exclude(id__in=existing_initial_import_ids) + backfill_count = records_needing_backfill.count() + existing_count = len(existing_initial_import_ids) + + # Log the breakdown + self.stdout.write(f" Records with initial_import events: {existing_count:,}") + self.stdout.write(f" Records needing initial_import events: {backfill_count:,}") + + if backfill_count == 0: + self.stdout.write( + self.style.SUCCESS(f" ✓ All {total_count:,} records already have initial_import events"), + ) + processed = total_count + continue + + if dry_run: + self.stdout.write(f" Would process {backfill_count:,} records in batches of {batch_size:,}...") + else: + self.stdout.write(f" Processing {backfill_count:,} records in batches of {batch_size:,}...") + + # Process records one by one and bulk insert every batch_size records + processed = 0 + event_records = [] + failed_records = [] + + for instance in records_needing_backfill.iterator(): + try: + # Create event record with all model fields + event_data = {} + + # Get excluded fields for this model from pghistory configuration + excluded_fields = self.get_excluded_fields(model_name) + + # Copy all fields from the instance to event_data, except excluded ones + for field in instance._meta.fields: + field_name = field.name + if field_name not in excluded_fields: + field_value = getattr(instance, field_name) + event_data[field_name] = field_value + + # Explicitly preserve created timestamp from the original instance + # Only if not excluded and exists + if hasattr(instance, "created") and instance.created and "created" not in excluded_fields: + event_data["created"] = instance.created + # Note: We don't preserve 'updated' for Product since it's excluded + + # Add pghistory-specific fields + event_data.update({ + "pgh_label": "initial_import", + "pgh_obj": instance, # ForeignKey to the original object + "pgh_context": None, # No context for backfilled events + }) + + # Set pgh_created_at to current time (this is for the event creation time) + # The created/updated fields above contain the original instance timestamps + event_data["pgh_created_at"] = timezone.now() + + event_records.append(EventModel(**event_data)) + + except Exception as e: + failed_records.append(instance.id) + logger.error( + f"Failed to prepare event for {model_name} ID {instance.id}: {e}", + ) + + # Bulk create when we hit batch_size records + if len(event_records) >= batch_size: + if not dry_run and event_records: + try: + attempted = len(event_records) + created_objects = EventModel.objects.bulk_create(event_records, batch_size=batch_size) + actually_created = len(created_objects) if created_objects else 0 + processed += actually_created + + if actually_created != attempted: + logger.warning( + f"bulk_create for {model_name}: attempted {attempted}, " + f"actually created {actually_created} ({attempted - actually_created} skipped)", + ) + except Exception as e: + logger.error(f"Failed to bulk create events for {model_name}: {e}") + raise + elif dry_run: + processed += len(event_records) + + event_records = [] # Reset for next batch + + # Progress update + progress = (processed / backfill_count) * 100 + self.stdout.write(f" Processed {processed:,}/{backfill_count:,} records needing backfill ({progress:.1f}%)") + + # Handle remaining records + if event_records: + if not dry_run: + try: + attempted = len(event_records) + created_objects = EventModel.objects.bulk_create(event_records, batch_size=batch_size) + actually_created = len(created_objects) if created_objects else 0 + processed += actually_created + + if actually_created != attempted: + logger.warning( + f"bulk_create final batch for {model_name}: attempted {attempted}, " + f"actually created {actually_created} ({attempted - actually_created} skipped)", + ) + except Exception as e: + logger.error(f"Failed to bulk create final batch for {model_name}: {e}") + raise + else: + processed += len(event_records) + + # Final progress update + if backfill_count > 0: + progress = (processed / backfill_count) * 100 + self.stdout.write(f" Processed {processed:,}/{backfill_count:,} records needing backfill ({progress:.1f}%)") + + total_processed += processed + + # Show completion summary + if failed_records: + self.stdout.write( + self.style.WARNING( + f" ⚠ Completed {model_name}: {processed:,} records processed, " + f"{len(failed_records)} records failed", + ), + ) + else: + self.stdout.write( + self.style.SUCCESS( + f" ✓ Completed {model_name}: {processed:,} records", + ), + ) + + except Exception as e: + self.stdout.write( + self.style.ERROR(f" ✗ Failed to process {model_name}: {e}"), + ) + logger.error(f"Error processing {model_name}: {e}") + + self.stdout.write( + self.style.SUCCESS( + f"\nBACKFILL COMPLETE: Processed {total_processed:,} records", + ), + ) diff --git a/dojo/management/commands/pghistory_clear.py b/dojo/management/commands/pghistory_clear.py new file mode 100644 index 00000000000..a2593ac25ca --- /dev/null +++ b/dojo/management/commands/pghistory_clear.py @@ -0,0 +1,206 @@ +""" +Management command to clear all pghistory Event tables. + +This command removes all historical event data from django-pghistory tables. +Use with caution as this operation is irreversible. It's meant to be used only during development/testing. +""" +import logging + +from django.apps import apps +from django.conf import settings +from django.core.management.base import BaseCommand +from django.db import connection, transaction + +logger = logging.getLogger(__name__) + + +class Command(BaseCommand): + help = "Clear all pghistory Event tables" + + def add_arguments(self, parser): + parser.add_argument( + "--dry-run", + action="store_true", + help="Show what would be cleared without actually clearing", + ) + parser.add_argument( + "--force", + action="store_true", + help="Skip confirmation prompt (use with caution)", + ) + parser.add_argument( + "--drop", + action="store_true", + help="Drop tables entirely instead of truncating (EXTREMELY DESTRUCTIVE)", + ) + + def handle(self, *args, **options): + if not settings.ENABLE_AUDITLOG or settings.AUDITLOG_TYPE != "django-pghistory": + self.stdout.write( + self.style.WARNING( + "pghistory is not enabled. Set DD_ENABLE_AUDITLOG=True and " + "DD_AUDITLOG_TYPE=django-pghistory", + ), + ) + return + + # All pghistory Event tables based on tracked models + event_tables = [ + "Cred_UserEvent", + "Dojo_UserEvent", + "EndpointEvent", + "EngagementEvent", + "Finding_GroupEvent", + "Finding_TemplateEvent", + "FindingEvent", + "Notification_WebhooksEvent", + "Product_TypeEvent", + "ProductEvent", + "Risk_AcceptanceEvent", + "TestEvent", + ] + + dry_run = options["dry_run"] + force = options["force"] + drop_tables = options["drop"] + + if dry_run: + self.stdout.write( + self.style.WARNING("DRY RUN MODE - No data will be cleared"), + ) + + total_records = 0 + table_counts = {} + + # First, count all records + self.stdout.write("Analyzing pghistory Event tables...") + for table_name in event_tables: + try: + EventModel = apps.get_model("dojo", table_name) + count = EventModel.objects.count() + table_counts[table_name] = count + total_records += count + + if count > 0: + self.stdout.write(f" {table_name}: {count:,} records") + else: + self.stdout.write(f" {table_name}: empty") + + except LookupError: + self.stdout.write( + self.style.WARNING(f" {table_name}: table not found (skipping)"), + ) + continue + except Exception as e: + self.stdout.write( + self.style.ERROR(f" {table_name}: error counting records - {e}"), + ) + continue + + if total_records == 0: + self.stdout.write( + self.style.SUCCESS("No pghistory records found. Nothing to clear."), + ) + return + + self.stdout.write(f"\nTotal records to clear: {total_records:,}") + + if dry_run: + operation = "drop" if drop_tables else "clear" + self.stdout.write( + self.style.SUCCESS( + f"\nDRY RUN COMPLETE: Would {operation} {total_records:,} records " + f"from {len([t for t in table_counts.values() if t > 0])} tables", + ), + ) + return + + # Confirmation prompt + if not force: + if drop_tables: + self.stdout.write( + self.style.ERROR( + f"\n🚨 EXTREMELY DESTRUCTIVE WARNING: This will DROP {len([t for t in table_counts.values() if t > 0])} " + f"pghistory Event tables entirely, deleting {total_records:,} records and the table structure! " + "You will need to recreate tables and run migrations to restore them!", + ), + ) + else: + self.stdout.write( + self.style.WARNING( + f"\n⚠️ WARNING: This will permanently delete {total_records:,} " + "pghistory records. This operation cannot be undone!", + ), + ) + + operation_type = "DROP TABLES" if drop_tables else "truncate tables" + confirm = input(f"Are you sure you want to {operation_type}? Type 'yes' to continue: ") + if confirm.lower() != "yes": + self.stdout.write(self.style.ERROR("Operation cancelled.")) + return + + # Clear the tables using TRUNCATE or DROP + operation_verb = "Dropping" if drop_tables else "Truncating" + self.stdout.write(f"\n{operation_verb} pghistory Event tables...") + cleared_records = 0 + cleared_tables = 0 + + for table_name in event_tables: + if table_counts.get(table_name, 0) == 0: + continue # Skip empty tables + + try: + EventModel = apps.get_model("dojo", table_name) + + # Use raw SQL TRUNCATE or DROP for better performance on large tables + with transaction.atomic(): + count = table_counts.get(table_name, 0) + if count > 0: + # Get the actual database table name + db_table = EventModel._meta.db_table + + with connection.cursor() as cursor: + if drop_tables: + # DROP TABLE - completely removes the table structure + cursor.execute(f'DROP TABLE IF EXISTS "{db_table}" CASCADE') + operation_past = "Dropped" + else: + # TRUNCATE TABLE - removes all data but keeps table structure + cursor.execute(f'TRUNCATE TABLE "{db_table}" RESTART IDENTITY CASCADE') + operation_past = "Truncated" + + cleared_records += count + cleared_tables += 1 + self.stdout.write( + self.style.SUCCESS(f" ✓ {operation_past} {table_name}: {count:,} records"), + ) + + except LookupError: + # Already handled in counting phase + continue + except Exception as e: + operation_verb_lower = "drop" if drop_tables else "truncate" + self.stdout.write( + self.style.ERROR(f" ✗ Failed to {operation_verb_lower} {table_name}: {e}"), + ) + logger.error(f"Error {operation_verb_lower}ing {table_name}: {e}") + + # Final success message + if drop_tables: + self.stdout.write( + self.style.SUCCESS( + f"\n🎉 DROP COMPLETE: Dropped {cleared_tables} tables with {cleared_records:,} records", + ), + ) + self.stdout.write( + self.style.WARNING( + "⚠️ Remember to run migrations to recreate the dropped tables!", + ), + ) + else: + self.stdout.write( + self.style.SUCCESS( + f"\n🎉 CLEARING COMPLETE: Cleared {cleared_records:,} records " + f"from {cleared_tables} tables", + ), + ) diff --git a/dojo/middleware.py b/dojo/middleware.py index a40d01c6d0f..aa954373c1c 100644 --- a/dojo/middleware.py +++ b/dojo/middleware.py @@ -5,6 +5,7 @@ from threading import local from urllib.parse import quote +import pghistory.middleware from auditlog.context import set_actor from auditlog.middleware import AuditlogMiddleware as _AuditlogMiddleware from django.conf import settings @@ -193,6 +194,26 @@ def __call__(self, request): return self.get_response(request) +class PgHistoryMiddleware(pghistory.middleware.HistoryMiddleware): + + """ + Custom pghistory middleware for DefectDojo that extends the built-in HistoryMiddleware + to add remote_addr context following the pattern from: + https://django-pghistory.readthedocs.io/en/3.8.1/context/#middleware + """ + + def get_context(self, request): + context = super().get_context(request) + + # Add remote address with proxy support + remote_addr = request.META.get("HTTP_X_FORWARDED_FOR") + # Get the first IP if there are multiple (proxy chain), or fall back to REMOTE_ADDR + remote_addr = remote_addr.split(",")[0].strip() if remote_addr else request.META.get("REMOTE_ADDR") + + context["remote_addr"] = remote_addr + return context + + class LongRunningRequestAlertMiddleware: def __init__(self, get_response): self.get_response = get_response diff --git a/dojo/models.py b/dojo/models.py index 5bdf195bedb..e3958d76172 100644 --- a/dojo/models.py +++ b/dojo/models.py @@ -14,7 +14,6 @@ import dateutil import hyperlink import tagulous.admin -from auditlog.registry import auditlog from dateutil.parser import parse as datetutilsparse from dateutil.relativedelta import relativedelta from django import forms @@ -4694,21 +4693,9 @@ def __str__(self): return "No Response" -if settings.ENABLE_AUDITLOG: - # Register for automatic logging to database - logger.info("enabling audit logging") - auditlog.register(Dojo_User, exclude_fields=["password"]) - auditlog.register(Endpoint) - auditlog.register(Engagement) - auditlog.register(Finding, m2m_fields={"reviewers"}) - auditlog.register(Finding_Group) - auditlog.register(Product_Type) - auditlog.register(Product) - auditlog.register(Test) - auditlog.register(Risk_Acceptance) - auditlog.register(Finding_Template) - auditlog.register(Cred_User, exclude_fields=["password"]) - auditlog.register(Notification_Webhooks, exclude_fields=["header_name", "header_value"]) +# Audit logging registration is now handled in auditlog.py and configured in apps.py +# This allows for conditional registration of either django-auditlog or django-pghistory +# The audit system is configured in DojoAppConfig.ready() to ensure all models are loaded from dojo.utils import ( # noqa: E402 # there is issue due to a circular import diff --git a/dojo/pghistory_models.py b/dojo/pghistory_models.py new file mode 100644 index 00000000000..936bd939c60 --- /dev/null +++ b/dojo/pghistory_models.py @@ -0,0 +1,31 @@ +""" +Custom pghistory models for DefectDojo. + +This module contains custom proxy models for pghistory Events +to expose context fields as structured fields. + +Note: Performance indexes for the pghistory_context table are managed +via Django migration 0244_pghistory_indices.py rather than +through model Meta classes, since the context table is managed by +the pghistory library itself. +""" +import pghistory.models +from django.db import models + + +class DojoEvents(pghistory.models.Events): + + """ + Custom Events proxy model that exposes context fields as structured fields. + + This allows querying and displaying context data like user, url, and remote_addr + as regular model fields instead of accessing nested JSON data. + """ + + user = pghistory.ProxyField("pgh_context__user", models.IntegerField(null=True)) + url = pghistory.ProxyField("pgh_context__url", models.TextField(null=True)) + remote_addr = pghistory.ProxyField("pgh_context__remote_addr", models.CharField(max_length=45, null=True)) + + class Meta: + proxy = True + app_label = "dojo" diff --git a/dojo/product/signals.py b/dojo/product/signals.py index b0e9b999cc5..0ed9a62747c 100644 --- a/dojo/product/signals.py +++ b/dojo/product/signals.py @@ -2,6 +2,7 @@ from auditlog.models import LogEntry from django.conf import settings +from django.contrib.auth import get_user_model from django.contrib.contenttypes.models import ContentType from django.db.models.signals import post_delete, post_save from django.dispatch import receiver @@ -11,6 +12,8 @@ from dojo.labels import get_labels from dojo.models import Product from dojo.notifications.helper import create_notification +from dojo.pghistory_models import DojoEvents +from dojo.utils import get_current_user labels = get_labels() @@ -31,13 +34,42 @@ def product_post_delete(sender, instance, **kwargs): # Catch instances in async delete where a single object is deleted more than once with contextlib.suppress(sender.DoesNotExist): description = labels.ASSET_DELETE_WITH_NAME_SUCCESS_MESSAGE % {"name": instance.name} + user = None + if settings.ENABLE_AUDITLOG: - if le := LogEntry.objects.filter( - action=LogEntry.Action.DELETE, - content_type=ContentType.objects.get(app_label="dojo", model="product"), - object_id=instance.id, - ).order_by("-id").first(): - description = labels.ASSET_DELETE_WITH_NAME_WITH_USER_SUCCESS_MESSAGE % {"name": instance.name, "user": le.actor} + # First try to find deletion author in pghistory events + # Look for delete events for this specific product instance + pghistory_delete_events = DojoEvents.objects.filter( + pgh_obj_model="dojo.Product", + pgh_obj_id=instance.id, + pgh_label="delete", + ).order_by("-pgh_created_at") + + if pghistory_delete_events.exists(): + latest_delete = pghistory_delete_events.first() + # Extract user from pghistory context + if latest_delete.user: + User = get_user_model() + with contextlib.suppress(User.DoesNotExist): + user = User.objects.get(id=latest_delete.user) + + # Fall back to django-auditlog if no user found in pghistory + if not user: + if le := LogEntry.objects.filter( + action=LogEntry.Action.DELETE, + content_type=ContentType.objects.get(app_label="dojo", model="product"), + object_id=instance.id, + ).order_by("-id").first(): + user = le.actor + + if not user: + current_user = get_current_user() + user = current_user + + # Update description with user if found + if user: + description = labels.ASSET_DELETE_WITH_NAME_WITH_USER_SUCCESS_MESSAGE % {"name": instance.name, "user": user} + create_notification(event="product_deleted", # template does not exists, it will default to "other" but this event name needs to stay because of unit testing title=_("Deletion of %(name)s") % {"name": instance.name}, description=description, diff --git a/dojo/product_type/signals.py b/dojo/product_type/signals.py index 3c67c24f2cd..523e7dcedc4 100644 --- a/dojo/product_type/signals.py +++ b/dojo/product_type/signals.py @@ -1,7 +1,9 @@ import contextlib from auditlog.models import LogEntry +from crum import get_current_user from django.conf import settings +from django.contrib.auth import get_user_model from django.contrib.contenttypes.models import ContentType from django.db.models.signals import post_delete, post_save from django.dispatch import receiver @@ -11,6 +13,7 @@ from dojo.labels import get_labels from dojo.models import Product_Type from dojo.notifications.helper import create_notification +from dojo.pghistory_models import DojoEvents labels = get_labels() @@ -30,15 +33,45 @@ def product_type_post_save(sender, instance, created, **kwargs): def product_type_post_delete(sender, instance, **kwargs): # Catch instances in async delete where a single object is deleted more than once with contextlib.suppress(sender.DoesNotExist): - description = labels.ORG_DELETE_WITH_NAME_SUCCESS_MESSAGE % {"name": instance.name} + user = None + if settings.ENABLE_AUDITLOG: - if le := LogEntry.objects.filter( - action=LogEntry.Action.DELETE, - content_type=ContentType.objects.get(app_label="dojo", model="product_type"), - object_id=instance.id, - ).order_by("-id").first(): - description = labels.ORG_DELETE_WITH_NAME_WITH_USER_SUCCESS_MESSAGE % {"name": instance.name, "user": le.actor} + # First try to find deletion author in pghistory events + # Look for delete events for this specific product_type instance + pghistory_delete_events = DojoEvents.objects.filter( + pgh_obj_model="dojo.Product_Type", + pgh_obj_id=instance.id, + pgh_label="delete", + ).order_by("-pgh_created_at") + + if pghistory_delete_events.exists(): + latest_delete = pghistory_delete_events.first() + # Extract user from pghistory context + if latest_delete.user: + User = get_user_model() + with contextlib.suppress(User.DoesNotExist): + user = User.objects.get(id=latest_delete.user) + + # Fall back to django-auditlog if no user found in pghistory + if not user: + if le := LogEntry.objects.filter( + action=LogEntry.Action.DELETE, + content_type=ContentType.objects.get(app_label="dojo", model="product_type"), + object_id=instance.id, + ).order_by("-id").first(): + user = le.actor + + # Since adding pghistory as auditlog option, this signal here runs before the django-auditlog signal + # Fallback to the current user of the request (Which might be not available for ASYNC_OBJECT_DELETE scenario's) + if not user: + current_user = get_current_user() + user = current_user + + # Update description with user if found + if user: + description = labels.ORG_DELETE_WITH_NAME_WITH_USER_SUCCESS_MESSAGE % {"name": instance.name, "user": user} + create_notification(event="product_type_deleted", # template does not exists, it will default to "other" but this event name needs to stay because of unit testing title=_("Deletion of %(name)s") % {"name": instance.name}, description=description, diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index eeca99ce190..5b4713a9c43 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -15,6 +15,7 @@ from pathlib import Path import environ +import pghistory from celery.schedules import crontab from netaddr import IPNetwork, IPSet @@ -264,6 +265,10 @@ DD_TRACK_IMPORT_HISTORY=(bool, True), # Delete Auditlogs older than x month; -1 to keep all logs DD_AUDITLOG_FLUSH_RETENTION_PERIOD=(int, -1), + # Batch size for flushing audit logs per task run + DD_AUDITLOG_FLUSH_BATCH_SIZE=(int, 1000), + # Maximum number of batches to process per task run + DD_AUDITLOG_FLUSH_MAX_BATCHES=(int, 100), # Allow grouping of findings in the same test, for example to group findings per dependency # DD_FEATURE_FINDING_GROUPS feature is moved to system_settings, will be removed from settings file DD_FEATURE_FINDING_GROUPS=(bool, True), @@ -317,6 +322,8 @@ # If you run big import you may want to disable this because the way django-auditlog currently works, there's # a big performance hit. Especially during (re-)imports. DD_ENABLE_AUDITLOG=(bool, True), + # Audit logging system: "django-auditlog" (default) or "django-pghistory" + DD_AUDITLOG_TYPE=(str, "django-pghistory"), # Specifies whether the "first seen" date of a given report should be used over the "last seen" date DD_USE_FIRST_SEEN=(bool, False), # When set to True, use the older version of the qualys parser that is a more heavy handed in setting severity @@ -894,7 +901,6 @@ def generate_url(scheme, double_slashes, user, password, host, port, path, param "polymorphic", # provides admin templates "django.contrib.admin", "django.contrib.humanize", - "auditlog", "dojo", "watson", "imagekit", @@ -909,6 +915,9 @@ def generate_url(scheme, double_slashes, user, password, host, port, path, param "tagulous", "fontawesomefree", "django_filters", + "auditlog", + "pgtrigger", + "pghistory", ) # ------------------------------------------------------------------------------ @@ -1920,8 +1929,11 @@ def saml2_attrib_map_format(din): # ------------------------------------------------------------------------------ AUDITLOG_FLUSH_RETENTION_PERIOD = env("DD_AUDITLOG_FLUSH_RETENTION_PERIOD") ENABLE_AUDITLOG = env("DD_ENABLE_AUDITLOG") +AUDITLOG_TYPE = env("DD_AUDITLOG_TYPE") AUDITLOG_TWO_STEP_MIGRATION = False AUDITLOG_USE_TEXT_CHANGES_IF_JSON_IS_NOT_PRESENT = False +AUDITLOG_FLUSH_BATCH_SIZE = env("DD_AUDITLOG_FLUSH_BATCH_SIZE") +AUDITLOG_FLUSH_MAX_BATCHES = env("DD_AUDITLOG_FLUSH_MAX_BATCHES") USE_FIRST_SEEN = env("DD_USE_FIRST_SEEN") USE_QUALYS_LEGACY_SEVERITY_PARSING = env("DD_QUALYS_LEGACY_SEVERITY_PARSING") @@ -2006,3 +2018,28 @@ def show_toolbar(request): "debug_toolbar.panels.profiling.ProfilingPanel", # 'cachalot.panels.CachalotPanel', ] + +######################################################################################################### +# Auditlog configuration # +######################################################################################################### + +if ENABLE_AUDITLOG: + middleware_list = list(MIDDLEWARE) + crum_index = middleware_list.index("crum.CurrentRequestUserMiddleware") + + if AUDITLOG_TYPE == "django-auditlog": + # Insert AuditlogMiddleware before CurrentRequestUserMiddleware + middleware_list.insert(crum_index, "dojo.middleware.AuditlogMiddleware") + elif AUDITLOG_TYPE == "django-pghistory": + # Insert pghistory HistoryMiddleware before CurrentRequestUserMiddleware + middleware_list.insert(crum_index, "dojo.middleware.PgHistoryMiddleware") + + MIDDLEWARE = middleware_list + +PGHISTORY_FOREIGN_KEY_FIELD = pghistory.ForeignKey(db_index=False) +PGHISTORY_CONTEXT_FIELD = pghistory.ContextForeignKey(db_index=True) +PGHISTORY_OBJ_FIELD = pghistory.ObjForeignKey(db_index=True) + +######################################################################################################### +# End of Auditlog configuration # +######################################################################################################### diff --git a/dojo/tasks.py b/dojo/tasks.py index ee6f2aec3bc..ad489a0c229 100644 --- a/dojo/tasks.py +++ b/dojo/tasks.py @@ -1,9 +1,7 @@ import logging -from datetime import date, timedelta +from datetime import timedelta -from auditlog.models import LogEntry from celery.utils.log import get_task_logger -from dateutil.relativedelta import relativedelta from django.apps import apps from django.conf import settings from django.core.management import call_command @@ -11,6 +9,7 @@ from django.urls import reverse from django.utils import timezone +from dojo.auditlog import run_flush_auditlog from dojo.celery import app from dojo.decorators import dojo_async_task from dojo.finding.helper import fix_loop_duplicates @@ -95,22 +94,7 @@ def cleanup_alerts(*args, **kwargs): @app.task(bind=True) def flush_auditlog(*args, **kwargs): - retention_period = settings.AUDITLOG_FLUSH_RETENTION_PERIOD - - if retention_period < 0: - logger.info("Flushing auditlog is disabled") - return - - logger.info("Running Cleanup Task for Logentries with %d Months retention", retention_period) - retention_date = date.today() - relativedelta(months=retention_period) - subset = LogEntry.objects.filter(timestamp__date__lt=retention_date) - event_count = subset.count() - logger.debug("Initially received %d Logentries", event_count) - if event_count > 0: - subset._raw_delete(subset.db) - logger.debug("Total number of audit log entries deleted: %s", event_count) - else: - logger.debug("No outdated Logentries found") + run_flush_auditlog() @app.task(bind=True) diff --git a/dojo/templates/dojo/action_history.html b/dojo/templates/dojo/action_history.html index be7f0353d8c..c2c5e822e22 100644 --- a/dojo/templates/dojo/action_history.html +++ b/dojo/templates/dojo/action_history.html @@ -4,50 +4,155 @@ {{ block.super }}
-
-
-

- {{ obj }} History - -

-
-
- {% include "dojo/filter_snippet.html" with form=log_entry_filter.form %} -
-
- {% if history %} -
- {% include "dojo/paging_snippet.html" with page=history %} + {% if pghistory_history %} +
+
+

+ PostgreSQL History (pghistory) + +

+
+
+ {% include "dojo/filter_snippet.html" with form=pghistory_filter.form %} +
+
+ {% include "dojo/paging_snippet.html" with page=pghistory_history %} +
+
+ + + + + + + + + + + + + + {% for h in pghistory_history %} + + + + + + + + + + + + + {% endfor %} +
TimestampLabelObjectUserURLIP AddressDataContextObject IDChanges
{{ h.pgh_created_at }}{{ h.pgh_label }}{{ h.object_str|default:"N/A" }}{{ h.user|default:"N/A" }} + {% if h.url and h.url != "N/A" %} + {{ h.url|truncatechars:50 }} + {% else %} + N/A + {% endif %} + {{ h.remote_addr|default:"N/A" }} +
+ + View + +
{{ h.pgh_data|pprint|default:"N/A" }}
+
+
+ {% if h.pgh_context %} +
+ + View + +
{{ h.pgh_context|pprint|default:"N/A" }}
+
+ {% else %} + None + {% endif %} +
{{ h.pgh_obj_id|default:"N/A" }} + {% if h.pgh_label == "initial_import" %} + Initial Import + {% elif h.pgh_diff %} +
+ {% for field, values in h.pgh_diff.items %} +
+ {{ field }}: + + {% if values.0 %} + {{ values.0|truncatechars:50 }} + {% else %} + empty + {% endif %} + + to + + {% if values.1 %} + {{ values.1|truncatechars:50 }} + {% else %} + empty + {% endif %} + +
+ {% endfor %} +
+ {% else %} + No Changes + {% endif %} +
+
+
+ {% include "dojo/paging_snippet.html" with page=pghistory_history %} +
-
- - - - - - - + {% endif %} - {% for h in history %} + {% if auditlog_history %} +
+
+

+ Audit Log History (django-auditlog) + +

+
+
+ {% include "dojo/filter_snippet.html" with form=log_entry_filter.form %} +
+
+ {% include "dojo/paging_snippet.html" with page=auditlog_history %} +
+
+
ActionActorDate/TimeChanges
- - - - + + + + - {% endfor %} -
{{ h }}{{ h.actor }}{{ h.timestamp }} - {{ h.changes|action_log_entry|linebreaks}} - ActionActorDate/TimeChanges
-
-
- {% include "dojo/paging_snippet.html" with page=history %} + {% for h in auditlog_history %} + + {{ h }} + {{ h.actor }} + {{ h.timestamp }} + + {{ h.changes|action_log_entry|linebreaks}} + + + {% endfor %} + +
+
+ {% include "dojo/paging_snippet.html" with page=auditlog_history %} +
- {% else %} -

No update history found for this object

+ {% endif %} + + {% if not pghistory_history and not auditlog_history %} +

No update history found for this object

{% endif %}
@@ -56,4 +161,17 @@

{% block postscript %} {{ block.super }} {% include "dojo/filter_js_snippet.html" %} + {% endblock %} diff --git a/dojo/test/signals.py b/dojo/test/signals.py index 82594395260..22669a2a040 100644 --- a/dojo/test/signals.py +++ b/dojo/test/signals.py @@ -2,6 +2,7 @@ from auditlog.models import LogEntry from django.conf import settings +from django.contrib.auth import get_user_model from django.contrib.contenttypes.models import ContentType from django.db.models.signals import post_delete, pre_delete, pre_save from django.dispatch import receiver @@ -12,6 +13,7 @@ from dojo.models import Engagement, Finding, Product, Test from dojo.notes.helper import delete_related_notes from dojo.notifications.helper import create_notification +from dojo.pghistory_models import DojoEvents @receiver(post_delete, sender=Test) @@ -20,14 +22,39 @@ def test_post_delete(sender, instance, using, origin, **kwargs): with contextlib.suppress(sender.DoesNotExist, Engagement.DoesNotExist, Product.DoesNotExist): if instance == origin: description = _('The test "%(name)s" was deleted') % {"name": str(instance)} + user = None + if settings.ENABLE_AUDITLOG: - if le := LogEntry.objects.filter( - action=LogEntry.Action.DELETE, - content_type=ContentType.objects.get(app_label="dojo", model="test"), - object_id=instance.id, - ).order_by("-id").first(): + # First try to find deletion author in pghistory events + # Look for delete events for this specific test instance + pghistory_delete_events = DojoEvents.objects.filter( + pgh_obj_model="dojo.Test", + pgh_obj_id=instance.id, + pgh_label="delete", + ).order_by("-pgh_created_at") + + if pghistory_delete_events.exists(): + latest_delete = pghistory_delete_events.first() + # Extract user from pghistory context + if latest_delete.user: + User = get_user_model() + with contextlib.suppress(User.DoesNotExist): + user = User.objects.get(id=latest_delete.user) + + # Fall back to django-auditlog if no user found in pghistory + if not user: + if le := LogEntry.objects.filter( + action=LogEntry.Action.DELETE, + content_type=ContentType.objects.get(app_label="dojo", model="test"), + object_id=instance.id, + ).order_by("-id").first(): + user = le.actor + + # Update description with user if found + if user: description = _('The test "%(name)s" was deleted by %(user)s') % { - "name": str(instance), "user": le.actor} + "name": str(instance), "user": user} + create_notification(event="test_deleted", # Template does not exist, it will default to "other" but this event name needs to stay because of unit testing title=_("Deletion of %(name)s") % {"name": str(instance)}, description=description, diff --git a/dojo/views.py b/dojo/views.py index afc20b5065a..ae2303e3ea4 100644 --- a/dojo/views.py +++ b/dojo/views.py @@ -3,6 +3,7 @@ from pathlib import Path from auditlog.models import LogEntry +from django.apps import apps from django.conf import settings from django.contrib import messages from django.contrib.auth.decorators import login_required @@ -18,15 +19,44 @@ user_has_permission_or_403, ) from dojo.authorization.roles_permissions import Permissions -from dojo.filters import LogEntryFilter +from dojo.filters import LogEntryFilter, PgHistoryFilter from dojo.forms import ManageFileFormSet from dojo.models import Endpoint, Engagement, FileUpload, Finding, Product, Test +from dojo.pghistory_models import DojoEvents from dojo.product_announcements import ErrorPageProductAnnouncement from dojo.utils import Product_Tab, generate_file_response, get_page_items logger = logging.getLogger(__name__) +def get_object_str(event): + """Get the __str__ representation of the original object from pghistory event data.""" + try: + if not hasattr(event, "pgh_obj_model") or not event.pgh_obj_model: + return "N/A" + + app_label, model_name = event.pgh_obj_model.split(".") + model_class = apps.get_model(app_label, model_name) + + if hasattr(event, "pgh_data") and event.pgh_data: + # Create a temporary instance with the event data + temp_instance = model_class(**event.pgh_data) + return str(temp_instance) + if hasattr(event, "pgh_obj_id") and event.pgh_obj_id: + return f"Object ID: {event.pgh_obj_id}" + return "N/A" # noqa: TRY300 it complains that it wants an else, but if I add an else, it complains that the elise is unnecessary + + except (ValueError, LookupError, TypeError, AttributeError): + # Fallback to name from data if available + if hasattr(event, "pgh_data") and event.pgh_data and "name" in event.pgh_data: + return event.pgh_data["name"] + + if hasattr(event, "pgh_obj_id") and event.pgh_obj_id: + return f"Object ID: {event.pgh_obj_id}" + + return "N/A" + + def custom_error_view(request, exception=None): ErrorPageProductAnnouncement(request=request) return render(request, "500.html", {}, status=500) @@ -105,10 +135,42 @@ def action_history(request, cid, oid): else: product_tab.setEngagement(object_value.engagement) - history = LogEntry.objects.filter(content_type=ct, - object_pk=obj.id).order_by("-timestamp") - log_entry_filter = LogEntryFilter(request.GET, queryset=history) - paged_history = get_page_items(request, log_entry_filter.qs, 25) + # Get audit history from both systems separately + auditlog_history = [] + pghistory_history = [] + + # Try to get django-auditlog entries + auditlog_queryset = LogEntry.objects.filter( + content_type=ct, + object_pk=obj.id, + ).order_by("-timestamp") + auditlog_history = auditlog_queryset + + # Use custom DojoEvents proxy model - provides proper diff calculation and context fields + # Filter by the specific object using tracks() method + # Note: Events is a CTE that doesn't support select_related, but includes context data + pghistory_history = DojoEvents.objects.tracks(obj).order_by("-pgh_created_at") + + # Add object string representation based on the original models __str__ method + # this value was available in the old auditlogs, so we mimic that here + # it can be useful to see the object_str that was changed, but we'll have to see how it performs + + # Apply filtering first, then process for object strings + pghistory_filter = PgHistoryFilter(request.GET, queryset=pghistory_history) + filtered_pghistory = pghistory_filter.qs + + # Process filtered events to add object string representation + processed_events = [] + for event in filtered_pghistory: + event.object_str = get_object_str(event) + processed_events.append(event) + + # Paginate the processed events + paged_pghistory_history = get_page_items(request, processed_events, 25) + + # Create filter and pagination for auditlog entries + auditlog_filter = LogEntryFilter(request.GET, queryset=auditlog_history) + paged_auditlog_history = get_page_items(request, auditlog_filter.qs, 25) if not settings.ENABLE_AUDITLOG: messages.add_message( @@ -118,10 +180,12 @@ def action_history(request, cid, oid): extra_tags="alert-danger") return render(request, "dojo/action_history.html", - {"history": paged_history, + {"auditlog_history": paged_auditlog_history, + "pghistory_history": paged_pghistory_history, "product_tab": product_tab, - "filtered": history, - "log_entry_filter": log_entry_filter, + "filtered": auditlog_history, + "log_entry_filter": auditlog_filter, + "pghistory_filter": pghistory_filter, "obj": obj, "test": test, "object_value": object_value, diff --git a/requirements.txt b/requirements.txt index 193350d9428..11a5db0a49d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,6 +6,7 @@ celery==5.5.3 defusedxml==0.7.1 django_celery_results==2.6.0 django-auditlog==3.2.1 +django-pghistory==3.7.0 django-dbbackup==5.0.0 django-environ==0.12.0 django-filter==25.1 diff --git a/unittests/test_adminsite.py b/unittests/test_adminsite.py index 128727fca09..55af03da0ef 100644 --- a/unittests/test_adminsite.py +++ b/unittests/test_adminsite.py @@ -12,6 +12,9 @@ def test_is_model_defined(self): if subclass._meta.proxy: continue if subclass.__module__ == "dojo.models": + # Skip pghistory Event models - they're audit trail models not meant for admin + if subclass.__name__.endswith("Event"): + continue if not ((subclass.__name__[:9] == "Tagulous_") and (subclass.__name__[-5:] == "_tags")): with self.subTest(type="base", subclass=subclass): self.assertIn(subclass, admin.site._registry.keys(), f"{subclass} is not registered in 'admin.site' in models.py") diff --git a/unittests/test_apiv2_methods_and_endpoints.py b/unittests/test_apiv2_methods_and_endpoints.py index 93b128b795c..3ca9f05cd24 100644 --- a/unittests/test_apiv2_methods_and_endpoints.py +++ b/unittests/test_apiv2_methods_and_endpoints.py @@ -52,6 +52,11 @@ def test_is_defined(self): "dojo_group_members", "product_members", "product_groups", "product_type_groups", "product_type_members", "asset_members", "asset_groups", "organization_groups", "organization_members", + # pghistory Event models (should not be exposed via API) + "dojo_userevents", "endpointevents", "engagementevents", "findingevents", + "finding_groupevents", "product_typeevents", "productevents", "testevents", + "risk_acceptanceevents", "finding_templateevents", "cred_userevents", + "notification_webhooksevents", } for reg, _, _ in sorted(self.registry): if reg in exempt_list: @@ -120,6 +125,9 @@ def test_is_defined(self): continue if subclass.__name__ == "Alerts": continue + # Skip pghistory Event models - they're audit trail models not meant for API endpoints + if subclass.__name__.endswith("Event"): + continue with self.subTest(subclass=subclass): if subclass in self.used_models: self.assertNotIn(subclass, self.no_api_models, "Thank you, you just implemented API endpoint for the model which was needed. Please remove it from exception list 'self.no_api_models'") diff --git a/unittests/test_auditlog.py b/unittests/test_auditlog.py new file mode 100644 index 00000000000..c748758cc67 --- /dev/null +++ b/unittests/test_auditlog.py @@ -0,0 +1,297 @@ +""" +Unit tests for audit configuration functionality. + +Tests the dual-audit system where both django-auditlog and django-pghistory +can coexist, allowing users to see historical data from both systems. +""" +from unittest.mock import MagicMock, patch + +from auditlog.models import LogEntry +from django.apps import apps +from django.test import TestCase, override_settings + +from dojo.auditlog import ( + configure_audit_system, + configure_pghistory_triggers, + disable_django_auditlog, + disable_django_pghistory, + enable_django_auditlog, + enable_django_pghistory, + register_django_pghistory_models, +) +from dojo.models import Product_Type + + +class TestAuditConfig(TestCase): + + """Test audit configuration functionality.""" + + @patch("auditlog.registry.auditlog") + def test_enable_django_auditlog(self, mock_auditlog): + """Test that enable_django_auditlog registers models.""" + # Mock the auditlog registry + mock_auditlog.register = MagicMock() + + enable_django_auditlog() + + # Verify that register was called multiple times (once for each model) + self.assertTrue(mock_auditlog.register.called) + self.assertGreater(mock_auditlog.register.call_count, 5) + + def test_disable_django_auditlog(self): + """Test that disable_django_auditlog runs without error.""" + # This should not raise an exception + disable_django_auditlog() + + @patch("dojo.auditlog.pghistory") + def test_register_django_pghistory_models(self, mock_pghistory): + """Test that register_django_pghistory_models registers all models.""" + # Mock pghistory.track + mock_pghistory.track = MagicMock() + mock_pghistory.InsertEvent = MagicMock() + mock_pghistory.UpdateEvent = MagicMock() + mock_pghistory.DeleteEvent = MagicMock() + mock_pghistory.ManualEvent = MagicMock() + + register_django_pghistory_models() + + # Verify that track was called multiple times (once for each model) + self.assertTrue(mock_pghistory.track.called) + self.assertGreater(mock_pghistory.track.call_count, 5) + + @patch("dojo.auditlog.call_command") + def test_enable_django_pghistory(self, mock_call_command): + """Test that enable_django_pghistory enables triggers only.""" + enable_django_pghistory() + + # Verify that pgtrigger enable command was called + mock_call_command.assert_called_with("pgtrigger", "enable") + + @patch("dojo.auditlog.call_command") + def test_disable_django_pghistory(self, mock_call_command): + """Test that disable_django_pghistory disables triggers.""" + disable_django_pghistory() + + # Verify that pgtrigger disable command was called + mock_call_command.assert_called_once_with("pgtrigger", "disable") + + @override_settings(ENABLE_AUDITLOG=True, AUDITLOG_TYPE="invalid-type") + @patch("dojo.auditlog.disable_django_auditlog") + @patch("dojo.auditlog.call_command") + def test_invalid_audit_type_warning(self, mock_call_command, mock_disable_auditlog): + """Test that invalid audit types disable both audit systems.""" + # Call the main configuration function with invalid type + configure_audit_system() + configure_pghistory_triggers() + + # Verify that auditlog is disabled for invalid type + mock_disable_auditlog.assert_called_once() + # Verify that pghistory triggers are also disabled for invalid type + mock_call_command.assert_called_with("pgtrigger", "disable") + + # This test mainly ensures no exceptions are raised + + @override_settings(ENABLE_AUDITLOG=True, AUDITLOG_TYPE="django-pghistory") + @patch("dojo.auditlog.enable_django_auditlog") + @patch("dojo.auditlog.disable_django_auditlog") + @patch("dojo.auditlog.call_command") + def test_dual_audit_system_coexistence(self, mock_call_command, mock_disable_auditlog, mock_enable_auditlog): + """Test that audit system configuration handles pghistory type correctly.""" + # Call the main configuration function + configure_audit_system() + configure_pghistory_triggers() + + # Verify auditlog is disabled when pghistory is the chosen type + mock_disable_auditlog.assert_called_once() + # Verify auditlog is not enabled when pghistory is chosen + mock_enable_auditlog.assert_not_called() + # Verify that pghistory triggers are enabled when pghistory is the chosen type + mock_call_command.assert_called_with("pgtrigger", "enable") + + # This demonstrates that the system correctly chooses the configured audit type + + def test_separate_history_lists_approach(self): + """Test that the dual-history approach creates separate lists correctly.""" + # This test verifies the new approach where we maintain separate history lists + # instead of mixing audit data from different systems + + # Import the view function to test the separation logic + + # This is more of a structural test to ensure the approach is sound + # The actual view testing would require more complex setup + + @override_settings(ENABLE_AUDITLOG=True, AUDITLOG_TYPE="django-pghistory") + def test_pghistory_insert_event_creation(self): + """Test that pghistory creates insert events when a Product_Type is created and auditlog does not.""" + # Configure audit system for pghistory + configure_audit_system() + configure_pghistory_triggers() + + # Get the Product_Type event model + ProductTypeEvent = apps.get_model("dojo", "Product_TypeEvent") + + # Count existing events before creating new Product_Type + initial_event_count = ProductTypeEvent.objects.count() + + # Clear any existing audit log entries for Product_Type + LogEntry.objects.filter(content_type__model="product_type").delete() + + # Create a new Product_Type + product_type = Product_Type.objects.create( + name="Test Product Type for pghistory", + description="Test product type to verify pghistory event creation", + ) + + # Verify that an insert event was created in pghistory + final_event_count = ProductTypeEvent.objects.count() + self.assertEqual(final_event_count, initial_event_count + 1, + "Expected exactly one new pghistory event after creating Product_Type") + + # Get the most recent event + latest_event = ProductTypeEvent.objects.latest("pgh_created_at") + + # Verify the event details + self.assertEqual(latest_event.pgh_obj_id, product_type.id, + "Event should reference the created Product_Type") + self.assertEqual(latest_event.name, product_type.name, + "Event should contain the Product_Type name") + self.assertEqual(latest_event.description, product_type.description, + "Event should contain the Product_Type description") + + # Verify it's an insert event (check if pgh_label indicates creation) + # The label could be 'insert' or contain insert-related information + self.assertIsNotNone(latest_event.pgh_created_at, + "Event should have a creation timestamp") + + # Verify that NO auditlog entries were created (mutual exclusivity) + audit_entries = LogEntry.objects.filter( + content_type__model="product_type", + object_id=product_type.id, + ) + self.assertEqual(audit_entries.count(), 0, + "Expected NO auditlog entries when pghistory is enabled") + + # Clean up + product_type.delete() + + @override_settings(ENABLE_AUDITLOG=True, AUDITLOG_TYPE="django-auditlog") + @patch("dojo.auditlog.enable_django_auditlog") + @patch("dojo.auditlog.call_command") + def test_configure_audit_system_auditlog_enabled(self, mock_call_command, mock_enable_auditlog): + """Test that configure_audit_system enables auditlog and configures pghistory triggers correctly.""" + configure_audit_system() + configure_pghistory_triggers() + + # Verify that auditlog is enabled + mock_enable_auditlog.assert_called_once() + # Verify that pghistory triggers are disabled when auditlog is the chosen type + mock_call_command.assert_called_with("pgtrigger", "disable") + + @override_settings(ENABLE_AUDITLOG=True, AUDITLOG_TYPE="django-pghistory") + @patch("dojo.auditlog.disable_django_auditlog") + @patch("dojo.auditlog.call_command") + def test_configure_audit_system_pghistory_enabled(self, mock_call_command, mock_disable_auditlog): + """Test that configure_audit_system disables auditlog and enables pghistory triggers correctly.""" + configure_audit_system() + configure_pghistory_triggers() + + # Verify that auditlog is disabled when pghistory is the chosen type + mock_disable_auditlog.assert_called_once() + # Verify that pghistory triggers are enabled when pghistory is the chosen type + mock_call_command.assert_called_with("pgtrigger", "enable") + + @override_settings(ENABLE_AUDITLOG=False) + @patch("dojo.auditlog.disable_django_auditlog") + @patch("dojo.auditlog.call_command") + def test_configure_audit_system_all_disabled(self, mock_call_command, mock_disable_auditlog): + """Test that configure_audit_system disables both auditlog and pghistory when audit is disabled.""" + configure_audit_system() + configure_pghistory_triggers() + + # Verify that auditlog is disabled when audit logging is disabled + mock_disable_auditlog.assert_called_once() + # Verify that pghistory triggers are also disabled when audit logging is disabled + mock_call_command.assert_called_with("pgtrigger", "disable") + + @override_settings(ENABLE_AUDITLOG=True, AUDITLOG_TYPE="unknown-type") + @patch("dojo.auditlog.disable_django_auditlog") + @patch("dojo.auditlog.call_command") + def test_configure_audit_system_unknown_type(self, mock_call_command, mock_disable_auditlog): + """Test that configure_audit_system disables both systems for unknown audit types.""" + configure_audit_system() + configure_pghistory_triggers() + + # Verify that auditlog is disabled for unknown types + mock_disable_auditlog.assert_called_once() + # Verify that pghistory triggers are also disabled for unknown types + mock_call_command.assert_called_with("pgtrigger", "disable") + + @patch("dojo.auditlog.call_command") + def test_disable_pghistory_command_failure(self, mock_call_command): + """Test that disable_django_pghistory handles command failures gracefully.""" + # Simulate command failure + mock_call_command.side_effect = Exception("Command failed") + + # This should not raise an exception + disable_django_pghistory() + + # Verify that call_command was attempted + mock_call_command.assert_called_once_with("pgtrigger", "disable") + + @patch("dojo.auditlog.call_command") + def test_enable_pghistory_command_failure(self, mock_call_command): + """Test that enable_django_pghistory handles command failures gracefully.""" + # Simulate command failure for trigger enable + mock_call_command.side_effect = Exception("Command failed") + + # This should not raise an exception + enable_django_pghistory() + + # Verify that call_command was attempted + mock_call_command.assert_called_with("pgtrigger", "enable") + + @override_settings(ENABLE_AUDITLOG=True, AUDITLOG_TYPE="django-auditlog") + def test_auditlog_insert_event_creation(self): + """Test that django-auditlog creates audit log entries when a Product_Type is created and pghistory does not.""" + # Configure audit system for auditlog + configure_audit_system() + configure_pghistory_triggers() + + # Get the Product_Type event model for pghistory check + ProductTypeEvent = apps.get_model("dojo", "Product_TypeEvent") + + # Clear any existing audit log entries for Product_Type + LogEntry.objects.filter(content_type__model="product_type").delete() + + # Count existing pghistory events + initial_pghistory_count = ProductTypeEvent.objects.count() + + # Create a new Product_Type + product_type = Product_Type.objects.create( + name="Test Product Type for Auditlog", + description="Test description for auditlog verification", + ) + + # Verify that an audit log entry was created + audit_entries = LogEntry.objects.filter( + content_type__model="product_type", + object_id=product_type.id, + action=LogEntry.Action.CREATE, + ) + + self.assertEqual(audit_entries.count(), 1, + "Expected exactly one audit log entry for Product_Type creation") + + audit_entry = audit_entries.first() + self.assertEqual(audit_entry.object_repr, str(product_type), + "Audit entry should represent the created object") + self.assertIsNotNone(audit_entry.timestamp, + "Audit entry should have a timestamp") + + # Verify that NO pghistory events were created (mutual exclusivity) + final_pghistory_count = ProductTypeEvent.objects.count() + self.assertEqual(final_pghistory_count, initial_pghistory_count, + "Expected NO new pghistory events when auditlog is enabled") + + # Clean up + product_type.delete() diff --git a/unittests/test_importers_performance.py b/unittests/test_importers_performance.py index e26cf876d4d..6e43f0e2d13 100644 --- a/unittests/test_importers_performance.py +++ b/unittests/test_importers_performance.py @@ -3,8 +3,10 @@ from crum import impersonate from django.contrib.contenttypes.models import ContentType +from django.test import override_settings from django.utils import timezone +from dojo.auditlog import configure_audit_system, configure_pghistory_triggers from dojo.decorators import dojo_async_task_counter from dojo.importers.default_importer import DefaultImporter from dojo.importers.default_reimporter import DefaultReImporter @@ -87,7 +89,7 @@ def import_reimport_performance(self, expected_num_queries1, expected_num_async_ ./run-unittest.sh --test-case unittests.test_importers_performance.TestDojoImporterPerformance 2>&1 | less Then search for `expected` to find the lines where the expected number of queries is printed. Or you can use `grep` to filter the output: - ./run-unittest.sh --test-case unittests.test_importers_performance.TestDojoImporterPerformance 2>&1 | grep expected + ./run-unittest.sh --test-case unittests.test_importers_performance.TestDojoImporterPerformance 2>&1 | grep expected -B 10 """ product_type, _created = Product_Type.objects.get_or_create(name="test") product, _created = Product.objects.get_or_create( @@ -171,10 +173,12 @@ def import_reimport_performance(self, expected_num_queries1, expected_num_async_ reimporter = DefaultReImporter(**reimport_options) test, _, _len_new_findings, _len_closed_findings, _, _, _ = reimporter.process_scan(scan) - # patch the we_want_async decorator to always return True so we don't depend on block_execution flag shenanigans - # @patch("dojo.decorators.we_want_async", return_value=True) - # def test_import_reimport_reimport_performance_async(self, mock): + @override_settings(ENABLE_AUDITLOG=True, AUDITLOG_TYPE="django-auditlog") def test_import_reimport_reimport_performance_async(self): + # Ensure django-auditlog is properly configured for this test + configure_audit_system() + configure_pghistory_triggers() + self.import_reimport_performance( expected_num_queries1=593, expected_num_async_tasks1=10, @@ -184,8 +188,25 @@ def test_import_reimport_reimport_performance_async(self): expected_num_async_tasks3=20, ) - # @patch("dojo.decorators.we_want_async", return_value=False) - # def test_import_reimport_reimport_performance_no_async(self, mock): + @override_settings(ENABLE_AUDITLOG=True, AUDITLOG_TYPE="django-pghistory") + def test_import_reimport_reimport_performance_pghistory_async(self): + """ + This test checks the performance of the importers when using django-pghistory with async enabled. + Query counts will need to be determined by running the test initially. + """ + configure_audit_system() + configure_pghistory_triggers() + + self.import_reimport_performance( + expected_num_queries1=559, + expected_num_async_tasks1=10, + expected_num_queries2=491, + expected_num_async_tasks2=22, + expected_num_queries3=284, + expected_num_async_tasks3=20, + ) + + @override_settings(ENABLE_AUDITLOG=True, AUDITLOG_TYPE="django-auditlog") def test_import_reimport_reimport_performance_no_async(self): """ This test checks the performance of the importers when they are run in sync mode. @@ -194,6 +215,9 @@ def test_import_reimport_reimport_performance_no_async(self): The impersonate context manager above does not work as expected for disabling async, so we patch the we_want_async decorator to always return False. """ + configure_audit_system() + configure_pghistory_triggers() + testuser = User.objects.get(username="admin") testuser.usercontactinfo.block_execution = True testuser.usercontactinfo.save() @@ -206,8 +230,29 @@ def test_import_reimport_reimport_performance_no_async(self): expected_num_async_tasks3=20, ) - # @patch("dojo.decorators.we_want_async", return_value=False) - # def test_import_reimport_reimport_performance_no_async_with_product_grading(self, mock): + @override_settings(ENABLE_AUDITLOG=True, AUDITLOG_TYPE="django-pghistory") + def test_import_reimport_reimport_performance_pghistory_no_async(self): + """ + This test checks the performance of the importers when using django-pghistory with async disabled. + Query counts will need to be determined by running the test initially. + """ + configure_audit_system() + configure_pghistory_triggers() + + testuser = User.objects.get(username="admin") + testuser.usercontactinfo.block_execution = True + testuser.usercontactinfo.save() + + self.import_reimport_performance( + expected_num_queries1=559, + expected_num_async_tasks1=10, + expected_num_queries2=496, + expected_num_async_tasks2=22, + expected_num_queries3=289, + expected_num_async_tasks3=20, + ) + + @override_settings(ENABLE_AUDITLOG=True, AUDITLOG_TYPE="django-auditlog") def test_import_reimport_reimport_performance_no_async_with_product_grading(self): """ This test checks the performance of the importers when they are run in sync mode. @@ -216,6 +261,9 @@ def test_import_reimport_reimport_performance_no_async_with_product_grading(self The impersonate context manager above does not work as expected for disabling async, so we patch the we_want_async decorator to always return False. """ + configure_audit_system() + configure_pghistory_triggers() + testuser = User.objects.get(username="admin") testuser.usercontactinfo.block_execution = True testuser.usercontactinfo.save() @@ -229,3 +277,26 @@ def test_import_reimport_reimport_performance_no_async_with_product_grading(self expected_num_queries3=295, expected_num_async_tasks3=21, ) + + @override_settings(ENABLE_AUDITLOG=True, AUDITLOG_TYPE="django-pghistory") + def test_import_reimport_reimport_performance_pghistory_no_async_with_product_grading(self): + """ + This test checks the performance of the importers when using django-pghistory with async disabled and product grading enabled. + Query counts will need to be determined by running the test initially. + """ + configure_audit_system() + configure_pghistory_triggers() + + testuser = User.objects.get(username="admin") + testuser.usercontactinfo.block_execution = True + testuser.usercontactinfo.save() + self.system_settings(enable_product_grade=True) + + self.import_reimport_performance( + expected_num_queries1=560, + expected_num_async_tasks1=11, + expected_num_queries2=497, + expected_num_async_tasks2=23, + expected_num_queries3=290, + expected_num_async_tasks3=21, + ) diff --git a/unittests/test_notifications.py b/unittests/test_notifications.py index b3e22747194..150f4c3205d 100644 --- a/unittests/test_notifications.py +++ b/unittests/test_notifications.py @@ -2,6 +2,7 @@ import logging from unittest.mock import Mock, patch +import pghistory from auditlog.context import set_actor from crum import impersonate from django.test import override_settings @@ -212,7 +213,7 @@ def test_product_types(self, mock): last_count = mock.call_count with self.subTest("product_type_added"): - with set_actor(self.notification_tester): + with set_actor(self.notification_tester), pghistory.context(user=self.notification_tester.id): prod_type = Product_Type.objects.create(name="notif prod type") self.assertEqual(mock.call_count, last_count + 4) self.assertEqual(mock.call_args_list[-1].args[0], "product_type_added") @@ -220,7 +221,7 @@ def test_product_types(self, mock): last_count = mock.call_count with self.subTest("product_type_deleted"): - with set_actor(self.notification_tester): + with set_actor(self.notification_tester), pghistory.context(user=self.notification_tester.id): prod_type.delete() self.assertEqual(mock.call_count, last_count + 1) self.assertEqual(mock.call_args_list[-1].args[0], "product_type_deleted") @@ -232,7 +233,7 @@ def test_products(self, mock): last_count = mock.call_count with self.subTest("product_added"): - with set_actor(self.notification_tester): + with set_actor(self.notification_tester), pghistory.context(user=self.notification_tester.id): prod_type = Product_Type.objects.first() prod, _ = Product.objects.get_or_create(prod_type=prod_type, name="prod name") self.assertEqual(mock.call_count, last_count + 5) @@ -241,7 +242,7 @@ def test_products(self, mock): last_count = mock.call_count with self.subTest("product_deleted"): - with set_actor(self.notification_tester): + with set_actor(self.notification_tester), pghistory.context(user=self.notification_tester.id): prod.delete() self.assertEqual(mock.call_count, last_count + 2) self.assertEqual(mock.call_args_list[-1].args[0], "product_deleted") @@ -253,7 +254,7 @@ def test_engagements(self, mock): last_count = mock.call_count with self.subTest("engagement_added"): - with set_actor(self.notification_tester): + with set_actor(self.notification_tester), pghistory.context(user=self.notification_tester.id): prod = Product.objects.first() eng = Engagement.objects.create(product=prod, target_start=timezone.now(), target_end=timezone.now()) self.assertEqual(mock.call_count, last_count + 5) @@ -262,7 +263,7 @@ def test_engagements(self, mock): last_count = mock.call_count with self.subTest("close_engagement"): - with set_actor(self.notification_tester): + with set_actor(self.notification_tester), pghistory.context(user=self.notification_tester.id): eng.status = "Completed" eng.save() self.assertEqual(mock.call_count, last_count + 5) @@ -271,7 +272,7 @@ def test_engagements(self, mock): last_count = mock.call_count with self.subTest("reopen_engagement"): - with set_actor(self.notification_tester): + with set_actor(self.notification_tester), pghistory.context(user=self.notification_tester.id): eng.status = "In Progress" eng.save() self.assertEqual(mock.call_count, last_count + 5) @@ -282,7 +283,7 @@ def test_engagements(self, mock): eng.save() last_count = mock.call_count with self.subTest("no reopen_engagement from not started"): - with set_actor(self.notification_tester): + with set_actor(self.notification_tester), pghistory.context(user=self.notification_tester.id): eng.status = "In Progress" eng.save() self.assertEqual(mock.call_count, last_count) @@ -294,14 +295,14 @@ def test_engagements(self, mock): eng2 = Engagement.objects.create(product=prod2, name="Testing engagement", target_start=timezone.now(), target_end=timezone.now(), lead=User.objects.get(username="admin")) with self.subTest("engagement_deleted by product"): # in case of product removal, we are not notifying about removal - with set_actor(self.notification_tester): + with set_actor(self.notification_tester), pghistory.context(user=self.notification_tester.id): prod1.delete() for call in mock.call_args_list: self.assertNotEqual(call.args[0], "engagement_deleted") last_count = mock.call_count with self.subTest("engagement_deleted itself"): - with set_actor(self.notification_tester): + with set_actor(self.notification_tester), pghistory.context(user=self.notification_tester.id): eng2.delete() self.assertEqual(mock.call_count, last_count + 1) self.assertEqual(mock.call_args_list[-1].args[0], "engagement_deleted") @@ -317,14 +318,14 @@ def test_endpoints(self, mock): endpoint2, _ = Endpoint.objects.get_or_create(product=prod2, host="host2") with self.subTest("endpoint_deleted by product"): # in case of product removal, we are not notifying about removal - with set_actor(self.notification_tester): + with set_actor(self.notification_tester), pghistory.context(user=self.notification_tester.id): prod1.delete() for call in mock.call_args_list: self.assertNotEqual(call.args[0], "endpoint_deleted") last_count = mock.call_count with self.subTest("endpoint_deleted itself"): - with set_actor(self.notification_tester): + with set_actor(self.notification_tester), pghistory.context(user=self.notification_tester.id): endpoint2.delete() self.assertEqual(mock.call_count, last_count + 2) self.assertEqual(mock.call_args_list[-1].args[0], "endpoint_deleted") @@ -341,14 +342,14 @@ def test_tests(self, mock): test2 = Test.objects.create(engagement=eng2, target_start=timezone.now(), target_end=timezone.now(), test_type_id=Test_Type.objects.first().id) with self.subTest("test_deleted by engagement"): # in case of engagement removal, we are not notifying about removal - with set_actor(self.notification_tester): + with set_actor(self.notification_tester), pghistory.context(user=self.notification_tester.id): eng1.delete() for call in mock.call_args_list: self.assertNotEqual(call.args[0], "test_deleted") last_count = mock.call_count with self.subTest("test_deleted itself"): - with set_actor(self.notification_tester): + with set_actor(self.notification_tester), pghistory.context(user=self.notification_tester.id): test2.delete() self.assertEqual(mock.call_count, last_count + 1) self.assertEqual(mock.call_args_list[-1].args[0], "test_deleted") @@ -366,14 +367,14 @@ def test_finding_groups(self, mock): fg2, _ = Finding_Group.objects.get_or_create(test=test2, name="fg test", creator=User.objects.get(username="admin")) with self.subTest("test_deleted by engagement"): # in case of engagement removal, we are not notifying about removal - with set_actor(self.notification_tester): + with set_actor(self.notification_tester), pghistory.context(user=self.notification_tester.id): test1.delete() for call in mock.call_args_list: self.assertNotEqual(call.args[0], "finding_group_deleted") last_count = mock.call_count with self.subTest("test_deleted itself"): - with set_actor(self.notification_tester): + with set_actor(self.notification_tester), pghistory.context(user=self.notification_tester.id): fg2.delete() self.assertEqual(mock.call_count, last_count + 5) self.assertEqual(mock.call_args_list[-1].args[0], "finding_group_deleted") @@ -384,7 +385,7 @@ def test_finding_groups(self, mock): @override_settings(ENABLE_AUDITLOG=True) def test_auditlog_on(self, mock): prod_type = Product_Type.objects.create(name="notif prod type") - with set_actor(self.notification_tester): + with set_actor(self.notification_tester), pghistory.context(user=self.notification_tester.id): prod_type.delete() self.assertEqual(mock.call_args_list[-1].kwargs["description"], 'The product type "notif prod type" was deleted by admin') @@ -392,7 +393,7 @@ def test_auditlog_on(self, mock): @override_settings(ENABLE_AUDITLOG=False) def test_auditlog_off(self, mock): prod_type = Product_Type.objects.create(name="notif prod type") - with set_actor(self.notification_tester): + with set_actor(self.notification_tester), pghistory.context(user=self.notification_tester.id): prod_type.delete() self.assertEqual(mock.call_args_list[-1].kwargs["description"], 'The product type "notif prod type" was deleted') @@ -406,7 +407,6 @@ def setUp(self): self.client.credentials(HTTP_AUTHORIZATION="Token " + token.key) @patch("dojo.notifications.helper.NotificationManager._process_notifications") - @override_settings(ENABLE_AUDITLOG=True) def test_auditlog_on(self, mock): prod_type = Product_Type.objects.create(name="notif prod type API") self.client.delete(reverse("product_type-detail", args=(prod_type.pk,)), format="json")