diff --git a/.generated-info b/.generated-info index 5c3c962703..2d18752019 100644 --- a/.generated-info +++ b/.generated-info @@ -1,4 +1,4 @@ { - "spec_repo_commit": "4413e63", - "generated": "2025-08-19 20:28:38.330" + "spec_repo_commit": "ca16233", + "generated": "2025-08-21 17:08:54.062" } diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index 5dc52e4a2e..7be522b934 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -4983,6 +4983,8 @@ components: description: Optional prefix for blobs written to the container. example: logs/ type: string + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' container_name: description: The name of the Azure Blob Storage container to store logs in. @@ -25037,6 +25039,8 @@ components: description: The `microsoft_sentinel` destination forwards logs to Microsoft Sentinel. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' client_id: description: Azure AD client ID used for authentication. example: a1b2c3d4-5678-90ab-cdef-1234567890ab @@ -26669,6 +26673,8 @@ components: properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestinationAuth' + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' bulk_index: description: The index to write logs to. example: logs-index @@ -26747,6 +26753,8 @@ components: description: S3 bucket name. example: error-logs type: string + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: Unique identifier for the destination component. example: amazon-s3-destination @@ -26919,6 +26927,30 @@ components: role session. type: string type: object + ObservabilityPipelineBufferOptions: + description: Configuration for buffer settings on destination components. + oneOf: + - $ref: '#/components/schemas/ObservabilityPipelineDiskBufferOptions' + - $ref: '#/components/schemas/ObservabilityPipelineMemoryBufferOptions' + - $ref: '#/components/schemas/ObservabilityPipelineMemoryBufferSizeOptions' + ObservabilityPipelineBufferOptionsDiskType: + default: disk + description: Specifies the buffer type to configure. This option supports only + a disk buffer. + enum: + - disk + type: string + x-enum-varnames: + - DISK + ObservabilityPipelineBufferOptionsMemoryType: + default: memory + description: Specifies the buffer type to configure. This option supports only + a memory buffer. + enum: + - memory + type: string + x-enum-varnames: + - MEMORY ObservabilityPipelineConfig: description: Specifies the pipeline's configuration, including its sources, processors, and destinations. @@ -27232,6 +27264,8 @@ components: ObservabilityPipelineDatadogLogsDestination: description: The `datadog_logs` destination forwards logs to Datadog Log Management. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: The unique identifier for this component. example: datadog-logs-destination @@ -27407,12 +27441,25 @@ components: type: string x-enum-varnames: - DEDUPE + ObservabilityPipelineDiskBufferOptions: + description: Options for configuring a disk buffer. + properties: + max_size: + description: Maximum size of the disk buffer. + example: 4096 + format: int64 + type: integer + type: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptionsDiskType' + type: object ObservabilityPipelineElasticsearchDestination: description: The `elasticsearch` destination writes logs to an Elasticsearch cluster. properties: api_version: $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationApiVersion' + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' bulk_index: description: The index to write logs to in Elasticsearch. example: logs-index @@ -27897,6 +27944,8 @@ components: properties: auth: $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' customer_id: description: The Google Chronicle customer ID. example: abcdefg123456789 @@ -27963,6 +28012,8 @@ components: description: Name of the GCS bucket. example: error-logs type: string + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: Unique identifier for the destination component. example: gcs-destination @@ -28273,6 +28324,28 @@ components: type: string x-enum-varnames: - LOGSTASH + ObservabilityPipelineMemoryBufferOptions: + description: Options for configuring a memory buffer by byte size. + properties: + max_size: + description: Maximum size of the disk buffer. + example: 4096 + format: int64 + type: integer + type: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptionsMemoryType' + type: object + ObservabilityPipelineMemoryBufferSizeOptions: + description: Options for configuring a memory buffer by queue length. + properties: + max_events: + description: Maximum events for the memory buffer. + example: 500 + format: int64 + type: integer + type: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptionsMemoryType' + type: object ObservabilityPipelineMetadataEntry: description: A custom metadata entry. properties: @@ -28296,6 +28369,8 @@ components: ObservabilityPipelineNewRelicDestination: description: The `new_relic` destination sends logs to the New Relic platform. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: The unique identifier for this component. example: new-relic-destination @@ -28433,6 +28508,8 @@ components: ObservabilityPipelineOpenSearchDestination: description: The `opensearch` destination writes logs to an OpenSearch cluster. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' bulk_index: description: The index to write logs to. example: logs-index @@ -28657,9 +28734,10 @@ components: can drop or alert. properties: drop_events: - description: If set to `true`, logs that matched the quota filter and sent - after the quota has been met are dropped; only logs that did not match - the filter query continue through the pipeline. + description: 'If set to `true`, logs that match the quota filter and are + sent after the quota is exceeded are dropped. Logs that do not match the + filter continue through the pipeline. **Note**: You can set either `drop_events` + or `overflow_action`, but not both.' example: false type: boolean id: @@ -28707,6 +28785,8 @@ components: items: type: string type: array + too_many_buckets_action: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorOverflowAction' type: $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorType' required: @@ -28714,7 +28794,6 @@ components: - type - include - name - - drop_events - limit - inputs type: object @@ -28745,7 +28824,8 @@ components: - BYTES - EVENTS ObservabilityPipelineQuotaProcessorOverflowAction: - description: 'The action to take when the quota is exceeded. Options: + description: 'The action to take when the quota or bucket limit is exceeded. + Options: - `drop`: Drop the event. @@ -29006,6 +29086,8 @@ components: description: The `rsyslog` destination forwards logs to an external `rsyslog` server over TCP or UDP using the syslog protocol. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: The unique identifier for this component. example: rsyslog-destination @@ -29076,6 +29158,16 @@ components: description: The `sample` processor allows probabilistic sampling of logs at a fixed rate. properties: + group_by: + description: Optional list of fields to group events by. Each group is sampled + independently. + example: + - service + - host + items: + type: string + minItems: 1 + type: array id: description: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` @@ -29480,6 +29572,8 @@ components: ObservabilityPipelineSentinelOneDestination: description: The `sentinel_one` destination sends logs to SentinelOne. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: The unique identifier for this component. example: sentinelone-destination @@ -29836,6 +29930,8 @@ components: ' example: true type: boolean + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' encoding: $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestinationEncoding' id: @@ -29949,6 +30045,8 @@ components: ObservabilityPipelineSumoLogicDestination: description: The `sumo_logic` destination forwards logs to Sumo Logic. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' encoding: $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestinationEncoding' header_custom_fields: @@ -30052,6 +30150,8 @@ components: description: The `syslog_ng` destination forwards logs to an external `syslog-ng` server over TCP or UDP using the syslog protocol. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: The unique identifier for this component. example: syslog-ng-destination diff --git a/docs/datadog_api_client.v2.model.rst b/docs/datadog_api_client.v2.model.rst index 31bdac915a..b67cbcd9c2 100644 --- a/docs/datadog_api_client.v2.model.rst +++ b/docs/datadog_api_client.v2.model.rst @@ -11890,6 +11890,27 @@ datadog\_api\_client.v2.model.observability\_pipeline\_aws\_auth module :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_buffer\_options module +----------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_buffer_options + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_buffer\_options\_disk\_type module +----------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_buffer_options_disk_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_buffer\_options\_memory\_type module +------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_buffer_options_memory_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_config module -------------------------------------------------------------------- @@ -12072,6 +12093,13 @@ datadog\_api\_client.v2.model.observability\_pipeline\_dedupe\_processor\_type m :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_disk\_buffer\_options module +----------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_disk_buffer_options + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_elasticsearch\_destination module ---------------------------------------------------------------------------------------- @@ -12415,6 +12443,20 @@ datadog\_api\_client.v2.model.observability\_pipeline\_logstash\_source\_type mo :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_memory\_buffer\_options module +------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_memory_buffer_options + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_memory\_buffer\_size\_options module +------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_metadata\_entry module ----------------------------------------------------------------------------- diff --git a/src/datadog_api_client/v2/model/azure_storage_destination.py b/src/datadog_api_client/v2/model/azure_storage_destination.py index 283e2df4aa..11c78496d6 100644 --- a/src/datadog_api_client/v2/model/azure_storage_destination.py +++ b/src/datadog_api_client/v2/model/azure_storage_destination.py @@ -14,16 +14,28 @@ if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.azure_storage_destination_type import AzureStorageDestinationType + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class AzureStorageDestination(ModelNormal): @cached_property def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.azure_storage_destination_type import AzureStorageDestinationType return { "blob_prefix": (str,), + "buffer": (ObservabilityPipelineBufferOptions,), "container_name": (str,), "id": (str,), "inputs": ([str],), @@ -32,6 +44,7 @@ def openapi_types(_): attribute_map = { "blob_prefix": "blob_prefix", + "buffer": "buffer", "container_name": "container_name", "id": "id", "inputs": "inputs", @@ -45,6 +58,13 @@ def __init__( inputs: List[str], type: AzureStorageDestinationType, blob_prefix: Union[str, UnsetType] = unset, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, **kwargs, ): """ @@ -53,6 +73,9 @@ def __init__( :param blob_prefix: Optional prefix for blobs written to the container. :type blob_prefix: str, optional + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param container_name: The name of the Azure Blob Storage container to store logs in. :type container_name: str @@ -67,6 +90,8 @@ def __init__( """ if blob_prefix is not unset: kwargs["blob_prefix"] = blob_prefix + if buffer is not unset: + kwargs["buffer"] = buffer super().__init__(kwargs) self_.container_name = container_name diff --git a/src/datadog_api_client/v2/model/microsoft_sentinel_destination.py b/src/datadog_api_client/v2/model/microsoft_sentinel_destination.py index 44868adb77..1d4d005b60 100644 --- a/src/datadog_api_client/v2/model/microsoft_sentinel_destination.py +++ b/src/datadog_api_client/v2/model/microsoft_sentinel_destination.py @@ -3,24 +3,38 @@ # Copyright 2019-Present Datadog, Inc. from __future__ import annotations -from typing import List, TYPE_CHECKING +from typing import List, Union, TYPE_CHECKING from datadog_api_client.model_utils import ( ModelNormal, cached_property, + unset, + UnsetType, ) if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.microsoft_sentinel_destination_type import MicrosoftSentinelDestinationType + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class MicrosoftSentinelDestination(ModelNormal): @cached_property def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.microsoft_sentinel_destination_type import MicrosoftSentinelDestinationType return { + "buffer": (ObservabilityPipelineBufferOptions,), "client_id": (str,), "dcr_immutable_id": (str,), "id": (str,), @@ -31,6 +45,7 @@ def openapi_types(_): } attribute_map = { + "buffer": "buffer", "client_id": "client_id", "dcr_immutable_id": "dcr_immutable_id", "id": "id", @@ -49,11 +64,21 @@ def __init__( table: str, tenant_id: str, type: MicrosoftSentinelDestinationType, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, **kwargs, ): """ The ``microsoft_sentinel`` destination forwards logs to Microsoft Sentinel. + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param client_id: Azure AD client ID used for authentication. :type client_id: str @@ -75,6 +100,8 @@ def __init__( :param type: The destination type. The value should always be ``microsoft_sentinel``. :type type: MicrosoftSentinelDestinationType """ + if buffer is not unset: + kwargs["buffer"] = buffer super().__init__(kwargs) self_.client_id = client_id diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination.py index 639d156f8b..518ff59fc3 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination.py @@ -17,9 +17,19 @@ from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination_auth import ( ObservabilityPipelineAmazonOpenSearchDestinationAuth, ) + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination_type import ( ObservabilityPipelineAmazonOpenSearchDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineAmazonOpenSearchDestination(ModelNormal): @@ -28,12 +38,14 @@ def openapi_types(_): from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination_auth import ( ObservabilityPipelineAmazonOpenSearchDestinationAuth, ) + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination_type import ( ObservabilityPipelineAmazonOpenSearchDestinationType, ) return { "auth": (ObservabilityPipelineAmazonOpenSearchDestinationAuth,), + "buffer": (ObservabilityPipelineBufferOptions,), "bulk_index": (str,), "id": (str,), "inputs": ([str],), @@ -42,6 +54,7 @@ def openapi_types(_): attribute_map = { "auth": "auth", + "buffer": "buffer", "bulk_index": "bulk_index", "id": "id", "inputs": "inputs", @@ -54,6 +67,13 @@ def __init__( id: str, inputs: List[str], type: ObservabilityPipelineAmazonOpenSearchDestinationType, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, bulk_index: Union[str, UnsetType] = unset, **kwargs, ): @@ -64,6 +84,9 @@ def __init__( The ``strategy`` field determines whether basic or AWS-based authentication is used. :type auth: ObservabilityPipelineAmazonOpenSearchDestinationAuth + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param bulk_index: The index to write logs to. :type bulk_index: str, optional @@ -76,6 +99,8 @@ def __init__( :param type: The destination type. The value should always be ``amazon_opensearch``. :type type: ObservabilityPipelineAmazonOpenSearchDestinationType """ + if buffer is not unset: + kwargs["buffer"] = buffer if bulk_index is not unset: kwargs["bulk_index"] = bulk_index super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_destination.py index 1a7af99838..99c74ad3fe 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_destination.py @@ -15,6 +15,7 @@ if TYPE_CHECKING: from datadog_api_client.v2.model.observability_pipeline_aws_auth import ObservabilityPipelineAwsAuth + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_amazon_s3_destination_storage_class import ( ObservabilityPipelineAmazonS3DestinationStorageClass, ) @@ -22,12 +23,22 @@ from datadog_api_client.v2.model.observability_pipeline_amazon_s3_destination_type import ( ObservabilityPipelineAmazonS3DestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineAmazonS3Destination(ModelNormal): @cached_property def openapi_types(_): from datadog_api_client.v2.model.observability_pipeline_aws_auth import ObservabilityPipelineAwsAuth + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_amazon_s3_destination_storage_class import ( ObservabilityPipelineAmazonS3DestinationStorageClass, ) @@ -39,6 +50,7 @@ def openapi_types(_): return { "auth": (ObservabilityPipelineAwsAuth,), "bucket": (str,), + "buffer": (ObservabilityPipelineBufferOptions,), "id": (str,), "inputs": ([str],), "key_prefix": (str,), @@ -51,6 +63,7 @@ def openapi_types(_): attribute_map = { "auth": "auth", "bucket": "bucket", + "buffer": "buffer", "id": "id", "inputs": "inputs", "key_prefix": "key_prefix", @@ -69,6 +82,13 @@ def __init__( storage_class: ObservabilityPipelineAmazonS3DestinationStorageClass, type: ObservabilityPipelineAmazonS3DestinationType, auth: Union[ObservabilityPipelineAwsAuth, UnsetType] = unset, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, key_prefix: Union[str, UnsetType] = unset, tls: Union[ObservabilityPipelineTls, UnsetType] = unset, **kwargs, @@ -83,6 +103,9 @@ def __init__( :param bucket: S3 bucket name. :type bucket: str + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param id: Unique identifier for the destination component. :type id: str @@ -106,6 +129,8 @@ def __init__( """ if auth is not unset: kwargs["auth"] = auth + if buffer is not unset: + kwargs["buffer"] = buffer if key_prefix is not unset: kwargs["key_prefix"] = key_prefix if tls is not unset: diff --git a/src/datadog_api_client/v2/model/observability_pipeline_buffer_options.py b/src/datadog_api_client/v2/model/observability_pipeline_buffer_options.py new file mode 100644 index 0000000000..283655773f --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_buffer_options.py @@ -0,0 +1,54 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelComposed, + cached_property, +) + + +class ObservabilityPipelineBufferOptions(ModelComposed): + def __init__(self, **kwargs): + """ + Configuration for buffer settings on destination components. + + :param max_size: Maximum size of the disk buffer. + :type max_size: int, optional + + :param type: Specifies the buffer type to configure. This option supports only a disk buffer. + :type type: ObservabilityPipelineBufferOptionsDiskType, optional + + :param max_events: Maximum events for the memory buffer. + :type max_events: int, optional + """ + super().__init__(kwargs) + + @cached_property + def _composed_schemas(_): + # we need this here to make our import statements work + # we must store _composed_schemas in here so the code is only run + # when we invoke this method. If we kept this at the class + # level we would get an error because the class level + # code would be run when this module is imported, and these composed + # classes don't exist yet because their module has not finished + # loading + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) + + return { + "oneOf": [ + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + ], + } diff --git a/src/datadog_api_client/v2/model/observability_pipeline_buffer_options_disk_type.py b/src/datadog_api_client/v2/model/observability_pipeline_buffer_options_disk_type.py new file mode 100644 index 0000000000..260ffa8057 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_buffer_options_disk_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineBufferOptionsDiskType(ModelSimple): + """ + Specifies the buffer type to configure. This option supports only a disk buffer. + + :param value: If omitted defaults to "disk". Must be one of ["disk"]. + :type value: str + """ + + allowed_values = { + "disk", + } + DISK: ClassVar["ObservabilityPipelineBufferOptionsDiskType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineBufferOptionsDiskType.DISK = ObservabilityPipelineBufferOptionsDiskType("disk") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_buffer_options_memory_type.py b/src/datadog_api_client/v2/model/observability_pipeline_buffer_options_memory_type.py new file mode 100644 index 0000000000..dafdee383e --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_buffer_options_memory_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineBufferOptionsMemoryType(ModelSimple): + """ + Specifies the buffer type to configure. This option supports only a memory buffer. + + :param value: If omitted defaults to "memory". Must be one of ["memory"]. + :type value: str + """ + + allowed_values = { + "memory", + } + MEMORY: ClassVar["ObservabilityPipelineBufferOptionsMemoryType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineBufferOptionsMemoryType.MEMORY = ObservabilityPipelineBufferOptionsMemoryType("memory") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config_destination_item.py b/src/datadog_api_client/v2/model/observability_pipeline_config_destination_item.py index 3272e5a0c8..a2503c9b9a 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_config_destination_item.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_config_destination_item.py @@ -15,6 +15,9 @@ def __init__(self, **kwargs): """ A destination for the pipeline. + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param id: The unique identifier for this component. :type id: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py b/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py index 11201ae45d..b64898da7c 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py @@ -30,8 +30,8 @@ def __init__(self, **kwargs): :param field: The name of the log field that contains a JSON string. :type field: str - :param drop_events: If set to `true`, logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline. - :type drop_events: bool + :param drop_events: If set to `true`, logs that match the quota filter and are sent after the quota is exceeded are dropped. Logs that do not match the filter continue through the pipeline. **Note**: You can set either `drop_events` or `overflow_action`, but not both. + :type drop_events: bool, optional :param ignore_when_missing_partitions: If `true`, the processor skips quota checks when partition fields are missing from the logs. :type ignore_when_missing_partitions: bool, optional @@ -42,7 +42,7 @@ def __init__(self, **kwargs): :param name: Name of the quota. :type name: str - :param overflow_action: The action to take when the quota is exceeded. Options: + :param overflow_action: The action to take when the quota or bucket limit is exceeded. Options: - `drop`: Drop the event. - `no_action`: Let the event pass through. - `overflow_routing`: Route to an overflow destination. @@ -55,12 +55,22 @@ def __init__(self, **kwargs): :param partition_fields: A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values. :type partition_fields: [str], optional + :param too_many_buckets_action: The action to take when the quota or bucket limit is exceeded. Options: + - `drop`: Drop the event. + - `no_action`: Let the event pass through. + - `overflow_routing`: Route to an overflow destination. + + :type too_many_buckets_action: ObservabilityPipelineQuotaProcessorOverflowAction, optional + :param fields: A list of static fields (key-value pairs) that is added to each log event processed by this component. :type fields: [ObservabilityPipelineFieldValue] :param metrics: Configuration for generating individual metrics. :type metrics: [ObservabilityPipelineGeneratedMetric] + :param group_by: Optional list of fields to group events by. Each group is sampled independently. + :type group_by: [str], optional + :param percentage: The percentage of logs to sample. :type percentage: float, optional @@ -91,9 +101,6 @@ def __init__(self, **kwargs): :param target: Path where enrichment results should be stored in the log. :type target: str - :param group_by: A list of fields used to group log events for merging. - :type group_by: [str] - :param merge_strategies: List of merge strategies defining how values from grouped events should be combined. :type merge_strategies: [ObservabilityPipelineReduceProcessorMergeStrategy] diff --git a/src/datadog_api_client/v2/model/observability_pipeline_datadog_logs_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_datadog_logs_destination.py index b519b48735..5554a4a554 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_datadog_logs_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_datadog_logs_destination.py @@ -3,43 +3,74 @@ # Copyright 2019-Present Datadog, Inc. from __future__ import annotations -from typing import List, TYPE_CHECKING +from typing import List, Union, TYPE_CHECKING from datadog_api_client.model_utils import ( ModelNormal, cached_property, + unset, + UnsetType, ) if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_datadog_logs_destination_type import ( ObservabilityPipelineDatadogLogsDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineDatadogLogsDestination(ModelNormal): @cached_property def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_datadog_logs_destination_type import ( ObservabilityPipelineDatadogLogsDestinationType, ) return { + "buffer": (ObservabilityPipelineBufferOptions,), "id": (str,), "inputs": ([str],), "type": (ObservabilityPipelineDatadogLogsDestinationType,), } attribute_map = { + "buffer": "buffer", "id": "id", "inputs": "inputs", "type": "type", } - def __init__(self_, id: str, inputs: List[str], type: ObservabilityPipelineDatadogLogsDestinationType, **kwargs): + def __init__( + self_, + id: str, + inputs: List[str], + type: ObservabilityPipelineDatadogLogsDestinationType, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, + **kwargs, + ): """ The ``datadog_logs`` destination forwards logs to Datadog Log Management. + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param id: The unique identifier for this component. :type id: str @@ -49,6 +80,8 @@ def __init__(self_, id: str, inputs: List[str], type: ObservabilityPipelineDatad :param type: The destination type. The value should always be ``datadog_logs``. :type type: ObservabilityPipelineDatadogLogsDestinationType """ + if buffer is not unset: + kwargs["buffer"] = buffer super().__init__(kwargs) self_.id = id diff --git a/src/datadog_api_client/v2/model/observability_pipeline_disk_buffer_options.py b/src/datadog_api_client/v2/model/observability_pipeline_disk_buffer_options.py new file mode 100644 index 0000000000..3950e1998a --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_disk_buffer_options.py @@ -0,0 +1,58 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options_disk_type import ( + ObservabilityPipelineBufferOptionsDiskType, + ) + + +class ObservabilityPipelineDiskBufferOptions(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options_disk_type import ( + ObservabilityPipelineBufferOptionsDiskType, + ) + + return { + "max_size": (int,), + "type": (ObservabilityPipelineBufferOptionsDiskType,), + } + + attribute_map = { + "max_size": "max_size", + "type": "type", + } + + def __init__( + self_, + max_size: Union[int, UnsetType] = unset, + type: Union[ObservabilityPipelineBufferOptionsDiskType, UnsetType] = unset, + **kwargs, + ): + """ + Options for configuring a disk buffer. + + :param max_size: Maximum size of the disk buffer. + :type max_size: int, optional + + :param type: Specifies the buffer type to configure. This option supports only a disk buffer. + :type type: ObservabilityPipelineBufferOptionsDiskType, optional + """ + if max_size is not unset: + kwargs["max_size"] = max_size + if type is not unset: + kwargs["type"] = type + super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination.py index 8ce413b7ef..0689f64e11 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination.py @@ -17,9 +17,19 @@ from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_api_version import ( ObservabilityPipelineElasticsearchDestinationApiVersion, ) + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_type import ( ObservabilityPipelineElasticsearchDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineElasticsearchDestination(ModelNormal): @@ -28,12 +38,14 @@ def openapi_types(_): from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_api_version import ( ObservabilityPipelineElasticsearchDestinationApiVersion, ) + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_type import ( ObservabilityPipelineElasticsearchDestinationType, ) return { "api_version": (ObservabilityPipelineElasticsearchDestinationApiVersion,), + "buffer": (ObservabilityPipelineBufferOptions,), "bulk_index": (str,), "id": (str,), "inputs": ([str],), @@ -42,6 +54,7 @@ def openapi_types(_): attribute_map = { "api_version": "api_version", + "buffer": "buffer", "bulk_index": "bulk_index", "id": "id", "inputs": "inputs", @@ -54,6 +67,13 @@ def __init__( inputs: List[str], type: ObservabilityPipelineElasticsearchDestinationType, api_version: Union[ObservabilityPipelineElasticsearchDestinationApiVersion, UnsetType] = unset, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, bulk_index: Union[str, UnsetType] = unset, **kwargs, ): @@ -63,6 +83,9 @@ def __init__( :param api_version: The Elasticsearch API version to use. Set to ``auto`` to auto-detect. :type api_version: ObservabilityPipelineElasticsearchDestinationApiVersion, optional + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param bulk_index: The index to write logs to in Elasticsearch. :type bulk_index: str, optional @@ -77,6 +100,8 @@ def __init__( """ if api_version is not unset: kwargs["api_version"] = api_version + if buffer is not unset: + kwargs["buffer"] = buffer if bulk_index is not unset: kwargs["bulk_index"] = bulk_index super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_google_chronicle_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_google_chronicle_destination.py index cdfbc1b816..8a6a50beac 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_google_chronicle_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_google_chronicle_destination.py @@ -15,18 +15,29 @@ if TYPE_CHECKING: from datadog_api_client.v2.model.observability_pipeline_gcp_auth import ObservabilityPipelineGcpAuth + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_google_chronicle_destination_encoding import ( ObservabilityPipelineGoogleChronicleDestinationEncoding, ) from datadog_api_client.v2.model.observability_pipeline_google_chronicle_destination_type import ( ObservabilityPipelineGoogleChronicleDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineGoogleChronicleDestination(ModelNormal): @cached_property def openapi_types(_): from datadog_api_client.v2.model.observability_pipeline_gcp_auth import ObservabilityPipelineGcpAuth + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_google_chronicle_destination_encoding import ( ObservabilityPipelineGoogleChronicleDestinationEncoding, ) @@ -36,6 +47,7 @@ def openapi_types(_): return { "auth": (ObservabilityPipelineGcpAuth,), + "buffer": (ObservabilityPipelineBufferOptions,), "customer_id": (str,), "encoding": (ObservabilityPipelineGoogleChronicleDestinationEncoding,), "id": (str,), @@ -46,6 +58,7 @@ def openapi_types(_): attribute_map = { "auth": "auth", + "buffer": "buffer", "customer_id": "customer_id", "encoding": "encoding", "id": "id", @@ -61,6 +74,13 @@ def __init__( id: str, inputs: List[str], type: ObservabilityPipelineGoogleChronicleDestinationType, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, encoding: Union[ObservabilityPipelineGoogleChronicleDestinationEncoding, UnsetType] = unset, log_type: Union[str, UnsetType] = unset, **kwargs, @@ -71,6 +91,9 @@ def __init__( :param auth: GCP credentials used to authenticate with Google Cloud Storage. :type auth: ObservabilityPipelineGcpAuth + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param customer_id: The Google Chronicle customer ID. :type customer_id: str @@ -89,6 +112,8 @@ def __init__( :param type: The destination type. The value should always be ``google_chronicle``. :type type: ObservabilityPipelineGoogleChronicleDestinationType """ + if buffer is not unset: + kwargs["buffer"] = buffer if encoding is not unset: kwargs["encoding"] = encoding if log_type is not unset: diff --git a/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination.py index baf750038b..9f04c4a023 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination.py @@ -18,6 +18,7 @@ ObservabilityPipelineGoogleCloudStorageDestinationAcl, ) from datadog_api_client.v2.model.observability_pipeline_gcp_auth import ObservabilityPipelineGcpAuth + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_metadata_entry import ObservabilityPipelineMetadataEntry from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination_storage_class import ( ObservabilityPipelineGoogleCloudStorageDestinationStorageClass, @@ -25,6 +26,15 @@ from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination_type import ( ObservabilityPipelineGoogleCloudStorageDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineGoogleCloudStorageDestination(ModelNormal): @@ -34,6 +44,7 @@ def openapi_types(_): ObservabilityPipelineGoogleCloudStorageDestinationAcl, ) from datadog_api_client.v2.model.observability_pipeline_gcp_auth import ObservabilityPipelineGcpAuth + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_metadata_entry import ObservabilityPipelineMetadataEntry from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination_storage_class import ( ObservabilityPipelineGoogleCloudStorageDestinationStorageClass, @@ -46,6 +57,7 @@ def openapi_types(_): "acl": (ObservabilityPipelineGoogleCloudStorageDestinationAcl,), "auth": (ObservabilityPipelineGcpAuth,), "bucket": (str,), + "buffer": (ObservabilityPipelineBufferOptions,), "id": (str,), "inputs": ([str],), "key_prefix": (str,), @@ -58,6 +70,7 @@ def openapi_types(_): "acl": "acl", "auth": "auth", "bucket": "bucket", + "buffer": "buffer", "id": "id", "inputs": "inputs", "key_prefix": "key_prefix", @@ -75,6 +88,13 @@ def __init__( inputs: List[str], storage_class: ObservabilityPipelineGoogleCloudStorageDestinationStorageClass, type: ObservabilityPipelineGoogleCloudStorageDestinationType, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, key_prefix: Union[str, UnsetType] = unset, metadata: Union[List[ObservabilityPipelineMetadataEntry], UnsetType] = unset, **kwargs, @@ -92,6 +112,9 @@ def __init__( :param bucket: Name of the GCS bucket. :type bucket: str + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param id: Unique identifier for the destination component. :type id: str @@ -110,6 +133,8 @@ def __init__( :param type: The destination type. Always ``google_cloud_storage``. :type type: ObservabilityPipelineGoogleCloudStorageDestinationType """ + if buffer is not unset: + kwargs["buffer"] = buffer if key_prefix is not unset: kwargs["key_prefix"] = key_prefix if metadata is not unset: diff --git a/src/datadog_api_client/v2/model/observability_pipeline_memory_buffer_options.py b/src/datadog_api_client/v2/model/observability_pipeline_memory_buffer_options.py new file mode 100644 index 0000000000..97f17243e2 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_memory_buffer_options.py @@ -0,0 +1,58 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options_memory_type import ( + ObservabilityPipelineBufferOptionsMemoryType, + ) + + +class ObservabilityPipelineMemoryBufferOptions(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options_memory_type import ( + ObservabilityPipelineBufferOptionsMemoryType, + ) + + return { + "max_size": (int,), + "type": (ObservabilityPipelineBufferOptionsMemoryType,), + } + + attribute_map = { + "max_size": "max_size", + "type": "type", + } + + def __init__( + self_, + max_size: Union[int, UnsetType] = unset, + type: Union[ObservabilityPipelineBufferOptionsMemoryType, UnsetType] = unset, + **kwargs, + ): + """ + Options for configuring a memory buffer by byte size. + + :param max_size: Maximum size of the disk buffer. + :type max_size: int, optional + + :param type: Specifies the buffer type to configure. This option supports only a memory buffer. + :type type: ObservabilityPipelineBufferOptionsMemoryType, optional + """ + if max_size is not unset: + kwargs["max_size"] = max_size + if type is not unset: + kwargs["type"] = type + super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_memory_buffer_size_options.py b/src/datadog_api_client/v2/model/observability_pipeline_memory_buffer_size_options.py new file mode 100644 index 0000000000..fd622fd87e --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_memory_buffer_size_options.py @@ -0,0 +1,58 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options_memory_type import ( + ObservabilityPipelineBufferOptionsMemoryType, + ) + + +class ObservabilityPipelineMemoryBufferSizeOptions(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options_memory_type import ( + ObservabilityPipelineBufferOptionsMemoryType, + ) + + return { + "max_events": (int,), + "type": (ObservabilityPipelineBufferOptionsMemoryType,), + } + + attribute_map = { + "max_events": "max_events", + "type": "type", + } + + def __init__( + self_, + max_events: Union[int, UnsetType] = unset, + type: Union[ObservabilityPipelineBufferOptionsMemoryType, UnsetType] = unset, + **kwargs, + ): + """ + Options for configuring a memory buffer by queue length. + + :param max_events: Maximum events for the memory buffer. + :type max_events: int, optional + + :param type: Specifies the buffer type to configure. This option supports only a memory buffer. + :type type: ObservabilityPipelineBufferOptionsMemoryType, optional + """ + if max_events is not unset: + kwargs["max_events"] = max_events + if type is not unset: + kwargs["type"] = type + super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_new_relic_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_new_relic_destination.py index ec0cb39481..e437d0714a 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_new_relic_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_new_relic_destination.py @@ -3,26 +3,39 @@ # Copyright 2019-Present Datadog, Inc. from __future__ import annotations -from typing import List, TYPE_CHECKING +from typing import List, Union, TYPE_CHECKING from datadog_api_client.model_utils import ( ModelNormal, cached_property, + unset, + UnsetType, ) if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_new_relic_destination_region import ( ObservabilityPipelineNewRelicDestinationRegion, ) from datadog_api_client.v2.model.observability_pipeline_new_relic_destination_type import ( ObservabilityPipelineNewRelicDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineNewRelicDestination(ModelNormal): @cached_property def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_new_relic_destination_region import ( ObservabilityPipelineNewRelicDestinationRegion, ) @@ -31,6 +44,7 @@ def openapi_types(_): ) return { + "buffer": (ObservabilityPipelineBufferOptions,), "id": (str,), "inputs": ([str],), "region": (ObservabilityPipelineNewRelicDestinationRegion,), @@ -38,6 +52,7 @@ def openapi_types(_): } attribute_map = { + "buffer": "buffer", "id": "id", "inputs": "inputs", "region": "region", @@ -50,11 +65,21 @@ def __init__( inputs: List[str], region: ObservabilityPipelineNewRelicDestinationRegion, type: ObservabilityPipelineNewRelicDestinationType, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, **kwargs, ): """ The ``new_relic`` destination sends logs to the New Relic platform. + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param id: The unique identifier for this component. :type id: str @@ -67,6 +92,8 @@ def __init__( :param type: The destination type. The value should always be ``new_relic``. :type type: ObservabilityPipelineNewRelicDestinationType """ + if buffer is not unset: + kwargs["buffer"] = buffer super().__init__(kwargs) self_.id = id diff --git a/src/datadog_api_client/v2/model/observability_pipeline_open_search_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_open_search_destination.py index d817a4b558..be7b5e7730 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_open_search_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_open_search_destination.py @@ -14,19 +14,31 @@ if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_open_search_destination_type import ( ObservabilityPipelineOpenSearchDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineOpenSearchDestination(ModelNormal): @cached_property def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_open_search_destination_type import ( ObservabilityPipelineOpenSearchDestinationType, ) return { + "buffer": (ObservabilityPipelineBufferOptions,), "bulk_index": (str,), "id": (str,), "inputs": ([str],), @@ -34,6 +46,7 @@ def openapi_types(_): } attribute_map = { + "buffer": "buffer", "bulk_index": "bulk_index", "id": "id", "inputs": "inputs", @@ -45,12 +58,22 @@ def __init__( id: str, inputs: List[str], type: ObservabilityPipelineOpenSearchDestinationType, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, bulk_index: Union[str, UnsetType] = unset, **kwargs, ): """ The ``opensearch`` destination writes logs to an OpenSearch cluster. + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param bulk_index: The index to write logs to. :type bulk_index: str, optional @@ -63,6 +86,8 @@ def __init__( :param type: The destination type. The value should always be ``opensearch``. :type type: ObservabilityPipelineOpenSearchDestinationType """ + if buffer is not unset: + kwargs["buffer"] = buffer if bulk_index is not unset: kwargs["bulk_index"] = bulk_index super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_quota_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_quota_processor.py index aa4be5a8ca..3b418cd88c 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_quota_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_quota_processor.py @@ -55,6 +55,7 @@ def openapi_types(_): "overflow_action": (ObservabilityPipelineQuotaProcessorOverflowAction,), "overrides": ([ObservabilityPipelineQuotaProcessorOverride],), "partition_fields": ([str],), + "too_many_buckets_action": (ObservabilityPipelineQuotaProcessorOverflowAction,), "type": (ObservabilityPipelineQuotaProcessorType,), } @@ -69,29 +70,31 @@ def openapi_types(_): "overflow_action": "overflow_action", "overrides": "overrides", "partition_fields": "partition_fields", + "too_many_buckets_action": "too_many_buckets_action", "type": "type", } def __init__( self_, - drop_events: bool, id: str, include: str, inputs: List[str], limit: ObservabilityPipelineQuotaProcessorLimit, name: str, type: ObservabilityPipelineQuotaProcessorType, + drop_events: Union[bool, UnsetType] = unset, ignore_when_missing_partitions: Union[bool, UnsetType] = unset, overflow_action: Union[ObservabilityPipelineQuotaProcessorOverflowAction, UnsetType] = unset, overrides: Union[List[ObservabilityPipelineQuotaProcessorOverride], UnsetType] = unset, partition_fields: Union[List[str], UnsetType] = unset, + too_many_buckets_action: Union[ObservabilityPipelineQuotaProcessorOverflowAction, UnsetType] = unset, **kwargs, ): """ The Quota Processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert. - :param drop_events: If set to ``true`` , logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline. - :type drop_events: bool + :param drop_events: If set to ``true`` , logs that match the quota filter and are sent after the quota is exceeded are dropped. Logs that do not match the filter continue through the pipeline. **Note** : You can set either ``drop_events`` or ``overflow_action`` , but not both. + :type drop_events: bool, optional :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). :type id: str @@ -111,7 +114,7 @@ def __init__( :param name: Name of the quota. :type name: str - :param overflow_action: The action to take when the quota is exceeded. Options: + :param overflow_action: The action to take when the quota or bucket limit is exceeded. Options: * ``drop`` : Drop the event. * ``no_action`` : Let the event pass through. @@ -124,9 +127,18 @@ def __init__( :param partition_fields: A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values. :type partition_fields: [str], optional + :param too_many_buckets_action: The action to take when the quota or bucket limit is exceeded. Options: + + * ``drop`` : Drop the event. + * ``no_action`` : Let the event pass through. + * ``overflow_routing`` : Route to an overflow destination. + :type too_many_buckets_action: ObservabilityPipelineQuotaProcessorOverflowAction, optional + :param type: The processor type. The value should always be ``quota``. :type type: ObservabilityPipelineQuotaProcessorType """ + if drop_events is not unset: + kwargs["drop_events"] = drop_events if ignore_when_missing_partitions is not unset: kwargs["ignore_when_missing_partitions"] = ignore_when_missing_partitions if overflow_action is not unset: @@ -135,9 +147,10 @@ def __init__( kwargs["overrides"] = overrides if partition_fields is not unset: kwargs["partition_fields"] = partition_fields + if too_many_buckets_action is not unset: + kwargs["too_many_buckets_action"] = too_many_buckets_action super().__init__(kwargs) - self_.drop_events = drop_events self_.id = id self_.include = include self_.inputs = inputs diff --git a/src/datadog_api_client/v2/model/observability_pipeline_quota_processor_overflow_action.py b/src/datadog_api_client/v2/model/observability_pipeline_quota_processor_overflow_action.py index f8181e4685..b104354ffc 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_quota_processor_overflow_action.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_quota_processor_overflow_action.py @@ -14,7 +14,7 @@ class ObservabilityPipelineQuotaProcessorOverflowAction(ModelSimple): """ - The action to take when the quota is exceeded. Options: + The action to take when the quota or bucket limit is exceeded. Options: - `drop`: Drop the event. - `no_action`: Let the event pass through. - `overflow_routing`: Route to an overflow destination. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_destination.py index 486442d4aa..a09ccda931 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_destination.py @@ -14,10 +14,20 @@ if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls from datadog_api_client.v2.model.observability_pipeline_rsyslog_destination_type import ( ObservabilityPipelineRsyslogDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineRsyslogDestination(ModelNormal): @@ -29,12 +39,14 @@ class ObservabilityPipelineRsyslogDestination(ModelNormal): @cached_property def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls from datadog_api_client.v2.model.observability_pipeline_rsyslog_destination_type import ( ObservabilityPipelineRsyslogDestinationType, ) return { + "buffer": (ObservabilityPipelineBufferOptions,), "id": (str,), "inputs": ([str],), "keepalive": (int,), @@ -43,6 +55,7 @@ def openapi_types(_): } attribute_map = { + "buffer": "buffer", "id": "id", "inputs": "inputs", "keepalive": "keepalive", @@ -55,6 +68,13 @@ def __init__( id: str, inputs: List[str], type: ObservabilityPipelineRsyslogDestinationType, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, keepalive: Union[int, UnsetType] = unset, tls: Union[ObservabilityPipelineTls, UnsetType] = unset, **kwargs, @@ -62,6 +82,9 @@ def __init__( """ The ``rsyslog`` destination forwards logs to an external ``rsyslog`` server over TCP or UDP using the syslog protocol. + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param id: The unique identifier for this component. :type id: str @@ -77,6 +100,8 @@ def __init__( :param type: The destination type. The value should always be ``rsyslog``. :type type: ObservabilityPipelineRsyslogDestinationType """ + if buffer is not unset: + kwargs["buffer"] = buffer if keepalive is not unset: kwargs["keepalive"] = keepalive if tls is not unset: diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sample_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_sample_processor.py index e6f16ce99d..9dcd675e0c 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_sample_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_sample_processor.py @@ -21,6 +21,9 @@ class ObservabilityPipelineSampleProcessor(ModelNormal): validations = { + "group_by": { + "min_items": 1, + }, "rate": { "inclusive_minimum": 1, }, @@ -33,6 +36,7 @@ def openapi_types(_): ) return { + "group_by": ([str],), "id": (str,), "include": (str,), "inputs": ([str],), @@ -42,6 +46,7 @@ def openapi_types(_): } attribute_map = { + "group_by": "group_by", "id": "id", "include": "include", "inputs": "inputs", @@ -56,6 +61,7 @@ def __init__( include: str, inputs: List[str], type: ObservabilityPipelineSampleProcessorType, + group_by: Union[List[str], UnsetType] = unset, percentage: Union[float, UnsetType] = unset, rate: Union[int, UnsetType] = unset, **kwargs, @@ -63,6 +69,9 @@ def __init__( """ The ``sample`` processor allows probabilistic sampling of logs at a fixed rate. + :param group_by: Optional list of fields to group events by. Each group is sampled independently. + :type group_by: [str], optional + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). :type id: str @@ -81,6 +90,8 @@ def __init__( :param type: The processor type. The value should always be ``sample``. :type type: ObservabilityPipelineSampleProcessorType """ + if group_by is not unset: + kwargs["group_by"] = group_by if percentage is not unset: kwargs["percentage"] = percentage if rate is not unset: diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sentinel_one_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_sentinel_one_destination.py index 924aa9b0f5..d19c296e36 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_sentinel_one_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_sentinel_one_destination.py @@ -3,26 +3,39 @@ # Copyright 2019-Present Datadog, Inc. from __future__ import annotations -from typing import List, TYPE_CHECKING +from typing import List, Union, TYPE_CHECKING from datadog_api_client.model_utils import ( ModelNormal, cached_property, + unset, + UnsetType, ) if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination_region import ( ObservabilityPipelineSentinelOneDestinationRegion, ) from datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination_type import ( ObservabilityPipelineSentinelOneDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineSentinelOneDestination(ModelNormal): @cached_property def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination_region import ( ObservabilityPipelineSentinelOneDestinationRegion, ) @@ -31,6 +44,7 @@ def openapi_types(_): ) return { + "buffer": (ObservabilityPipelineBufferOptions,), "id": (str,), "inputs": ([str],), "region": (ObservabilityPipelineSentinelOneDestinationRegion,), @@ -38,6 +52,7 @@ def openapi_types(_): } attribute_map = { + "buffer": "buffer", "id": "id", "inputs": "inputs", "region": "region", @@ -50,11 +65,21 @@ def __init__( inputs: List[str], region: ObservabilityPipelineSentinelOneDestinationRegion, type: ObservabilityPipelineSentinelOneDestinationType, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, **kwargs, ): """ The ``sentinel_one`` destination sends logs to SentinelOne. + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param id: The unique identifier for this component. :type id: str @@ -67,6 +92,8 @@ def __init__( :param type: The destination type. The value should always be ``sentinel_one``. :type type: ObservabilityPipelineSentinelOneDestinationType """ + if buffer is not unset: + kwargs["buffer"] = buffer super().__init__(kwargs) self_.id = id diff --git a/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_destination.py index e36b4aee74..49665d08f3 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_destination.py @@ -14,17 +14,28 @@ if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination_encoding import ( ObservabilityPipelineSplunkHecDestinationEncoding, ) from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination_type import ( ObservabilityPipelineSplunkHecDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineSplunkHecDestination(ModelNormal): @cached_property def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination_encoding import ( ObservabilityPipelineSplunkHecDestinationEncoding, ) @@ -34,6 +45,7 @@ def openapi_types(_): return { "auto_extract_timestamp": (bool,), + "buffer": (ObservabilityPipelineBufferOptions,), "encoding": (ObservabilityPipelineSplunkHecDestinationEncoding,), "id": (str,), "index": (str,), @@ -44,6 +56,7 @@ def openapi_types(_): attribute_map = { "auto_extract_timestamp": "auto_extract_timestamp", + "buffer": "buffer", "encoding": "encoding", "id": "id", "index": "index", @@ -58,6 +71,13 @@ def __init__( inputs: List[str], type: ObservabilityPipelineSplunkHecDestinationType, auto_extract_timestamp: Union[bool, UnsetType] = unset, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, encoding: Union[ObservabilityPipelineSplunkHecDestinationEncoding, UnsetType] = unset, index: Union[str, UnsetType] = unset, sourcetype: Union[str, UnsetType] = unset, @@ -70,6 +90,9 @@ def __init__( If ``false`` , Splunk assigns the time the event was received. :type auto_extract_timestamp: bool, optional + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param encoding: Encoding format for log events. :type encoding: ObservabilityPipelineSplunkHecDestinationEncoding, optional @@ -90,6 +113,8 @@ def __init__( """ if auto_extract_timestamp is not unset: kwargs["auto_extract_timestamp"] = auto_extract_timestamp + if buffer is not unset: + kwargs["buffer"] = buffer if encoding is not unset: kwargs["encoding"] = encoding if index is not unset: diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination.py index d49e3044f8..488c595631 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination.py @@ -14,6 +14,7 @@ if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination_encoding import ( ObservabilityPipelineSumoLogicDestinationEncoding, ) @@ -23,11 +24,21 @@ from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination_type import ( ObservabilityPipelineSumoLogicDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineSumoLogicDestination(ModelNormal): @cached_property def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination_encoding import ( ObservabilityPipelineSumoLogicDestinationEncoding, ) @@ -39,6 +50,7 @@ def openapi_types(_): ) return { + "buffer": (ObservabilityPipelineBufferOptions,), "encoding": (ObservabilityPipelineSumoLogicDestinationEncoding,), "header_custom_fields": ([ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem],), "header_host_name": (str,), @@ -50,6 +62,7 @@ def openapi_types(_): } attribute_map = { + "buffer": "buffer", "encoding": "encoding", "header_custom_fields": "header_custom_fields", "header_host_name": "header_host_name", @@ -65,6 +78,13 @@ def __init__( id: str, inputs: List[str], type: ObservabilityPipelineSumoLogicDestinationType, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, encoding: Union[ObservabilityPipelineSumoLogicDestinationEncoding, UnsetType] = unset, header_custom_fields: Union[ List[ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem], UnsetType @@ -77,6 +97,9 @@ def __init__( """ The ``sumo_logic`` destination forwards logs to Sumo Logic. + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param encoding: The output encoding format. :type encoding: ObservabilityPipelineSumoLogicDestinationEncoding, optional @@ -101,6 +124,8 @@ def __init__( :param type: The destination type. The value should always be ``sumo_logic``. :type type: ObservabilityPipelineSumoLogicDestinationType """ + if buffer is not unset: + kwargs["buffer"] = buffer if encoding is not unset: kwargs["encoding"] = encoding if header_custom_fields is not unset: diff --git a/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_destination.py index 4984e69b5e..eb2a8c309d 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_destination.py @@ -14,10 +14,20 @@ if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls from datadog_api_client.v2.model.observability_pipeline_syslog_ng_destination_type import ( ObservabilityPipelineSyslogNgDestinationType, ) + from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, + ) class ObservabilityPipelineSyslogNgDestination(ModelNormal): @@ -29,12 +39,14 @@ class ObservabilityPipelineSyslogNgDestination(ModelNormal): @cached_property def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls from datadog_api_client.v2.model.observability_pipeline_syslog_ng_destination_type import ( ObservabilityPipelineSyslogNgDestinationType, ) return { + "buffer": (ObservabilityPipelineBufferOptions,), "id": (str,), "inputs": ([str],), "keepalive": (int,), @@ -43,6 +55,7 @@ def openapi_types(_): } attribute_map = { + "buffer": "buffer", "id": "id", "inputs": "inputs", "keepalive": "keepalive", @@ -55,6 +68,13 @@ def __init__( id: str, inputs: List[str], type: ObservabilityPipelineSyslogNgDestinationType, + buffer: Union[ + ObservabilityPipelineBufferOptions, + ObservabilityPipelineDiskBufferOptions, + ObservabilityPipelineMemoryBufferOptions, + ObservabilityPipelineMemoryBufferSizeOptions, + UnsetType, + ] = unset, keepalive: Union[int, UnsetType] = unset, tls: Union[ObservabilityPipelineTls, UnsetType] = unset, **kwargs, @@ -62,6 +82,9 @@ def __init__( """ The ``syslog_ng`` destination forwards logs to an external ``syslog-ng`` server over TCP or UDP using the syslog protocol. + :param buffer: Configuration for buffer settings on destination components. + :type buffer: ObservabilityPipelineBufferOptions, optional + :param id: The unique identifier for this component. :type id: str @@ -77,6 +100,8 @@ def __init__( :param type: The destination type. The value should always be ``syslog_ng``. :type type: ObservabilityPipelineSyslogNgDestinationType """ + if buffer is not unset: + kwargs["buffer"] = buffer if keepalive is not unset: kwargs["keepalive"] = keepalive if tls is not unset: diff --git a/src/datadog_api_client/v2/models/__init__.py b/src/datadog_api_client/v2/models/__init__.py index 76b18d03ca..536a4f674d 100644 --- a/src/datadog_api_client/v2/models/__init__.py +++ b/src/datadog_api_client/v2/models/__init__.py @@ -2185,6 +2185,13 @@ ObservabilityPipelineAmazonSecurityLakeDestinationType, ) from datadog_api_client.v2.model.observability_pipeline_aws_auth import ObservabilityPipelineAwsAuth +from datadog_api_client.v2.model.observability_pipeline_buffer_options import ObservabilityPipelineBufferOptions +from datadog_api_client.v2.model.observability_pipeline_buffer_options_disk_type import ( + ObservabilityPipelineBufferOptionsDiskType, +) +from datadog_api_client.v2.model.observability_pipeline_buffer_options_memory_type import ( + ObservabilityPipelineBufferOptionsMemoryType, +) from datadog_api_client.v2.model.observability_pipeline_config import ObservabilityPipelineConfig from datadog_api_client.v2.model.observability_pipeline_config_destination_item import ( ObservabilityPipelineConfigDestinationItem, @@ -2249,6 +2256,9 @@ from datadog_api_client.v2.model.observability_pipeline_dedupe_processor_type import ( ObservabilityPipelineDedupeProcessorType, ) +from datadog_api_client.v2.model.observability_pipeline_disk_buffer_options import ( + ObservabilityPipelineDiskBufferOptions, +) from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination import ( ObservabilityPipelineElasticsearchDestination, ) @@ -2375,6 +2385,12 @@ from datadog_api_client.v2.model.observability_pipeline_logstash_source_type import ( ObservabilityPipelineLogstashSourceType, ) +from datadog_api_client.v2.model.observability_pipeline_memory_buffer_options import ( + ObservabilityPipelineMemoryBufferOptions, +) +from datadog_api_client.v2.model.observability_pipeline_memory_buffer_size_options import ( + ObservabilityPipelineMemoryBufferSizeOptions, +) from datadog_api_client.v2.model.observability_pipeline_metadata_entry import ObservabilityPipelineMetadataEntry from datadog_api_client.v2.model.observability_pipeline_metric_value import ObservabilityPipelineMetricValue from datadog_api_client.v2.model.observability_pipeline_new_relic_destination import ( @@ -5639,6 +5655,9 @@ "ObservabilityPipelineAmazonSecurityLakeDestination", "ObservabilityPipelineAmazonSecurityLakeDestinationType", "ObservabilityPipelineAwsAuth", + "ObservabilityPipelineBufferOptions", + "ObservabilityPipelineBufferOptionsDiskType", + "ObservabilityPipelineBufferOptionsMemoryType", "ObservabilityPipelineConfig", "ObservabilityPipelineConfigDestinationItem", "ObservabilityPipelineConfigProcessorItem", @@ -5665,6 +5684,7 @@ "ObservabilityPipelineDedupeProcessor", "ObservabilityPipelineDedupeProcessorMode", "ObservabilityPipelineDedupeProcessorType", + "ObservabilityPipelineDiskBufferOptions", "ObservabilityPipelineElasticsearchDestination", "ObservabilityPipelineElasticsearchDestinationApiVersion", "ObservabilityPipelineElasticsearchDestinationType", @@ -5715,6 +5735,8 @@ "ObservabilityPipelineKafkaSourceType", "ObservabilityPipelineLogstashSource", "ObservabilityPipelineLogstashSourceType", + "ObservabilityPipelineMemoryBufferOptions", + "ObservabilityPipelineMemoryBufferSizeOptions", "ObservabilityPipelineMetadataEntry", "ObservabilityPipelineMetricValue", "ObservabilityPipelineNewRelicDestination",