diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index d2192b7484..d24c2bf5f1 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -6677,8 +6677,11 @@ components: x-enum-varnames: - AZURE_SCAN_OPTIONS AzureStorageDestination: - description: The `azure_storage` destination forwards logs to an Azure Blob + description: 'The `azure_storage` destination forwards logs to an Azure Blob Storage container. + + + **Supported pipeline types:** logs' properties: blob_prefix: description: Optional prefix for blobs written to the container. @@ -6709,6 +6712,8 @@ components: - inputs - container_name type: object + x-pipeline-types: + - logs AzureStorageDestinationType: default: azure_storage description: The destination type. The value should always be `azure_storage`. @@ -33331,8 +33336,11 @@ components: - query type: object MicrosoftSentinelDestination: - description: The `microsoft_sentinel` destination forwards logs to Microsoft + description: 'The `microsoft_sentinel` destination forwards logs to Microsoft Sentinel. + + + **Supported pipeline types:** logs' properties: client_id: description: Azure AD client ID used for authentication. @@ -33373,6 +33381,8 @@ components: - dcr_immutable_id - table type: object + x-pipeline-types: + - logs MicrosoftSentinelDestinationType: default: microsoft_sentinel description: The destination type. The value should always be `microsoft_sentinel`. @@ -35077,8 +35087,11 @@ components: - data type: object ObservabilityPipelineAddEnvVarsProcessor: - description: The `add_env_vars` processor adds environment variable values to - log events. + description: 'The `add_env_vars` processor adds environment variable values + to log events. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -35110,6 +35123,8 @@ components: - variables - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineAddEnvVarsProcessorType: default: add_env_vars description: The processor type. The value should always be `add_env_vars`. @@ -35135,7 +35150,10 @@ components: - name type: object ObservabilityPipelineAddFieldsProcessor: - description: The `add_fields` processor adds static key-value fields to logs. + description: 'The `add_fields` processor adds static key-value fields to logs. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -35169,6 +35187,8 @@ components: - fields - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineAddFieldsProcessorType: default: add_fields description: The processor type. The value should always be `add_fields`. @@ -35178,8 +35198,53 @@ components: type: string x-enum-varnames: - ADD_FIELDS + ObservabilityPipelineAddHostnameProcessor: + description: 'The `add_hostname` processor adds the hostname to log events. + + + **Supported pipeline types:** logs' + properties: + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: add-hostname-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineAddHostnameProcessorType' + required: + - id + - type + - include + - enabled + type: object + x-pipeline-types: + - logs + ObservabilityPipelineAddHostnameProcessorType: + default: add_hostname + description: The processor type. The value should always be `add_hostname`. + enum: + - add_hostname + example: add_hostname + type: string + x-enum-varnames: + - ADD_HOSTNAME ObservabilityPipelineAmazonDataFirehoseSource: - description: The `amazon_data_firehose` source ingests logs from AWS Data Firehose. + description: 'The `amazon_data_firehose` source ingests logs from AWS Data Firehose. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' @@ -35197,6 +35262,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonDataFirehoseSourceType: default: amazon_data_firehose description: The source type. The value should always be `amazon_data_firehose`. @@ -35207,7 +35274,10 @@ components: x-enum-varnames: - AMAZON_DATA_FIREHOSE ObservabilityPipelineAmazonOpenSearchDestination: - description: The `amazon_opensearch` destination writes logs to Amazon OpenSearch. + description: 'The `amazon_opensearch` destination writes logs to Amazon OpenSearch. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestinationAuth' @@ -35235,6 +35305,8 @@ components: - inputs - auth type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonOpenSearchDestinationAuth: description: 'Authentication settings for the Amazon OpenSearch destination. @@ -35278,8 +35350,11 @@ components: x-enum-varnames: - AMAZON_OPENSEARCH ObservabilityPipelineAmazonS3Destination: - description: The `amazon_s3` destination sends your logs in Datadog-rehydratable + description: 'The `amazon_s3` destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' @@ -35320,6 +35395,8 @@ components: - region - storage_class type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonS3DestinationStorageClass: description: S3 storage class. enum: @@ -35356,7 +35433,10 @@ components: ObservabilityPipelineAmazonS3Source: description: 'The `amazon_s3` source ingests logs from an Amazon S3 bucket. - It supports AWS authentication and TLS encryption.' + It supports AWS authentication and TLS encryption. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' @@ -35379,6 +35459,8 @@ components: - type - region type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonS3SourceType: default: amazon_s3 description: The source type. Always `amazon_s3`. @@ -35389,8 +35471,11 @@ components: x-enum-varnames: - AMAZON_S3 ObservabilityPipelineAmazonSecurityLakeDestination: - description: The `amazon_security_lake` destination sends your logs to Amazon + description: 'The `amazon_security_lake` destination sends your logs to Amazon Security Lake. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' @@ -35430,6 +35515,8 @@ components: - region - custom_source_name type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonSecurityLakeDestinationType: default: amazon_security_lake description: The destination type. Always `amazon_security_lake`. @@ -35455,6 +35542,42 @@ components: role session. type: string type: object + ObservabilityPipelineCloudPremDestination: + description: 'The `cloud_prem` destination sends logs to Datadog CloudPrem. + + + **Supported pipeline types:** logs' + properties: + id: + description: The unique identifier for this component. + example: cloud-prem-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineCloudPremDestinationType' + required: + - id + - type + - inputs + type: object + x-pipeline-types: + - logs + ObservabilityPipelineCloudPremDestinationType: + default: cloud_prem + description: The destination type. The value should always be `cloud_prem`. + enum: + - cloud_prem + example: cloud_prem + type: string + x-enum-varnames: + - CLOUD_PREM ObservabilityPipelineComponentDisplayName: description: The display name for a component. example: my component @@ -35473,6 +35596,8 @@ components: items: $ref: '#/components/schemas/ObservabilityPipelineConfigDestinationItem' type: array + pipeline_type: + $ref: '#/components/schemas/ObservabilityPipelineConfigPipelineType' processors: description: A list of processor groups that transform or enrich log data. example: @@ -35509,25 +35634,40 @@ components: ObservabilityPipelineConfigDestinationItem: description: A destination for the pipeline. oneOf: - - $ref: '#/components/schemas/ObservabilityPipelineDatadogLogsDestination' + - $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestination' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestination' - $ref: '#/components/schemas/ObservabilityPipelineAmazonS3Destination' - - $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestination' - - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestination' - - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestination' - - $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestination' - - $ref: '#/components/schemas/ObservabilityPipelineRsyslogDestination' - - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgDestination' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonSecurityLakeDestination' - $ref: '#/components/schemas/AzureStorageDestination' - - $ref: '#/components/schemas/MicrosoftSentinelDestination' + - $ref: '#/components/schemas/ObservabilityPipelineCloudPremDestination' + - $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestination' + - $ref: '#/components/schemas/ObservabilityPipelineDatadogLogsDestination' + - $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestination' - $ref: '#/components/schemas/ObservabilityPipelineGoogleChronicleDestination' + - $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestination' + - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubDestination' + - $ref: '#/components/schemas/ObservabilityPipelineKafkaDestination' + - $ref: '#/components/schemas/MicrosoftSentinelDestination' - $ref: '#/components/schemas/ObservabilityPipelineNewRelicDestination' - - $ref: '#/components/schemas/ObservabilityPipelineSentinelOneDestination' - $ref: '#/components/schemas/ObservabilityPipelineOpenSearchDestination' - - $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestination' + - $ref: '#/components/schemas/ObservabilityPipelineRsyslogDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSentinelOneDestination' - $ref: '#/components/schemas/ObservabilityPipelineSocketDestination' - - $ref: '#/components/schemas/ObservabilityPipelineAmazonSecurityLakeDestination' - - $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestination' - - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgDestination' + - $ref: '#/components/schemas/ObservabilityPipelineDatadogMetricsDestination' + ObservabilityPipelineConfigPipelineType: + default: logs + description: The type of data being ingested. Defaults to `logs` if not specified. + enum: + - logs + - metrics + example: logs + type: string + x-enum-varnames: + - LOGS + - METRICS ObservabilityPipelineConfigProcessorGroup: description: A group of processors. example: @@ -35601,45 +35741,53 @@ components: description: A processor for the pipeline. oneOf: - $ref: '#/components/schemas/ObservabilityPipelineFilterProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddHostnameProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineCustomProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor' - $ref: '#/components/schemas/ObservabilityPipelineParseJSONProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineParseXMLProcessor' - $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRemoveFieldsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineSampleProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor' - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessor' - $ref: '#/components/schemas/ObservabilityPipelineThrottleProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineCustomProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessor' ObservabilityPipelineConfigSourceItem: description: A data source for the pipeline. oneOf: - - $ref: '#/components/schemas/ObservabilityPipelineKafkaSource' - $ref: '#/components/schemas/ObservabilityPipelineDatadogAgentSource' - - $ref: '#/components/schemas/ObservabilityPipelineSplunkTcpSource' - - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecSource' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonDataFirehoseSource' - $ref: '#/components/schemas/ObservabilityPipelineAmazonS3Source' - - $ref: '#/components/schemas/ObservabilityPipelineFluentdSource' - $ref: '#/components/schemas/ObservabilityPipelineFluentBitSource' - - $ref: '#/components/schemas/ObservabilityPipelineHttpServerSource' - - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicSource' - - $ref: '#/components/schemas/ObservabilityPipelineRsyslogSource' - - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgSource' - - $ref: '#/components/schemas/ObservabilityPipelineAmazonDataFirehoseSource' + - $ref: '#/components/schemas/ObservabilityPipelineFluentdSource' - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubSource' - $ref: '#/components/schemas/ObservabilityPipelineHttpClientSource' + - $ref: '#/components/schemas/ObservabilityPipelineHttpServerSource' + - $ref: '#/components/schemas/ObservabilityPipelineKafkaSource' - $ref: '#/components/schemas/ObservabilityPipelineLogstashSource' + - $ref: '#/components/schemas/ObservabilityPipelineRsyslogSource' - $ref: '#/components/schemas/ObservabilityPipelineSocketSource' + - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecSource' + - $ref: '#/components/schemas/ObservabilityPipelineSplunkTcpSource' + - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicSource' + - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgSource' + - $ref: '#/components/schemas/ObservabilityPipelineOpentelemetrySource' ObservabilityPipelineCrowdStrikeNextGenSiemDestination: - description: The `crowdstrike_next_gen_siem` destination forwards logs to CrowdStrike + description: 'The `crowdstrike_next_gen_siem` destination forwards logs to CrowdStrike Next Gen SIEM. + + + **Supported pipeline types:** logs' properties: compression: $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression' @@ -35667,6 +35815,8 @@ components: - inputs - encoding type: object + x-pipeline-types: + - logs ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression: description: Compression configuration for log events. properties: @@ -35710,9 +35860,12 @@ components: x-enum-varnames: - CROWDSTRIKE_NEXT_GEN_SIEM ObservabilityPipelineCustomProcessor: - description: The `custom_processor` processor transforms events using [Vector + description: 'The `custom_processor` processor transforms events using [Vector Remap Language (VRL)](https://vector.dev/docs/reference/vrl/) scripts with advanced filtering capabilities. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -35746,6 +35899,8 @@ components: - remaps - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineCustomProcessorRemap: description: Defines a single VRL remap rule with its own filtering and transformation logic. @@ -35821,7 +35976,11 @@ components: - config type: object ObservabilityPipelineDatadogAgentSource: - description: The `datadog_agent` source collects logs from the Datadog Agent. + description: 'The `datadog_agent` source collects logs/metrics from the Datadog + Agent. + + + **Supported pipeline types:** logs, metrics' properties: id: description: The unique identifier for this component. Used to reference @@ -35837,6 +35996,9 @@ components: - id - type type: object + x-pipeline-types: + - logs + - metrics ObservabilityPipelineDatadogAgentSourceType: default: datadog_agent description: The source type. The value should always be `datadog_agent`. @@ -35847,7 +36009,10 @@ components: x-enum-varnames: - DATADOG_AGENT ObservabilityPipelineDatadogLogsDestination: - description: The `datadog_logs` destination forwards logs to Datadog Log Management. + description: 'The `datadog_logs` destination forwards logs to Datadog Log Management. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -35868,6 +36033,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineDatadogLogsDestinationType: default: datadog_logs description: The destination type. The value should always be `datadog_logs`. @@ -35877,9 +36044,48 @@ components: type: string x-enum-varnames: - DATADOG_LOGS + ObservabilityPipelineDatadogMetricsDestination: + description: 'The `datadog_metrics` destination forwards metrics to Datadog. + + + **Supported pipeline types:** metrics' + properties: + id: + description: The unique identifier for this component. + example: datadog-metrics-destination + type: string + inputs: + description: A list of component IDs whose output is used as the input for + this component. + example: + - metric-tags-processor + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineDatadogMetricsDestinationType' + required: + - id + - type + - inputs + type: object + x-pipeline-types: + - metrics + ObservabilityPipelineDatadogMetricsDestinationType: + default: datadog_metrics + description: The destination type. The value should always be `datadog_metrics`. + enum: + - datadog_metrics + example: datadog_metrics + type: string + x-enum-varnames: + - DATADOG_METRICS ObservabilityPipelineDatadogTagsProcessor: - description: The `datadog_tags` processor includes or excludes specific Datadog + description: 'The `datadog_tags` processor includes or excludes specific Datadog tags in your logs. + + + **Supported pipeline types:** logs' properties: action: $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessorAction' @@ -35922,6 +36128,8 @@ components: - keys - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineDatadogTagsProcessorAction: description: The action to take on tags with matching keys. enum: @@ -35964,7 +36172,10 @@ components: - DECODE_JSON - DECODE_SYSLOG ObservabilityPipelineDedupeProcessor: - description: The `dedupe` processor removes duplicate fields in log events. + description: 'The `dedupe` processor removes duplicate fields in log events. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -36001,6 +36212,8 @@ components: - mode - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineDedupeProcessorMode: description: The deduplication mode to apply to the fields. enum: @@ -36021,8 +36234,11 @@ components: x-enum-varnames: - DEDUPE ObservabilityPipelineElasticsearchDestination: - description: The `elasticsearch` destination writes logs to an Elasticsearch + description: 'The `elasticsearch` destination writes logs to an Elasticsearch cluster. + + + **Supported pipeline types:** logs' properties: api_version: $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationApiVersion' @@ -36030,6 +36246,8 @@ components: description: The index to write logs to in Elasticsearch. example: logs-index type: string + data_stream: + $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationDataStream' id: description: The unique identifier for this component. example: elasticsearch-destination @@ -36049,6 +36267,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineElasticsearchDestinationApiVersion: description: The Elasticsearch API version to use. Set to `auto` to auto-detect. enum: @@ -36063,6 +36283,23 @@ components: - V6 - V7 - V8 + ObservabilityPipelineElasticsearchDestinationDataStream: + description: Configuration options for writing to Elasticsearch Data Streams + instead of a fixed index. + properties: + dataset: + description: The data stream dataset for your logs. This groups logs by + their source or application. + type: string + dtype: + description: The data stream type for your logs. This determines how logs + are categorized within the data stream. + type: string + namespace: + description: The data stream namespace for your logs. This separates logs + into different environments or domains. + type: string + type: object ObservabilityPipelineElasticsearchDestinationType: default: elasticsearch description: The destination type. The value should always be `elasticsearch`. @@ -36202,8 +36439,12 @@ components: - path type: object ObservabilityPipelineEnrichmentTableProcessor: - description: The `enrichment_table` processor enriches logs using a static CSV - file or GeoIP database. + description: 'The `enrichment_table` processor enriches logs using a static + CSV file, GeoIP database, or reference table. Exactly one of `file`, `geoip`, + or `reference_table` must be configured. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -36224,6 +36465,8 @@ components: targets. example: source:my-source type: string + reference_table: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableReferenceTable' target: description: Path where enrichment results should be stored in the log. example: enriched.geoip @@ -36237,6 +36480,8 @@ components: - target - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineEnrichmentTableProcessorType: default: enrichment_table description: The processor type. The value should always be `enrichment_table`. @@ -36246,6 +36491,28 @@ components: type: string x-enum-varnames: - ENRICHMENT_TABLE + ObservabilityPipelineEnrichmentTableReferenceTable: + description: Uses a Datadog reference table to enrich logs. + properties: + columns: + description: List of column names to include from the reference table. If + not provided, all columns are included. + items: + type: string + type: array + key_field: + description: Path to the field in the log event to match against the reference + table. + example: log.user.id + type: string + table_id: + description: The unique identifier of the reference table. + example: 550e8400-e29b-41d4-a716-446655440000 + type: string + required: + - key_field + - table_id + type: object ObservabilityPipelineFieldValue: description: Represents a static key-value pair used in various processors. properties: @@ -36262,9 +36529,12 @@ components: - value type: object ObservabilityPipelineFilterProcessor: - description: The `filter` processor allows conditional processing of logs based - on a Datadog search query. Logs that match the `include` query are passed - through; others are discarded. + description: 'The `filter` processor allows conditional processing of logs/metrics + based on a Datadog search query. Logs/metrics that match the `include` query + are passed through; others are discarded. + + + **Supported pipeline types:** logs, metrics' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -36279,9 +36549,9 @@ components: example: filter-processor type: string include: - description: A Datadog search query used to determine which logs should - pass through the filter. Logs that match this query continue to downstream - components; others are dropped. + description: A Datadog search query used to determine which logs/metrics + should pass through the filter. Logs/metrics that match this query continue + to downstream components; others are dropped. example: service:my-service type: string type: @@ -36292,6 +36562,9 @@ components: - include - enabled type: object + x-pipeline-types: + - logs + - metrics ObservabilityPipelineFilterProcessorType: default: filter description: The processor type. The value should always be `filter`. @@ -36302,7 +36575,10 @@ components: x-enum-varnames: - FILTER ObservabilityPipelineFluentBitSource: - description: The `fluent_bit` source ingests logs from Fluent Bit. + description: 'The `fluent_bit` source ingests logs from Fluent Bit. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -36318,6 +36594,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineFluentBitSourceType: default: fluent_bit description: The source type. The value should always be `fluent_bit`. @@ -36328,7 +36606,10 @@ components: x-enum-varnames: - FLUENT_BIT ObservabilityPipelineFluentdSource: - description: The `fluentd` source ingests logs from a Fluentd-compatible service. + description: 'The `fluentd` source ingests logs from a Fluentd-compatible service. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -36344,6 +36625,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineFluentdSourceType: default: fluentd description: The source type. The value should always be `fluentd. @@ -36368,7 +36651,10 @@ components: from logs and sends them to Datadog. Metrics can be counters, gauges, or distributions and optionally grouped by - log fields.' + log fields. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -36398,6 +36684,8 @@ components: - type - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineGenerateMetricsProcessorType: default: generate_datadog_metrics description: The processor type. Always `generate_datadog_metrics`. @@ -36492,7 +36780,10 @@ components: - GAUGE - DISTRIBUTION ObservabilityPipelineGoogleChronicleDestination: - description: The `google_chronicle` destination sends logs to Google Chronicle. + description: 'The `google_chronicle` destination sends logs to Google Chronicle. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' @@ -36526,6 +36817,8 @@ components: - inputs - customer_id type: object + x-pipeline-types: + - logs ObservabilityPipelineGoogleChronicleDestinationEncoding: description: The encoding format for the logs sent to Chronicle. enum: @@ -36549,7 +36842,10 @@ components: description: 'The `google_cloud_storage` destination stores logs in a Google Cloud Storage (GCS) bucket. - It requires a bucket name, GCP authentication, and metadata fields.' + It requires a bucket name, GCP authentication, and metadata fields. + + + **Supported pipeline types:** logs' properties: acl: $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestinationAcl' @@ -36591,6 +36887,8 @@ components: - bucket - storage_class type: object + x-pipeline-types: + - logs ObservabilityPipelineGoogleCloudStorageDestinationAcl: description: Access control list setting for objects written to the bucket. enum: @@ -36633,8 +36931,11 @@ components: x-enum-varnames: - GOOGLE_CLOUD_STORAGE ObservabilityPipelineGooglePubSubDestination: - description: The `google_pubsub` destination publishes logs to a Google Cloud + description: 'The `google_pubsub` destination publishes logs to a Google Cloud Pub/Sub topic. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' @@ -36672,6 +36973,8 @@ components: - project - topic type: object + x-pipeline-types: + - logs ObservabilityPipelineGooglePubSubDestinationEncoding: description: Encoding format for log events. enum: @@ -36692,8 +36995,11 @@ components: x-enum-varnames: - GOOGLE_PUBSUB ObservabilityPipelineGooglePubSubSource: - description: The `google_pubsub` source ingests logs from a Google Cloud Pub/Sub + description: 'The `google_pubsub` source ingests logs from a Google Cloud Pub/Sub subscription. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' @@ -36724,6 +37030,8 @@ components: - project - subscription type: object + x-pipeline-types: + - logs ObservabilityPipelineGooglePubSubSourceType: default: google_pubsub description: The source type. The value should always be `google_pubsub`. @@ -36733,9 +37041,94 @@ components: type: string x-enum-varnames: - GOOGLE_PUBSUB + ObservabilityPipelineHttpClientDestination: + description: 'The `http_client` destination sends data to an HTTP endpoint. + + + **Supported pipeline types:** logs, metrics' + properties: + auth_strategy: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationAuthStrategy' + compression: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationCompression' + encoding: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationEncoding' + id: + description: The unique identifier for this component. + example: http-client-destination + type: string + inputs: + description: A list of component IDs whose output is used as the input for + this component. + example: + - filter-processor + items: + type: string + type: array + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationType' + required: + - id + - type + - inputs + - encoding + type: object + x-pipeline-types: + - logs + - metrics + ObservabilityPipelineHttpClientDestinationAuthStrategy: + description: HTTP authentication strategy. + enum: + - none + - basic + - bearer + example: basic + type: string + x-enum-varnames: + - NONE + - BASIC + - BEARER + ObservabilityPipelineHttpClientDestinationCompression: + description: Compression configuration for HTTP requests. + properties: + algorithm: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationCompressionAlgorithm' + required: + - algorithm + type: object + ObservabilityPipelineHttpClientDestinationCompressionAlgorithm: + description: Compression algorithm. + enum: + - gzip + example: gzip + type: string + x-enum-varnames: + - GZIP + ObservabilityPipelineHttpClientDestinationEncoding: + description: Encoding format for log events. + enum: + - json + example: json + type: string + x-enum-varnames: + - JSON + ObservabilityPipelineHttpClientDestinationType: + default: http_client + description: The destination type. The value should always be `http_client`. + enum: + - http_client + example: http_client + type: string + x-enum-varnames: + - HTTP_CLIENT ObservabilityPipelineHttpClientSource: - description: The `http_client` source scrapes logs from HTTP endpoints at regular + description: 'The `http_client` source scrapes logs from HTTP endpoints at regular intervals. + + + **Supported pipeline types:** logs' properties: auth_strategy: $ref: '#/components/schemas/ObservabilityPipelineHttpClientSourceAuthStrategy' @@ -36766,14 +37159,18 @@ components: - type - decoding type: object + x-pipeline-types: + - logs ObservabilityPipelineHttpClientSourceAuthStrategy: description: Optional authentication strategy for HTTP requests. enum: + - none - basic - bearer example: basic type: string x-enum-varnames: + - NONE - BASIC - BEARER ObservabilityPipelineHttpClientSourceType: @@ -36786,8 +37183,11 @@ components: x-enum-varnames: - HTTP_CLIENT ObservabilityPipelineHttpServerSource: - description: The `http_server` source collects logs over HTTP POST from external + description: 'The `http_server` source collects logs over HTTP POST from external services. + + + **Supported pipeline types:** logs' properties: auth_strategy: $ref: '#/components/schemas/ObservabilityPipelineHttpServerSourceAuthStrategy' @@ -36807,6 +37207,8 @@ components: - auth_strategy - decoding type: object + x-pipeline-types: + - logs ObservabilityPipelineHttpServerSourceAuthStrategy: description: HTTP authentication method. enum: @@ -36826,8 +37228,161 @@ components: type: string x-enum-varnames: - HTTP_SERVER + ObservabilityPipelineKafkaDestination: + description: 'The `kafka` destination sends logs to Apache Kafka topics. + + + **Supported pipeline types:** logs' + properties: + compression: + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationCompression' + encoding: + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationEncoding' + headers_key: + description: The field name to use for Kafka message headers. + example: headers + type: string + id: + description: The unique identifier for this component. + example: kafka-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + key_field: + description: The field name to use as the Kafka message key. + example: message_id + type: string + librdkafka_options: + description: Optional list of advanced Kafka producer configuration options, + defined as key-value pairs. + items: + $ref: '#/components/schemas/ObservabilityPipelineKafkaLibrdkafkaOption' + type: array + message_timeout_ms: + description: Maximum time in milliseconds to wait for message delivery confirmation. + example: 300000 + format: int64 + minimum: 1 + type: integer + rate_limit_duration_secs: + description: Duration in seconds for the rate limit window. + example: 1 + format: int64 + minimum: 1 + type: integer + rate_limit_num: + description: Maximum number of messages allowed per rate limit duration. + example: 1000 + format: int64 + minimum: 1 + type: integer + sasl: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSasl' + socket_timeout_ms: + description: Socket timeout in milliseconds for network requests. + example: 60000 + format: int64 + maximum: 300000 + minimum: 10 + type: integer + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + topic: + description: The Kafka topic name to publish logs to. + example: logs-topic + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationType' + required: + - id + - type + - inputs + - topic + - encoding + type: object + x-pipeline-types: + - logs + ObservabilityPipelineKafkaDestinationCompression: + description: Compression codec for Kafka messages. + enum: + - none + - gzip + - snappy + - lz4 + - zstd + example: gzip + type: string + x-enum-varnames: + - NONE + - GZIP + - SNAPPY + - LZ4 + - ZSTD + ObservabilityPipelineKafkaDestinationEncoding: + description: Encoding format for log events. + enum: + - json + - raw_message + example: json + type: string + x-enum-varnames: + - JSON + - RAW_MESSAGE + ObservabilityPipelineKafkaDestinationType: + default: kafka + description: The destination type. The value should always be `kafka`. + enum: + - kafka + example: kafka + type: string + x-enum-varnames: + - KAFKA + ObservabilityPipelineKafkaLibrdkafkaOption: + description: Represents a key-value pair used to configure low-level `librdkafka` + client options for Kafka source and destination, such as timeouts, buffer + sizes, and security settings. + properties: + name: + description: The name of the `librdkafka` configuration option to set. + example: fetch.message.max.bytes + type: string + value: + description: The value assigned to the specified `librdkafka` configuration + option. + example: '1048576' + type: string + required: + - name + - value + type: object + ObservabilityPipelineKafkaSasl: + description: Specifies the SASL mechanism for authenticating with a Kafka cluster. + properties: + mechanism: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSaslMechanism' + type: object + ObservabilityPipelineKafkaSaslMechanism: + description: SASL mechanism used for Kafka authentication. + enum: + - PLAIN + - SCRAM-SHA-256 + - SCRAM-SHA-512 + type: string + x-enum-varnames: + - PLAIN + - SCRAMNOT_SHANOT_256 + - SCRAMNOT_SHANOT_512 ObservabilityPipelineKafkaSource: - description: The `kafka` source ingests data from Apache Kafka topics. + description: 'The `kafka` source ingests data from Apache Kafka topics. + + + **Supported pipeline types:** logs' properties: group_id: description: Consumer group ID used by the Kafka client. @@ -36843,10 +37398,10 @@ components: description: Optional list of advanced Kafka client configuration options, defined as key-value pairs. items: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceLibrdkafkaOption' + $ref: '#/components/schemas/ObservabilityPipelineKafkaLibrdkafkaOption' type: array sasl: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceSasl' + $ref: '#/components/schemas/ObservabilityPipelineKafkaSasl' tls: $ref: '#/components/schemas/ObservabilityPipelineTls' topics: @@ -36866,30 +37421,8 @@ components: - group_id - topics type: object - ObservabilityPipelineKafkaSourceLibrdkafkaOption: - description: Represents a key-value pair used to configure low-level `librdkafka` - client options for Kafka sources, such as timeouts, buffer sizes, and security - settings. - properties: - name: - description: The name of the `librdkafka` configuration option to set. - example: fetch.message.max.bytes - type: string - value: - description: The value assigned to the specified `librdkafka` configuration - option. - example: '1048576' - type: string - required: - - name - - value - type: object - ObservabilityPipelineKafkaSourceSasl: - description: Specifies the SASL mechanism for authenticating with a Kafka cluster. - properties: - mechanism: - $ref: '#/components/schemas/ObservabilityPipelinePipelineKafkaSourceSaslMechanism' - type: object + x-pipeline-types: + - logs ObservabilityPipelineKafkaSourceType: default: kafka description: The source type. The value should always be `kafka`. @@ -36900,7 +37433,10 @@ components: x-enum-varnames: - KAFKA ObservabilityPipelineLogstashSource: - description: The `logstash` source ingests logs from a Logstash forwarder. + description: 'The `logstash` source ingests logs from a Logstash forwarder. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -36916,6 +37452,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineLogstashSourceType: default: logstash description: The source type. The value should always be `logstash`. @@ -36940,13 +37478,106 @@ components: - name - value type: object + ObservabilityPipelineMetricTagsProcessor: + description: 'The `metric_tags` processor filters metrics based on their tags + using Datadog tag key patterns. + + + **Supported pipeline types:** metrics' + properties: + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: metric-tags-processor + type: string + include: + description: A Datadog search query used to determine which metrics this + processor targets. + example: '*' + type: string + rules: + description: A list of rules for filtering metric tags. + items: + $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorRule' + maxItems: 100 + minItems: 1 + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorType' + required: + - id + - type + - include + - rules + - enabled + type: object + x-pipeline-types: + - metrics + ObservabilityPipelineMetricTagsProcessorRule: + description: Defines a rule for filtering metric tags based on key patterns. + properties: + action: + $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorRuleAction' + keys: + description: A list of tag keys to include or exclude. + example: + - env + - service + - version + items: + type: string + type: array + mode: + $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorRuleMode' + required: + - mode + - action + - keys + type: object + ObservabilityPipelineMetricTagsProcessorRuleAction: + description: The action to take on tags with matching keys. + enum: + - include + - exclude + example: include + type: string + x-enum-varnames: + - INCLUDE + - EXCLUDE + ObservabilityPipelineMetricTagsProcessorRuleMode: + description: The processing mode for tag filtering. + enum: + - filter + example: filter + type: string + x-enum-varnames: + - FILTER + ObservabilityPipelineMetricTagsProcessorType: + default: metric_tags + description: The processor type. The value should always be `metric_tags`. + enum: + - metric_tags + example: metric_tags + type: string + x-enum-varnames: + - METRIC_TAGS ObservabilityPipelineMetricValue: description: Specifies how the value of the generated metric is computed. oneOf: - $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByOne' - $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByField' ObservabilityPipelineNewRelicDestination: - description: The `new_relic` destination sends logs to the New Relic platform. + description: 'The `new_relic` destination sends logs to the New Relic platform. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -36970,6 +37601,8 @@ components: - inputs - region type: object + x-pipeline-types: + - logs ObservabilityPipelineNewRelicDestinationRegion: description: The New Relic region. enum: @@ -36990,8 +37623,11 @@ components: x-enum-varnames: - NEW_RELIC ObservabilityPipelineOcsfMapperProcessor: - description: The `ocsf_mapper` processor transforms logs into the OCSF schema + description: 'The `ocsf_mapper` processor transforms logs into the OCSF schema using a predefined mapping configuration. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37023,6 +37659,8 @@ components: - mappings - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineOcsfMapperProcessorMapping: description: Defines how specific events are transformed to OCSF using a mapping configuration. @@ -37082,7 +37720,10 @@ components: - OKTA_SYSTEM_LOG_AUTHENTICATION - PALO_ALTO_NETWORKS_FIREWALL_TRAFFIC ObservabilityPipelineOpenSearchDestination: - description: The `opensearch` destination writes logs to an OpenSearch cluster. + description: 'The `opensearch` destination writes logs to an OpenSearch cluster. + + + **Supported pipeline types:** logs' properties: bulk_index: description: The index to write logs to. @@ -37107,6 +37748,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineOpenSearchDestinationType: default: opensearch description: The destination type. The value should always be `opensearch`. @@ -37116,9 +37759,56 @@ components: type: string x-enum-varnames: - OPENSEARCH + ObservabilityPipelineOpentelemetrySource: + description: 'The `opentelemetry` source receives telemetry data using the OpenTelemetry + Protocol (OTLP) over gRPC and HTTP. + + + **Supported pipeline types:** logs' + properties: + grpc_address_key: + description: Environment variable name containing the gRPC server address + for receiving OTLP data. Must be a valid environment variable name (alphanumeric + characters and underscores only). + example: OTEL_GRPC_ADDRESS + type: string + http_address_key: + description: Environment variable name containing the HTTP server address + for receiving OTLP data. Must be a valid environment variable name (alphanumeric + characters and underscores only). + example: OTEL_HTTP_ADDRESS + type: string + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: opentelemetry-source + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineOpentelemetrySourceType' + required: + - id + - type + type: object + x-pipeline-types: + - logs + ObservabilityPipelineOpentelemetrySourceType: + default: opentelemetry + description: The source type. The value should always be `opentelemetry`. + enum: + - opentelemetry + example: opentelemetry + type: string + x-enum-varnames: + - OPENTELEMETRY ObservabilityPipelineParseGrokProcessor: - description: The `parse_grok` processor extracts structured fields from unstructured + description: 'The `parse_grok` processor extracts structured fields from unstructured log messages using Grok patterns. + + + **Supported pipeline types:** logs' properties: disable_library_rules: default: false @@ -37157,6 +37847,8 @@ components: - rules - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineParseGrokProcessorRule: description: 'A Grok parsing rule used in the `parse_grok` processor. Each rule defines how to extract structured fields @@ -37236,9 +37928,12 @@ components: x-enum-varnames: - PARSE_GROK ObservabilityPipelineParseJSONProcessor: - description: The `parse_json` processor extracts JSON from a specified field + description: 'The `parse_json` processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded JSON as a string. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37270,6 +37965,8 @@ components: - field - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineParseJSONProcessorType: default: parse_json description: The processor type. The value should always be `parse_json`. @@ -37279,28 +37976,92 @@ components: type: string x-enum-varnames: - PARSE_JSON - ObservabilityPipelinePipelineKafkaSourceSaslMechanism: - description: SASL mechanism used for Kafka authentication. + ObservabilityPipelineParseXMLProcessor: + description: 'The `parse_xml` processor parses XML from a specified field and + extracts it into the event. + + + **Supported pipeline types:** logs' + properties: + always_use_text_key: + description: Whether to always use a text key for element content. + type: boolean + attr_prefix: + description: The prefix to use for XML attributes in the parsed output. + type: string + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + field: + description: The name of the log field that contains an XML string. + example: message + type: string + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: parse-xml-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + include_attr: + description: Whether to include XML attributes in the parsed output. + type: boolean + parse_bool: + description: Whether to parse boolean values from strings. + type: boolean + parse_null: + description: Whether to parse null values. + type: boolean + parse_number: + description: Whether to parse numeric values from strings. + type: boolean + text_key: + description: The key name to use for text content within XML elements. Must + be at least 1 character if specified. + minLength: 1 + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineParseXMLProcessorType' + required: + - id + - type + - include + - field + - enabled + type: object + x-pipeline-types: + - logs + ObservabilityPipelineParseXMLProcessorType: + default: parse_xml + description: The processor type. The value should always be `parse_xml`. enum: - - PLAIN - - SCRAM-SHA-256 - - SCRAM-SHA-512 + - parse_xml + example: parse_xml type: string x-enum-varnames: - - PLAIN - - SCRAMNOT_SHANOT_256 - - SCRAMNOT_SHANOT_512 + - PARSE_XML ObservabilityPipelineQuotaProcessor: - description: The Quota Processor measures logging traffic for logs that match + description: 'The `quota` processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' drop_events: - description: If set to `true`, logs that matched the quota filter and sent - after the quota has been met are dropped; only logs that did not match - the filter query continue through the pipeline. + description: 'If set to `true`, logs that match the quota filter and are + sent after the quota is exceeded are dropped. Logs that do not match the + filter continue through the pipeline. **Note**: You can set either `drop_events` + or `overflow_action`, but not both.' example: false type: boolean enabled: @@ -37344,6 +38105,8 @@ components: items: type: string type: array + too_many_buckets_action: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorOverflowAction' type: $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorType' required: @@ -37354,6 +38117,8 @@ components: - limit - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineQuotaProcessorLimit: description: The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events. @@ -37381,7 +38146,8 @@ components: - BYTES - EVENTS ObservabilityPipelineQuotaProcessorOverflowAction: - description: 'The action to take when the quota is exceeded. Options: + description: 'The action to take when the quota or bucket limit is exceeded. + Options: - `drop`: Drop the event. @@ -37425,8 +38191,11 @@ components: x-enum-varnames: - QUOTA ObservabilityPipelineReduceProcessor: - description: The `reduce` processor aggregates and merges logs based on matching + description: 'The `reduce` processor aggregates and merges logs based on matching keys and merge strategies. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37467,6 +38236,8 @@ components: - merge_strategies - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineReduceProcessorMergeStrategy: description: Defines how a specific field should be merged across grouped events. properties: @@ -37520,7 +38291,10 @@ components: x-enum-varnames: - REDUCE ObservabilityPipelineRemoveFieldsProcessor: - description: The `remove_fields` processor deletes specified fields from logs. + description: 'The `remove_fields` processor deletes specified fields from logs. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37556,6 +38330,8 @@ components: - fields - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineRemoveFieldsProcessorType: default: remove_fields description: The processor type. The value should always be `remove_fields`. @@ -37566,7 +38342,10 @@ components: x-enum-varnames: - REMOVE_FIELDS ObservabilityPipelineRenameFieldsProcessor: - description: The `rename_fields` processor changes field names. + description: 'The `rename_fields` processor changes field names. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37601,6 +38380,8 @@ components: - fields - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineRenameFieldsProcessorField: description: Defines how to rename a field in log events. properties: @@ -37632,8 +38413,11 @@ components: x-enum-varnames: - RENAME_FIELDS ObservabilityPipelineRsyslogDestination: - description: The `rsyslog` destination forwards logs to an external `rsyslog` + description: 'The `rsyslog` destination forwards logs to an external `rsyslog` server over TCP or UDP using the syslog protocol. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -37662,6 +38446,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineRsyslogDestinationType: default: rsyslog description: The destination type. The value should always be `rsyslog`. @@ -37672,8 +38458,11 @@ components: x-enum-varnames: - RSYSLOG ObservabilityPipelineRsyslogSource: - description: The `rsyslog` source listens for logs over TCP or UDP from an `rsyslog` - server using the syslog protocol. + description: 'The `rsyslog` source listens for logs over TCP or UDP from an + `rsyslog` server using the syslog protocol. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -37692,6 +38481,8 @@ components: - type - mode type: object + x-pipeline-types: + - logs ObservabilityPipelineRsyslogSourceType: default: rsyslog description: The source type. The value should always be `rsyslog`. @@ -37702,8 +38493,11 @@ components: x-enum-varnames: - RSYSLOG ObservabilityPipelineSampleProcessor: - description: The `sample` processor allows probabilistic sampling of logs at + description: 'The `sample` processor allows probabilistic sampling of logs at a fixed rate. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37711,6 +38505,16 @@ components: description: Whether this processor is enabled. example: true type: boolean + group_by: + description: Optional list of fields to group events by. Each group is sampled + independently. + example: + - service + - host + items: + type: string + minItems: 1 + type: array id: description: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` @@ -37727,20 +38531,17 @@ components: example: 10.0 format: double type: number - rate: - description: Number of events to sample (1 in N). - example: 10 - format: int64 - minimum: 1 - type: integer type: $ref: '#/components/schemas/ObservabilityPipelineSampleProcessorType' required: - id - type - include + - percentage - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineSampleProcessorType: default: sample description: The processor type. The value should always be `sample`. @@ -37751,8 +38552,11 @@ components: x-enum-varnames: - SAMPLE ObservabilityPipelineSensitiveDataScannerProcessor: - description: The `sensitive_data_scanner` processor detects and optionally redacts - sensitive data in log events. + description: 'The `sensitive_data_scanner` processor detects and optionally + redacts sensitive data in log events. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -37786,6 +38590,8 @@ components: - rules - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineSensitiveDataScannerProcessorAction: description: Defines what action to take when sensitive data is matched. oneOf: @@ -37907,6 +38713,11 @@ components: ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions: description: Options for defining a custom regex pattern. properties: + description: + description: Human-readable description providing context about a sensitive + data scanner rule + example: "Custom regex for internal API\u202Fkeys" + type: string rule: description: A regular expression used to detect sensitive values. Must be a valid regex. @@ -37962,6 +38773,11 @@ components: description: Options for selecting a predefined library pattern and enabling keyword support. properties: + description: + description: Human-readable description providing context about a sensitive + data scanner rule + example: Credit card pattern + type: string id: description: Identifier for a predefined pattern from the sensitive data scanner pattern library. @@ -38103,7 +38919,10 @@ components: x-enum-varnames: - SENSITIVE_DATA_SCANNER ObservabilityPipelineSentinelOneDestination: - description: The `sentinel_one` destination sends logs to SentinelOne. + description: 'The `sentinel_one` destination sends logs to SentinelOne. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -38127,6 +38946,8 @@ components: - inputs - region type: object + x-pipeline-types: + - logs ObservabilityPipelineSentinelOneDestinationRegion: description: The SentinelOne region to send logs to. enum: @@ -38151,8 +38972,11 @@ components: x-enum-varnames: - SENTINEL_ONE ObservabilityPipelineSocketDestination: - description: The `socket` destination sends logs over TCP or UDP to a remote + description: 'The `socket` destination sends logs over TCP or UDP to a remote server. + + + **Supported pipeline types:** logs' properties: encoding: $ref: '#/components/schemas/ObservabilityPipelineSocketDestinationEncoding' @@ -38185,6 +39009,8 @@ components: - framing - mode type: object + x-pipeline-types: + - logs ObservabilityPipelineSocketDestinationEncoding: description: Encoding format for log events. enum: @@ -38279,7 +39105,10 @@ components: x-enum-varnames: - SOCKET ObservabilityPipelineSocketSource: - description: The `socket` source ingests logs over TCP or UDP. + description: 'The `socket` source ingests logs over TCP or UDP. + + + **Supported pipeline types:** logs' properties: framing: $ref: '#/components/schemas/ObservabilityPipelineSocketSourceFraming' @@ -38302,6 +39131,8 @@ components: - mode - framing type: object + x-pipeline-types: + - logs ObservabilityPipelineSocketSourceFraming: description: Framing method configuration for the socket source. oneOf: @@ -38442,9 +39273,79 @@ components: - type - attributes type: object + ObservabilityPipelineSplitArrayProcessor: + description: 'The `split_array` processor splits array fields into separate + events based on configured rules. + + + **Supported pipeline types:** logs' + properties: + arrays: + description: A list of array split configurations. + items: + $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessorArrayConfig' + maxItems: 15 + minItems: 1 + type: array + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Whether this processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: split-array-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. For split_array, this should typically be `*`. + example: '*' + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessorType' + required: + - id + - type + - include + - arrays + - enabled + type: object + x-pipeline-types: + - logs + ObservabilityPipelineSplitArrayProcessorArrayConfig: + description: Configuration for a single array split operation. + properties: + field: + description: The path to the array field to split. + example: tags + type: string + include: + description: A Datadog search query used to determine which logs this array + split operation targets. + example: '*' + type: string + required: + - include + - field + type: object + ObservabilityPipelineSplitArrayProcessorType: + default: split_array + description: The processor type. The value should always be `split_array`. + enum: + - split_array + example: split_array + type: string + x-enum-varnames: + - SPLIT_ARRAY ObservabilityPipelineSplunkHecDestination: - description: The `splunk_hec` destination forwards logs to Splunk using the + description: 'The `splunk_hec` destination forwards logs to Splunk using the HTTP Event Collector (HEC). + + + **Supported pipeline types:** logs' properties: auto_extract_timestamp: description: 'If `true`, Splunk tries to extract timestamps from incoming @@ -38484,6 +39385,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineSplunkHecDestinationEncoding: description: Encoding format for log events. enum: @@ -38504,8 +39407,11 @@ components: x-enum-varnames: - SPLUNK_HEC ObservabilityPipelineSplunkHecSource: - description: The `splunk_hec` source implements the Splunk HTTP Event Collector + description: 'The `splunk_hec` source implements the Splunk HTTP Event Collector (HEC) API. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -38521,6 +39427,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineSplunkHecSourceType: default: splunk_hec description: The source type. Always `splunk_hec`. @@ -38534,7 +39442,10 @@ components: description: 'The `splunk_tcp` source receives logs from a Splunk Universal Forwarder over TCP. - TLS is supported for secure transmission.' + TLS is supported for secure transmission. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -38550,6 +39461,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineSplunkTcpSourceType: default: splunk_tcp description: The source type. Always `splunk_tcp`. @@ -38560,7 +39473,10 @@ components: x-enum-varnames: - SPLUNK_TCP ObservabilityPipelineSumoLogicDestination: - description: The `sumo_logic` destination forwards logs to Sumo Logic. + description: 'The `sumo_logic` destination forwards logs to Sumo Logic. + + + **Supported pipeline types:** logs' properties: encoding: $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestinationEncoding' @@ -38601,6 +39517,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineSumoLogicDestinationEncoding: description: The output encoding format. enum: @@ -38638,7 +39556,10 @@ components: x-enum-varnames: - SUMO_LOGIC ObservabilityPipelineSumoLogicSource: - description: The `sumo_logic` source receives logs from Sumo Logic collectors. + description: 'The `sumo_logic` source receives logs from Sumo Logic collectors. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -38652,6 +39573,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineSumoLogicSourceType: default: sumo_logic description: The source type. The value should always be `sumo_logic`. @@ -38662,8 +39585,11 @@ components: x-enum-varnames: - SUMO_LOGIC ObservabilityPipelineSyslogNgDestination: - description: The `syslog_ng` destination forwards logs to an external `syslog-ng` + description: 'The `syslog_ng` destination forwards logs to an external `syslog-ng` server over TCP or UDP using the syslog protocol. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -38692,6 +39618,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineSyslogNgDestinationType: default: syslog_ng description: The destination type. The value should always be `syslog_ng`. @@ -38702,8 +39630,11 @@ components: x-enum-varnames: - SYSLOG_NG ObservabilityPipelineSyslogNgSource: - description: The `syslog_ng` source listens for logs over TCP or UDP from a + description: 'The `syslog_ng` source listens for logs over TCP or UDP from a `syslog-ng` server using the syslog protocol. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. Used to reference @@ -38722,6 +39653,8 @@ components: - type - mode type: object + x-pipeline-types: + - logs ObservabilityPipelineSyslogNgSourceType: default: syslog_ng description: The source type. The value should always be `syslog_ng`. @@ -38742,8 +39675,11 @@ components: - TCP - UDP ObservabilityPipelineThrottleProcessor: - description: The `throttle` processor limits the number of events that pass + description: 'The `throttle` processor limits the number of events that pass through over a given time window. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' @@ -38789,6 +39725,8 @@ components: - window - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineThrottleProcessorType: default: throttle description: The processor type. The value should always be `throttle`. @@ -75879,6 +76817,103 @@ paths: summary: Get all aggregated DNS traffic tags: - Cloud Network Monitoring + /api/v2/obs-pipelines/pipelines: + get: + description: Retrieve a list of pipelines. + operationId: ListPipelines + parameters: + - $ref: '#/components/parameters/PageSize' + - $ref: '#/components/parameters/PageNumber' + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ListPipelinesResponse' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: List pipelines + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_read + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' + post: + description: Create a new pipeline. + operationId: CreatePipeline + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipelineSpec' + required: true + responses: + '201': + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipeline' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '409': + $ref: '#/components/responses/ConflictResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Create a new pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_deploy + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' + /api/v2/obs-pipelines/pipelines/validate: + post: + description: 'Validates a pipeline configuration without creating or updating + any resources. + + Returns a list of validation errors, if any.' + operationId: ValidatePipeline + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipelineSpec' + required: true + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ValidationResponse' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Validate an observability pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_read + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' /api/v2/on-call/escalation-policies: post: description: Create a new On-Call escalation policy @@ -79313,103 +80348,6 @@ paths: tags: - CSM Threats x-codegen-request-body-name: body - /api/v2/remote_config/products/obs_pipelines/pipelines: - get: - description: Retrieve a list of pipelines. - operationId: ListPipelines - parameters: - - $ref: '#/components/parameters/PageSize' - - $ref: '#/components/parameters/PageNumber' - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ListPipelinesResponse' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: List pipelines - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_read - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' - post: - description: Create a new pipeline. - operationId: CreatePipeline - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipelineSpec' - required: true - responses: - '201': - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipeline' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '409': - $ref: '#/components/responses/ConflictResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: Create a new pipeline - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_deploy - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' - /api/v2/remote_config/products/obs_pipelines/pipelines/validate: - post: - description: 'Validates a pipeline configuration without creating or updating - any resources. - - Returns a list of validation errors, if any.' - operationId: ValidatePipeline - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipelineSpec' - required: true - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ValidationResponse' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: Validate an observability pipeline - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_read - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' /api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}: delete: description: Delete a pipeline. diff --git a/docs/datadog_api_client.v2.model.rst b/docs/datadog_api_client.v2.model.rst index 2e859a4f0f..a5a413ddc6 100644 --- a/docs/datadog_api_client.v2.model.rst +++ b/docs/datadog_api_client.v2.model.rst @@ -15369,6 +15369,20 @@ datadog\_api\_client.v2.model.observability\_pipeline\_add\_fields\_processor\_t :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_add\_hostname\_processor module +-------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_add_hostname_processor + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_add\_hostname\_processor\_type module +-------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_add_hostname_processor_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_amazon\_data\_firehose\_source module -------------------------------------------------------------------------------------------- @@ -15467,6 +15481,20 @@ datadog\_api\_client.v2.model.observability\_pipeline\_aws\_auth module :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_cloud\_prem\_destination module +-------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_cloud_prem_destination + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_cloud\_prem\_destination\_type module +-------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_cloud_prem_destination_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_config module -------------------------------------------------------------------- @@ -15481,6 +15509,13 @@ datadog\_api\_client.v2.model.observability\_pipeline\_config\_destination\_item :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_config\_pipeline\_type module +------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_config_pipeline_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_config\_processor\_group module -------------------------------------------------------------------------------------- @@ -15600,6 +15635,20 @@ datadog\_api\_client.v2.model.observability\_pipeline\_datadog\_logs\_destinatio :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_datadog\_metrics\_destination module +------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_datadog_metrics_destination + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_datadog\_metrics\_destination\_type module +------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_datadog_metrics_destination_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_datadog\_tags\_processor module -------------------------------------------------------------------------------------- @@ -15663,6 +15712,13 @@ datadog\_api\_client.v2.model.observability\_pipeline\_elasticsearch\_destinatio :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_elasticsearch\_destination\_data\_stream module +------------------------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_data_stream + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_elasticsearch\_destination\_type module ---------------------------------------------------------------------------------------------- @@ -15740,6 +15796,13 @@ datadog\_api\_client.v2.model.observability\_pipeline\_enrichment\_table\_proces :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_enrichment\_table\_reference\_table module +------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_enrichment_table_reference_table + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_field\_value module -------------------------------------------------------------------------- @@ -15936,6 +15999,48 @@ datadog\_api\_client.v2.model.observability\_pipeline\_google\_pub\_sub\_source\ :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_http\_client\_destination module +--------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_http_client_destination + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_http\_client\_destination\_auth\_strategy module +------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_http_client_destination_auth_strategy + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_http\_client\_destination\_compression module +---------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_http_client_destination_compression + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_http\_client\_destination\_compression\_algorithm module +--------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_http_client_destination_compression_algorithm + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_http\_client\_destination\_encoding module +------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_http_client_destination_encoding + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_http\_client\_destination\_type module +--------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_http_client_destination_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_http\_client\_source module ---------------------------------------------------------------------------------- @@ -15978,24 +16083,59 @@ datadog\_api\_client.v2.model.observability\_pipeline\_http\_server\_source\_typ :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_source module ---------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_destination module +-------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_source +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_destination :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_source\_librdkafka\_option module ------------------------------------------------------------------------------------------------ +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_destination\_compression module +--------------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_source_librdkafka_option +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_destination_compression :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_source\_sasl module ---------------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_destination\_encoding module +------------------------------------------------------------------------------------------ -.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_source_sasl +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_destination_encoding + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_destination\_type module +-------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_destination_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_librdkafka\_option module +--------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_librdkafka_option + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_sasl module +------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_sasl + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_sasl\_mechanism module +------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_sasl_mechanism + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_source module +--------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_source :members: :show-inheritance: @@ -16027,6 +16167,41 @@ datadog\_api\_client.v2.model.observability\_pipeline\_metadata\_entry module :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_metric\_tags\_processor module +------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_metric_tags_processor + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_metric\_tags\_processor\_rule module +------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_rule + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_metric\_tags\_processor\_rule\_action module +--------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_rule_action + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_metric\_tags\_processor\_rule\_mode module +------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_rule_mode + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_metric\_tags\_processor\_type module +------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_metric\_value module --------------------------------------------------------------------------- @@ -16104,6 +16279,20 @@ datadog\_api\_client.v2.model.observability\_pipeline\_open\_search\_destination :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_opentelemetry\_source module +----------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_opentelemetry_source + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_opentelemetry\_source\_type module +----------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_opentelemetry_source_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_parse\_grok\_processor module ------------------------------------------------------------------------------------ @@ -16153,10 +16342,17 @@ datadog\_api\_client.v2.model.observability\_pipeline\_parse\_json\_processor\_t :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_pipeline\_kafka\_source\_sasl\_mechanism module ------------------------------------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_parse\_xml\_processor module +----------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_pipeline_kafka_source_sasl_mechanism +.. automodule:: datadog_api_client.v2.model.observability_pipeline_parse_xml_processor + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_parse\_xml\_processor\_type module +----------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_parse_xml_processor_type :members: :show-inheritance: @@ -16720,6 +16916,27 @@ datadog\_api\_client.v2.model.observability\_pipeline\_spec\_data module :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_split\_array\_processor module +------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_split_array_processor + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_split\_array\_processor\_array\_config module +---------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_split_array_processor_array_config + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_split\_array\_processor\_type module +------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_split_array_processor_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_splunk\_hec\_destination module -------------------------------------------------------------------------------------- diff --git a/src/datadog_api_client/configuration.py b/src/datadog_api_client/configuration.py index 5f29613298..cb031ef99a 100644 --- a/src/datadog_api_client/configuration.py +++ b/src/datadog_api_client/configuration.py @@ -359,6 +359,12 @@ def __init__( "v2.update_monitor_user_template": False, "v2.validate_existing_monitor_user_template": False, "v2.validate_monitor_user_template": False, + "v2.create_pipeline": False, + "v2.delete_pipeline": False, + "v2.get_pipeline": False, + "v2.list_pipelines": False, + "v2.update_pipeline": False, + "v2.validate_pipeline": False, "v2.list_role_templates": False, "v2.create_connection": False, "v2.delete_connection": False, @@ -370,12 +376,6 @@ def __init__( "v2.query_event_filtered_users": False, "v2.query_users": False, "v2.update_connection": False, - "v2.create_pipeline": False, - "v2.delete_pipeline": False, - "v2.get_pipeline": False, - "v2.list_pipelines": False, - "v2.update_pipeline": False, - "v2.validate_pipeline": False, "v2.create_scorecard_outcomes_batch": False, "v2.create_scorecard_rule": False, "v2.delete_scorecard_rule": False, diff --git a/src/datadog_api_client/v2/api/observability_pipelines_api.py b/src/datadog_api_client/v2/api/observability_pipelines_api.py index 2c14f8497a..12332dfd33 100644 --- a/src/datadog_api_client/v2/api/observability_pipelines_api.py +++ b/src/datadog_api_client/v2/api/observability_pipelines_api.py @@ -31,7 +31,7 @@ def __init__(self, api_client=None): settings={ "response_type": (ObservabilityPipeline,), "auth": ["apiKeyAuth", "appKeyAuth"], - "endpoint_path": "/api/v2/remote_config/products/obs_pipelines/pipelines", + "endpoint_path": "/api/v2/obs-pipelines/pipelines", "operation_id": "create_pipeline", "http_method": "POST", "version": "v2", @@ -97,7 +97,7 @@ def __init__(self, api_client=None): settings={ "response_type": (ListPipelinesResponse,), "auth": ["apiKeyAuth", "appKeyAuth"], - "endpoint_path": "/api/v2/remote_config/products/obs_pipelines/pipelines", + "endpoint_path": "/api/v2/obs-pipelines/pipelines", "operation_id": "list_pipelines", "http_method": "GET", "version": "v2", @@ -150,7 +150,7 @@ def __init__(self, api_client=None): settings={ "response_type": (ValidationResponse,), "auth": ["apiKeyAuth", "appKeyAuth"], - "endpoint_path": "/api/v2/remote_config/products/obs_pipelines/pipelines/validate", + "endpoint_path": "/api/v2/obs-pipelines/pipelines/validate", "operation_id": "validate_pipeline", "http_method": "POST", "version": "v2", diff --git a/src/datadog_api_client/v2/model/azure_storage_destination.py b/src/datadog_api_client/v2/model/azure_storage_destination.py index 283e2df4aa..09c1195596 100644 --- a/src/datadog_api_client/v2/model/azure_storage_destination.py +++ b/src/datadog_api_client/v2/model/azure_storage_destination.py @@ -50,6 +50,8 @@ def __init__( """ The ``azure_storage`` destination forwards logs to an Azure Blob Storage container. + **Supported pipeline types:** logs + :param blob_prefix: Optional prefix for blobs written to the container. :type blob_prefix: str, optional diff --git a/src/datadog_api_client/v2/model/microsoft_sentinel_destination.py b/src/datadog_api_client/v2/model/microsoft_sentinel_destination.py index 44868adb77..9b70faf693 100644 --- a/src/datadog_api_client/v2/model/microsoft_sentinel_destination.py +++ b/src/datadog_api_client/v2/model/microsoft_sentinel_destination.py @@ -54,6 +54,8 @@ def __init__( """ The ``microsoft_sentinel`` destination forwards logs to Microsoft Sentinel. + **Supported pipeline types:** logs + :param client_id: Azure AD client ID used for authentication. :type client_id: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_add_env_vars_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_add_env_vars_processor.py index afef01d8e9..ff31018b01 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_add_env_vars_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_add_env_vars_processor.py @@ -63,6 +63,8 @@ def __init__( """ The ``add_env_vars`` processor adds environment variable values to log events. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_add_fields_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_add_fields_processor.py index 47e7dd254c..64f63e7ba8 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_add_fields_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_add_fields_processor.py @@ -59,6 +59,8 @@ def __init__( """ The ``add_fields`` processor adds static key-value fields to logs. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor.py new file mode 100644 index 0000000000..93c65862ac --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor.py @@ -0,0 +1,81 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_add_hostname_processor_type import ( + ObservabilityPipelineAddHostnameProcessorType, + ) + + +class ObservabilityPipelineAddHostnameProcessor(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_add_hostname_processor_type import ( + ObservabilityPipelineAddHostnameProcessorType, + ) + + return { + "display_name": (str,), + "enabled": (bool,), + "id": (str,), + "include": (str,), + "type": (ObservabilityPipelineAddHostnameProcessorType,), + } + + attribute_map = { + "display_name": "display_name", + "enabled": "enabled", + "id": "id", + "include": "include", + "type": "type", + } + + def __init__( + self_, + enabled: bool, + id: str, + include: str, + type: ObservabilityPipelineAddHostnameProcessorType, + display_name: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + The ``add_hostname`` processor adds the hostname to log events. + + **Supported pipeline types:** logs + + :param display_name: The display name for a component. + :type display_name: str, optional + + :param enabled: Whether this processor is enabled. + :type enabled: bool + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). + :type id: str + + :param include: A Datadog search query used to determine which logs this processor targets. + :type include: str + + :param type: The processor type. The value should always be ``add_hostname``. + :type type: ObservabilityPipelineAddHostnameProcessorType + """ + if display_name is not unset: + kwargs["display_name"] = display_name + super().__init__(kwargs) + + self_.enabled = enabled + self_.id = id + self_.include = include + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor_type.py b/src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor_type.py new file mode 100644 index 0000000000..3f0c2dd4ff --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_add_hostname_processor_type.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineAddHostnameProcessorType(ModelSimple): + """ + The processor type. The value should always be `add_hostname`. + + :param value: If omitted defaults to "add_hostname". Must be one of ["add_hostname"]. + :type value: str + """ + + allowed_values = { + "add_hostname", + } + ADD_HOSTNAME: ClassVar["ObservabilityPipelineAddHostnameProcessorType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineAddHostnameProcessorType.ADD_HOSTNAME = ObservabilityPipelineAddHostnameProcessorType( + "add_hostname" +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_data_firehose_source.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_data_firehose_source.py index cdb35acf00..29b1d8dc9d 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_amazon_data_firehose_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_data_firehose_source.py @@ -55,6 +55,8 @@ def __init__( """ The ``amazon_data_firehose`` source ingests logs from AWS Data Firehose. + **Supported pipeline types:** logs + :param auth: AWS authentication credentials used for accessing AWS services such as S3. If omitted, the system’s default credentials are used (for example, the IAM role and environment variables). :type auth: ObservabilityPipelineAwsAuth, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination.py index 639d156f8b..003a449c9c 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination.py @@ -60,6 +60,8 @@ def __init__( """ The ``amazon_opensearch`` destination writes logs to Amazon OpenSearch. + **Supported pipeline types:** logs + :param auth: Authentication settings for the Amazon OpenSearch destination. The ``strategy`` field determines whether basic or AWS-based authentication is used. :type auth: ObservabilityPipelineAmazonOpenSearchDestinationAuth diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_destination.py index 1a7af99838..0018d410eb 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_destination.py @@ -76,6 +76,8 @@ def __init__( """ The ``amazon_s3`` destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving. + **Supported pipeline types:** logs + :param auth: AWS authentication credentials used for accessing AWS services such as S3. If omitted, the system’s default credentials are used (for example, the IAM role and environment variables). :type auth: ObservabilityPipelineAwsAuth, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_source.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_source.py index fe4730a921..5d81b16d63 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_source.py @@ -59,6 +59,8 @@ def __init__( The ``amazon_s3`` source ingests logs from an Amazon S3 bucket. It supports AWS authentication and TLS encryption. + **Supported pipeline types:** logs + :param auth: AWS authentication credentials used for accessing AWS services such as S3. If omitted, the system’s default credentials are used (for example, the IAM role and environment variables). :type auth: ObservabilityPipelineAwsAuth, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_security_lake_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_security_lake_destination.py index 6757c5f25c..2f286a9755 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_amazon_security_lake_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_security_lake_destination.py @@ -67,6 +67,8 @@ def __init__( """ The ``amazon_security_lake`` destination sends your logs to Amazon Security Lake. + **Supported pipeline types:** logs + :param auth: AWS authentication credentials used for accessing AWS services such as S3. If omitted, the system’s default credentials are used (for example, the IAM role and environment variables). :type auth: ObservabilityPipelineAwsAuth, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_cloud_prem_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_cloud_prem_destination.py new file mode 100644 index 0000000000..90a08c3a2d --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_cloud_prem_destination.py @@ -0,0 +1,58 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_cloud_prem_destination_type import ( + ObservabilityPipelineCloudPremDestinationType, + ) + + +class ObservabilityPipelineCloudPremDestination(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_cloud_prem_destination_type import ( + ObservabilityPipelineCloudPremDestinationType, + ) + + return { + "id": (str,), + "inputs": ([str],), + "type": (ObservabilityPipelineCloudPremDestinationType,), + } + + attribute_map = { + "id": "id", + "inputs": "inputs", + "type": "type", + } + + def __init__(self_, id: str, inputs: List[str], type: ObservabilityPipelineCloudPremDestinationType, **kwargs): + """ + The ``cloud_prem`` destination sends logs to Datadog CloudPrem. + + **Supported pipeline types:** logs + + :param id: The unique identifier for this component. + :type id: str + + :param inputs: A list of component IDs whose output is used as the ``input`` for this component. + :type inputs: [str] + + :param type: The destination type. The value should always be ``cloud_prem``. + :type type: ObservabilityPipelineCloudPremDestinationType + """ + super().__init__(kwargs) + + self_.id = id + self_.inputs = inputs + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_cloud_prem_destination_type.py b/src/datadog_api_client/v2/model/observability_pipeline_cloud_prem_destination_type.py new file mode 100644 index 0000000000..88110c24ef --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_cloud_prem_destination_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineCloudPremDestinationType(ModelSimple): + """ + The destination type. The value should always be `cloud_prem`. + + :param value: If omitted defaults to "cloud_prem". Must be one of ["cloud_prem"]. + :type value: str + """ + + allowed_values = { + "cloud_prem", + } + CLOUD_PREM: ClassVar["ObservabilityPipelineCloudPremDestinationType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineCloudPremDestinationType.CLOUD_PREM = ObservabilityPipelineCloudPremDestinationType("cloud_prem") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config.py b/src/datadog_api_client/v2/model/observability_pipeline_config.py index 48cf842a5d..473b414ac9 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_config.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_config.py @@ -17,99 +17,117 @@ from datadog_api_client.v2.model.observability_pipeline_config_destination_item import ( ObservabilityPipelineConfigDestinationItem, ) + from datadog_api_client.v2.model.observability_pipeline_config_pipeline_type import ( + ObservabilityPipelineConfigPipelineType, + ) from datadog_api_client.v2.model.observability_pipeline_config_processor_group import ( ObservabilityPipelineConfigProcessorGroup, ) from datadog_api_client.v2.model.observability_pipeline_config_source_item import ( ObservabilityPipelineConfigSourceItem, ) - from datadog_api_client.v2.model.observability_pipeline_datadog_logs_destination import ( - ObservabilityPipelineDatadogLogsDestination, + from datadog_api_client.v2.model.observability_pipeline_http_client_destination import ( + ObservabilityPipelineHttpClientDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination import ( + ObservabilityPipelineAmazonOpenSearchDestination, ) from datadog_api_client.v2.model.observability_pipeline_amazon_s3_destination import ( ObservabilityPipelineAmazonS3Destination, ) - from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination import ( - ObservabilityPipelineGoogleCloudStorageDestination, + from datadog_api_client.v2.model.observability_pipeline_amazon_security_lake_destination import ( + ObservabilityPipelineAmazonSecurityLakeDestination, ) - from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination import ( - ObservabilityPipelineSplunkHecDestination, + from datadog_api_client.v2.model.azure_storage_destination import AzureStorageDestination + from datadog_api_client.v2.model.observability_pipeline_cloud_prem_destination import ( + ObservabilityPipelineCloudPremDestination, ) - from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination import ( - ObservabilityPipelineSumoLogicDestination, + from datadog_api_client.v2.model.observability_pipeline_crowd_strike_next_gen_siem_destination import ( + ObservabilityPipelineCrowdStrikeNextGenSiemDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_datadog_logs_destination import ( + ObservabilityPipelineDatadogLogsDestination, ) from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination import ( ObservabilityPipelineElasticsearchDestination, ) - from datadog_api_client.v2.model.observability_pipeline_rsyslog_destination import ( - ObservabilityPipelineRsyslogDestination, - ) - from datadog_api_client.v2.model.observability_pipeline_syslog_ng_destination import ( - ObservabilityPipelineSyslogNgDestination, - ) - from datadog_api_client.v2.model.azure_storage_destination import AzureStorageDestination - from datadog_api_client.v2.model.microsoft_sentinel_destination import MicrosoftSentinelDestination from datadog_api_client.v2.model.observability_pipeline_google_chronicle_destination import ( ObservabilityPipelineGoogleChronicleDestination, ) + from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination import ( + ObservabilityPipelineGoogleCloudStorageDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_google_pub_sub_destination import ( + ObservabilityPipelineGooglePubSubDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_destination import ( + ObservabilityPipelineKafkaDestination, + ) + from datadog_api_client.v2.model.microsoft_sentinel_destination import MicrosoftSentinelDestination from datadog_api_client.v2.model.observability_pipeline_new_relic_destination import ( ObservabilityPipelineNewRelicDestination, ) - from datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination import ( - ObservabilityPipelineSentinelOneDestination, - ) from datadog_api_client.v2.model.observability_pipeline_open_search_destination import ( ObservabilityPipelineOpenSearchDestination, ) - from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination import ( - ObservabilityPipelineAmazonOpenSearchDestination, + from datadog_api_client.v2.model.observability_pipeline_rsyslog_destination import ( + ObservabilityPipelineRsyslogDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination import ( + ObservabilityPipelineSentinelOneDestination, ) from datadog_api_client.v2.model.observability_pipeline_socket_destination import ( ObservabilityPipelineSocketDestination, ) - from datadog_api_client.v2.model.observability_pipeline_amazon_security_lake_destination import ( - ObservabilityPipelineAmazonSecurityLakeDestination, + from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination import ( + ObservabilityPipelineSplunkHecDestination, ) - from datadog_api_client.v2.model.observability_pipeline_crowd_strike_next_gen_siem_destination import ( - ObservabilityPipelineCrowdStrikeNextGenSiemDestination, + from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination import ( + ObservabilityPipelineSumoLogicDestination, ) - from datadog_api_client.v2.model.observability_pipeline_google_pub_sub_destination import ( - ObservabilityPipelineGooglePubSubDestination, + from datadog_api_client.v2.model.observability_pipeline_syslog_ng_destination import ( + ObservabilityPipelineSyslogNgDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_datadog_metrics_destination import ( + ObservabilityPipelineDatadogMetricsDestination, ) - from datadog_api_client.v2.model.observability_pipeline_kafka_source import ObservabilityPipelineKafkaSource from datadog_api_client.v2.model.observability_pipeline_datadog_agent_source import ( ObservabilityPipelineDatadogAgentSource, ) - from datadog_api_client.v2.model.observability_pipeline_splunk_tcp_source import ( - ObservabilityPipelineSplunkTcpSource, - ) - from datadog_api_client.v2.model.observability_pipeline_splunk_hec_source import ( - ObservabilityPipelineSplunkHecSource, + from datadog_api_client.v2.model.observability_pipeline_amazon_data_firehose_source import ( + ObservabilityPipelineAmazonDataFirehoseSource, ) from datadog_api_client.v2.model.observability_pipeline_amazon_s3_source import ObservabilityPipelineAmazonS3Source - from datadog_api_client.v2.model.observability_pipeline_fluentd_source import ObservabilityPipelineFluentdSource from datadog_api_client.v2.model.observability_pipeline_fluent_bit_source import ( ObservabilityPipelineFluentBitSource, ) - from datadog_api_client.v2.model.observability_pipeline_http_server_source import ( - ObservabilityPipelineHttpServerSource, - ) - from datadog_api_client.v2.model.observability_pipeline_sumo_logic_source import ( - ObservabilityPipelineSumoLogicSource, - ) - from datadog_api_client.v2.model.observability_pipeline_rsyslog_source import ObservabilityPipelineRsyslogSource - from datadog_api_client.v2.model.observability_pipeline_syslog_ng_source import ObservabilityPipelineSyslogNgSource - from datadog_api_client.v2.model.observability_pipeline_amazon_data_firehose_source import ( - ObservabilityPipelineAmazonDataFirehoseSource, - ) + from datadog_api_client.v2.model.observability_pipeline_fluentd_source import ObservabilityPipelineFluentdSource from datadog_api_client.v2.model.observability_pipeline_google_pub_sub_source import ( ObservabilityPipelineGooglePubSubSource, ) from datadog_api_client.v2.model.observability_pipeline_http_client_source import ( ObservabilityPipelineHttpClientSource, ) + from datadog_api_client.v2.model.observability_pipeline_http_server_source import ( + ObservabilityPipelineHttpServerSource, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_source import ObservabilityPipelineKafkaSource from datadog_api_client.v2.model.observability_pipeline_logstash_source import ObservabilityPipelineLogstashSource + from datadog_api_client.v2.model.observability_pipeline_rsyslog_source import ObservabilityPipelineRsyslogSource from datadog_api_client.v2.model.observability_pipeline_socket_source import ObservabilityPipelineSocketSource + from datadog_api_client.v2.model.observability_pipeline_splunk_hec_source import ( + ObservabilityPipelineSplunkHecSource, + ) + from datadog_api_client.v2.model.observability_pipeline_splunk_tcp_source import ( + ObservabilityPipelineSplunkTcpSource, + ) + from datadog_api_client.v2.model.observability_pipeline_sumo_logic_source import ( + ObservabilityPipelineSumoLogicSource, + ) + from datadog_api_client.v2.model.observability_pipeline_syslog_ng_source import ObservabilityPipelineSyslogNgSource + from datadog_api_client.v2.model.observability_pipeline_opentelemetry_source import ( + ObservabilityPipelineOpentelemetrySource, + ) class ObservabilityPipelineConfig(ModelNormal): @@ -118,6 +136,9 @@ def openapi_types(_): from datadog_api_client.v2.model.observability_pipeline_config_destination_item import ( ObservabilityPipelineConfigDestinationItem, ) + from datadog_api_client.v2.model.observability_pipeline_config_pipeline_type import ( + ObservabilityPipelineConfigPipelineType, + ) from datadog_api_client.v2.model.observability_pipeline_config_processor_group import ( ObservabilityPipelineConfigProcessorGroup, ) @@ -127,12 +148,14 @@ def openapi_types(_): return { "destinations": ([ObservabilityPipelineConfigDestinationItem],), + "pipeline_type": (ObservabilityPipelineConfigPipelineType,), "processors": ([ObservabilityPipelineConfigProcessorGroup],), "sources": ([ObservabilityPipelineConfigSourceItem],), } attribute_map = { "destinations": "destinations", + "pipeline_type": "pipeline_type", "processors": "processors", "sources": "sources", } @@ -142,48 +165,54 @@ def __init__( destinations: List[ Union[ ObservabilityPipelineConfigDestinationItem, - ObservabilityPipelineDatadogLogsDestination, + ObservabilityPipelineHttpClientDestination, + ObservabilityPipelineAmazonOpenSearchDestination, ObservabilityPipelineAmazonS3Destination, - ObservabilityPipelineGoogleCloudStorageDestination, - ObservabilityPipelineSplunkHecDestination, - ObservabilityPipelineSumoLogicDestination, - ObservabilityPipelineElasticsearchDestination, - ObservabilityPipelineRsyslogDestination, - ObservabilityPipelineSyslogNgDestination, + ObservabilityPipelineAmazonSecurityLakeDestination, AzureStorageDestination, - MicrosoftSentinelDestination, + ObservabilityPipelineCloudPremDestination, + ObservabilityPipelineCrowdStrikeNextGenSiemDestination, + ObservabilityPipelineDatadogLogsDestination, + ObservabilityPipelineElasticsearchDestination, ObservabilityPipelineGoogleChronicleDestination, + ObservabilityPipelineGoogleCloudStorageDestination, + ObservabilityPipelineGooglePubSubDestination, + ObservabilityPipelineKafkaDestination, + MicrosoftSentinelDestination, ObservabilityPipelineNewRelicDestination, - ObservabilityPipelineSentinelOneDestination, ObservabilityPipelineOpenSearchDestination, - ObservabilityPipelineAmazonOpenSearchDestination, + ObservabilityPipelineRsyslogDestination, + ObservabilityPipelineSentinelOneDestination, ObservabilityPipelineSocketDestination, - ObservabilityPipelineAmazonSecurityLakeDestination, - ObservabilityPipelineCrowdStrikeNextGenSiemDestination, - ObservabilityPipelineGooglePubSubDestination, + ObservabilityPipelineSplunkHecDestination, + ObservabilityPipelineSumoLogicDestination, + ObservabilityPipelineSyslogNgDestination, + ObservabilityPipelineDatadogMetricsDestination, ] ], sources: List[ Union[ ObservabilityPipelineConfigSourceItem, - ObservabilityPipelineKafkaSource, ObservabilityPipelineDatadogAgentSource, - ObservabilityPipelineSplunkTcpSource, - ObservabilityPipelineSplunkHecSource, + ObservabilityPipelineAmazonDataFirehoseSource, ObservabilityPipelineAmazonS3Source, - ObservabilityPipelineFluentdSource, ObservabilityPipelineFluentBitSource, - ObservabilityPipelineHttpServerSource, - ObservabilityPipelineSumoLogicSource, - ObservabilityPipelineRsyslogSource, - ObservabilityPipelineSyslogNgSource, - ObservabilityPipelineAmazonDataFirehoseSource, + ObservabilityPipelineFluentdSource, ObservabilityPipelineGooglePubSubSource, ObservabilityPipelineHttpClientSource, + ObservabilityPipelineHttpServerSource, + ObservabilityPipelineKafkaSource, ObservabilityPipelineLogstashSource, + ObservabilityPipelineRsyslogSource, ObservabilityPipelineSocketSource, + ObservabilityPipelineSplunkHecSource, + ObservabilityPipelineSplunkTcpSource, + ObservabilityPipelineSumoLogicSource, + ObservabilityPipelineSyslogNgSource, + ObservabilityPipelineOpentelemetrySource, ] ], + pipeline_type: Union[ObservabilityPipelineConfigPipelineType, UnsetType] = unset, processors: Union[List[ObservabilityPipelineConfigProcessorGroup], UnsetType] = unset, **kwargs, ): @@ -193,12 +222,17 @@ def __init__( :param destinations: A list of destination components where processed logs are sent. :type destinations: [ObservabilityPipelineConfigDestinationItem] + :param pipeline_type: The type of data being ingested. Defaults to ``logs`` if not specified. + :type pipeline_type: ObservabilityPipelineConfigPipelineType, optional + :param processors: A list of processor groups that transform or enrich log data. :type processors: [ObservabilityPipelineConfigProcessorGroup], optional :param sources: A list of configured data sources for the pipeline. :type sources: [ObservabilityPipelineConfigSourceItem] """ + if pipeline_type is not unset: + kwargs["pipeline_type"] = pipeline_type if processors is not unset: kwargs["processors"] = processors super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config_destination_item.py b/src/datadog_api_client/v2/model/observability_pipeline_config_destination_item.py index df018bec4e..7f4f8e2384 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_config_destination_item.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_config_destination_item.py @@ -15,18 +15,33 @@ def __init__(self, **kwargs): """ A destination for the pipeline. + :param auth_strategy: HTTP authentication strategy. + :type auth_strategy: ObservabilityPipelineHttpClientDestinationAuthStrategy, optional + + :param compression: Compression configuration for HTTP requests. + :type compression: ObservabilityPipelineHttpClientDestinationCompression, optional + + :param encoding: Encoding format for log events. + :type encoding: ObservabilityPipelineHttpClientDestinationEncoding + :param id: The unique identifier for this component. :type id: str - :param inputs: A list of component IDs whose output is used as the `input` for this component. + :param inputs: A list of component IDs whose output is used as the input for this component. :type inputs: [str] - :param type: The destination type. The value should always be `datadog_logs`. - :type type: ObservabilityPipelineDatadogLogsDestinationType + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional - :param auth: AWS authentication credentials used for accessing AWS services such as S3. - If omitted, the system’s default credentials are used (for example, the IAM role and environment variables). - :type auth: ObservabilityPipelineAwsAuth, optional + :param type: The destination type. The value should always be `http_client`. + :type type: ObservabilityPipelineHttpClientDestinationType + + :param auth: Authentication settings for the Amazon OpenSearch destination. + The `strategy` field determines whether basic or AWS-based authentication is used. + :type auth: ObservabilityPipelineAmazonOpenSearchDestinationAuth + + :param bulk_index: The index to write logs to. + :type bulk_index: str, optional :param bucket: S3 bucket name. :type bucket: str @@ -40,8 +55,26 @@ def __init__(self, **kwargs): :param storage_class: S3 storage class. :type storage_class: ObservabilityPipelineAmazonS3DestinationStorageClass - :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. - :type tls: ObservabilityPipelineTls, optional + :param custom_source_name: Custom source name for the logs in Security Lake. + :type custom_source_name: str + + :param blob_prefix: Optional prefix for blobs written to the container. + :type blob_prefix: str, optional + + :param container_name: The name of the Azure Blob Storage container to store logs in. + :type container_name: str + + :param api_version: The Elasticsearch API version to use. Set to `auto` to auto-detect. + :type api_version: ObservabilityPipelineElasticsearchDestinationApiVersion, optional + + :param data_stream: Configuration options for writing to Elasticsearch Data Streams instead of a fixed index. + :type data_stream: ObservabilityPipelineElasticsearchDestinationDataStream, optional + + :param customer_id: The Google Chronicle customer ID. + :type customer_id: str + + :param log_type: The log type metadata associated with the Chronicle destination. + :type log_type: str, optional :param acl: Access control list setting for objects written to the bucket. :type acl: ObservabilityPipelineGoogleCloudStorageDestinationAcl, optional @@ -49,45 +82,35 @@ def __init__(self, **kwargs): :param metadata: Custom metadata to attach to each object uploaded to the GCS bucket. :type metadata: [ObservabilityPipelineMetadataEntry], optional - :param auto_extract_timestamp: If `true`, Splunk tries to extract timestamps from incoming log events. - If `false`, Splunk assigns the time the event was received. - :type auto_extract_timestamp: bool, optional - - :param encoding: Encoding format for log events. - :type encoding: ObservabilityPipelineSplunkHecDestinationEncoding, optional - - :param index: Optional name of the Splunk index where logs are written. - :type index: str, optional - - :param sourcetype: The Splunk sourcetype to assign to log events. - :type sourcetype: str, optional + :param project: The GCP project ID that owns the Pub/Sub topic. + :type project: str - :param header_custom_fields: A list of custom headers to include in the request to Sumo Logic. - :type header_custom_fields: [ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem], optional + :param topic: The Pub/Sub topic name to publish logs to. + :type topic: str - :param header_host_name: Optional override for the host name header. - :type header_host_name: str, optional + :param headers_key: The field name to use for Kafka message headers. + :type headers_key: str, optional - :param header_source_category: Optional override for the source category header. - :type header_source_category: str, optional + :param key_field: The field name to use as the Kafka message key. + :type key_field: str, optional - :param header_source_name: Optional override for the source name header. - :type header_source_name: str, optional + :param librdkafka_options: Optional list of advanced Kafka producer configuration options, defined as key-value pairs. + :type librdkafka_options: [ObservabilityPipelineKafkaLibrdkafkaOption], optional - :param api_version: The Elasticsearch API version to use. Set to `auto` to auto-detect. - :type api_version: ObservabilityPipelineElasticsearchDestinationApiVersion, optional + :param message_timeout_ms: Maximum time in milliseconds to wait for message delivery confirmation. + :type message_timeout_ms: int, optional - :param bulk_index: The index to write logs to in Elasticsearch. - :type bulk_index: str, optional + :param rate_limit_duration_secs: Duration in seconds for the rate limit window. + :type rate_limit_duration_secs: int, optional - :param keepalive: Optional socket keepalive duration in milliseconds. - :type keepalive: int, optional + :param rate_limit_num: Maximum number of messages allowed per rate limit duration. + :type rate_limit_num: int, optional - :param blob_prefix: Optional prefix for blobs written to the container. - :type blob_prefix: str, optional + :param sasl: Specifies the SASL mechanism for authenticating with a Kafka cluster. + :type sasl: ObservabilityPipelineKafkaSasl, optional - :param container_name: The name of the Azure Blob Storage container to store logs in. - :type container_name: str + :param socket_timeout_ms: Socket timeout in milliseconds for network requests. + :type socket_timeout_ms: int, optional :param client_id: Azure AD client ID used for authentication. :type client_id: str @@ -101,11 +124,8 @@ def __init__(self, **kwargs): :param tenant_id: Azure AD tenant ID. :type tenant_id: str - :param customer_id: The Google Chronicle customer ID. - :type customer_id: str - - :param log_type: The log type metadata associated with the Chronicle destination. - :type log_type: str, optional + :param keepalive: Optional socket keepalive duration in milliseconds. + :type keepalive: int, optional :param framing: Framing method configuration. :type framing: ObservabilityPipelineSocketDestinationFraming @@ -113,17 +133,27 @@ def __init__(self, **kwargs): :param mode: Protocol used to send logs. :type mode: ObservabilityPipelineSocketDestinationMode - :param custom_source_name: Custom source name for the logs in Security Lake. - :type custom_source_name: str + :param auto_extract_timestamp: If `true`, Splunk tries to extract timestamps from incoming log events. + If `false`, Splunk assigns the time the event was received. + :type auto_extract_timestamp: bool, optional - :param compression: Compression configuration for log events. - :type compression: ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression, optional + :param index: Optional name of the Splunk index where logs are written. + :type index: str, optional - :param project: The GCP project ID that owns the Pub/Sub topic. - :type project: str + :param sourcetype: The Splunk sourcetype to assign to log events. + :type sourcetype: str, optional - :param topic: The Pub/Sub topic name to publish logs to. - :type topic: str + :param header_custom_fields: A list of custom headers to include in the request to Sumo Logic. + :type header_custom_fields: [ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem], optional + + :param header_host_name: Optional override for the host name header. + :type header_host_name: str, optional + + :param header_source_category: Optional override for the source category header. + :type header_source_category: str, optional + + :param header_source_name: Optional override for the source name header. + :type header_source_name: str, optional """ super().__init__(kwargs) @@ -136,80 +166,96 @@ def _composed_schemas(_): # code would be run when this module is imported, and these composed # classes don't exist yet because their module has not finished # loading - from datadog_api_client.v2.model.observability_pipeline_datadog_logs_destination import ( - ObservabilityPipelineDatadogLogsDestination, + from datadog_api_client.v2.model.observability_pipeline_http_client_destination import ( + ObservabilityPipelineHttpClientDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination import ( + ObservabilityPipelineAmazonOpenSearchDestination, ) from datadog_api_client.v2.model.observability_pipeline_amazon_s3_destination import ( ObservabilityPipelineAmazonS3Destination, ) - from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination import ( - ObservabilityPipelineGoogleCloudStorageDestination, + from datadog_api_client.v2.model.observability_pipeline_amazon_security_lake_destination import ( + ObservabilityPipelineAmazonSecurityLakeDestination, ) - from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination import ( - ObservabilityPipelineSplunkHecDestination, + from datadog_api_client.v2.model.azure_storage_destination import AzureStorageDestination + from datadog_api_client.v2.model.observability_pipeline_cloud_prem_destination import ( + ObservabilityPipelineCloudPremDestination, ) - from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination import ( - ObservabilityPipelineSumoLogicDestination, + from datadog_api_client.v2.model.observability_pipeline_crowd_strike_next_gen_siem_destination import ( + ObservabilityPipelineCrowdStrikeNextGenSiemDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_datadog_logs_destination import ( + ObservabilityPipelineDatadogLogsDestination, ) from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination import ( ObservabilityPipelineElasticsearchDestination, ) - from datadog_api_client.v2.model.observability_pipeline_rsyslog_destination import ( - ObservabilityPipelineRsyslogDestination, - ) - from datadog_api_client.v2.model.observability_pipeline_syslog_ng_destination import ( - ObservabilityPipelineSyslogNgDestination, - ) - from datadog_api_client.v2.model.azure_storage_destination import AzureStorageDestination - from datadog_api_client.v2.model.microsoft_sentinel_destination import MicrosoftSentinelDestination from datadog_api_client.v2.model.observability_pipeline_google_chronicle_destination import ( ObservabilityPipelineGoogleChronicleDestination, ) + from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination import ( + ObservabilityPipelineGoogleCloudStorageDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_google_pub_sub_destination import ( + ObservabilityPipelineGooglePubSubDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_destination import ( + ObservabilityPipelineKafkaDestination, + ) + from datadog_api_client.v2.model.microsoft_sentinel_destination import MicrosoftSentinelDestination from datadog_api_client.v2.model.observability_pipeline_new_relic_destination import ( ObservabilityPipelineNewRelicDestination, ) - from datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination import ( - ObservabilityPipelineSentinelOneDestination, - ) from datadog_api_client.v2.model.observability_pipeline_open_search_destination import ( ObservabilityPipelineOpenSearchDestination, ) - from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination import ( - ObservabilityPipelineAmazonOpenSearchDestination, + from datadog_api_client.v2.model.observability_pipeline_rsyslog_destination import ( + ObservabilityPipelineRsyslogDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination import ( + ObservabilityPipelineSentinelOneDestination, ) from datadog_api_client.v2.model.observability_pipeline_socket_destination import ( ObservabilityPipelineSocketDestination, ) - from datadog_api_client.v2.model.observability_pipeline_amazon_security_lake_destination import ( - ObservabilityPipelineAmazonSecurityLakeDestination, + from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination import ( + ObservabilityPipelineSplunkHecDestination, ) - from datadog_api_client.v2.model.observability_pipeline_crowd_strike_next_gen_siem_destination import ( - ObservabilityPipelineCrowdStrikeNextGenSiemDestination, + from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination import ( + ObservabilityPipelineSumoLogicDestination, ) - from datadog_api_client.v2.model.observability_pipeline_google_pub_sub_destination import ( - ObservabilityPipelineGooglePubSubDestination, + from datadog_api_client.v2.model.observability_pipeline_syslog_ng_destination import ( + ObservabilityPipelineSyslogNgDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_datadog_metrics_destination import ( + ObservabilityPipelineDatadogMetricsDestination, ) return { "oneOf": [ - ObservabilityPipelineDatadogLogsDestination, + ObservabilityPipelineHttpClientDestination, + ObservabilityPipelineAmazonOpenSearchDestination, ObservabilityPipelineAmazonS3Destination, - ObservabilityPipelineGoogleCloudStorageDestination, - ObservabilityPipelineSplunkHecDestination, - ObservabilityPipelineSumoLogicDestination, - ObservabilityPipelineElasticsearchDestination, - ObservabilityPipelineRsyslogDestination, - ObservabilityPipelineSyslogNgDestination, + ObservabilityPipelineAmazonSecurityLakeDestination, AzureStorageDestination, - MicrosoftSentinelDestination, + ObservabilityPipelineCloudPremDestination, + ObservabilityPipelineCrowdStrikeNextGenSiemDestination, + ObservabilityPipelineDatadogLogsDestination, + ObservabilityPipelineElasticsearchDestination, ObservabilityPipelineGoogleChronicleDestination, + ObservabilityPipelineGoogleCloudStorageDestination, + ObservabilityPipelineGooglePubSubDestination, + ObservabilityPipelineKafkaDestination, + MicrosoftSentinelDestination, ObservabilityPipelineNewRelicDestination, - ObservabilityPipelineSentinelOneDestination, ObservabilityPipelineOpenSearchDestination, - ObservabilityPipelineAmazonOpenSearchDestination, + ObservabilityPipelineRsyslogDestination, + ObservabilityPipelineSentinelOneDestination, ObservabilityPipelineSocketDestination, - ObservabilityPipelineAmazonSecurityLakeDestination, - ObservabilityPipelineCrowdStrikeNextGenSiemDestination, - ObservabilityPipelineGooglePubSubDestination, + ObservabilityPipelineSplunkHecDestination, + ObservabilityPipelineSumoLogicDestination, + ObservabilityPipelineSyslogNgDestination, + ObservabilityPipelineDatadogMetricsDestination, ], } diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config_pipeline_type.py b/src/datadog_api_client/v2/model/observability_pipeline_config_pipeline_type.py new file mode 100644 index 0000000000..bdf1f81a18 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_config_pipeline_type.py @@ -0,0 +1,38 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineConfigPipelineType(ModelSimple): + """ + The type of data being ingested. Defaults to `logs` if not specified. + + :param value: If omitted defaults to "logs". Must be one of ["logs", "metrics"]. + :type value: str + """ + + allowed_values = { + "logs", + "metrics", + } + LOGS: ClassVar["ObservabilityPipelineConfigPipelineType"] + METRICS: ClassVar["ObservabilityPipelineConfigPipelineType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineConfigPipelineType.LOGS = ObservabilityPipelineConfigPipelineType("logs") +ObservabilityPipelineConfigPipelineType.METRICS = ObservabilityPipelineConfigPipelineType("metrics") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config_processor_group.py b/src/datadog_api_client/v2/model/observability_pipeline_config_processor_group.py index 6579870f58..62bba716b5 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_config_processor_group.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_config_processor_group.py @@ -18,46 +18,58 @@ ObservabilityPipelineConfigProcessorItem, ) from datadog_api_client.v2.model.observability_pipeline_filter_processor import ObservabilityPipelineFilterProcessor - from datadog_api_client.v2.model.observability_pipeline_parse_json_processor import ( - ObservabilityPipelineParseJSONProcessor, + from datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor import ( + ObservabilityPipelineAddEnvVarsProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_quota_processor import ObservabilityPipelineQuotaProcessor from datadog_api_client.v2.model.observability_pipeline_add_fields_processor import ( ObservabilityPipelineAddFieldsProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_remove_fields_processor import ( - ObservabilityPipelineRemoveFieldsProcessor, + from datadog_api_client.v2.model.observability_pipeline_add_hostname_processor import ( + ObservabilityPipelineAddHostnameProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_rename_fields_processor import ( - ObservabilityPipelineRenameFieldsProcessor, + from datadog_api_client.v2.model.observability_pipeline_custom_processor import ObservabilityPipelineCustomProcessor + from datadog_api_client.v2.model.observability_pipeline_datadog_tags_processor import ( + ObservabilityPipelineDatadogTagsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_dedupe_processor import ObservabilityPipelineDedupeProcessor + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor import ( + ObservabilityPipelineEnrichmentTableProcessor, ) from datadog_api_client.v2.model.observability_pipeline_generate_metrics_processor import ( ObservabilityPipelineGenerateMetricsProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_sample_processor import ObservabilityPipelineSampleProcessor + from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor import ( + ObservabilityPipelineOcsfMapperProcessor, + ) from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor import ( ObservabilityPipelineParseGrokProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor import ( - ObservabilityPipelineSensitiveDataScannerProcessor, + from datadog_api_client.v2.model.observability_pipeline_parse_json_processor import ( + ObservabilityPipelineParseJSONProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor import ( - ObservabilityPipelineOcsfMapperProcessor, + from datadog_api_client.v2.model.observability_pipeline_parse_xml_processor import ( + ObservabilityPipelineParseXMLProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor import ( - ObservabilityPipelineAddEnvVarsProcessor, + from datadog_api_client.v2.model.observability_pipeline_quota_processor import ObservabilityPipelineQuotaProcessor + from datadog_api_client.v2.model.observability_pipeline_reduce_processor import ObservabilityPipelineReduceProcessor + from datadog_api_client.v2.model.observability_pipeline_remove_fields_processor import ( + ObservabilityPipelineRemoveFieldsProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_dedupe_processor import ObservabilityPipelineDedupeProcessor - from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor import ( - ObservabilityPipelineEnrichmentTableProcessor, + from datadog_api_client.v2.model.observability_pipeline_rename_fields_processor import ( + ObservabilityPipelineRenameFieldsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_sample_processor import ObservabilityPipelineSampleProcessor + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor import ( + ObservabilityPipelineSensitiveDataScannerProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_split_array_processor import ( + ObservabilityPipelineSplitArrayProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_reduce_processor import ObservabilityPipelineReduceProcessor from datadog_api_client.v2.model.observability_pipeline_throttle_processor import ( ObservabilityPipelineThrottleProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_custom_processor import ObservabilityPipelineCustomProcessor - from datadog_api_client.v2.model.observability_pipeline_datadog_tags_processor import ( - ObservabilityPipelineDatadogTagsProcessor, + from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor import ( + ObservabilityPipelineMetricTagsProcessor, ) @@ -96,23 +108,27 @@ def __init__( Union[ ObservabilityPipelineConfigProcessorItem, ObservabilityPipelineFilterProcessor, + ObservabilityPipelineAddEnvVarsProcessor, + ObservabilityPipelineAddFieldsProcessor, + ObservabilityPipelineAddHostnameProcessor, + ObservabilityPipelineCustomProcessor, + ObservabilityPipelineDatadogTagsProcessor, + ObservabilityPipelineDedupeProcessor, + ObservabilityPipelineEnrichmentTableProcessor, + ObservabilityPipelineGenerateMetricsProcessor, + ObservabilityPipelineOcsfMapperProcessor, + ObservabilityPipelineParseGrokProcessor, ObservabilityPipelineParseJSONProcessor, + ObservabilityPipelineParseXMLProcessor, ObservabilityPipelineQuotaProcessor, - ObservabilityPipelineAddFieldsProcessor, + ObservabilityPipelineReduceProcessor, ObservabilityPipelineRemoveFieldsProcessor, ObservabilityPipelineRenameFieldsProcessor, - ObservabilityPipelineGenerateMetricsProcessor, ObservabilityPipelineSampleProcessor, - ObservabilityPipelineParseGrokProcessor, ObservabilityPipelineSensitiveDataScannerProcessor, - ObservabilityPipelineOcsfMapperProcessor, - ObservabilityPipelineAddEnvVarsProcessor, - ObservabilityPipelineDedupeProcessor, - ObservabilityPipelineEnrichmentTableProcessor, - ObservabilityPipelineReduceProcessor, + ObservabilityPipelineSplitArrayProcessor, ObservabilityPipelineThrottleProcessor, - ObservabilityPipelineCustomProcessor, - ObservabilityPipelineDatadogTagsProcessor, + ObservabilityPipelineMetricTagsProcessor, ] ], display_name: Union[str, UnsetType] = unset, diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py b/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py index 8becc43c0d..b57610336b 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py @@ -24,16 +24,79 @@ def __init__(self, **kwargs): :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). :type id: str - :param include: A Datadog search query used to determine which logs should pass through the filter. Logs that match this query continue to downstream components; others are dropped. + :param include: A Datadog search query used to determine which logs/metrics should pass through the filter. Logs/metrics that match this query continue to downstream components; others are dropped. :type include: str :param type: The processor type. The value should always be `filter`. :type type: ObservabilityPipelineFilterProcessorType + :param variables: A list of environment variable mappings to apply to log fields. + :type variables: [ObservabilityPipelineAddEnvVarsProcessorVariable] + + :param fields: A list of static fields (key-value pairs) that is added to each log event processed by this component. + :type fields: [ObservabilityPipelineFieldValue] + + :param remaps: Array of VRL remap rules. + :type remaps: [ObservabilityPipelineCustomProcessorRemap] + + :param action: The action to take on tags with matching keys. + :type action: ObservabilityPipelineDatadogTagsProcessorAction + + :param keys: A list of tag keys. + :type keys: [str] + + :param mode: The processing mode. + :type mode: ObservabilityPipelineDatadogTagsProcessorMode + + :param file: Defines a static enrichment table loaded from a CSV file. + :type file: ObservabilityPipelineEnrichmentTableFile, optional + + :param geoip: Uses a GeoIP database to enrich logs based on an IP field. + :type geoip: ObservabilityPipelineEnrichmentTableGeoIp, optional + + :param reference_table: Uses a Datadog reference table to enrich logs. + :type reference_table: ObservabilityPipelineEnrichmentTableReferenceTable, optional + + :param target: Path where enrichment results should be stored in the log. + :type target: str + + :param metrics: Configuration for generating individual metrics. + :type metrics: [ObservabilityPipelineGeneratedMetric], optional + + :param mappings: A list of mapping rules to convert events to the OCSF format. + :type mappings: [ObservabilityPipelineOcsfMapperProcessorMapping] + + :param disable_library_rules: If set to `true`, disables the default Grok rules provided by Datadog. + :type disable_library_rules: bool, optional + + :param rules: The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in order. The first successful match is applied. + :type rules: [ObservabilityPipelineParseGrokProcessorRule] + :param field: The name of the log field that contains a JSON string. :type field: str - :param drop_events: If set to `true`, logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline. + :param always_use_text_key: Whether to always use a text key for element content. + :type always_use_text_key: bool, optional + + :param attr_prefix: The prefix to use for XML attributes in the parsed output. + :type attr_prefix: str, optional + + :param include_attr: Whether to include XML attributes in the parsed output. + :type include_attr: bool, optional + + :param parse_bool: Whether to parse boolean values from strings. + :type parse_bool: bool, optional + + :param parse_null: Whether to parse null values. + :type parse_null: bool, optional + + :param parse_number: Whether to parse numeric values from strings. + :type parse_number: bool, optional + + :param text_key: The key name to use for text content within XML elements. Must be at least 1 character if specified. + :type text_key: str, optional + + :param drop_events: If set to `true`, logs that match the quota filter and are sent after the quota is exceeded are dropped. Logs that do not match the filter continue through the pipeline. **Note**: You can set either `drop_events` or `overflow_action`, but not both. :type drop_events: bool, optional :param ignore_when_missing_partitions: If `true`, the processor skips quota checks when partition fields are missing from the logs. @@ -45,7 +108,7 @@ def __init__(self, **kwargs): :param name: Name of the quota. :type name: str - :param overflow_action: The action to take when the quota is exceeded. Options: + :param overflow_action: The action to take when the quota or bucket limit is exceeded. Options: - `drop`: Drop the event. - `no_action`: Let the event pass through. - `overflow_routing`: Route to an overflow destination. @@ -57,41 +120,11 @@ def __init__(self, **kwargs): :param partition_fields: A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values. :type partition_fields: [str], optional - :param fields: A list of static fields (key-value pairs) that is added to each log event processed by this component. - :type fields: [ObservabilityPipelineFieldValue] - - :param metrics: Configuration for generating individual metrics. - :type metrics: [ObservabilityPipelineGeneratedMetric], optional - - :param percentage: The percentage of logs to sample. - :type percentage: float, optional - - :param rate: Number of events to sample (1 in N). - :type rate: int, optional - - :param disable_library_rules: If set to `true`, disables the default Grok rules provided by Datadog. - :type disable_library_rules: bool, optional - - :param rules: The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in order. The first successful match is applied. - :type rules: [ObservabilityPipelineParseGrokProcessorRule] - - :param mappings: A list of mapping rules to convert events to the OCSF format. - :type mappings: [ObservabilityPipelineOcsfMapperProcessorMapping] - - :param variables: A list of environment variable mappings to apply to log fields. - :type variables: [ObservabilityPipelineAddEnvVarsProcessorVariable] - - :param mode: The deduplication mode to apply to the fields. - :type mode: ObservabilityPipelineDedupeProcessorMode - - :param file: Defines a static enrichment table loaded from a CSV file. - :type file: ObservabilityPipelineEnrichmentTableFile, optional - - :param geoip: Uses a GeoIP database to enrich logs based on an IP field. - :type geoip: ObservabilityPipelineEnrichmentTableGeoIp, optional - - :param target: Path where enrichment results should be stored in the log. - :type target: str + :param too_many_buckets_action: The action to take when the quota or bucket limit is exceeded. Options: + - `drop`: Drop the event. + - `no_action`: Let the event pass through. + - `overflow_routing`: Route to an overflow destination. + :type too_many_buckets_action: ObservabilityPipelineQuotaProcessorOverflowAction, optional :param group_by: A list of fields used to group log events for merging. :type group_by: [str] @@ -99,20 +132,17 @@ def __init__(self, **kwargs): :param merge_strategies: List of merge strategies defining how values from grouped events should be combined. :type merge_strategies: [ObservabilityPipelineReduceProcessorMergeStrategy] + :param percentage: The percentage of logs to sample. + :type percentage: float + + :param arrays: A list of array split configurations. + :type arrays: [ObservabilityPipelineSplitArrayProcessorArrayConfig] + :param threshold: the number of events allowed in a given time window. Events sent after the threshold has been reached, are dropped. :type threshold: int :param window: The time window in seconds over which the threshold applies. :type window: float - - :param remaps: Array of VRL remap rules. - :type remaps: [ObservabilityPipelineCustomProcessorRemap] - - :param action: The action to take on tags with matching keys. - :type action: ObservabilityPipelineDatadogTagsProcessorAction - - :param keys: A list of tag keys. - :type keys: [str] """ super().__init__(kwargs) @@ -128,14 +158,47 @@ def _composed_schemas(_): from datadog_api_client.v2.model.observability_pipeline_filter_processor import ( ObservabilityPipelineFilterProcessor, ) + from datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor import ( + ObservabilityPipelineAddEnvVarsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_add_fields_processor import ( + ObservabilityPipelineAddFieldsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_add_hostname_processor import ( + ObservabilityPipelineAddHostnameProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_custom_processor import ( + ObservabilityPipelineCustomProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_datadog_tags_processor import ( + ObservabilityPipelineDatadogTagsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_dedupe_processor import ( + ObservabilityPipelineDedupeProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor import ( + ObservabilityPipelineEnrichmentTableProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_generate_metrics_processor import ( + ObservabilityPipelineGenerateMetricsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor import ( + ObservabilityPipelineOcsfMapperProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor import ( + ObservabilityPipelineParseGrokProcessor, + ) from datadog_api_client.v2.model.observability_pipeline_parse_json_processor import ( ObservabilityPipelineParseJSONProcessor, ) + from datadog_api_client.v2.model.observability_pipeline_parse_xml_processor import ( + ObservabilityPipelineParseXMLProcessor, + ) from datadog_api_client.v2.model.observability_pipeline_quota_processor import ( ObservabilityPipelineQuotaProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_add_fields_processor import ( - ObservabilityPipelineAddFieldsProcessor, + from datadog_api_client.v2.model.observability_pipeline_reduce_processor import ( + ObservabilityPipelineReduceProcessor, ) from datadog_api_client.v2.model.observability_pipeline_remove_fields_processor import ( ObservabilityPipelineRemoveFieldsProcessor, @@ -143,62 +206,45 @@ def _composed_schemas(_): from datadog_api_client.v2.model.observability_pipeline_rename_fields_processor import ( ObservabilityPipelineRenameFieldsProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_generate_metrics_processor import ( - ObservabilityPipelineGenerateMetricsProcessor, - ) from datadog_api_client.v2.model.observability_pipeline_sample_processor import ( ObservabilityPipelineSampleProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor import ( - ObservabilityPipelineParseGrokProcessor, - ) from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor import ( ObservabilityPipelineSensitiveDataScannerProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor import ( - ObservabilityPipelineOcsfMapperProcessor, - ) - from datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor import ( - ObservabilityPipelineAddEnvVarsProcessor, - ) - from datadog_api_client.v2.model.observability_pipeline_dedupe_processor import ( - ObservabilityPipelineDedupeProcessor, - ) - from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor import ( - ObservabilityPipelineEnrichmentTableProcessor, - ) - from datadog_api_client.v2.model.observability_pipeline_reduce_processor import ( - ObservabilityPipelineReduceProcessor, + from datadog_api_client.v2.model.observability_pipeline_split_array_processor import ( + ObservabilityPipelineSplitArrayProcessor, ) from datadog_api_client.v2.model.observability_pipeline_throttle_processor import ( ObservabilityPipelineThrottleProcessor, ) - from datadog_api_client.v2.model.observability_pipeline_custom_processor import ( - ObservabilityPipelineCustomProcessor, - ) - from datadog_api_client.v2.model.observability_pipeline_datadog_tags_processor import ( - ObservabilityPipelineDatadogTagsProcessor, + from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor import ( + ObservabilityPipelineMetricTagsProcessor, ) return { "oneOf": [ ObservabilityPipelineFilterProcessor, + ObservabilityPipelineAddEnvVarsProcessor, + ObservabilityPipelineAddFieldsProcessor, + ObservabilityPipelineAddHostnameProcessor, + ObservabilityPipelineCustomProcessor, + ObservabilityPipelineDatadogTagsProcessor, + ObservabilityPipelineDedupeProcessor, + ObservabilityPipelineEnrichmentTableProcessor, + ObservabilityPipelineGenerateMetricsProcessor, + ObservabilityPipelineOcsfMapperProcessor, + ObservabilityPipelineParseGrokProcessor, ObservabilityPipelineParseJSONProcessor, + ObservabilityPipelineParseXMLProcessor, ObservabilityPipelineQuotaProcessor, - ObservabilityPipelineAddFieldsProcessor, + ObservabilityPipelineReduceProcessor, ObservabilityPipelineRemoveFieldsProcessor, ObservabilityPipelineRenameFieldsProcessor, - ObservabilityPipelineGenerateMetricsProcessor, ObservabilityPipelineSampleProcessor, - ObservabilityPipelineParseGrokProcessor, ObservabilityPipelineSensitiveDataScannerProcessor, - ObservabilityPipelineOcsfMapperProcessor, - ObservabilityPipelineAddEnvVarsProcessor, - ObservabilityPipelineDedupeProcessor, - ObservabilityPipelineEnrichmentTableProcessor, - ObservabilityPipelineReduceProcessor, + ObservabilityPipelineSplitArrayProcessor, ObservabilityPipelineThrottleProcessor, - ObservabilityPipelineCustomProcessor, - ObservabilityPipelineDatadogTagsProcessor, + ObservabilityPipelineMetricTagsProcessor, ], } diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config_source_item.py b/src/datadog_api_client/v2/model/observability_pipeline_config_source_item.py index 04e30172c1..0a284062b6 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_config_source_item.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_config_source_item.py @@ -15,26 +15,14 @@ def __init__(self, **kwargs): """ A data source for the pipeline. - :param group_id: Consumer group ID used by the Kafka client. - :type group_id: str - :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). :type id: str - :param librdkafka_options: Optional list of advanced Kafka client configuration options, defined as key-value pairs. - :type librdkafka_options: [ObservabilityPipelineKafkaSourceLibrdkafkaOption], optional - - :param sasl: Specifies the SASL mechanism for authenticating with a Kafka cluster. - :type sasl: ObservabilityPipelineKafkaSourceSasl, optional - :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. :type tls: ObservabilityPipelineTls, optional - :param topics: A list of Kafka topic names to subscribe to. The source ingests messages from each topic specified. - :type topics: [str] - - :param type: The source type. The value should always be `kafka`. - :type type: ObservabilityPipelineKafkaSourceType + :param type: The source type. The value should always be `datadog_agent`. + :type type: ObservabilityPipelineDatadogAgentSourceType :param auth: AWS authentication credentials used for accessing AWS services such as S3. If omitted, the system’s default credentials are used (for example, the IAM role and environment variables). @@ -43,29 +31,47 @@ def __init__(self, **kwargs): :param region: AWS region where the S3 bucket resides. :type region: str - :param auth_strategy: HTTP authentication method. - :type auth_strategy: ObservabilityPipelineHttpServerSourceAuthStrategy - :param decoding: The decoding format used to interpret incoming logs. :type decoding: ObservabilityPipelineDecoding - :param mode: Protocol used by the syslog source to receive messages. - :type mode: ObservabilityPipelineSyslogSourceMode - :param project: The GCP project ID that owns the Pub/Sub subscription. :type project: str :param subscription: The Pub/Sub subscription name from which messages are consumed. :type subscription: str + :param auth_strategy: Optional authentication strategy for HTTP requests. + :type auth_strategy: ObservabilityPipelineHttpClientSourceAuthStrategy, optional + :param scrape_interval_secs: The interval (in seconds) between HTTP scrape requests. :type scrape_interval_secs: int, optional :param scrape_timeout_secs: The timeout (in seconds) for each scrape request. :type scrape_timeout_secs: int, optional + :param group_id: Consumer group ID used by the Kafka client. + :type group_id: str + + :param librdkafka_options: Optional list of advanced Kafka client configuration options, defined as key-value pairs. + :type librdkafka_options: [ObservabilityPipelineKafkaLibrdkafkaOption], optional + + :param sasl: Specifies the SASL mechanism for authenticating with a Kafka cluster. + :type sasl: ObservabilityPipelineKafkaSasl, optional + + :param topics: A list of Kafka topic names to subscribe to. The source ingests messages from each topic specified. + :type topics: [str] + + :param mode: Protocol used by the syslog source to receive messages. + :type mode: ObservabilityPipelineSyslogSourceMode + :param framing: Framing method configuration for the socket source. :type framing: ObservabilityPipelineSocketSourceFraming + + :param grpc_address_key: Environment variable name containing the gRPC server address for receiving OTLP data. Must be a valid environment variable name (alphanumeric characters and underscores only). + :type grpc_address_key: str, optional + + :param http_address_key: Environment variable name containing the HTTP server address for receiving OTLP data. Must be a valid environment variable name (alphanumeric characters and underscores only). + :type http_address_key: str, optional """ super().__init__(kwargs) @@ -78,64 +84,68 @@ def _composed_schemas(_): # code would be run when this module is imported, and these composed # classes don't exist yet because their module has not finished # loading - from datadog_api_client.v2.model.observability_pipeline_kafka_source import ObservabilityPipelineKafkaSource from datadog_api_client.v2.model.observability_pipeline_datadog_agent_source import ( ObservabilityPipelineDatadogAgentSource, ) - from datadog_api_client.v2.model.observability_pipeline_splunk_tcp_source import ( - ObservabilityPipelineSplunkTcpSource, - ) - from datadog_api_client.v2.model.observability_pipeline_splunk_hec_source import ( - ObservabilityPipelineSplunkHecSource, + from datadog_api_client.v2.model.observability_pipeline_amazon_data_firehose_source import ( + ObservabilityPipelineAmazonDataFirehoseSource, ) from datadog_api_client.v2.model.observability_pipeline_amazon_s3_source import ( ObservabilityPipelineAmazonS3Source, ) - from datadog_api_client.v2.model.observability_pipeline_fluentd_source import ObservabilityPipelineFluentdSource from datadog_api_client.v2.model.observability_pipeline_fluent_bit_source import ( ObservabilityPipelineFluentBitSource, ) - from datadog_api_client.v2.model.observability_pipeline_http_server_source import ( - ObservabilityPipelineHttpServerSource, - ) - from datadog_api_client.v2.model.observability_pipeline_sumo_logic_source import ( - ObservabilityPipelineSumoLogicSource, - ) - from datadog_api_client.v2.model.observability_pipeline_rsyslog_source import ObservabilityPipelineRsyslogSource - from datadog_api_client.v2.model.observability_pipeline_syslog_ng_source import ( - ObservabilityPipelineSyslogNgSource, - ) - from datadog_api_client.v2.model.observability_pipeline_amazon_data_firehose_source import ( - ObservabilityPipelineAmazonDataFirehoseSource, - ) + from datadog_api_client.v2.model.observability_pipeline_fluentd_source import ObservabilityPipelineFluentdSource from datadog_api_client.v2.model.observability_pipeline_google_pub_sub_source import ( ObservabilityPipelineGooglePubSubSource, ) from datadog_api_client.v2.model.observability_pipeline_http_client_source import ( ObservabilityPipelineHttpClientSource, ) + from datadog_api_client.v2.model.observability_pipeline_http_server_source import ( + ObservabilityPipelineHttpServerSource, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_source import ObservabilityPipelineKafkaSource from datadog_api_client.v2.model.observability_pipeline_logstash_source import ( ObservabilityPipelineLogstashSource, ) + from datadog_api_client.v2.model.observability_pipeline_rsyslog_source import ObservabilityPipelineRsyslogSource from datadog_api_client.v2.model.observability_pipeline_socket_source import ObservabilityPipelineSocketSource + from datadog_api_client.v2.model.observability_pipeline_splunk_hec_source import ( + ObservabilityPipelineSplunkHecSource, + ) + from datadog_api_client.v2.model.observability_pipeline_splunk_tcp_source import ( + ObservabilityPipelineSplunkTcpSource, + ) + from datadog_api_client.v2.model.observability_pipeline_sumo_logic_source import ( + ObservabilityPipelineSumoLogicSource, + ) + from datadog_api_client.v2.model.observability_pipeline_syslog_ng_source import ( + ObservabilityPipelineSyslogNgSource, + ) + from datadog_api_client.v2.model.observability_pipeline_opentelemetry_source import ( + ObservabilityPipelineOpentelemetrySource, + ) return { "oneOf": [ - ObservabilityPipelineKafkaSource, ObservabilityPipelineDatadogAgentSource, - ObservabilityPipelineSplunkTcpSource, - ObservabilityPipelineSplunkHecSource, + ObservabilityPipelineAmazonDataFirehoseSource, ObservabilityPipelineAmazonS3Source, - ObservabilityPipelineFluentdSource, ObservabilityPipelineFluentBitSource, - ObservabilityPipelineHttpServerSource, - ObservabilityPipelineSumoLogicSource, - ObservabilityPipelineRsyslogSource, - ObservabilityPipelineSyslogNgSource, - ObservabilityPipelineAmazonDataFirehoseSource, + ObservabilityPipelineFluentdSource, ObservabilityPipelineGooglePubSubSource, ObservabilityPipelineHttpClientSource, + ObservabilityPipelineHttpServerSource, + ObservabilityPipelineKafkaSource, ObservabilityPipelineLogstashSource, + ObservabilityPipelineRsyslogSource, ObservabilityPipelineSocketSource, + ObservabilityPipelineSplunkHecSource, + ObservabilityPipelineSplunkTcpSource, + ObservabilityPipelineSumoLogicSource, + ObservabilityPipelineSyslogNgSource, + ObservabilityPipelineOpentelemetrySource, ], } diff --git a/src/datadog_api_client/v2/model/observability_pipeline_crowd_strike_next_gen_siem_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_crowd_strike_next_gen_siem_destination.py index d1dbc9facf..a84a24c745 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_crowd_strike_next_gen_siem_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_crowd_strike_next_gen_siem_destination.py @@ -71,6 +71,8 @@ def __init__( """ The ``crowdstrike_next_gen_siem`` destination forwards logs to CrowdStrike Next Gen SIEM. + **Supported pipeline types:** logs + :param compression: Compression configuration for log events. :type compression: ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_custom_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_custom_processor.py index 3304418aaf..237c88b0bf 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_custom_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_custom_processor.py @@ -68,6 +68,8 @@ def __init__( """ The ``custom_processor`` processor transforms events using `Vector Remap Language (VRL) `_ scripts with advanced filtering capabilities. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_datadog_agent_source.py b/src/datadog_api_client/v2/model/observability_pipeline_datadog_agent_source.py index 3614768ec1..565c998f82 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_datadog_agent_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_datadog_agent_source.py @@ -48,7 +48,9 @@ def __init__( **kwargs, ): """ - The ``datadog_agent`` source collects logs from the Datadog Agent. + The ``datadog_agent`` source collects logs/metrics from the Datadog Agent. + + **Supported pipeline types:** logs, metrics :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). :type id: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_datadog_logs_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_datadog_logs_destination.py index b519b48735..453881dfc4 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_datadog_logs_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_datadog_logs_destination.py @@ -40,6 +40,8 @@ def __init__(self_, id: str, inputs: List[str], type: ObservabilityPipelineDatad """ The ``datadog_logs`` destination forwards logs to Datadog Log Management. + **Supported pipeline types:** logs + :param id: The unique identifier for this component. :type id: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_datadog_metrics_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_datadog_metrics_destination.py new file mode 100644 index 0000000000..4c42f53f56 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_datadog_metrics_destination.py @@ -0,0 +1,58 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_datadog_metrics_destination_type import ( + ObservabilityPipelineDatadogMetricsDestinationType, + ) + + +class ObservabilityPipelineDatadogMetricsDestination(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_datadog_metrics_destination_type import ( + ObservabilityPipelineDatadogMetricsDestinationType, + ) + + return { + "id": (str,), + "inputs": ([str],), + "type": (ObservabilityPipelineDatadogMetricsDestinationType,), + } + + attribute_map = { + "id": "id", + "inputs": "inputs", + "type": "type", + } + + def __init__(self_, id: str, inputs: List[str], type: ObservabilityPipelineDatadogMetricsDestinationType, **kwargs): + """ + The ``datadog_metrics`` destination forwards metrics to Datadog. + + **Supported pipeline types:** metrics + + :param id: The unique identifier for this component. + :type id: str + + :param inputs: A list of component IDs whose output is used as the input for this component. + :type inputs: [str] + + :param type: The destination type. The value should always be ``datadog_metrics``. + :type type: ObservabilityPipelineDatadogMetricsDestinationType + """ + super().__init__(kwargs) + + self_.id = id + self_.inputs = inputs + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_datadog_metrics_destination_type.py b/src/datadog_api_client/v2/model/observability_pipeline_datadog_metrics_destination_type.py new file mode 100644 index 0000000000..b0897c7d8b --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_datadog_metrics_destination_type.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineDatadogMetricsDestinationType(ModelSimple): + """ + The destination type. The value should always be `datadog_metrics`. + + :param value: If omitted defaults to "datadog_metrics". Must be one of ["datadog_metrics"]. + :type value: str + """ + + allowed_values = { + "datadog_metrics", + } + DATADOG_METRICS: ClassVar["ObservabilityPipelineDatadogMetricsDestinationType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineDatadogMetricsDestinationType.DATADOG_METRICS = ObservabilityPipelineDatadogMetricsDestinationType( + "datadog_metrics" +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_datadog_tags_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_datadog_tags_processor.py index c96b59c4a6..3a9e2130e6 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_datadog_tags_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_datadog_tags_processor.py @@ -75,6 +75,8 @@ def __init__( """ The ``datadog_tags`` processor includes or excludes specific Datadog tags in your logs. + **Supported pipeline types:** logs + :param action: The action to take on tags with matching keys. :type action: ObservabilityPipelineDatadogTagsProcessorAction diff --git a/src/datadog_api_client/v2/model/observability_pipeline_dedupe_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_dedupe_processor.py index 9c4e3999c9..01b401de6b 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_dedupe_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_dedupe_processor.py @@ -66,6 +66,8 @@ def __init__( """ The ``dedupe`` processor removes duplicate fields in log events. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination.py index 8ce413b7ef..33b38b74b1 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination.py @@ -17,6 +17,9 @@ from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_api_version import ( ObservabilityPipelineElasticsearchDestinationApiVersion, ) + from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_data_stream import ( + ObservabilityPipelineElasticsearchDestinationDataStream, + ) from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_type import ( ObservabilityPipelineElasticsearchDestinationType, ) @@ -28,6 +31,9 @@ def openapi_types(_): from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_api_version import ( ObservabilityPipelineElasticsearchDestinationApiVersion, ) + from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_data_stream import ( + ObservabilityPipelineElasticsearchDestinationDataStream, + ) from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_type import ( ObservabilityPipelineElasticsearchDestinationType, ) @@ -35,6 +41,7 @@ def openapi_types(_): return { "api_version": (ObservabilityPipelineElasticsearchDestinationApiVersion,), "bulk_index": (str,), + "data_stream": (ObservabilityPipelineElasticsearchDestinationDataStream,), "id": (str,), "inputs": ([str],), "type": (ObservabilityPipelineElasticsearchDestinationType,), @@ -43,6 +50,7 @@ def openapi_types(_): attribute_map = { "api_version": "api_version", "bulk_index": "bulk_index", + "data_stream": "data_stream", "id": "id", "inputs": "inputs", "type": "type", @@ -55,17 +63,23 @@ def __init__( type: ObservabilityPipelineElasticsearchDestinationType, api_version: Union[ObservabilityPipelineElasticsearchDestinationApiVersion, UnsetType] = unset, bulk_index: Union[str, UnsetType] = unset, + data_stream: Union[ObservabilityPipelineElasticsearchDestinationDataStream, UnsetType] = unset, **kwargs, ): """ The ``elasticsearch`` destination writes logs to an Elasticsearch cluster. + **Supported pipeline types:** logs + :param api_version: The Elasticsearch API version to use. Set to ``auto`` to auto-detect. :type api_version: ObservabilityPipelineElasticsearchDestinationApiVersion, optional :param bulk_index: The index to write logs to in Elasticsearch. :type bulk_index: str, optional + :param data_stream: Configuration options for writing to Elasticsearch Data Streams instead of a fixed index. + :type data_stream: ObservabilityPipelineElasticsearchDestinationDataStream, optional + :param id: The unique identifier for this component. :type id: str @@ -79,6 +93,8 @@ def __init__( kwargs["api_version"] = api_version if bulk_index is not unset: kwargs["bulk_index"] = bulk_index + if data_stream is not unset: + kwargs["data_stream"] = data_stream super().__init__(kwargs) self_.id = id diff --git a/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination_data_stream.py b/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination_data_stream.py new file mode 100644 index 0000000000..8046b936b9 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination_data_stream.py @@ -0,0 +1,56 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +class ObservabilityPipelineElasticsearchDestinationDataStream(ModelNormal): + @cached_property + def openapi_types(_): + return { + "dataset": (str,), + "dtype": (str,), + "namespace": (str,), + } + + attribute_map = { + "dataset": "dataset", + "dtype": "dtype", + "namespace": "namespace", + } + + def __init__( + self_, + dataset: Union[str, UnsetType] = unset, + dtype: Union[str, UnsetType] = unset, + namespace: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + Configuration options for writing to Elasticsearch Data Streams instead of a fixed index. + + :param dataset: The data stream dataset for your logs. This groups logs by their source or application. + :type dataset: str, optional + + :param dtype: The data stream type for your logs. This determines how logs are categorized within the data stream. + :type dtype: str, optional + + :param namespace: The data stream namespace for your logs. This separates logs into different environments or domains. + :type namespace: str, optional + """ + if dataset is not unset: + kwargs["dataset"] = dataset + if dtype is not unset: + kwargs["dtype"] = dtype + if namespace is not unset: + kwargs["namespace"] = namespace + super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_processor.py index 3483c95ae4..f9ac33c1c5 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_processor.py @@ -20,6 +20,9 @@ from datadog_api_client.v2.model.observability_pipeline_enrichment_table_geo_ip import ( ObservabilityPipelineEnrichmentTableGeoIp, ) + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_reference_table import ( + ObservabilityPipelineEnrichmentTableReferenceTable, + ) from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor_type import ( ObservabilityPipelineEnrichmentTableProcessorType, ) @@ -34,6 +37,9 @@ def openapi_types(_): from datadog_api_client.v2.model.observability_pipeline_enrichment_table_geo_ip import ( ObservabilityPipelineEnrichmentTableGeoIp, ) + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_reference_table import ( + ObservabilityPipelineEnrichmentTableReferenceTable, + ) from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor_type import ( ObservabilityPipelineEnrichmentTableProcessorType, ) @@ -45,6 +51,7 @@ def openapi_types(_): "geoip": (ObservabilityPipelineEnrichmentTableGeoIp,), "id": (str,), "include": (str,), + "reference_table": (ObservabilityPipelineEnrichmentTableReferenceTable,), "target": (str,), "type": (ObservabilityPipelineEnrichmentTableProcessorType,), } @@ -56,6 +63,7 @@ def openapi_types(_): "geoip": "geoip", "id": "id", "include": "include", + "reference_table": "reference_table", "target": "target", "type": "type", } @@ -70,10 +78,13 @@ def __init__( display_name: Union[str, UnsetType] = unset, file: Union[ObservabilityPipelineEnrichmentTableFile, UnsetType] = unset, geoip: Union[ObservabilityPipelineEnrichmentTableGeoIp, UnsetType] = unset, + reference_table: Union[ObservabilityPipelineEnrichmentTableReferenceTable, UnsetType] = unset, **kwargs, ): """ - The ``enrichment_table`` processor enriches logs using a static CSV file or GeoIP database. + The ``enrichment_table`` processor enriches logs using a static CSV file, GeoIP database, or reference table. Exactly one of ``file`` , ``geoip`` , or ``reference_table`` must be configured. + + **Supported pipeline types:** logs :param display_name: The display name for a component. :type display_name: str, optional @@ -93,6 +104,9 @@ def __init__( :param include: A Datadog search query used to determine which logs this processor targets. :type include: str + :param reference_table: Uses a Datadog reference table to enrich logs. + :type reference_table: ObservabilityPipelineEnrichmentTableReferenceTable, optional + :param target: Path where enrichment results should be stored in the log. :type target: str @@ -105,6 +119,8 @@ def __init__( kwargs["file"] = file if geoip is not unset: kwargs["geoip"] = geoip + if reference_table is not unset: + kwargs["reference_table"] = reference_table super().__init__(kwargs) self_.enabled = enabled diff --git a/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_reference_table.py b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_reference_table.py new file mode 100644 index 0000000000..9788ea4973 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_reference_table.py @@ -0,0 +1,49 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +class ObservabilityPipelineEnrichmentTableReferenceTable(ModelNormal): + @cached_property + def openapi_types(_): + return { + "columns": ([str],), + "key_field": (str,), + "table_id": (str,), + } + + attribute_map = { + "columns": "columns", + "key_field": "key_field", + "table_id": "table_id", + } + + def __init__(self_, key_field: str, table_id: str, columns: Union[List[str], UnsetType] = unset, **kwargs): + """ + Uses a Datadog reference table to enrich logs. + + :param columns: List of column names to include from the reference table. If not provided, all columns are included. + :type columns: [str], optional + + :param key_field: Path to the field in the log event to match against the reference table. + :type key_field: str + + :param table_id: The unique identifier of the reference table. + :type table_id: str + """ + if columns is not unset: + kwargs["columns"] = columns + super().__init__(kwargs) + + self_.key_field = key_field + self_.table_id = table_id diff --git a/src/datadog_api_client/v2/model/observability_pipeline_filter_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_filter_processor.py index 1098a4c6a6..e20d3023c3 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_filter_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_filter_processor.py @@ -52,7 +52,9 @@ def __init__( **kwargs, ): """ - The ``filter`` processor allows conditional processing of logs based on a Datadog search query. Logs that match the ``include`` query are passed through; others are discarded. + The ``filter`` processor allows conditional processing of logs/metrics based on a Datadog search query. Logs/metrics that match the ``include`` query are passed through; others are discarded. + + **Supported pipeline types:** logs, metrics :param display_name: The display name for a component. :type display_name: str, optional @@ -63,7 +65,7 @@ def __init__( :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). :type id: str - :param include: A Datadog search query used to determine which logs should pass through the filter. Logs that match this query continue to downstream components; others are dropped. + :param include: A Datadog search query used to determine which logs/metrics should pass through the filter. Logs/metrics that match this query continue to downstream components; others are dropped. :type include: str :param type: The processor type. The value should always be ``filter``. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_fluent_bit_source.py b/src/datadog_api_client/v2/model/observability_pipeline_fluent_bit_source.py index ba39b1d678..62195c5a5e 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_fluent_bit_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_fluent_bit_source.py @@ -50,6 +50,8 @@ def __init__( """ The ``fluent_bit`` source ingests logs from Fluent Bit. + **Supported pipeline types:** logs + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). :type id: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_fluentd_source.py b/src/datadog_api_client/v2/model/observability_pipeline_fluentd_source.py index 2e64961b5f..063c2f5262 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_fluentd_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_fluentd_source.py @@ -50,6 +50,8 @@ def __init__( """ The ``fluentd`` source ingests logs from a Fluentd-compatible service. + **Supported pipeline types:** logs + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). :type id: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_generate_metrics_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_generate_metrics_processor.py index 97aa67e007..195aa223e8 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_generate_metrics_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_generate_metrics_processor.py @@ -62,6 +62,8 @@ def __init__( The ``generate_datadog_metrics`` processor creates custom metrics from logs and sends them to Datadog. Metrics can be counters, gauges, or distributions and optionally grouped by log fields. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_google_chronicle_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_google_chronicle_destination.py index 964a09e959..44f294ec03 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_google_chronicle_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_google_chronicle_destination.py @@ -68,6 +68,8 @@ def __init__( """ The ``google_chronicle`` destination sends logs to Google Chronicle. + **Supported pipeline types:** logs + :param auth: GCP credentials used to authenticate with Google Cloud Storage. :type auth: ObservabilityPipelineGcpAuth, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination.py index a2e8b0c337..8c5e4f1527 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination.py @@ -83,6 +83,8 @@ def __init__( The ``google_cloud_storage`` destination stores logs in a Google Cloud Storage (GCS) bucket. It requires a bucket name, GCP authentication, and metadata fields. + **Supported pipeline types:** logs + :param acl: Access control list setting for objects written to the bucket. :type acl: ObservabilityPipelineGoogleCloudStorageDestinationAcl, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_destination.py index 5fc600f35e..847bdd482f 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_destination.py @@ -73,6 +73,8 @@ def __init__( """ The ``google_pubsub`` destination publishes logs to a Google Cloud Pub/Sub topic. + **Supported pipeline types:** logs + :param auth: GCP credentials used to authenticate with Google Cloud Storage. :type auth: ObservabilityPipelineGcpAuth, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_source.py b/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_source.py index 6135892ad0..f2571ed23e 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_source.py @@ -66,6 +66,8 @@ def __init__( """ The ``google_pubsub`` source ingests logs from a Google Cloud Pub/Sub subscription. + **Supported pipeline types:** logs + :param auth: GCP credentials used to authenticate with Google Cloud Storage. :type auth: ObservabilityPipelineGcpAuth, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination.py new file mode 100644 index 0000000000..c26d7bf7ce --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination.py @@ -0,0 +1,117 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_http_client_destination_auth_strategy import ( + ObservabilityPipelineHttpClientDestinationAuthStrategy, + ) + from datadog_api_client.v2.model.observability_pipeline_http_client_destination_compression import ( + ObservabilityPipelineHttpClientDestinationCompression, + ) + from datadog_api_client.v2.model.observability_pipeline_http_client_destination_encoding import ( + ObservabilityPipelineHttpClientDestinationEncoding, + ) + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_http_client_destination_type import ( + ObservabilityPipelineHttpClientDestinationType, + ) + + +class ObservabilityPipelineHttpClientDestination(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_http_client_destination_auth_strategy import ( + ObservabilityPipelineHttpClientDestinationAuthStrategy, + ) + from datadog_api_client.v2.model.observability_pipeline_http_client_destination_compression import ( + ObservabilityPipelineHttpClientDestinationCompression, + ) + from datadog_api_client.v2.model.observability_pipeline_http_client_destination_encoding import ( + ObservabilityPipelineHttpClientDestinationEncoding, + ) + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_http_client_destination_type import ( + ObservabilityPipelineHttpClientDestinationType, + ) + + return { + "auth_strategy": (ObservabilityPipelineHttpClientDestinationAuthStrategy,), + "compression": (ObservabilityPipelineHttpClientDestinationCompression,), + "encoding": (ObservabilityPipelineHttpClientDestinationEncoding,), + "id": (str,), + "inputs": ([str],), + "tls": (ObservabilityPipelineTls,), + "type": (ObservabilityPipelineHttpClientDestinationType,), + } + + attribute_map = { + "auth_strategy": "auth_strategy", + "compression": "compression", + "encoding": "encoding", + "id": "id", + "inputs": "inputs", + "tls": "tls", + "type": "type", + } + + def __init__( + self_, + encoding: ObservabilityPipelineHttpClientDestinationEncoding, + id: str, + inputs: List[str], + type: ObservabilityPipelineHttpClientDestinationType, + auth_strategy: Union[ObservabilityPipelineHttpClientDestinationAuthStrategy, UnsetType] = unset, + compression: Union[ObservabilityPipelineHttpClientDestinationCompression, UnsetType] = unset, + tls: Union[ObservabilityPipelineTls, UnsetType] = unset, + **kwargs, + ): + """ + The ``http_client`` destination sends data to an HTTP endpoint. + + **Supported pipeline types:** logs, metrics + + :param auth_strategy: HTTP authentication strategy. + :type auth_strategy: ObservabilityPipelineHttpClientDestinationAuthStrategy, optional + + :param compression: Compression configuration for HTTP requests. + :type compression: ObservabilityPipelineHttpClientDestinationCompression, optional + + :param encoding: Encoding format for log events. + :type encoding: ObservabilityPipelineHttpClientDestinationEncoding + + :param id: The unique identifier for this component. + :type id: str + + :param inputs: A list of component IDs whose output is used as the input for this component. + :type inputs: [str] + + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional + + :param type: The destination type. The value should always be ``http_client``. + :type type: ObservabilityPipelineHttpClientDestinationType + """ + if auth_strategy is not unset: + kwargs["auth_strategy"] = auth_strategy + if compression is not unset: + kwargs["compression"] = compression + if tls is not unset: + kwargs["tls"] = tls + super().__init__(kwargs) + + self_.encoding = encoding + self_.id = id + self_.inputs = inputs + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_auth_strategy.py b/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_auth_strategy.py new file mode 100644 index 0000000000..ffb1523cca --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_auth_strategy.py @@ -0,0 +1,47 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineHttpClientDestinationAuthStrategy(ModelSimple): + """ + HTTP authentication strategy. + + :param value: Must be one of ["none", "basic", "bearer"]. + :type value: str + """ + + allowed_values = { + "none", + "basic", + "bearer", + } + NONE: ClassVar["ObservabilityPipelineHttpClientDestinationAuthStrategy"] + BASIC: ClassVar["ObservabilityPipelineHttpClientDestinationAuthStrategy"] + BEARER: ClassVar["ObservabilityPipelineHttpClientDestinationAuthStrategy"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineHttpClientDestinationAuthStrategy.NONE = ObservabilityPipelineHttpClientDestinationAuthStrategy( + "none" +) +ObservabilityPipelineHttpClientDestinationAuthStrategy.BASIC = ObservabilityPipelineHttpClientDestinationAuthStrategy( + "basic" +) +ObservabilityPipelineHttpClientDestinationAuthStrategy.BEARER = ObservabilityPipelineHttpClientDestinationAuthStrategy( + "bearer" +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_compression.py b/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_compression.py new file mode 100644 index 0000000000..be42af4291 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_compression.py @@ -0,0 +1,44 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_http_client_destination_compression_algorithm import ( + ObservabilityPipelineHttpClientDestinationCompressionAlgorithm, + ) + + +class ObservabilityPipelineHttpClientDestinationCompression(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_http_client_destination_compression_algorithm import ( + ObservabilityPipelineHttpClientDestinationCompressionAlgorithm, + ) + + return { + "algorithm": (ObservabilityPipelineHttpClientDestinationCompressionAlgorithm,), + } + + attribute_map = { + "algorithm": "algorithm", + } + + def __init__(self_, algorithm: ObservabilityPipelineHttpClientDestinationCompressionAlgorithm, **kwargs): + """ + Compression configuration for HTTP requests. + + :param algorithm: Compression algorithm. + :type algorithm: ObservabilityPipelineHttpClientDestinationCompressionAlgorithm + """ + super().__init__(kwargs) + + self_.algorithm = algorithm diff --git a/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_compression_algorithm.py b/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_compression_algorithm.py new file mode 100644 index 0000000000..0adcb8b2f3 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_compression_algorithm.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineHttpClientDestinationCompressionAlgorithm(ModelSimple): + """ + Compression algorithm. + + :param value: If omitted defaults to "gzip". Must be one of ["gzip"]. + :type value: str + """ + + allowed_values = { + "gzip", + } + GZIP: ClassVar["ObservabilityPipelineHttpClientDestinationCompressionAlgorithm"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineHttpClientDestinationCompressionAlgorithm.GZIP = ( + ObservabilityPipelineHttpClientDestinationCompressionAlgorithm("gzip") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_encoding.py b/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_encoding.py new file mode 100644 index 0000000000..573d3f2798 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_encoding.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineHttpClientDestinationEncoding(ModelSimple): + """ + Encoding format for log events. + + :param value: If omitted defaults to "json". Must be one of ["json"]. + :type value: str + """ + + allowed_values = { + "json", + } + JSON: ClassVar["ObservabilityPipelineHttpClientDestinationEncoding"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineHttpClientDestinationEncoding.JSON = ObservabilityPipelineHttpClientDestinationEncoding("json") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_type.py b/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_type.py new file mode 100644 index 0000000000..3bbe09a7c1 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_http_client_destination_type.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineHttpClientDestinationType(ModelSimple): + """ + The destination type. The value should always be `http_client`. + + :param value: If omitted defaults to "http_client". Must be one of ["http_client"]. + :type value: str + """ + + allowed_values = { + "http_client", + } + HTTP_CLIENT: ClassVar["ObservabilityPipelineHttpClientDestinationType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineHttpClientDestinationType.HTTP_CLIENT = ObservabilityPipelineHttpClientDestinationType( + "http_client" +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_http_client_source.py b/src/datadog_api_client/v2/model/observability_pipeline_http_client_source.py index 5d6b534568..278e70fcb0 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_http_client_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_http_client_source.py @@ -70,6 +70,8 @@ def __init__( """ The ``http_client`` source scrapes logs from HTTP endpoints at regular intervals. + **Supported pipeline types:** logs + :param auth_strategy: Optional authentication strategy for HTTP requests. :type auth_strategy: ObservabilityPipelineHttpClientSourceAuthStrategy, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_http_client_source_auth_strategy.py b/src/datadog_api_client/v2/model/observability_pipeline_http_client_source_auth_strategy.py index d2cd326f0a..51e4c20f6d 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_http_client_source_auth_strategy.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_http_client_source_auth_strategy.py @@ -16,14 +16,16 @@ class ObservabilityPipelineHttpClientSourceAuthStrategy(ModelSimple): """ Optional authentication strategy for HTTP requests. - :param value: Must be one of ["basic", "bearer"]. + :param value: Must be one of ["none", "basic", "bearer"]. :type value: str """ allowed_values = { + "none", "basic", "bearer", } + NONE: ClassVar["ObservabilityPipelineHttpClientSourceAuthStrategy"] BASIC: ClassVar["ObservabilityPipelineHttpClientSourceAuthStrategy"] BEARER: ClassVar["ObservabilityPipelineHttpClientSourceAuthStrategy"] @@ -34,5 +36,6 @@ def openapi_types(_): } +ObservabilityPipelineHttpClientSourceAuthStrategy.NONE = ObservabilityPipelineHttpClientSourceAuthStrategy("none") ObservabilityPipelineHttpClientSourceAuthStrategy.BASIC = ObservabilityPipelineHttpClientSourceAuthStrategy("basic") ObservabilityPipelineHttpClientSourceAuthStrategy.BEARER = ObservabilityPipelineHttpClientSourceAuthStrategy("bearer") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_http_server_source.py b/src/datadog_api_client/v2/model/observability_pipeline_http_server_source.py index 260387f57b..c1cc317bc8 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_http_server_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_http_server_source.py @@ -64,6 +64,8 @@ def __init__( """ The ``http_server`` source collects logs over HTTP POST from external services. + **Supported pipeline types:** logs + :param auth_strategy: HTTP authentication method. :type auth_strategy: ObservabilityPipelineHttpServerSourceAuthStrategy diff --git a/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination.py new file mode 100644 index 0000000000..2436d1cfde --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination.py @@ -0,0 +1,198 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_kafka_destination_compression import ( + ObservabilityPipelineKafkaDestinationCompression, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_destination_encoding import ( + ObservabilityPipelineKafkaDestinationEncoding, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_librdkafka_option import ( + ObservabilityPipelineKafkaLibrdkafkaOption, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_sasl import ObservabilityPipelineKafkaSasl + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_kafka_destination_type import ( + ObservabilityPipelineKafkaDestinationType, + ) + + +class ObservabilityPipelineKafkaDestination(ModelNormal): + validations = { + "message_timeout_ms": { + "inclusive_minimum": 1, + }, + "rate_limit_duration_secs": { + "inclusive_minimum": 1, + }, + "rate_limit_num": { + "inclusive_minimum": 1, + }, + "socket_timeout_ms": { + "inclusive_maximum": 300000, + "inclusive_minimum": 10, + }, + } + + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_kafka_destination_compression import ( + ObservabilityPipelineKafkaDestinationCompression, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_destination_encoding import ( + ObservabilityPipelineKafkaDestinationEncoding, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_librdkafka_option import ( + ObservabilityPipelineKafkaLibrdkafkaOption, + ) + from datadog_api_client.v2.model.observability_pipeline_kafka_sasl import ObservabilityPipelineKafkaSasl + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_kafka_destination_type import ( + ObservabilityPipelineKafkaDestinationType, + ) + + return { + "compression": (ObservabilityPipelineKafkaDestinationCompression,), + "encoding": (ObservabilityPipelineKafkaDestinationEncoding,), + "headers_key": (str,), + "id": (str,), + "inputs": ([str],), + "key_field": (str,), + "librdkafka_options": ([ObservabilityPipelineKafkaLibrdkafkaOption],), + "message_timeout_ms": (int,), + "rate_limit_duration_secs": (int,), + "rate_limit_num": (int,), + "sasl": (ObservabilityPipelineKafkaSasl,), + "socket_timeout_ms": (int,), + "tls": (ObservabilityPipelineTls,), + "topic": (str,), + "type": (ObservabilityPipelineKafkaDestinationType,), + } + + attribute_map = { + "compression": "compression", + "encoding": "encoding", + "headers_key": "headers_key", + "id": "id", + "inputs": "inputs", + "key_field": "key_field", + "librdkafka_options": "librdkafka_options", + "message_timeout_ms": "message_timeout_ms", + "rate_limit_duration_secs": "rate_limit_duration_secs", + "rate_limit_num": "rate_limit_num", + "sasl": "sasl", + "socket_timeout_ms": "socket_timeout_ms", + "tls": "tls", + "topic": "topic", + "type": "type", + } + + def __init__( + self_, + encoding: ObservabilityPipelineKafkaDestinationEncoding, + id: str, + inputs: List[str], + topic: str, + type: ObservabilityPipelineKafkaDestinationType, + compression: Union[ObservabilityPipelineKafkaDestinationCompression, UnsetType] = unset, + headers_key: Union[str, UnsetType] = unset, + key_field: Union[str, UnsetType] = unset, + librdkafka_options: Union[List[ObservabilityPipelineKafkaLibrdkafkaOption], UnsetType] = unset, + message_timeout_ms: Union[int, UnsetType] = unset, + rate_limit_duration_secs: Union[int, UnsetType] = unset, + rate_limit_num: Union[int, UnsetType] = unset, + sasl: Union[ObservabilityPipelineKafkaSasl, UnsetType] = unset, + socket_timeout_ms: Union[int, UnsetType] = unset, + tls: Union[ObservabilityPipelineTls, UnsetType] = unset, + **kwargs, + ): + """ + The ``kafka`` destination sends logs to Apache Kafka topics. + + **Supported pipeline types:** logs + + :param compression: Compression codec for Kafka messages. + :type compression: ObservabilityPipelineKafkaDestinationCompression, optional + + :param encoding: Encoding format for log events. + :type encoding: ObservabilityPipelineKafkaDestinationEncoding + + :param headers_key: The field name to use for Kafka message headers. + :type headers_key: str, optional + + :param id: The unique identifier for this component. + :type id: str + + :param inputs: A list of component IDs whose output is used as the ``input`` for this component. + :type inputs: [str] + + :param key_field: The field name to use as the Kafka message key. + :type key_field: str, optional + + :param librdkafka_options: Optional list of advanced Kafka producer configuration options, defined as key-value pairs. + :type librdkafka_options: [ObservabilityPipelineKafkaLibrdkafkaOption], optional + + :param message_timeout_ms: Maximum time in milliseconds to wait for message delivery confirmation. + :type message_timeout_ms: int, optional + + :param rate_limit_duration_secs: Duration in seconds for the rate limit window. + :type rate_limit_duration_secs: int, optional + + :param rate_limit_num: Maximum number of messages allowed per rate limit duration. + :type rate_limit_num: int, optional + + :param sasl: Specifies the SASL mechanism for authenticating with a Kafka cluster. + :type sasl: ObservabilityPipelineKafkaSasl, optional + + :param socket_timeout_ms: Socket timeout in milliseconds for network requests. + :type socket_timeout_ms: int, optional + + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional + + :param topic: The Kafka topic name to publish logs to. + :type topic: str + + :param type: The destination type. The value should always be ``kafka``. + :type type: ObservabilityPipelineKafkaDestinationType + """ + if compression is not unset: + kwargs["compression"] = compression + if headers_key is not unset: + kwargs["headers_key"] = headers_key + if key_field is not unset: + kwargs["key_field"] = key_field + if librdkafka_options is not unset: + kwargs["librdkafka_options"] = librdkafka_options + if message_timeout_ms is not unset: + kwargs["message_timeout_ms"] = message_timeout_ms + if rate_limit_duration_secs is not unset: + kwargs["rate_limit_duration_secs"] = rate_limit_duration_secs + if rate_limit_num is not unset: + kwargs["rate_limit_num"] = rate_limit_num + if sasl is not unset: + kwargs["sasl"] = sasl + if socket_timeout_ms is not unset: + kwargs["socket_timeout_ms"] = socket_timeout_ms + if tls is not unset: + kwargs["tls"] = tls + super().__init__(kwargs) + + self_.encoding = encoding + self_.id = id + self_.inputs = inputs + self_.topic = topic + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_compression.py b/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_compression.py new file mode 100644 index 0000000000..3cd49d2960 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_compression.py @@ -0,0 +1,47 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineKafkaDestinationCompression(ModelSimple): + """ + Compression codec for Kafka messages. + + :param value: Must be one of ["none", "gzip", "snappy", "lz4", "zstd"]. + :type value: str + """ + + allowed_values = { + "none", + "gzip", + "snappy", + "lz4", + "zstd", + } + NONE: ClassVar["ObservabilityPipelineKafkaDestinationCompression"] + GZIP: ClassVar["ObservabilityPipelineKafkaDestinationCompression"] + SNAPPY: ClassVar["ObservabilityPipelineKafkaDestinationCompression"] + LZ4: ClassVar["ObservabilityPipelineKafkaDestinationCompression"] + ZSTD: ClassVar["ObservabilityPipelineKafkaDestinationCompression"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineKafkaDestinationCompression.NONE = ObservabilityPipelineKafkaDestinationCompression("none") +ObservabilityPipelineKafkaDestinationCompression.GZIP = ObservabilityPipelineKafkaDestinationCompression("gzip") +ObservabilityPipelineKafkaDestinationCompression.SNAPPY = ObservabilityPipelineKafkaDestinationCompression("snappy") +ObservabilityPipelineKafkaDestinationCompression.LZ4 = ObservabilityPipelineKafkaDestinationCompression("lz4") +ObservabilityPipelineKafkaDestinationCompression.ZSTD = ObservabilityPipelineKafkaDestinationCompression("zstd") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_encoding.py b/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_encoding.py new file mode 100644 index 0000000000..99db79c36e --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_encoding.py @@ -0,0 +1,38 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineKafkaDestinationEncoding(ModelSimple): + """ + Encoding format for log events. + + :param value: Must be one of ["json", "raw_message"]. + :type value: str + """ + + allowed_values = { + "json", + "raw_message", + } + JSON: ClassVar["ObservabilityPipelineKafkaDestinationEncoding"] + RAW_MESSAGE: ClassVar["ObservabilityPipelineKafkaDestinationEncoding"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineKafkaDestinationEncoding.JSON = ObservabilityPipelineKafkaDestinationEncoding("json") +ObservabilityPipelineKafkaDestinationEncoding.RAW_MESSAGE = ObservabilityPipelineKafkaDestinationEncoding("raw_message") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_type.py b/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_type.py new file mode 100644 index 0000000000..e2e290b169 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_kafka_destination_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineKafkaDestinationType(ModelSimple): + """ + The destination type. The value should always be `kafka`. + + :param value: If omitted defaults to "kafka". Must be one of ["kafka"]. + :type value: str + """ + + allowed_values = { + "kafka", + } + KAFKA: ClassVar["ObservabilityPipelineKafkaDestinationType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineKafkaDestinationType.KAFKA = ObservabilityPipelineKafkaDestinationType("kafka") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_kafka_source_librdkafka_option.py b/src/datadog_api_client/v2/model/observability_pipeline_kafka_librdkafka_option.py similarity index 84% rename from src/datadog_api_client/v2/model/observability_pipeline_kafka_source_librdkafka_option.py rename to src/datadog_api_client/v2/model/observability_pipeline_kafka_librdkafka_option.py index 4099a196cd..a7e29aa09b 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_kafka_source_librdkafka_option.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_kafka_librdkafka_option.py @@ -10,7 +10,7 @@ ) -class ObservabilityPipelineKafkaSourceLibrdkafkaOption(ModelNormal): +class ObservabilityPipelineKafkaLibrdkafkaOption(ModelNormal): @cached_property def openapi_types(_): return { @@ -25,7 +25,7 @@ def openapi_types(_): def __init__(self_, name: str, value: str, **kwargs): """ - Represents a key-value pair used to configure low-level ``librdkafka`` client options for Kafka sources, such as timeouts, buffer sizes, and security settings. + Represents a key-value pair used to configure low-level ``librdkafka`` client options for Kafka source and destination, such as timeouts, buffer sizes, and security settings. :param name: The name of the ``librdkafka`` configuration option to set. :type name: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_kafka_source_sasl.py b/src/datadog_api_client/v2/model/observability_pipeline_kafka_sasl.py similarity index 59% rename from src/datadog_api_client/v2/model/observability_pipeline_kafka_source_sasl.py rename to src/datadog_api_client/v2/model/observability_pipeline_kafka_sasl.py index 88f6e0aaab..e6d42bc1c8 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_kafka_source_sasl.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_kafka_sasl.py @@ -14,34 +14,32 @@ if TYPE_CHECKING: - from datadog_api_client.v2.model.observability_pipeline_pipeline_kafka_source_sasl_mechanism import ( - ObservabilityPipelinePipelineKafkaSourceSaslMechanism, + from datadog_api_client.v2.model.observability_pipeline_kafka_sasl_mechanism import ( + ObservabilityPipelineKafkaSaslMechanism, ) -class ObservabilityPipelineKafkaSourceSasl(ModelNormal): +class ObservabilityPipelineKafkaSasl(ModelNormal): @cached_property def openapi_types(_): - from datadog_api_client.v2.model.observability_pipeline_pipeline_kafka_source_sasl_mechanism import ( - ObservabilityPipelinePipelineKafkaSourceSaslMechanism, + from datadog_api_client.v2.model.observability_pipeline_kafka_sasl_mechanism import ( + ObservabilityPipelineKafkaSaslMechanism, ) return { - "mechanism": (ObservabilityPipelinePipelineKafkaSourceSaslMechanism,), + "mechanism": (ObservabilityPipelineKafkaSaslMechanism,), } attribute_map = { "mechanism": "mechanism", } - def __init__( - self_, mechanism: Union[ObservabilityPipelinePipelineKafkaSourceSaslMechanism, UnsetType] = unset, **kwargs - ): + def __init__(self_, mechanism: Union[ObservabilityPipelineKafkaSaslMechanism, UnsetType] = unset, **kwargs): """ Specifies the SASL mechanism for authenticating with a Kafka cluster. :param mechanism: SASL mechanism used for Kafka authentication. - :type mechanism: ObservabilityPipelinePipelineKafkaSourceSaslMechanism, optional + :type mechanism: ObservabilityPipelineKafkaSaslMechanism, optional """ if mechanism is not unset: kwargs["mechanism"] = mechanism diff --git a/src/datadog_api_client/v2/model/observability_pipeline_pipeline_kafka_source_sasl_mechanism.py b/src/datadog_api_client/v2/model/observability_pipeline_kafka_sasl_mechanism.py similarity index 50% rename from src/datadog_api_client/v2/model/observability_pipeline_pipeline_kafka_source_sasl_mechanism.py rename to src/datadog_api_client/v2/model/observability_pipeline_kafka_sasl_mechanism.py index db414b0a6f..2f261fd1aa 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_pipeline_kafka_source_sasl_mechanism.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_kafka_sasl_mechanism.py @@ -12,7 +12,7 @@ from typing import ClassVar -class ObservabilityPipelinePipelineKafkaSourceSaslMechanism(ModelSimple): +class ObservabilityPipelineKafkaSaslMechanism(ModelSimple): """ SASL mechanism used for Kafka authentication. @@ -25,9 +25,9 @@ class ObservabilityPipelinePipelineKafkaSourceSaslMechanism(ModelSimple): "SCRAM-SHA-256", "SCRAM-SHA-512", } - PLAIN: ClassVar["ObservabilityPipelinePipelineKafkaSourceSaslMechanism"] - SCRAMNOT_SHANOT_256: ClassVar["ObservabilityPipelinePipelineKafkaSourceSaslMechanism"] - SCRAMNOT_SHANOT_512: ClassVar["ObservabilityPipelinePipelineKafkaSourceSaslMechanism"] + PLAIN: ClassVar["ObservabilityPipelineKafkaSaslMechanism"] + SCRAMNOT_SHANOT_256: ClassVar["ObservabilityPipelineKafkaSaslMechanism"] + SCRAMNOT_SHANOT_512: ClassVar["ObservabilityPipelineKafkaSaslMechanism"] @cached_property def openapi_types(_): @@ -36,12 +36,6 @@ def openapi_types(_): } -ObservabilityPipelinePipelineKafkaSourceSaslMechanism.PLAIN = ObservabilityPipelinePipelineKafkaSourceSaslMechanism( - "PLAIN" -) -ObservabilityPipelinePipelineKafkaSourceSaslMechanism.SCRAMNOT_SHANOT_256 = ( - ObservabilityPipelinePipelineKafkaSourceSaslMechanism("SCRAM-SHA-256") -) -ObservabilityPipelinePipelineKafkaSourceSaslMechanism.SCRAMNOT_SHANOT_512 = ( - ObservabilityPipelinePipelineKafkaSourceSaslMechanism("SCRAM-SHA-512") -) +ObservabilityPipelineKafkaSaslMechanism.PLAIN = ObservabilityPipelineKafkaSaslMechanism("PLAIN") +ObservabilityPipelineKafkaSaslMechanism.SCRAMNOT_SHANOT_256 = ObservabilityPipelineKafkaSaslMechanism("SCRAM-SHA-256") +ObservabilityPipelineKafkaSaslMechanism.SCRAMNOT_SHANOT_512 = ObservabilityPipelineKafkaSaslMechanism("SCRAM-SHA-512") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_kafka_source.py b/src/datadog_api_client/v2/model/observability_pipeline_kafka_source.py index 3ba6cfe651..aa48342438 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_kafka_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_kafka_source.py @@ -14,12 +14,10 @@ if TYPE_CHECKING: - from datadog_api_client.v2.model.observability_pipeline_kafka_source_librdkafka_option import ( - ObservabilityPipelineKafkaSourceLibrdkafkaOption, - ) - from datadog_api_client.v2.model.observability_pipeline_kafka_source_sasl import ( - ObservabilityPipelineKafkaSourceSasl, + from datadog_api_client.v2.model.observability_pipeline_kafka_librdkafka_option import ( + ObservabilityPipelineKafkaLibrdkafkaOption, ) + from datadog_api_client.v2.model.observability_pipeline_kafka_sasl import ObservabilityPipelineKafkaSasl from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls from datadog_api_client.v2.model.observability_pipeline_kafka_source_type import ( ObservabilityPipelineKafkaSourceType, @@ -29,12 +27,10 @@ class ObservabilityPipelineKafkaSource(ModelNormal): @cached_property def openapi_types(_): - from datadog_api_client.v2.model.observability_pipeline_kafka_source_librdkafka_option import ( - ObservabilityPipelineKafkaSourceLibrdkafkaOption, - ) - from datadog_api_client.v2.model.observability_pipeline_kafka_source_sasl import ( - ObservabilityPipelineKafkaSourceSasl, + from datadog_api_client.v2.model.observability_pipeline_kafka_librdkafka_option import ( + ObservabilityPipelineKafkaLibrdkafkaOption, ) + from datadog_api_client.v2.model.observability_pipeline_kafka_sasl import ObservabilityPipelineKafkaSasl from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls from datadog_api_client.v2.model.observability_pipeline_kafka_source_type import ( ObservabilityPipelineKafkaSourceType, @@ -43,8 +39,8 @@ def openapi_types(_): return { "group_id": (str,), "id": (str,), - "librdkafka_options": ([ObservabilityPipelineKafkaSourceLibrdkafkaOption],), - "sasl": (ObservabilityPipelineKafkaSourceSasl,), + "librdkafka_options": ([ObservabilityPipelineKafkaLibrdkafkaOption],), + "sasl": (ObservabilityPipelineKafkaSasl,), "tls": (ObservabilityPipelineTls,), "topics": ([str],), "type": (ObservabilityPipelineKafkaSourceType,), @@ -66,14 +62,16 @@ def __init__( id: str, topics: List[str], type: ObservabilityPipelineKafkaSourceType, - librdkafka_options: Union[List[ObservabilityPipelineKafkaSourceLibrdkafkaOption], UnsetType] = unset, - sasl: Union[ObservabilityPipelineKafkaSourceSasl, UnsetType] = unset, + librdkafka_options: Union[List[ObservabilityPipelineKafkaLibrdkafkaOption], UnsetType] = unset, + sasl: Union[ObservabilityPipelineKafkaSasl, UnsetType] = unset, tls: Union[ObservabilityPipelineTls, UnsetType] = unset, **kwargs, ): """ The ``kafka`` source ingests data from Apache Kafka topics. + **Supported pipeline types:** logs + :param group_id: Consumer group ID used by the Kafka client. :type group_id: str @@ -81,10 +79,10 @@ def __init__( :type id: str :param librdkafka_options: Optional list of advanced Kafka client configuration options, defined as key-value pairs. - :type librdkafka_options: [ObservabilityPipelineKafkaSourceLibrdkafkaOption], optional + :type librdkafka_options: [ObservabilityPipelineKafkaLibrdkafkaOption], optional :param sasl: Specifies the SASL mechanism for authenticating with a Kafka cluster. - :type sasl: ObservabilityPipelineKafkaSourceSasl, optional + :type sasl: ObservabilityPipelineKafkaSasl, optional :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. :type tls: ObservabilityPipelineTls, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_logstash_source.py b/src/datadog_api_client/v2/model/observability_pipeline_logstash_source.py index 1971cae326..51eb2876f8 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_logstash_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_logstash_source.py @@ -50,6 +50,8 @@ def __init__( """ The ``logstash`` source ingests logs from a Logstash forwarder. + **Supported pipeline types:** logs + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). :type id: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor.py new file mode 100644 index 0000000000..a48ca9ed04 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor.py @@ -0,0 +1,101 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_rule import ( + ObservabilityPipelineMetricTagsProcessorRule, + ) + from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_type import ( + ObservabilityPipelineMetricTagsProcessorType, + ) + + +class ObservabilityPipelineMetricTagsProcessor(ModelNormal): + validations = { + "rules": { + "max_items": 100, + "min_items": 1, + }, + } + + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_rule import ( + ObservabilityPipelineMetricTagsProcessorRule, + ) + from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_type import ( + ObservabilityPipelineMetricTagsProcessorType, + ) + + return { + "display_name": (str,), + "enabled": (bool,), + "id": (str,), + "include": (str,), + "rules": ([ObservabilityPipelineMetricTagsProcessorRule],), + "type": (ObservabilityPipelineMetricTagsProcessorType,), + } + + attribute_map = { + "display_name": "display_name", + "enabled": "enabled", + "id": "id", + "include": "include", + "rules": "rules", + "type": "type", + } + + def __init__( + self_, + enabled: bool, + id: str, + include: str, + rules: List[ObservabilityPipelineMetricTagsProcessorRule], + type: ObservabilityPipelineMetricTagsProcessorType, + display_name: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + The ``metric_tags`` processor filters metrics based on their tags using Datadog tag key patterns. + + **Supported pipeline types:** metrics + + :param display_name: The display name for a component. + :type display_name: str, optional + + :param enabled: Whether this processor is enabled. + :type enabled: bool + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). + :type id: str + + :param include: A Datadog search query used to determine which metrics this processor targets. + :type include: str + + :param rules: A list of rules for filtering metric tags. + :type rules: [ObservabilityPipelineMetricTagsProcessorRule] + + :param type: The processor type. The value should always be ``metric_tags``. + :type type: ObservabilityPipelineMetricTagsProcessorType + """ + if display_name is not unset: + kwargs["display_name"] = display_name + super().__init__(kwargs) + + self_.enabled = enabled + self_.id = id + self_.include = include + self_.rules = rules + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor_rule.py b/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor_rule.py new file mode 100644 index 0000000000..f0fb218038 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor_rule.py @@ -0,0 +1,68 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_rule_action import ( + ObservabilityPipelineMetricTagsProcessorRuleAction, + ) + from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_rule_mode import ( + ObservabilityPipelineMetricTagsProcessorRuleMode, + ) + + +class ObservabilityPipelineMetricTagsProcessorRule(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_rule_action import ( + ObservabilityPipelineMetricTagsProcessorRuleAction, + ) + from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_rule_mode import ( + ObservabilityPipelineMetricTagsProcessorRuleMode, + ) + + return { + "action": (ObservabilityPipelineMetricTagsProcessorRuleAction,), + "keys": ([str],), + "mode": (ObservabilityPipelineMetricTagsProcessorRuleMode,), + } + + attribute_map = { + "action": "action", + "keys": "keys", + "mode": "mode", + } + + def __init__( + self_, + action: ObservabilityPipelineMetricTagsProcessorRuleAction, + keys: List[str], + mode: ObservabilityPipelineMetricTagsProcessorRuleMode, + **kwargs, + ): + """ + Defines a rule for filtering metric tags based on key patterns. + + :param action: The action to take on tags with matching keys. + :type action: ObservabilityPipelineMetricTagsProcessorRuleAction + + :param keys: A list of tag keys to include or exclude. + :type keys: [str] + + :param mode: The processing mode for tag filtering. + :type mode: ObservabilityPipelineMetricTagsProcessorRuleMode + """ + super().__init__(kwargs) + + self_.action = action + self_.keys = keys + self_.mode = mode diff --git a/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor_rule_action.py b/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor_rule_action.py new file mode 100644 index 0000000000..2f2e45809a --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor_rule_action.py @@ -0,0 +1,42 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineMetricTagsProcessorRuleAction(ModelSimple): + """ + The action to take on tags with matching keys. + + :param value: Must be one of ["include", "exclude"]. + :type value: str + """ + + allowed_values = { + "include", + "exclude", + } + INCLUDE: ClassVar["ObservabilityPipelineMetricTagsProcessorRuleAction"] + EXCLUDE: ClassVar["ObservabilityPipelineMetricTagsProcessorRuleAction"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineMetricTagsProcessorRuleAction.INCLUDE = ObservabilityPipelineMetricTagsProcessorRuleAction( + "include" +) +ObservabilityPipelineMetricTagsProcessorRuleAction.EXCLUDE = ObservabilityPipelineMetricTagsProcessorRuleAction( + "exclude" +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor_rule_mode.py b/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor_rule_mode.py new file mode 100644 index 0000000000..6add98b0c0 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor_rule_mode.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineMetricTagsProcessorRuleMode(ModelSimple): + """ + The processing mode for tag filtering. + + :param value: If omitted defaults to "filter". Must be one of ["filter"]. + :type value: str + """ + + allowed_values = { + "filter", + } + FILTER: ClassVar["ObservabilityPipelineMetricTagsProcessorRuleMode"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineMetricTagsProcessorRuleMode.FILTER = ObservabilityPipelineMetricTagsProcessorRuleMode("filter") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor_type.py b/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor_type.py new file mode 100644 index 0000000000..ad85ed7e23 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_metric_tags_processor_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineMetricTagsProcessorType(ModelSimple): + """ + The processor type. The value should always be `metric_tags`. + + :param value: If omitted defaults to "metric_tags". Must be one of ["metric_tags"]. + :type value: str + """ + + allowed_values = { + "metric_tags", + } + METRIC_TAGS: ClassVar["ObservabilityPipelineMetricTagsProcessorType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineMetricTagsProcessorType.METRIC_TAGS = ObservabilityPipelineMetricTagsProcessorType("metric_tags") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_new_relic_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_new_relic_destination.py index ec0cb39481..e8cf62c104 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_new_relic_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_new_relic_destination.py @@ -55,6 +55,8 @@ def __init__( """ The ``new_relic`` destination sends logs to the New Relic platform. + **Supported pipeline types:** logs + :param id: The unique identifier for this component. :type id: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_ocsf_mapper_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_ocsf_mapper_processor.py index 45ce411dc2..6f95d50319 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_ocsf_mapper_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_ocsf_mapper_processor.py @@ -63,6 +63,8 @@ def __init__( """ The ``ocsf_mapper`` processor transforms logs into the OCSF schema using a predefined mapping configuration. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_open_search_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_open_search_destination.py index d817a4b558..d2753ebdb7 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_open_search_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_open_search_destination.py @@ -51,6 +51,8 @@ def __init__( """ The ``opensearch`` destination writes logs to an OpenSearch cluster. + **Supported pipeline types:** logs + :param bulk_index: The index to write logs to. :type bulk_index: str, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_opentelemetry_source.py b/src/datadog_api_client/v2/model/observability_pipeline_opentelemetry_source.py new file mode 100644 index 0000000000..ab1de7a879 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_opentelemetry_source.py @@ -0,0 +1,85 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_opentelemetry_source_type import ( + ObservabilityPipelineOpentelemetrySourceType, + ) + + +class ObservabilityPipelineOpentelemetrySource(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_opentelemetry_source_type import ( + ObservabilityPipelineOpentelemetrySourceType, + ) + + return { + "grpc_address_key": (str,), + "http_address_key": (str,), + "id": (str,), + "tls": (ObservabilityPipelineTls,), + "type": (ObservabilityPipelineOpentelemetrySourceType,), + } + + attribute_map = { + "grpc_address_key": "grpc_address_key", + "http_address_key": "http_address_key", + "id": "id", + "tls": "tls", + "type": "type", + } + + def __init__( + self_, + id: str, + type: ObservabilityPipelineOpentelemetrySourceType, + grpc_address_key: Union[str, UnsetType] = unset, + http_address_key: Union[str, UnsetType] = unset, + tls: Union[ObservabilityPipelineTls, UnsetType] = unset, + **kwargs, + ): + """ + The ``opentelemetry`` source receives telemetry data using the OpenTelemetry Protocol (OTLP) over gRPC and HTTP. + + **Supported pipeline types:** logs + + :param grpc_address_key: Environment variable name containing the gRPC server address for receiving OTLP data. Must be a valid environment variable name (alphanumeric characters and underscores only). + :type grpc_address_key: str, optional + + :param http_address_key: Environment variable name containing the HTTP server address for receiving OTLP data. Must be a valid environment variable name (alphanumeric characters and underscores only). + :type http_address_key: str, optional + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + :type id: str + + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional + + :param type: The source type. The value should always be ``opentelemetry``. + :type type: ObservabilityPipelineOpentelemetrySourceType + """ + if grpc_address_key is not unset: + kwargs["grpc_address_key"] = grpc_address_key + if http_address_key is not unset: + kwargs["http_address_key"] = http_address_key + if tls is not unset: + kwargs["tls"] = tls + super().__init__(kwargs) + + self_.id = id + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_opentelemetry_source_type.py b/src/datadog_api_client/v2/model/observability_pipeline_opentelemetry_source_type.py new file mode 100644 index 0000000000..ec476d0dba --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_opentelemetry_source_type.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineOpentelemetrySourceType(ModelSimple): + """ + The source type. The value should always be `opentelemetry`. + + :param value: If omitted defaults to "opentelemetry". Must be one of ["opentelemetry"]. + :type value: str + """ + + allowed_values = { + "opentelemetry", + } + OPENTELEMETRY: ClassVar["ObservabilityPipelineOpentelemetrySourceType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineOpentelemetrySourceType.OPENTELEMETRY = ObservabilityPipelineOpentelemetrySourceType( + "opentelemetry" +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_parse_grok_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_parse_grok_processor.py index 32e6da60d0..f361e1b7e5 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_parse_grok_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_parse_grok_processor.py @@ -66,6 +66,8 @@ def __init__( """ The ``parse_grok`` processor extracts structured fields from unstructured log messages using Grok patterns. + **Supported pipeline types:** logs + :param disable_library_rules: If set to ``true`` , disables the default Grok rules provided by Datadog. :type disable_library_rules: bool, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_parse_json_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_parse_json_processor.py index 134700560c..a5e1c90cf8 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_parse_json_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_parse_json_processor.py @@ -57,6 +57,8 @@ def __init__( """ The ``parse_json`` processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded JSON as a string. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor.py new file mode 100644 index 0000000000..f6a1026ce5 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor.py @@ -0,0 +1,150 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_parse_xml_processor_type import ( + ObservabilityPipelineParseXMLProcessorType, + ) + + +class ObservabilityPipelineParseXMLProcessor(ModelNormal): + validations = { + "text_key": { + "min_length": 1, + }, + } + + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_parse_xml_processor_type import ( + ObservabilityPipelineParseXMLProcessorType, + ) + + return { + "always_use_text_key": (bool,), + "attr_prefix": (str,), + "display_name": (str,), + "enabled": (bool,), + "field": (str,), + "id": (str,), + "include": (str,), + "include_attr": (bool,), + "parse_bool": (bool,), + "parse_null": (bool,), + "parse_number": (bool,), + "text_key": (str,), + "type": (ObservabilityPipelineParseXMLProcessorType,), + } + + attribute_map = { + "always_use_text_key": "always_use_text_key", + "attr_prefix": "attr_prefix", + "display_name": "display_name", + "enabled": "enabled", + "field": "field", + "id": "id", + "include": "include", + "include_attr": "include_attr", + "parse_bool": "parse_bool", + "parse_null": "parse_null", + "parse_number": "parse_number", + "text_key": "text_key", + "type": "type", + } + + def __init__( + self_, + enabled: bool, + field: str, + id: str, + include: str, + type: ObservabilityPipelineParseXMLProcessorType, + always_use_text_key: Union[bool, UnsetType] = unset, + attr_prefix: Union[str, UnsetType] = unset, + display_name: Union[str, UnsetType] = unset, + include_attr: Union[bool, UnsetType] = unset, + parse_bool: Union[bool, UnsetType] = unset, + parse_null: Union[bool, UnsetType] = unset, + parse_number: Union[bool, UnsetType] = unset, + text_key: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + The ``parse_xml`` processor parses XML from a specified field and extracts it into the event. + + **Supported pipeline types:** logs + + :param always_use_text_key: Whether to always use a text key for element content. + :type always_use_text_key: bool, optional + + :param attr_prefix: The prefix to use for XML attributes in the parsed output. + :type attr_prefix: str, optional + + :param display_name: The display name for a component. + :type display_name: str, optional + + :param enabled: Whether this processor is enabled. + :type enabled: bool + + :param field: The name of the log field that contains an XML string. + :type field: str + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). + :type id: str + + :param include: A Datadog search query used to determine which logs this processor targets. + :type include: str + + :param include_attr: Whether to include XML attributes in the parsed output. + :type include_attr: bool, optional + + :param parse_bool: Whether to parse boolean values from strings. + :type parse_bool: bool, optional + + :param parse_null: Whether to parse null values. + :type parse_null: bool, optional + + :param parse_number: Whether to parse numeric values from strings. + :type parse_number: bool, optional + + :param text_key: The key name to use for text content within XML elements. Must be at least 1 character if specified. + :type text_key: str, optional + + :param type: The processor type. The value should always be ``parse_xml``. + :type type: ObservabilityPipelineParseXMLProcessorType + """ + if always_use_text_key is not unset: + kwargs["always_use_text_key"] = always_use_text_key + if attr_prefix is not unset: + kwargs["attr_prefix"] = attr_prefix + if display_name is not unset: + kwargs["display_name"] = display_name + if include_attr is not unset: + kwargs["include_attr"] = include_attr + if parse_bool is not unset: + kwargs["parse_bool"] = parse_bool + if parse_null is not unset: + kwargs["parse_null"] = parse_null + if parse_number is not unset: + kwargs["parse_number"] = parse_number + if text_key is not unset: + kwargs["text_key"] = text_key + super().__init__(kwargs) + + self_.enabled = enabled + self_.field = field + self_.id = id + self_.include = include + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor_type.py b/src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor_type.py new file mode 100644 index 0000000000..5e8f0a8285 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_parse_xml_processor_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineParseXMLProcessorType(ModelSimple): + """ + The processor type. The value should always be `parse_xml`. + + :param value: If omitted defaults to "parse_xml". Must be one of ["parse_xml"]. + :type value: str + """ + + allowed_values = { + "parse_xml", + } + PARSE_XML: ClassVar["ObservabilityPipelineParseXMLProcessorType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineParseXMLProcessorType.PARSE_XML = ObservabilityPipelineParseXMLProcessorType("parse_xml") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_quota_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_quota_processor.py index a122a03915..87f5c04a7b 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_quota_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_quota_processor.py @@ -56,6 +56,7 @@ def openapi_types(_): "overflow_action": (ObservabilityPipelineQuotaProcessorOverflowAction,), "overrides": ([ObservabilityPipelineQuotaProcessorOverride],), "partition_fields": ([str],), + "too_many_buckets_action": (ObservabilityPipelineQuotaProcessorOverflowAction,), "type": (ObservabilityPipelineQuotaProcessorType,), } @@ -71,6 +72,7 @@ def openapi_types(_): "overflow_action": "overflow_action", "overrides": "overrides", "partition_fields": "partition_fields", + "too_many_buckets_action": "too_many_buckets_action", "type": "type", } @@ -88,15 +90,18 @@ def __init__( overflow_action: Union[ObservabilityPipelineQuotaProcessorOverflowAction, UnsetType] = unset, overrides: Union[List[ObservabilityPipelineQuotaProcessorOverride], UnsetType] = unset, partition_fields: Union[List[str], UnsetType] = unset, + too_many_buckets_action: Union[ObservabilityPipelineQuotaProcessorOverflowAction, UnsetType] = unset, **kwargs, ): """ - The Quota Processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert. + The ``quota`` processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert. + + **Supported pipeline types:** logs :param display_name: The display name for a component. :type display_name: str, optional - :param drop_events: If set to ``true`` , logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline. + :param drop_events: If set to ``true`` , logs that match the quota filter and are sent after the quota is exceeded are dropped. Logs that do not match the filter continue through the pipeline. **Note** : You can set either ``drop_events`` or ``overflow_action`` , but not both. :type drop_events: bool, optional :param enabled: Whether this processor is enabled. @@ -117,7 +122,7 @@ def __init__( :param name: Name of the quota. :type name: str - :param overflow_action: The action to take when the quota is exceeded. Options: + :param overflow_action: The action to take when the quota or bucket limit is exceeded. Options: * ``drop`` : Drop the event. * ``no_action`` : Let the event pass through. @@ -130,6 +135,13 @@ def __init__( :param partition_fields: A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values. :type partition_fields: [str], optional + :param too_many_buckets_action: The action to take when the quota or bucket limit is exceeded. Options: + + * ``drop`` : Drop the event. + * ``no_action`` : Let the event pass through. + * ``overflow_routing`` : Route to an overflow destination. + :type too_many_buckets_action: ObservabilityPipelineQuotaProcessorOverflowAction, optional + :param type: The processor type. The value should always be ``quota``. :type type: ObservabilityPipelineQuotaProcessorType """ @@ -145,6 +157,8 @@ def __init__( kwargs["overrides"] = overrides if partition_fields is not unset: kwargs["partition_fields"] = partition_fields + if too_many_buckets_action is not unset: + kwargs["too_many_buckets_action"] = too_many_buckets_action super().__init__(kwargs) self_.enabled = enabled diff --git a/src/datadog_api_client/v2/model/observability_pipeline_quota_processor_overflow_action.py b/src/datadog_api_client/v2/model/observability_pipeline_quota_processor_overflow_action.py index 1341d5654d..3bf3e7727d 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_quota_processor_overflow_action.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_quota_processor_overflow_action.py @@ -14,7 +14,7 @@ class ObservabilityPipelineQuotaProcessorOverflowAction(ModelSimple): """ - The action to take when the quota is exceeded. Options: + The action to take when the quota or bucket limit is exceeded. Options: - `drop`: Drop the event. - `no_action`: Let the event pass through. - `overflow_routing`: Route to an overflow destination. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_reduce_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_reduce_processor.py index 86cf0c092a..bb3f832f3e 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_reduce_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_reduce_processor.py @@ -66,6 +66,8 @@ def __init__( """ The ``reduce`` processor aggregates and merges logs based on matching keys and merge strategies. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_remove_fields_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_remove_fields_processor.py index d439328fca..3d165d78f5 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_remove_fields_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_remove_fields_processor.py @@ -57,6 +57,8 @@ def __init__( """ The ``remove_fields`` processor deletes specified fields from logs. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_rename_fields_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_rename_fields_processor.py index c32d90a194..eedd34a56e 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_rename_fields_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_rename_fields_processor.py @@ -63,6 +63,8 @@ def __init__( """ The ``rename_fields`` processor changes field names. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_destination.py index 486442d4aa..ead5f47857 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_destination.py @@ -62,6 +62,8 @@ def __init__( """ The ``rsyslog`` destination forwards logs to an external ``rsyslog`` server over TCP or UDP using the syslog protocol. + **Supported pipeline types:** logs + :param id: The unique identifier for this component. :type id: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_source.py b/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_source.py index 90c9a6dc6b..c197260efb 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_source.py @@ -59,6 +59,8 @@ def __init__( """ The ``rsyslog`` source listens for logs over TCP or UDP from an ``rsyslog`` server using the syslog protocol. + **Supported pipeline types:** logs + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). :type id: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sample_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_sample_processor.py index b301a81a73..97f548a49c 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_sample_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_sample_processor.py @@ -3,7 +3,7 @@ # Copyright 2019-Present Datadog, Inc. from __future__ import annotations -from typing import Union, TYPE_CHECKING +from typing import List, Union, TYPE_CHECKING from datadog_api_client.model_utils import ( ModelNormal, @@ -21,8 +21,8 @@ class ObservabilityPipelineSampleProcessor(ModelNormal): validations = { - "rate": { - "inclusive_minimum": 1, + "group_by": { + "min_items": 1, }, } @@ -35,20 +35,20 @@ def openapi_types(_): return { "display_name": (str,), "enabled": (bool,), + "group_by": ([str],), "id": (str,), "include": (str,), "percentage": (float,), - "rate": (int,), "type": (ObservabilityPipelineSampleProcessorType,), } attribute_map = { "display_name": "display_name", "enabled": "enabled", + "group_by": "group_by", "id": "id", "include": "include", "percentage": "percentage", - "rate": "rate", "type": "type", } @@ -57,21 +57,26 @@ def __init__( enabled: bool, id: str, include: str, + percentage: float, type: ObservabilityPipelineSampleProcessorType, display_name: Union[str, UnsetType] = unset, - percentage: Union[float, UnsetType] = unset, - rate: Union[int, UnsetType] = unset, + group_by: Union[List[str], UnsetType] = unset, **kwargs, ): """ The ``sample`` processor allows probabilistic sampling of logs at a fixed rate. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional :param enabled: Whether this processor is enabled. :type enabled: bool + :param group_by: Optional list of fields to group events by. Each group is sampled independently. + :type group_by: [str], optional + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). :type id: str @@ -79,23 +84,19 @@ def __init__( :type include: str :param percentage: The percentage of logs to sample. - :type percentage: float, optional - - :param rate: Number of events to sample (1 in N). - :type rate: int, optional + :type percentage: float :param type: The processor type. The value should always be ``sample``. :type type: ObservabilityPipelineSampleProcessorType """ if display_name is not unset: kwargs["display_name"] = display_name - if percentage is not unset: - kwargs["percentage"] = percentage - if rate is not unset: - kwargs["rate"] = rate + if group_by is not unset: + kwargs["group_by"] = group_by super().__init__(kwargs) self_.enabled = enabled self_.id = id self_.include = include + self_.percentage = percentage self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor.py index 750d0619d3..fb61fade83 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor.py @@ -63,6 +63,8 @@ def __init__( """ The ``sensitive_data_scanner`` processor detects and optionally redacts sensitive data in log events. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.py index da99100de6..6b7a504c70 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.py @@ -3,10 +3,13 @@ # Copyright 2019-Present Datadog, Inc. from __future__ import annotations +from typing import Union from datadog_api_client.model_utils import ( ModelNormal, cached_property, + unset, + UnsetType, ) @@ -14,20 +17,27 @@ class ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions(Mod @cached_property def openapi_types(_): return { + "description": (str,), "rule": (str,), } attribute_map = { + "description": "description", "rule": "rule", } - def __init__(self_, rule: str, **kwargs): + def __init__(self_, rule: str, description: Union[str, UnsetType] = unset, **kwargs): """ Options for defining a custom regex pattern. + :param description: Human-readable description providing context about a sensitive data scanner rule + :type description: str, optional + :param rule: A regular expression used to detect sensitive values. Must be a valid regex. :type rule: str """ + if description is not unset: + kwargs["description"] = description super().__init__(kwargs) self_.rule = rule diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.py index 1389dec5a1..91ac818cdf 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.py @@ -17,25 +17,38 @@ class ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions(Mo @cached_property def openapi_types(_): return { + "description": (str,), "id": (str,), "use_recommended_keywords": (bool,), } attribute_map = { + "description": "description", "id": "id", "use_recommended_keywords": "use_recommended_keywords", } - def __init__(self_, id: str, use_recommended_keywords: Union[bool, UnsetType] = unset, **kwargs): + def __init__( + self_, + id: str, + description: Union[str, UnsetType] = unset, + use_recommended_keywords: Union[bool, UnsetType] = unset, + **kwargs, + ): """ Options for selecting a predefined library pattern and enabling keyword support. + :param description: Human-readable description providing context about a sensitive data scanner rule + :type description: str, optional + :param id: Identifier for a predefined pattern from the sensitive data scanner pattern library. :type id: str :param use_recommended_keywords: Whether to augment the pattern with recommended keywords (optional). :type use_recommended_keywords: bool, optional """ + if description is not unset: + kwargs["description"] = description if use_recommended_keywords is not unset: kwargs["use_recommended_keywords"] = use_recommended_keywords super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sentinel_one_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_sentinel_one_destination.py index 924aa9b0f5..85fca46e4d 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_sentinel_one_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_sentinel_one_destination.py @@ -55,6 +55,8 @@ def __init__( """ The ``sentinel_one`` destination sends logs to SentinelOne. + **Supported pipeline types:** logs + :param id: The unique identifier for this component. :type id: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_socket_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_socket_destination.py index 8ff0d447f2..c631f27cc2 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_socket_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_socket_destination.py @@ -94,6 +94,8 @@ def __init__( """ The ``socket`` destination sends logs over TCP or UDP to a remote server. + **Supported pipeline types:** logs + :param encoding: Encoding format for log events. :type encoding: ObservabilityPipelineSocketDestinationEncoding diff --git a/src/datadog_api_client/v2/model/observability_pipeline_socket_source.py b/src/datadog_api_client/v2/model/observability_pipeline_socket_source.py index 23d1794735..d95c5fa558 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_socket_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_socket_source.py @@ -90,6 +90,8 @@ def __init__( """ The ``socket`` source ingests logs over TCP or UDP. + **Supported pipeline types:** logs + :param framing: Framing method configuration for the socket source. :type framing: ObservabilityPipelineSocketSourceFraming diff --git a/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor.py new file mode 100644 index 0000000000..7acd2e359d --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor.py @@ -0,0 +1,101 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_split_array_processor_array_config import ( + ObservabilityPipelineSplitArrayProcessorArrayConfig, + ) + from datadog_api_client.v2.model.observability_pipeline_split_array_processor_type import ( + ObservabilityPipelineSplitArrayProcessorType, + ) + + +class ObservabilityPipelineSplitArrayProcessor(ModelNormal): + validations = { + "arrays": { + "max_items": 15, + "min_items": 1, + }, + } + + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_split_array_processor_array_config import ( + ObservabilityPipelineSplitArrayProcessorArrayConfig, + ) + from datadog_api_client.v2.model.observability_pipeline_split_array_processor_type import ( + ObservabilityPipelineSplitArrayProcessorType, + ) + + return { + "arrays": ([ObservabilityPipelineSplitArrayProcessorArrayConfig],), + "display_name": (str,), + "enabled": (bool,), + "id": (str,), + "include": (str,), + "type": (ObservabilityPipelineSplitArrayProcessorType,), + } + + attribute_map = { + "arrays": "arrays", + "display_name": "display_name", + "enabled": "enabled", + "id": "id", + "include": "include", + "type": "type", + } + + def __init__( + self_, + arrays: List[ObservabilityPipelineSplitArrayProcessorArrayConfig], + enabled: bool, + id: str, + include: str, + type: ObservabilityPipelineSplitArrayProcessorType, + display_name: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + The ``split_array`` processor splits array fields into separate events based on configured rules. + + **Supported pipeline types:** logs + + :param arrays: A list of array split configurations. + :type arrays: [ObservabilityPipelineSplitArrayProcessorArrayConfig] + + :param display_name: The display name for a component. + :type display_name: str, optional + + :param enabled: Whether this processor is enabled. + :type enabled: bool + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). + :type id: str + + :param include: A Datadog search query used to determine which logs this processor targets. For split_array, this should typically be ``*``. + :type include: str + + :param type: The processor type. The value should always be ``split_array``. + :type type: ObservabilityPipelineSplitArrayProcessorType + """ + if display_name is not unset: + kwargs["display_name"] = display_name + super().__init__(kwargs) + + self_.arrays = arrays + self_.enabled = enabled + self_.id = id + self_.include = include + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_array_config.py b/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_array_config.py new file mode 100644 index 0000000000..ac7133ee2c --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_array_config.py @@ -0,0 +1,39 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +class ObservabilityPipelineSplitArrayProcessorArrayConfig(ModelNormal): + @cached_property + def openapi_types(_): + return { + "field": (str,), + "include": (str,), + } + + attribute_map = { + "field": "field", + "include": "include", + } + + def __init__(self_, field: str, include: str, **kwargs): + """ + Configuration for a single array split operation. + + :param field: The path to the array field to split. + :type field: str + + :param include: A Datadog search query used to determine which logs this array split operation targets. + :type include: str + """ + super().__init__(kwargs) + + self_.field = field + self_.include = include diff --git a/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_type.py b/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_type.py new file mode 100644 index 0000000000..c6b6a7e9a1 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_split_array_processor_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSplitArrayProcessorType(ModelSimple): + """ + The processor type. The value should always be `split_array`. + + :param value: If omitted defaults to "split_array". Must be one of ["split_array"]. + :type value: str + """ + + allowed_values = { + "split_array", + } + SPLIT_ARRAY: ClassVar["ObservabilityPipelineSplitArrayProcessorType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSplitArrayProcessorType.SPLIT_ARRAY = ObservabilityPipelineSplitArrayProcessorType("split_array") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_destination.py index e36b4aee74..f2e4f9fc57 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_destination.py @@ -66,6 +66,8 @@ def __init__( """ The ``splunk_hec`` destination forwards logs to Splunk using the HTTP Event Collector (HEC). + **Supported pipeline types:** logs + :param auto_extract_timestamp: If ``true`` , Splunk tries to extract timestamps from incoming log events. If ``false`` , Splunk assigns the time the event was received. :type auto_extract_timestamp: bool, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_source.py b/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_source.py index bf8e2f976f..1773dfefeb 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_source.py @@ -50,6 +50,8 @@ def __init__( """ The ``splunk_hec`` source implements the Splunk HTTP Event Collector (HEC) API. + **Supported pipeline types:** logs + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). :type id: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_splunk_tcp_source.py b/src/datadog_api_client/v2/model/observability_pipeline_splunk_tcp_source.py index 5cd32dc425..9e2f538c45 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_splunk_tcp_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_splunk_tcp_source.py @@ -51,6 +51,8 @@ def __init__( The ``splunk_tcp`` source receives logs from a Splunk Universal Forwarder over TCP. TLS is supported for secure transmission. + **Supported pipeline types:** logs + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). :type id: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination.py index d49e3044f8..bd8b1f6a65 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination.py @@ -77,6 +77,8 @@ def __init__( """ The ``sumo_logic`` destination forwards logs to Sumo Logic. + **Supported pipeline types:** logs + :param encoding: The output encoding format. :type encoding: ObservabilityPipelineSumoLogicDestinationEncoding, optional diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_source.py b/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_source.py index c02e14c8c1..1338234aa6 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_source.py @@ -38,6 +38,8 @@ def __init__(self_, id: str, type: ObservabilityPipelineSumoLogicSourceType, **k """ The ``sumo_logic`` source receives logs from Sumo Logic collectors. + **Supported pipeline types:** logs + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). :type id: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_destination.py index 4984e69b5e..f9cc83262e 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_destination.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_destination.py @@ -62,6 +62,8 @@ def __init__( """ The ``syslog_ng`` destination forwards logs to an external ``syslog-ng`` server over TCP or UDP using the syslog protocol. + **Supported pipeline types:** logs + :param id: The unique identifier for this component. :type id: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_source.py b/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_source.py index 5f3e91d9a1..778d5d6ed3 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_source.py @@ -59,6 +59,8 @@ def __init__( """ The ``syslog_ng`` source listens for logs over TCP or UDP from a ``syslog-ng`` server using the syslog protocol. + **Supported pipeline types:** logs + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). :type id: str diff --git a/src/datadog_api_client/v2/model/observability_pipeline_throttle_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_throttle_processor.py index 3240833d02..00cf8ff296 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_throttle_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_throttle_processor.py @@ -63,6 +63,8 @@ def __init__( """ The ``throttle`` processor limits the number of events that pass through over a given time window. + **Supported pipeline types:** logs + :param display_name: The display name for a component. :type display_name: str, optional diff --git a/src/datadog_api_client/v2/models/__init__.py b/src/datadog_api_client/v2/models/__init__.py index 39ee6ab446..52d9a0a760 100644 --- a/src/datadog_api_client/v2/models/__init__.py +++ b/src/datadog_api_client/v2/models/__init__.py @@ -2886,6 +2886,12 @@ from datadog_api_client.v2.model.observability_pipeline_add_fields_processor_type import ( ObservabilityPipelineAddFieldsProcessorType, ) +from datadog_api_client.v2.model.observability_pipeline_add_hostname_processor import ( + ObservabilityPipelineAddHostnameProcessor, +) +from datadog_api_client.v2.model.observability_pipeline_add_hostname_processor_type import ( + ObservabilityPipelineAddHostnameProcessorType, +) from datadog_api_client.v2.model.observability_pipeline_amazon_data_firehose_source import ( ObservabilityPipelineAmazonDataFirehoseSource, ) @@ -2924,10 +2930,19 @@ ObservabilityPipelineAmazonSecurityLakeDestinationType, ) from datadog_api_client.v2.model.observability_pipeline_aws_auth import ObservabilityPipelineAwsAuth +from datadog_api_client.v2.model.observability_pipeline_cloud_prem_destination import ( + ObservabilityPipelineCloudPremDestination, +) +from datadog_api_client.v2.model.observability_pipeline_cloud_prem_destination_type import ( + ObservabilityPipelineCloudPremDestinationType, +) from datadog_api_client.v2.model.observability_pipeline_config import ObservabilityPipelineConfig from datadog_api_client.v2.model.observability_pipeline_config_destination_item import ( ObservabilityPipelineConfigDestinationItem, ) +from datadog_api_client.v2.model.observability_pipeline_config_pipeline_type import ( + ObservabilityPipelineConfigPipelineType, +) from datadog_api_client.v2.model.observability_pipeline_config_processor_group import ( ObservabilityPipelineConfigProcessorGroup, ) @@ -2971,6 +2986,12 @@ from datadog_api_client.v2.model.observability_pipeline_datadog_logs_destination_type import ( ObservabilityPipelineDatadogLogsDestinationType, ) +from datadog_api_client.v2.model.observability_pipeline_datadog_metrics_destination import ( + ObservabilityPipelineDatadogMetricsDestination, +) +from datadog_api_client.v2.model.observability_pipeline_datadog_metrics_destination_type import ( + ObservabilityPipelineDatadogMetricsDestinationType, +) from datadog_api_client.v2.model.observability_pipeline_datadog_tags_processor import ( ObservabilityPipelineDatadogTagsProcessor, ) @@ -2997,6 +3018,9 @@ from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_api_version import ( ObservabilityPipelineElasticsearchDestinationApiVersion, ) +from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_data_stream import ( + ObservabilityPipelineElasticsearchDestinationDataStream, +) from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_type import ( ObservabilityPipelineElasticsearchDestinationType, ) @@ -3030,6 +3054,9 @@ from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor_type import ( ObservabilityPipelineEnrichmentTableProcessorType, ) +from datadog_api_client.v2.model.observability_pipeline_enrichment_table_reference_table import ( + ObservabilityPipelineEnrichmentTableReferenceTable, +) from datadog_api_client.v2.model.observability_pipeline_field_value import ObservabilityPipelineFieldValue from datadog_api_client.v2.model.observability_pipeline_filter_processor import ObservabilityPipelineFilterProcessor from datadog_api_client.v2.model.observability_pipeline_filter_processor_type import ( @@ -3102,6 +3129,24 @@ from datadog_api_client.v2.model.observability_pipeline_google_pub_sub_source_type import ( ObservabilityPipelineGooglePubSubSourceType, ) +from datadog_api_client.v2.model.observability_pipeline_http_client_destination import ( + ObservabilityPipelineHttpClientDestination, +) +from datadog_api_client.v2.model.observability_pipeline_http_client_destination_auth_strategy import ( + ObservabilityPipelineHttpClientDestinationAuthStrategy, +) +from datadog_api_client.v2.model.observability_pipeline_http_client_destination_compression import ( + ObservabilityPipelineHttpClientDestinationCompression, +) +from datadog_api_client.v2.model.observability_pipeline_http_client_destination_compression_algorithm import ( + ObservabilityPipelineHttpClientDestinationCompressionAlgorithm, +) +from datadog_api_client.v2.model.observability_pipeline_http_client_destination_encoding import ( + ObservabilityPipelineHttpClientDestinationEncoding, +) +from datadog_api_client.v2.model.observability_pipeline_http_client_destination_type import ( + ObservabilityPipelineHttpClientDestinationType, +) from datadog_api_client.v2.model.observability_pipeline_http_client_source import ObservabilityPipelineHttpClientSource from datadog_api_client.v2.model.observability_pipeline_http_client_source_auth_strategy import ( ObservabilityPipelineHttpClientSourceAuthStrategy, @@ -3116,17 +3161,45 @@ from datadog_api_client.v2.model.observability_pipeline_http_server_source_type import ( ObservabilityPipelineHttpServerSourceType, ) -from datadog_api_client.v2.model.observability_pipeline_kafka_source import ObservabilityPipelineKafkaSource -from datadog_api_client.v2.model.observability_pipeline_kafka_source_librdkafka_option import ( - ObservabilityPipelineKafkaSourceLibrdkafkaOption, +from datadog_api_client.v2.model.observability_pipeline_kafka_destination import ObservabilityPipelineKafkaDestination +from datadog_api_client.v2.model.observability_pipeline_kafka_destination_compression import ( + ObservabilityPipelineKafkaDestinationCompression, ) -from datadog_api_client.v2.model.observability_pipeline_kafka_source_sasl import ObservabilityPipelineKafkaSourceSasl +from datadog_api_client.v2.model.observability_pipeline_kafka_destination_encoding import ( + ObservabilityPipelineKafkaDestinationEncoding, +) +from datadog_api_client.v2.model.observability_pipeline_kafka_destination_type import ( + ObservabilityPipelineKafkaDestinationType, +) +from datadog_api_client.v2.model.observability_pipeline_kafka_librdkafka_option import ( + ObservabilityPipelineKafkaLibrdkafkaOption, +) +from datadog_api_client.v2.model.observability_pipeline_kafka_sasl import ObservabilityPipelineKafkaSasl +from datadog_api_client.v2.model.observability_pipeline_kafka_sasl_mechanism import ( + ObservabilityPipelineKafkaSaslMechanism, +) +from datadog_api_client.v2.model.observability_pipeline_kafka_source import ObservabilityPipelineKafkaSource from datadog_api_client.v2.model.observability_pipeline_kafka_source_type import ObservabilityPipelineKafkaSourceType from datadog_api_client.v2.model.observability_pipeline_logstash_source import ObservabilityPipelineLogstashSource from datadog_api_client.v2.model.observability_pipeline_logstash_source_type import ( ObservabilityPipelineLogstashSourceType, ) from datadog_api_client.v2.model.observability_pipeline_metadata_entry import ObservabilityPipelineMetadataEntry +from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor import ( + ObservabilityPipelineMetricTagsProcessor, +) +from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_rule import ( + ObservabilityPipelineMetricTagsProcessorRule, +) +from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_rule_action import ( + ObservabilityPipelineMetricTagsProcessorRuleAction, +) +from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_rule_mode import ( + ObservabilityPipelineMetricTagsProcessorRuleMode, +) +from datadog_api_client.v2.model.observability_pipeline_metric_tags_processor_type import ( + ObservabilityPipelineMetricTagsProcessorType, +) from datadog_api_client.v2.model.observability_pipeline_metric_value import ObservabilityPipelineMetricValue from datadog_api_client.v2.model.observability_pipeline_new_relic_destination import ( ObservabilityPipelineNewRelicDestination, @@ -3158,6 +3231,12 @@ from datadog_api_client.v2.model.observability_pipeline_open_search_destination_type import ( ObservabilityPipelineOpenSearchDestinationType, ) +from datadog_api_client.v2.model.observability_pipeline_opentelemetry_source import ( + ObservabilityPipelineOpentelemetrySource, +) +from datadog_api_client.v2.model.observability_pipeline_opentelemetry_source_type import ( + ObservabilityPipelineOpentelemetrySourceType, +) from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor import ( ObservabilityPipelineParseGrokProcessor, ) @@ -3179,8 +3258,11 @@ from datadog_api_client.v2.model.observability_pipeline_parse_json_processor_type import ( ObservabilityPipelineParseJSONProcessorType, ) -from datadog_api_client.v2.model.observability_pipeline_pipeline_kafka_source_sasl_mechanism import ( - ObservabilityPipelinePipelineKafkaSourceSaslMechanism, +from datadog_api_client.v2.model.observability_pipeline_parse_xml_processor import ( + ObservabilityPipelineParseXMLProcessor, +) +from datadog_api_client.v2.model.observability_pipeline_parse_xml_processor_type import ( + ObservabilityPipelineParseXMLProcessorType, ) from datadog_api_client.v2.model.observability_pipeline_quota_processor import ObservabilityPipelineQuotaProcessor from datadog_api_client.v2.model.observability_pipeline_quota_processor_limit import ( @@ -3402,6 +3484,15 @@ from datadog_api_client.v2.model.observability_pipeline_socket_source_type import ObservabilityPipelineSocketSourceType from datadog_api_client.v2.model.observability_pipeline_spec import ObservabilityPipelineSpec from datadog_api_client.v2.model.observability_pipeline_spec_data import ObservabilityPipelineSpecData +from datadog_api_client.v2.model.observability_pipeline_split_array_processor import ( + ObservabilityPipelineSplitArrayProcessor, +) +from datadog_api_client.v2.model.observability_pipeline_split_array_processor_array_config import ( + ObservabilityPipelineSplitArrayProcessorArrayConfig, +) +from datadog_api_client.v2.model.observability_pipeline_split_array_processor_type import ( + ObservabilityPipelineSplitArrayProcessorType, +) from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination import ( ObservabilityPipelineSplunkHecDestination, ) @@ -7366,6 +7457,8 @@ "ObservabilityPipelineAddEnvVarsProcessorVariable", "ObservabilityPipelineAddFieldsProcessor", "ObservabilityPipelineAddFieldsProcessorType", + "ObservabilityPipelineAddHostnameProcessor", + "ObservabilityPipelineAddHostnameProcessorType", "ObservabilityPipelineAmazonDataFirehoseSource", "ObservabilityPipelineAmazonDataFirehoseSourceType", "ObservabilityPipelineAmazonOpenSearchDestination", @@ -7380,8 +7473,11 @@ "ObservabilityPipelineAmazonSecurityLakeDestination", "ObservabilityPipelineAmazonSecurityLakeDestinationType", "ObservabilityPipelineAwsAuth", + "ObservabilityPipelineCloudPremDestination", + "ObservabilityPipelineCloudPremDestinationType", "ObservabilityPipelineConfig", "ObservabilityPipelineConfigDestinationItem", + "ObservabilityPipelineConfigPipelineType", "ObservabilityPipelineConfigProcessorGroup", "ObservabilityPipelineConfigProcessorItem", "ObservabilityPipelineConfigSourceItem", @@ -7399,6 +7495,8 @@ "ObservabilityPipelineDatadogAgentSourceType", "ObservabilityPipelineDatadogLogsDestination", "ObservabilityPipelineDatadogLogsDestinationType", + "ObservabilityPipelineDatadogMetricsDestination", + "ObservabilityPipelineDatadogMetricsDestinationType", "ObservabilityPipelineDatadogTagsProcessor", "ObservabilityPipelineDatadogTagsProcessorAction", "ObservabilityPipelineDatadogTagsProcessorMode", @@ -7409,6 +7507,7 @@ "ObservabilityPipelineDedupeProcessorType", "ObservabilityPipelineElasticsearchDestination", "ObservabilityPipelineElasticsearchDestinationApiVersion", + "ObservabilityPipelineElasticsearchDestinationDataStream", "ObservabilityPipelineElasticsearchDestinationType", "ObservabilityPipelineEnrichmentTableFile", "ObservabilityPipelineEnrichmentTableFileEncoding", @@ -7420,6 +7519,7 @@ "ObservabilityPipelineEnrichmentTableGeoIp", "ObservabilityPipelineEnrichmentTableProcessor", "ObservabilityPipelineEnrichmentTableProcessorType", + "ObservabilityPipelineEnrichmentTableReferenceTable", "ObservabilityPipelineFieldValue", "ObservabilityPipelineFilterProcessor", "ObservabilityPipelineFilterProcessorType", @@ -7448,19 +7548,35 @@ "ObservabilityPipelineGooglePubSubDestinationType", "ObservabilityPipelineGooglePubSubSource", "ObservabilityPipelineGooglePubSubSourceType", + "ObservabilityPipelineHttpClientDestination", + "ObservabilityPipelineHttpClientDestinationAuthStrategy", + "ObservabilityPipelineHttpClientDestinationCompression", + "ObservabilityPipelineHttpClientDestinationCompressionAlgorithm", + "ObservabilityPipelineHttpClientDestinationEncoding", + "ObservabilityPipelineHttpClientDestinationType", "ObservabilityPipelineHttpClientSource", "ObservabilityPipelineHttpClientSourceAuthStrategy", "ObservabilityPipelineHttpClientSourceType", "ObservabilityPipelineHttpServerSource", "ObservabilityPipelineHttpServerSourceAuthStrategy", "ObservabilityPipelineHttpServerSourceType", + "ObservabilityPipelineKafkaDestination", + "ObservabilityPipelineKafkaDestinationCompression", + "ObservabilityPipelineKafkaDestinationEncoding", + "ObservabilityPipelineKafkaDestinationType", + "ObservabilityPipelineKafkaLibrdkafkaOption", + "ObservabilityPipelineKafkaSasl", + "ObservabilityPipelineKafkaSaslMechanism", "ObservabilityPipelineKafkaSource", - "ObservabilityPipelineKafkaSourceLibrdkafkaOption", - "ObservabilityPipelineKafkaSourceSasl", "ObservabilityPipelineKafkaSourceType", "ObservabilityPipelineLogstashSource", "ObservabilityPipelineLogstashSourceType", "ObservabilityPipelineMetadataEntry", + "ObservabilityPipelineMetricTagsProcessor", + "ObservabilityPipelineMetricTagsProcessorRule", + "ObservabilityPipelineMetricTagsProcessorRuleAction", + "ObservabilityPipelineMetricTagsProcessorRuleMode", + "ObservabilityPipelineMetricTagsProcessorType", "ObservabilityPipelineMetricValue", "ObservabilityPipelineNewRelicDestination", "ObservabilityPipelineNewRelicDestinationRegion", @@ -7472,6 +7588,8 @@ "ObservabilityPipelineOcsfMappingLibrary", "ObservabilityPipelineOpenSearchDestination", "ObservabilityPipelineOpenSearchDestinationType", + "ObservabilityPipelineOpentelemetrySource", + "ObservabilityPipelineOpentelemetrySourceType", "ObservabilityPipelineParseGrokProcessor", "ObservabilityPipelineParseGrokProcessorRule", "ObservabilityPipelineParseGrokProcessorRuleMatchRule", @@ -7479,7 +7597,8 @@ "ObservabilityPipelineParseGrokProcessorType", "ObservabilityPipelineParseJSONProcessor", "ObservabilityPipelineParseJSONProcessorType", - "ObservabilityPipelinePipelineKafkaSourceSaslMechanism", + "ObservabilityPipelineParseXMLProcessor", + "ObservabilityPipelineParseXMLProcessorType", "ObservabilityPipelineQuotaProcessor", "ObservabilityPipelineQuotaProcessorLimit", "ObservabilityPipelineQuotaProcessorLimitEnforceType", @@ -7560,6 +7679,9 @@ "ObservabilityPipelineSocketSourceType", "ObservabilityPipelineSpec", "ObservabilityPipelineSpecData", + "ObservabilityPipelineSplitArrayProcessor", + "ObservabilityPipelineSplitArrayProcessorArrayConfig", + "ObservabilityPipelineSplitArrayProcessorType", "ObservabilityPipelineSplunkHecDestination", "ObservabilityPipelineSplunkHecDestinationEncoding", "ObservabilityPipelineSplunkHecDestinationType", diff --git a/tests/v2/features/given.json b/tests/v2/features/given.json index aa08181879..f34bdccdee 100644 --- a/tests/v2/features/given.json +++ b/tests/v2/features/given.json @@ -727,6 +727,18 @@ "tag": "Monitors", "operationId": "CreateMonitorUserTemplate" }, + { + "parameters": [ + { + "name": "body", + "value": "{\n \"data\":{\n \"attributes\":{\n \"config\":{\n \"destinations\":[\n {\n \"id\":\"datadog-logs-destination\",\n \"inputs\":[\n \"processor-group-0\"\n ],\n \"type\":\"datadog_logs\"\n }\n ],\n \"processors\":[\n {\n \"id\":\"processor-group-0\",\n \"include\":\"service:my-service\",\n \"display_name\": \"My Processor Group\",\n \"inputs\":[\n \"datadog-agent-source\"\n ],\n \"enabled\": true,\n \"processors\": [\n {\n \"id\": \"filter-processor\",\n \"type\": \"filter\",\n \"include\": \"status:error\",\n \"display_name\": \"My Filter Processor\",\n \"enabled\": true\n }\n ]\n }\n ],\n \"sources\":[\n {\n \"id\":\"datadog-agent-source\",\n \"type\":\"datadog_agent\"\n }\n ]\n },\n \"name\":\"Main Observability Pipeline\"\n },\n \"type\":\"pipelines\"\n }\n}" + } + ], + "step": "there is a valid \"pipeline\" in the system", + "key": "pipeline", + "tag": "Observability Pipelines", + "operationId": "CreatePipeline" + }, { "parameters": [ { @@ -879,18 +891,6 @@ "tag": "CSM Threats", "operationId": "CreateCSMThreatsAgentPolicy" }, - { - "parameters": [ - { - "name": "body", - "value": "{\n \"data\":{\n \"attributes\":{\n \"config\":{\n \"destinations\":[\n {\n \"id\":\"datadog-logs-destination\",\n \"inputs\":[\n \"processor-group-0\"\n ],\n \"type\":\"datadog_logs\"\n }\n ],\n \"processors\":[\n {\n \"id\":\"processor-group-0\",\n \"include\":\"service:my-service\",\n \"display_name\": \"My Processor Group\",\n \"inputs\":[\n \"datadog-agent-source\"\n ],\n \"enabled\": true,\n \"processors\": [\n {\n \"id\": \"filter-processor\",\n \"type\": \"filter\",\n \"include\": \"status:error\",\n \"display_name\": \"My Filter Processor\",\n \"enabled\": true\n }\n ]\n }\n ],\n \"sources\":[\n {\n \"id\":\"datadog-agent-source\",\n \"type\":\"datadog_agent\"\n }\n ]\n },\n \"name\":\"Main Observability Pipeline\"\n },\n \"type\":\"pipelines\"\n }\n}" - } - ], - "step": "there is a valid \"pipeline\" in the system", - "key": "pipeline", - "tag": "Observability Pipelines", - "operationId": "CreatePipeline" - }, { "parameters": [ { diff --git a/tests/v2/features/observability_pipelines.feature b/tests/v2/features/observability_pipelines.feature index c43fa8b3b7..a9b17ec7fd 100644 --- a/tests/v2/features/observability_pipelines.feature +++ b/tests/v2/features/observability_pipelines.feature @@ -20,7 +20,7 @@ Feature: Observability Pipelines Scenario: Create a new pipeline returns "Conflict" response Given operation "CreatePipeline" enabled And new "CreatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "pipeline_type": "logs", "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 409 Conflict @@ -115,7 +115,7 @@ Feature: Observability Pipelines Given operation "UpdatePipeline" enabled And new "UpdatePipeline" request And request contains "pipeline_id" parameter from "REPLACE.ME" - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "pipeline_type": "logs", "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} When the request is sent Then the response status is 409 Conflict diff --git a/tests/v2/features/undo.json b/tests/v2/features/undo.json index eb43f26dbe..25c59473da 100644 --- a/tests/v2/features/undo.json +++ b/tests/v2/features/undo.json @@ -2838,6 +2838,31 @@ "type": "safe" } }, + "ListPipelines": { + "tag": "Observability Pipelines", + "undo": { + "type": "safe" + } + }, + "CreatePipeline": { + "tag": "Observability Pipelines", + "undo": { + "operationId": "DeletePipeline", + "parameters": [ + { + "name": "pipeline_id", + "source": "data.id" + } + ], + "type": "unsafe" + } + }, + "ValidatePipeline": { + "tag": "Observability Pipelines", + "undo": { + "type": "safe" + } + }, "CreateOnCallEscalationPolicy": { "tag": "On-Call", "undo": { @@ -3443,31 +3468,6 @@ "type": "idempotent" } }, - "ListPipelines": { - "tag": "Observability Pipelines", - "undo": { - "type": "safe" - } - }, - "CreatePipeline": { - "tag": "Observability Pipelines", - "undo": { - "operationId": "DeletePipeline", - "parameters": [ - { - "name": "pipeline_id", - "source": "data.id" - } - ], - "type": "unsafe" - } - }, - "ValidatePipeline": { - "tag": "Observability Pipelines", - "undo": { - "type": "safe" - } - }, "DeletePipeline": { "tag": "Observability Pipelines", "undo": {