diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index 52c13bf6e581..072441e139b0 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -6677,8 +6677,11 @@ components: x-enum-varnames: - AZURE_SCAN_OPTIONS AzureStorageDestination: - description: The `azure_storage` destination forwards logs to an Azure Blob + description: 'The `azure_storage` destination forwards logs to an Azure Blob Storage container. + + + **Supported pipeline types:** logs' properties: blob_prefix: description: Optional prefix for blobs written to the container. @@ -6709,6 +6712,8 @@ components: - inputs - container_name type: object + x-pipeline-types: + - logs AzureStorageDestinationType: default: azure_storage description: The destination type. The value should always be `azure_storage`. @@ -33628,8 +33633,11 @@ components: - query type: object MicrosoftSentinelDestination: - description: The `microsoft_sentinel` destination forwards logs to Microsoft + description: 'The `microsoft_sentinel` destination forwards logs to Microsoft Sentinel. + + + **Supported pipeline types:** logs' properties: client_id: description: Azure AD client ID used for authentication. @@ -33670,6 +33678,8 @@ components: - dcr_immutable_id - table type: object + x-pipeline-types: + - logs MicrosoftSentinelDestinationType: default: microsoft_sentinel description: The destination type. The value should always be `microsoft_sentinel`. @@ -35374,13 +35384,16 @@ components: - data type: object ObservabilityPipelineAddEnvVarsProcessor: - description: The `add_env_vars` processor adds environment variable values to - log events. + description: 'The `add_env_vars` processor adds environment variable values + to log events. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean id: @@ -35407,6 +35420,8 @@ components: - variables - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineAddEnvVarsProcessorType: default: add_env_vars description: The processor type. The value should always be `add_env_vars`. @@ -35432,12 +35447,15 @@ components: - name type: object ObservabilityPipelineAddFieldsProcessor: - description: The `add_fields` processor adds static key-value fields to logs. + description: 'The `add_fields` processor adds static key-value fields to logs. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean fields: @@ -35447,8 +35465,8 @@ components: $ref: '#/components/schemas/ObservabilityPipelineFieldValue' type: array id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (for example, as the `input` + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` to downstream components). example: add-fields-processor type: string @@ -35466,6 +35484,8 @@ components: - fields - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineAddFieldsProcessorType: default: add_fields description: The processor type. The value should always be `add_fields`. @@ -35475,15 +35495,60 @@ components: type: string x-enum-varnames: - ADD_FIELDS + ObservabilityPipelineAddHostnameProcessor: + description: 'The `add_hostname` processor adds the hostname to log events. + + + **Supported pipeline types:** logs' + properties: + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Indicates whether the processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). + example: add-hostname-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineAddHostnameProcessorType' + required: + - id + - type + - include + - enabled + type: object + x-pipeline-types: + - logs + ObservabilityPipelineAddHostnameProcessorType: + default: add_hostname + description: The processor type. The value should always be `add_hostname`. + enum: + - add_hostname + example: add_hostname + type: string + x-enum-varnames: + - ADD_HOSTNAME ObservabilityPipelineAmazonDataFirehoseSource: - description: The `amazon_data_firehose` source ingests logs from AWS Data Firehose. + description: 'The `amazon_data_firehose` source ingests logs from AWS Data Firehose. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: amazon-firehose-source type: string tls: @@ -35494,6 +35559,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonDataFirehoseSourceType: default: amazon_data_firehose description: The source type. The value should always be `amazon_data_firehose`. @@ -35504,7 +35571,10 @@ components: x-enum-varnames: - AMAZON_DATA_FIREHOSE ObservabilityPipelineAmazonOpenSearchDestination: - description: The `amazon_opensearch` destination writes logs to Amazon OpenSearch. + description: 'The `amazon_opensearch` destination writes logs to Amazon OpenSearch. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestinationAuth' @@ -35532,6 +35602,8 @@ components: - inputs - auth type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonOpenSearchDestinationAuth: description: 'Authentication settings for the Amazon OpenSearch destination. @@ -35575,8 +35647,11 @@ components: x-enum-varnames: - AMAZON_OPENSEARCH ObservabilityPipelineAmazonS3Destination: - description: The `amazon_s3` destination sends your logs in Datadog-rehydratable + description: 'The `amazon_s3` destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' @@ -35617,6 +35692,8 @@ components: - region - storage_class type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonS3DestinationStorageClass: description: S3 storage class. enum: @@ -35653,14 +35730,17 @@ components: ObservabilityPipelineAmazonS3Source: description: 'The `amazon_s3` source ingests logs from an Amazon S3 bucket. - It supports AWS authentication and TLS encryption.' + It supports AWS authentication and TLS encryption. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: aws-s3-source type: string region: @@ -35676,6 +35756,8 @@ components: - type - region type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonS3SourceType: default: amazon_s3 description: The source type. Always `amazon_s3`. @@ -35686,8 +35768,11 @@ components: x-enum-varnames: - AMAZON_S3 ObservabilityPipelineAmazonSecurityLakeDestination: - description: The `amazon_security_lake` destination sends your logs to Amazon + description: 'The `amazon_security_lake` destination sends your logs to Amazon Security Lake. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' @@ -35727,6 +35812,8 @@ components: - region - custom_source_name type: object + x-pipeline-types: + - logs ObservabilityPipelineAmazonSecurityLakeDestinationType: default: amazon_security_lake description: The destination type. Always `amazon_security_lake`. @@ -35752,6 +35839,42 @@ components: role session. type: string type: object + ObservabilityPipelineCloudPremDestination: + description: 'The `cloud_prem` destination sends logs to Datadog CloudPrem. + + + **Supported pipeline types:** logs' + properties: + id: + description: The unique identifier for this component. + example: cloud-prem-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineCloudPremDestinationType' + required: + - id + - type + - inputs + type: object + x-pipeline-types: + - logs + ObservabilityPipelineCloudPremDestinationType: + default: cloud_prem + description: The destination type. The value should always be `cloud_prem`. + enum: + - cloud_prem + example: cloud_prem + type: string + x-enum-varnames: + - CLOUD_PREM ObservabilityPipelineComponentDisplayName: description: The display name for a component. example: my component @@ -35765,12 +35888,14 @@ components: example: - id: datadog-logs-destination inputs: - - filter-processor + - my-processor-group type: datadog_logs items: $ref: '#/components/schemas/ObservabilityPipelineConfigDestinationItem' type: array - processors: + pipeline_type: + $ref: '#/components/schemas/ObservabilityPipelineConfigPipelineType' + processor_groups: description: A list of processor groups that transform or enrich log data. example: - enabled: true @@ -35791,6 +35916,17 @@ components: items: $ref: '#/components/schemas/ObservabilityPipelineConfigProcessorGroup' type: array + processors: + deprecated: true + description: 'A list of processor groups that transform or enrich log data. + + + **Deprecated:** This field is deprecated, you should now use the processor_groups + field.' + example: [] + items: + $ref: '#/components/schemas/ObservabilityPipelineConfigProcessorGroup' + type: array sources: description: A list of configured data sources for the pipeline. example: @@ -35806,25 +35942,40 @@ components: ObservabilityPipelineConfigDestinationItem: description: A destination for the pipeline. oneOf: - - $ref: '#/components/schemas/ObservabilityPipelineDatadogLogsDestination' + - $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestination' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestination' - $ref: '#/components/schemas/ObservabilityPipelineAmazonS3Destination' - - $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestination' - - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestination' - - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestination' - - $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestination' - - $ref: '#/components/schemas/ObservabilityPipelineRsyslogDestination' - - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgDestination' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonSecurityLakeDestination' - $ref: '#/components/schemas/AzureStorageDestination' - - $ref: '#/components/schemas/MicrosoftSentinelDestination' + - $ref: '#/components/schemas/ObservabilityPipelineCloudPremDestination' + - $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestination' + - $ref: '#/components/schemas/ObservabilityPipelineDatadogLogsDestination' + - $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestination' - $ref: '#/components/schemas/ObservabilityPipelineGoogleChronicleDestination' + - $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestination' + - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubDestination' + - $ref: '#/components/schemas/ObservabilityPipelineKafkaDestination' + - $ref: '#/components/schemas/MicrosoftSentinelDestination' - $ref: '#/components/schemas/ObservabilityPipelineNewRelicDestination' - - $ref: '#/components/schemas/ObservabilityPipelineSentinelOneDestination' - $ref: '#/components/schemas/ObservabilityPipelineOpenSearchDestination' - - $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestination' + - $ref: '#/components/schemas/ObservabilityPipelineRsyslogDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSentinelOneDestination' - $ref: '#/components/schemas/ObservabilityPipelineSocketDestination' - - $ref: '#/components/schemas/ObservabilityPipelineAmazonSecurityLakeDestination' - - $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestination' - - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgDestination' + - $ref: '#/components/schemas/ObservabilityPipelineDatadogMetricsDestination' + ObservabilityPipelineConfigPipelineType: + default: logs + description: The type of data being ingested. Defaults to `logs` if not specified. + enum: + - logs + - metrics + example: logs + type: string + x-enum-varnames: + - LOGS + - METRICS ObservabilityPipelineConfigProcessorGroup: description: A group of processors. example: @@ -35898,45 +36049,53 @@ components: description: A processor for the pipeline. oneOf: - $ref: '#/components/schemas/ObservabilityPipelineFilterProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddHostnameProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineCustomProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor' - $ref: '#/components/schemas/ObservabilityPipelineParseJSONProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineParseXMLProcessor' - $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRemoveFieldsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineSampleProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor' - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessor' - $ref: '#/components/schemas/ObservabilityPipelineThrottleProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineCustomProcessor' - - $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessor' ObservabilityPipelineConfigSourceItem: description: A data source for the pipeline. oneOf: - - $ref: '#/components/schemas/ObservabilityPipelineKafkaSource' - $ref: '#/components/schemas/ObservabilityPipelineDatadogAgentSource' - - $ref: '#/components/schemas/ObservabilityPipelineSplunkTcpSource' - - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecSource' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonDataFirehoseSource' - $ref: '#/components/schemas/ObservabilityPipelineAmazonS3Source' - - $ref: '#/components/schemas/ObservabilityPipelineFluentdSource' - $ref: '#/components/schemas/ObservabilityPipelineFluentBitSource' - - $ref: '#/components/schemas/ObservabilityPipelineHttpServerSource' - - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicSource' - - $ref: '#/components/schemas/ObservabilityPipelineRsyslogSource' - - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgSource' - - $ref: '#/components/schemas/ObservabilityPipelineAmazonDataFirehoseSource' + - $ref: '#/components/schemas/ObservabilityPipelineFluentdSource' - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubSource' - $ref: '#/components/schemas/ObservabilityPipelineHttpClientSource' + - $ref: '#/components/schemas/ObservabilityPipelineHttpServerSource' + - $ref: '#/components/schemas/ObservabilityPipelineKafkaSource' - $ref: '#/components/schemas/ObservabilityPipelineLogstashSource' + - $ref: '#/components/schemas/ObservabilityPipelineRsyslogSource' - $ref: '#/components/schemas/ObservabilityPipelineSocketSource' + - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecSource' + - $ref: '#/components/schemas/ObservabilityPipelineSplunkTcpSource' + - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicSource' + - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgSource' + - $ref: '#/components/schemas/ObservabilityPipelineOpentelemetrySource' ObservabilityPipelineCrowdStrikeNextGenSiemDestination: - description: The `crowdstrike_next_gen_siem` destination forwards logs to CrowdStrike + description: 'The `crowdstrike_next_gen_siem` destination forwards logs to CrowdStrike Next Gen SIEM. + + + **Supported pipeline types:** logs' properties: compression: $ref: '#/components/schemas/ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression' @@ -35964,6 +36123,8 @@ components: - inputs - encoding type: object + x-pipeline-types: + - logs ObservabilityPipelineCrowdStrikeNextGenSiemDestinationCompression: description: Compression configuration for log events. properties: @@ -36007,14 +36168,17 @@ components: x-enum-varnames: - CROWDSTRIKE_NEXT_GEN_SIEM ObservabilityPipelineCustomProcessor: - description: The `custom_processor` processor transforms events using [Vector + description: 'The `custom_processor` processor transforms events using [Vector Remap Language (VRL)](https://vector.dev/docs/reference/vrl/) scripts with advanced filtering capabilities. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean id: @@ -36043,6 +36207,8 @@ components: - remaps - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineCustomProcessorRemap: description: Defines a single VRL remap rule with its own filtering and transformation logic. @@ -36118,12 +36284,16 @@ components: - config type: object ObservabilityPipelineDatadogAgentSource: - description: The `datadog_agent` source collects logs from the Datadog Agent. + description: 'The `datadog_agent` source collects logs/metrics from the Datadog + Agent. + + + **Supported pipeline types:** logs, metrics' properties: id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: datadog-agent-source type: string tls: @@ -36134,6 +36304,9 @@ components: - id - type type: object + x-pipeline-types: + - logs + - metrics ObservabilityPipelineDatadogAgentSourceType: default: datadog_agent description: The source type. The value should always be `datadog_agent`. @@ -36144,7 +36317,10 @@ components: x-enum-varnames: - DATADOG_AGENT ObservabilityPipelineDatadogLogsDestination: - description: The `datadog_logs` destination forwards logs to Datadog Log Management. + description: 'The `datadog_logs` destination forwards logs to Datadog Log Management. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -36165,6 +36341,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineDatadogLogsDestinationType: default: datadog_logs description: The destination type. The value should always be `datadog_logs`. @@ -36174,21 +36352,60 @@ components: type: string x-enum-varnames: - DATADOG_LOGS + ObservabilityPipelineDatadogMetricsDestination: + description: 'The `datadog_metrics` destination forwards metrics to Datadog. + + + **Supported pipeline types:** metrics' + properties: + id: + description: The unique identifier for this component. + example: datadog-metrics-destination + type: string + inputs: + description: A list of component IDs whose output is used as the input for + this component. + example: + - metric-tags-processor + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineDatadogMetricsDestinationType' + required: + - id + - type + - inputs + type: object + x-pipeline-types: + - metrics + ObservabilityPipelineDatadogMetricsDestinationType: + default: datadog_metrics + description: The destination type. The value should always be `datadog_metrics`. + enum: + - datadog_metrics + example: datadog_metrics + type: string + x-enum-varnames: + - DATADOG_METRICS ObservabilityPipelineDatadogTagsProcessor: - description: The `datadog_tags` processor includes or excludes specific Datadog + description: 'The `datadog_tags` processor includes or excludes specific Datadog tags in your logs. + + + **Supported pipeline types:** logs' properties: action: $ref: '#/components/schemas/ObservabilityPipelineDatadogTagsProcessorAction' display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (for example, as the `input` + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` to downstream components). example: datadog-tags-processor type: string @@ -36219,6 +36436,8 @@ components: - keys - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineDatadogTagsProcessorAction: description: The action to take on tags with matching keys. enum: @@ -36261,12 +36480,15 @@ components: - DECODE_JSON - DECODE_SYSLOG ObservabilityPipelineDedupeProcessor: - description: The `dedupe` processor removes duplicate fields in log events. + description: 'The `dedupe` processor removes duplicate fields in log events. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean fields: @@ -36298,6 +36520,8 @@ components: - mode - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineDedupeProcessorMode: description: The deduplication mode to apply to the fields. enum: @@ -36318,8 +36542,11 @@ components: x-enum-varnames: - DEDUPE ObservabilityPipelineElasticsearchDestination: - description: The `elasticsearch` destination writes logs to an Elasticsearch + description: 'The `elasticsearch` destination writes logs to an Elasticsearch cluster. + + + **Supported pipeline types:** logs' properties: api_version: $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationApiVersion' @@ -36327,6 +36554,8 @@ components: description: The index to write logs to in Elasticsearch. example: logs-index type: string + data_stream: + $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationDataStream' id: description: The unique identifier for this component. example: elasticsearch-destination @@ -36346,6 +36575,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineElasticsearchDestinationApiVersion: description: The Elasticsearch API version to use. Set to `auto` to auto-detect. enum: @@ -36360,6 +36591,23 @@ components: - V6 - V7 - V8 + ObservabilityPipelineElasticsearchDestinationDataStream: + description: Configuration options for writing to Elasticsearch Data Streams + instead of a fixed index. + properties: + dataset: + description: The data stream dataset for your logs. This groups logs by + their source or application. + type: string + dtype: + description: The data stream type for your logs. This determines how logs + are categorized within the data stream. + type: string + namespace: + description: The data stream namespace for your logs. This separates logs + into different environments or domains. + type: string + type: object ObservabilityPipelineElasticsearchDestinationType: default: elasticsearch description: The destination type. The value should always be `elasticsearch`. @@ -36499,13 +36747,17 @@ components: - path type: object ObservabilityPipelineEnrichmentTableProcessor: - description: The `enrichment_table` processor enriches logs using a static CSV - file or GeoIP database. + description: 'The `enrichment_table` processor enriches logs using a static + CSV file, GeoIP database, or reference table. Exactly one of `file`, `geoip`, + or `reference_table` must be configured. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean file: @@ -36521,6 +36773,8 @@ components: targets. example: source:my-source type: string + reference_table: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableReferenceTable' target: description: Path where enrichment results should be stored in the log. example: enriched.geoip @@ -36534,6 +36788,8 @@ components: - target - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineEnrichmentTableProcessorType: default: enrichment_table description: The processor type. The value should always be `enrichment_table`. @@ -36543,6 +36799,28 @@ components: type: string x-enum-varnames: - ENRICHMENT_TABLE + ObservabilityPipelineEnrichmentTableReferenceTable: + description: Uses a Datadog reference table to enrich logs. + properties: + columns: + description: List of column names to include from the reference table. If + not provided, all columns are included. + items: + type: string + type: array + key_field: + description: Path to the field in the log event to match against the reference + table. + example: log.user.id + type: string + table_id: + description: The unique identifier of the reference table. + example: 550e8400-e29b-41d4-a716-446655440000 + type: string + required: + - key_field + - table_id + type: object ObservabilityPipelineFieldValue: description: Represents a static key-value pair used in various processors. properties: @@ -36559,26 +36837,29 @@ components: - value type: object ObservabilityPipelineFilterProcessor: - description: The `filter` processor allows conditional processing of logs based - on a Datadog search query. Logs that match the `include` query are passed - through; others are discarded. + description: 'The `filter` processor allows conditional processing of logs/metrics + based on a Datadog search query. Logs/metrics that match the `include` query + are passed through; others are discarded. + + + **Supported pipeline types:** logs, metrics' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (for example, as the `input` + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` to downstream components). example: filter-processor type: string include: - description: A Datadog search query used to determine which logs should - pass through the filter. Logs that match this query continue to downstream - components; others are dropped. + description: A Datadog search query used to determine which logs/metrics + should pass through the filter. Logs/metrics that match this query continue + to downstream components; others are dropped. example: service:my-service type: string type: @@ -36589,6 +36870,9 @@ components: - include - enabled type: object + x-pipeline-types: + - logs + - metrics ObservabilityPipelineFilterProcessorType: default: filter description: The processor type. The value should always be `filter`. @@ -36599,11 +36883,14 @@ components: x-enum-varnames: - FILTER ObservabilityPipelineFluentBitSource: - description: The `fluent_bit` source ingests logs from Fluent Bit. + description: 'The `fluent_bit` source ingests logs from Fluent Bit. + + + **Supported pipeline types:** logs' properties: id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (for example, as the `input` + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` to downstream components). example: fluent-source type: string @@ -36615,6 +36902,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineFluentBitSourceType: default: fluent_bit description: The source type. The value should always be `fluent_bit`. @@ -36625,11 +36914,14 @@ components: x-enum-varnames: - FLUENT_BIT ObservabilityPipelineFluentdSource: - description: The `fluentd` source ingests logs from a Fluentd-compatible service. + description: 'The `fluentd` source ingests logs from a Fluentd-compatible service. + + + **Supported pipeline types:** logs' properties: id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (for example, as the `input` + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` to downstream components). example: fluent-source type: string @@ -36641,6 +36933,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineFluentdSourceType: default: fluentd description: The source type. The value should always be `fluentd. @@ -36665,12 +36959,15 @@ components: from logs and sends them to Datadog. Metrics can be counters, gauges, or distributions and optionally grouped by - log fields.' + log fields. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean id: @@ -36695,6 +36992,8 @@ components: - type - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineGenerateMetricsProcessorType: default: generate_datadog_metrics description: The processor type. Always `generate_datadog_metrics`. @@ -36789,7 +37088,10 @@ components: - GAUGE - DISTRIBUTION ObservabilityPipelineGoogleChronicleDestination: - description: The `google_chronicle` destination sends logs to Google Chronicle. + description: 'The `google_chronicle` destination sends logs to Google Chronicle. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' @@ -36823,6 +37125,8 @@ components: - inputs - customer_id type: object + x-pipeline-types: + - logs ObservabilityPipelineGoogleChronicleDestinationEncoding: description: The encoding format for the logs sent to Chronicle. enum: @@ -36846,7 +37150,10 @@ components: description: 'The `google_cloud_storage` destination stores logs in a Google Cloud Storage (GCS) bucket. - It requires a bucket name, GCP authentication, and metadata fields.' + It requires a bucket name, GCP authentication, and metadata fields. + + + **Supported pipeline types:** logs' properties: acl: $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestinationAcl' @@ -36888,6 +37195,8 @@ components: - bucket - storage_class type: object + x-pipeline-types: + - logs ObservabilityPipelineGoogleCloudStorageDestinationAcl: description: Access control list setting for objects written to the bucket. enum: @@ -36930,8 +37239,11 @@ components: x-enum-varnames: - GOOGLE_CLOUD_STORAGE ObservabilityPipelineGooglePubSubDestination: - description: The `google_pubsub` destination publishes logs to a Google Cloud + description: 'The `google_pubsub` destination publishes logs to a Google Cloud Pub/Sub topic. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' @@ -36969,6 +37281,8 @@ components: - project - topic type: object + x-pipeline-types: + - logs ObservabilityPipelineGooglePubSubDestinationEncoding: description: Encoding format for log events. enum: @@ -36989,17 +37303,20 @@ components: x-enum-varnames: - GOOGLE_PUBSUB ObservabilityPipelineGooglePubSubSource: - description: The `google_pubsub` source ingests logs from a Google Cloud Pub/Sub + description: 'The `google_pubsub` source ingests logs from a Google Cloud Pub/Sub subscription. + + + **Supported pipeline types:** logs' properties: auth: $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' decoding: $ref: '#/components/schemas/ObservabilityPipelineDecoding' id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: google-pubsub-source type: string project: @@ -37021,6 +37338,8 @@ components: - project - subscription type: object + x-pipeline-types: + - logs ObservabilityPipelineGooglePubSubSourceType: default: google_pubsub description: The source type. The value should always be `google_pubsub`. @@ -37030,18 +37349,103 @@ components: type: string x-enum-varnames: - GOOGLE_PUBSUB + ObservabilityPipelineHttpClientDestination: + description: 'The `http_client` destination sends data to an HTTP endpoint. + + + **Supported pipeline types:** logs, metrics' + properties: + auth_strategy: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationAuthStrategy' + compression: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationCompression' + encoding: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationEncoding' + id: + description: The unique identifier for this component. + example: http-client-destination + type: string + inputs: + description: A list of component IDs whose output is used as the input for + this component. + example: + - filter-processor + items: + type: string + type: array + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationType' + required: + - id + - type + - inputs + - encoding + type: object + x-pipeline-types: + - logs + - metrics + ObservabilityPipelineHttpClientDestinationAuthStrategy: + description: HTTP authentication strategy. + enum: + - none + - basic + - bearer + example: basic + type: string + x-enum-varnames: + - NONE + - BASIC + - BEARER + ObservabilityPipelineHttpClientDestinationCompression: + description: Compression configuration for HTTP requests. + properties: + algorithm: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientDestinationCompressionAlgorithm' + required: + - algorithm + type: object + ObservabilityPipelineHttpClientDestinationCompressionAlgorithm: + description: Compression algorithm. + enum: + - gzip + example: gzip + type: string + x-enum-varnames: + - GZIP + ObservabilityPipelineHttpClientDestinationEncoding: + description: Encoding format for log events. + enum: + - json + example: json + type: string + x-enum-varnames: + - JSON + ObservabilityPipelineHttpClientDestinationType: + default: http_client + description: The destination type. The value should always be `http_client`. + enum: + - http_client + example: http_client + type: string + x-enum-varnames: + - HTTP_CLIENT ObservabilityPipelineHttpClientSource: - description: The `http_client` source scrapes logs from HTTP endpoints at regular + description: 'The `http_client` source scrapes logs from HTTP endpoints at regular intervals. + + + **Supported pipeline types:** logs' properties: auth_strategy: $ref: '#/components/schemas/ObservabilityPipelineHttpClientSourceAuthStrategy' decoding: $ref: '#/components/schemas/ObservabilityPipelineDecoding' id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: http-client-source type: string scrape_interval_secs: @@ -37063,14 +37467,18 @@ components: - type - decoding type: object + x-pipeline-types: + - logs ObservabilityPipelineHttpClientSourceAuthStrategy: description: Optional authentication strategy for HTTP requests. enum: + - none - basic - bearer example: basic type: string x-enum-varnames: + - NONE - BASIC - BEARER ObservabilityPipelineHttpClientSourceType: @@ -37083,8 +37491,11 @@ components: x-enum-varnames: - HTTP_CLIENT ObservabilityPipelineHttpServerSource: - description: The `http_server` source collects logs over HTTP POST from external + description: 'The `http_server` source collects logs over HTTP POST from external services. + + + **Supported pipeline types:** logs' properties: auth_strategy: $ref: '#/components/schemas/ObservabilityPipelineHttpServerSourceAuthStrategy' @@ -37104,6 +37515,8 @@ components: - auth_strategy - decoding type: object + x-pipeline-types: + - logs ObservabilityPipelineHttpServerSourceAuthStrategy: description: HTTP authentication method. enum: @@ -37123,50 +37536,125 @@ components: type: string x-enum-varnames: - HTTP_SERVER - ObservabilityPipelineKafkaSource: - description: The `kafka` source ingests data from Apache Kafka topics. + ObservabilityPipelineKafkaDestination: + description: 'The `kafka` destination sends logs to Apache Kafka topics. + + + **Supported pipeline types:** logs' properties: - group_id: - description: Consumer group ID used by the Kafka client. - example: consumer-group-0 + compression: + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationCompression' + encoding: + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationEncoding' + headers_key: + description: The field name to use for Kafka message headers. + example: headers type: string id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). - example: kafka-source + description: The unique identifier for this component. + example: kafka-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + key_field: + description: The field name to use as the Kafka message key. + example: message_id type: string librdkafka_options: - description: Optional list of advanced Kafka client configuration options, + description: Optional list of advanced Kafka producer configuration options, defined as key-value pairs. items: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceLibrdkafkaOption' + $ref: '#/components/schemas/ObservabilityPipelineKafkaLibrdkafkaOption' type: array + message_timeout_ms: + description: Maximum time in milliseconds to wait for message delivery confirmation. + example: 300000 + format: int64 + minimum: 1 + type: integer + rate_limit_duration_secs: + description: Duration in seconds for the rate limit window. + example: 1 + format: int64 + minimum: 1 + type: integer + rate_limit_num: + description: Maximum number of messages allowed per rate limit duration. + example: 1000 + format: int64 + minimum: 1 + type: integer sasl: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceSasl' + $ref: '#/components/schemas/ObservabilityPipelineKafkaSasl' + socket_timeout_ms: + description: Socket timeout in milliseconds for network requests. + example: 60000 + format: int64 + maximum: 300000 + minimum: 10 + type: integer tls: $ref: '#/components/schemas/ObservabilityPipelineTls' - topics: - description: A list of Kafka topic names to subscribe to. The source ingests - messages from each topic specified. - example: - - topic1 - - topic2 - items: - type: string - type: array + topic: + description: The Kafka topic name to publish logs to. + example: logs-topic + type: string type: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceType' + $ref: '#/components/schemas/ObservabilityPipelineKafkaDestinationType' required: - id - type - - group_id - - topics + - inputs + - topic + - encoding type: object - ObservabilityPipelineKafkaSourceLibrdkafkaOption: + x-pipeline-types: + - logs + ObservabilityPipelineKafkaDestinationCompression: + description: Compression codec for Kafka messages. + enum: + - none + - gzip + - snappy + - lz4 + - zstd + example: gzip + type: string + x-enum-varnames: + - NONE + - GZIP + - SNAPPY + - LZ4 + - ZSTD + ObservabilityPipelineKafkaDestinationEncoding: + description: Encoding format for log events. + enum: + - json + - raw_message + example: json + type: string + x-enum-varnames: + - JSON + - RAW_MESSAGE + ObservabilityPipelineKafkaDestinationType: + default: kafka + description: The destination type. The value should always be `kafka`. + enum: + - kafka + example: kafka + type: string + x-enum-varnames: + - KAFKA + ObservabilityPipelineKafkaLibrdkafkaOption: description: Represents a key-value pair used to configure low-level `librdkafka` - client options for Kafka sources, such as timeouts, buffer sizes, and security - settings. + client options for Kafka source and destination, such as timeouts, buffer + sizes, and security settings. properties: name: description: The name of the `librdkafka` configuration option to set. @@ -37181,12 +37669,68 @@ components: - name - value type: object - ObservabilityPipelineKafkaSourceSasl: + ObservabilityPipelineKafkaSasl: description: Specifies the SASL mechanism for authenticating with a Kafka cluster. properties: mechanism: - $ref: '#/components/schemas/ObservabilityPipelinePipelineKafkaSourceSaslMechanism' + $ref: '#/components/schemas/ObservabilityPipelineKafkaSaslMechanism' type: object + ObservabilityPipelineKafkaSaslMechanism: + description: SASL mechanism used for Kafka authentication. + enum: + - PLAIN + - SCRAM-SHA-256 + - SCRAM-SHA-512 + type: string + x-enum-varnames: + - PLAIN + - SCRAMNOT_SHANOT_256 + - SCRAMNOT_SHANOT_512 + ObservabilityPipelineKafkaSource: + description: 'The `kafka` source ingests data from Apache Kafka topics. + + + **Supported pipeline types:** logs' + properties: + group_id: + description: Consumer group ID used by the Kafka client. + example: consumer-group-0 + type: string + id: + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). + example: kafka-source + type: string + librdkafka_options: + description: Optional list of advanced Kafka client configuration options, + defined as key-value pairs. + items: + $ref: '#/components/schemas/ObservabilityPipelineKafkaLibrdkafkaOption' + type: array + sasl: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSasl' + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + topics: + description: A list of Kafka topic names to subscribe to. The source ingests + messages from each topic specified. + example: + - topic1 + - topic2 + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceType' + required: + - id + - type + - group_id + - topics + type: object + x-pipeline-types: + - logs ObservabilityPipelineKafkaSourceType: default: kafka description: The source type. The value should always be `kafka`. @@ -37197,12 +37741,15 @@ components: x-enum-varnames: - KAFKA ObservabilityPipelineLogstashSource: - description: The `logstash` source ingests logs from a Logstash forwarder. + description: 'The `logstash` source ingests logs from a Logstash forwarder. + + + **Supported pipeline types:** logs' properties: id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: logstash-source type: string tls: @@ -37213,6 +37760,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineLogstashSourceType: default: logstash description: The source type. The value should always be `logstash`. @@ -37237,13 +37786,112 @@ components: - name - value type: object + ObservabilityPipelineMetricTagsProcessor: + description: 'The `metric_tags` processor filters metrics based on their tags + using Datadog tag key patterns. + + + **Supported pipeline types:** metrics' + properties: + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Indicates whether the processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). + example: metric-tags-processor + type: string + include: + description: A Datadog search query that determines which metrics the processor + targets. + example: '*' + type: string + rules: + description: A list of rules for filtering metric tags. + items: + $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorRule' + maxItems: 100 + minItems: 1 + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorType' + required: + - id + - type + - include + - rules + - enabled + type: object + x-pipeline-types: + - metrics + ObservabilityPipelineMetricTagsProcessorRule: + description: Defines a rule for filtering metric tags based on key patterns. + properties: + action: + $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorRuleAction' + include: + description: A Datadog search query used to determine which metrics this + rule targets. + example: '*' + type: string + keys: + description: A list of tag keys to include or exclude. + example: + - env + - service + - version + items: + type: string + type: array + mode: + $ref: '#/components/schemas/ObservabilityPipelineMetricTagsProcessorRuleMode' + required: + - include + - mode + - action + - keys + type: object + ObservabilityPipelineMetricTagsProcessorRuleAction: + description: The action to take on tags with matching keys. + enum: + - include + - exclude + example: include + type: string + x-enum-varnames: + - INCLUDE + - EXCLUDE + ObservabilityPipelineMetricTagsProcessorRuleMode: + description: The processing mode for tag filtering. + enum: + - filter + example: filter + type: string + x-enum-varnames: + - FILTER + ObservabilityPipelineMetricTagsProcessorType: + default: metric_tags + description: The processor type. The value should always be `metric_tags`. + enum: + - metric_tags + example: metric_tags + type: string + x-enum-varnames: + - METRIC_TAGS ObservabilityPipelineMetricValue: description: Specifies how the value of the generated metric is computed. oneOf: - $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByOne' - $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByField' ObservabilityPipelineNewRelicDestination: - description: The `new_relic` destination sends logs to the New Relic platform. + description: 'The `new_relic` destination sends logs to the New Relic platform. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -37267,6 +37915,8 @@ components: - inputs - region type: object + x-pipeline-types: + - logs ObservabilityPipelineNewRelicDestinationRegion: description: The New Relic region. enum: @@ -37287,13 +37937,16 @@ components: x-enum-varnames: - NEW_RELIC ObservabilityPipelineOcsfMapperProcessor: - description: The `ocsf_mapper` processor transforms logs into the OCSF schema + description: 'The `ocsf_mapper` processor transforms logs into the OCSF schema using a predefined mapping configuration. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean id: @@ -37320,6 +37973,8 @@ components: - mappings - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineOcsfMapperProcessorMapping: description: Defines how specific events are transformed to OCSF using a mapping configuration. @@ -37379,7 +38034,10 @@ components: - OKTA_SYSTEM_LOG_AUTHENTICATION - PALO_ALTO_NETWORKS_FIREWALL_TRAFFIC ObservabilityPipelineOpenSearchDestination: - description: The `opensearch` destination writes logs to an OpenSearch cluster. + description: 'The `opensearch` destination writes logs to an OpenSearch cluster. + + + **Supported pipeline types:** logs' properties: bulk_index: description: The index to write logs to. @@ -37404,6 +38062,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineOpenSearchDestinationType: default: opensearch description: The destination type. The value should always be `opensearch`. @@ -37413,9 +38073,56 @@ components: type: string x-enum-varnames: - OPENSEARCH + ObservabilityPipelineOpentelemetrySource: + description: 'The `opentelemetry` source receives telemetry data using the OpenTelemetry + Protocol (OTLP) over gRPC and HTTP. + + + **Supported pipeline types:** logs' + properties: + grpc_address_key: + description: Environment variable name containing the gRPC server address + for receiving OTLP data. Must be a valid environment variable name (alphanumeric + characters and underscores only). + example: OTEL_GRPC_ADDRESS + type: string + http_address_key: + description: Environment variable name containing the HTTP server address + for receiving OTLP data. Must be a valid environment variable name (alphanumeric + characters and underscores only). + example: OTEL_HTTP_ADDRESS + type: string + id: + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). + example: opentelemetry-source + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineOpentelemetrySourceType' + required: + - id + - type + type: object + x-pipeline-types: + - logs + ObservabilityPipelineOpentelemetrySourceType: + default: opentelemetry + description: The source type. The value should always be `opentelemetry`. + enum: + - opentelemetry + example: opentelemetry + type: string + x-enum-varnames: + - OPENTELEMETRY ObservabilityPipelineParseGrokProcessor: - description: The `parse_grok` processor extracts structured fields from unstructured + description: 'The `parse_grok` processor extracts structured fields from unstructured log messages using Grok patterns. + + + **Supported pipeline types:** logs' properties: disable_library_rules: default: false @@ -37426,7 +38133,7 @@ components: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean id: @@ -37454,6 +38161,8 @@ components: - rules - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineParseGrokProcessorRule: description: 'A Grok parsing rule used in the `parse_grok` processor. Each rule defines how to extract structured fields @@ -37533,14 +38242,17 @@ components: x-enum-varnames: - PARSE_GROK ObservabilityPipelineParseJSONProcessor: - description: The `parse_json` processor extracts JSON from a specified field + description: 'The `parse_json` processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded JSON as a string. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean field: @@ -37567,6 +38279,8 @@ components: - field - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineParseJSONProcessorType: default: parse_json description: The processor type. The value should always be `parse_json`. @@ -37576,37 +38290,101 @@ components: type: string x-enum-varnames: - PARSE_JSON - ObservabilityPipelinePipelineKafkaSourceSaslMechanism: - description: SASL mechanism used for Kafka authentication. + ObservabilityPipelineParseXMLProcessor: + description: 'The `parse_xml` processor parses XML from a specified field and + extracts it into the event. + + + **Supported pipeline types:** logs' + properties: + always_use_text_key: + description: Whether to always use a text key for element content. + type: boolean + attr_prefix: + description: The prefix to use for XML attributes in the parsed output. + type: string + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Indicates whether the processor is enabled. + example: true + type: boolean + field: + description: The name of the log field that contains an XML string. + example: message + type: string + id: + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). + example: parse-xml-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + include_attr: + description: Whether to include XML attributes in the parsed output. + type: boolean + parse_bool: + description: Whether to parse boolean values from strings. + type: boolean + parse_null: + description: Whether to parse null values. + type: boolean + parse_number: + description: Whether to parse numeric values from strings. + type: boolean + text_key: + description: The key name to use for text content within XML elements. Must + be at least 1 character if specified. + minLength: 1 + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineParseXMLProcessorType' + required: + - id + - type + - include + - field + - enabled + type: object + x-pipeline-types: + - logs + ObservabilityPipelineParseXMLProcessorType: + default: parse_xml + description: The processor type. The value should always be `parse_xml`. enum: - - PLAIN - - SCRAM-SHA-256 - - SCRAM-SHA-512 + - parse_xml + example: parse_xml type: string x-enum-varnames: - - PLAIN - - SCRAMNOT_SHANOT_256 - - SCRAMNOT_SHANOT_512 + - PARSE_XML ObservabilityPipelineQuotaProcessor: - description: The Quota Processor measures logging traffic for logs that match + description: 'The `quota` processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' drop_events: - description: If set to `true`, logs that matched the quota filter and sent - after the quota has been met are dropped; only logs that did not match - the filter query continue through the pipeline. + description: 'If set to `true`, logs that match the quota filter and are + sent after the quota is exceeded are dropped. Logs that do not match the + filter continue through the pipeline. **Note**: You can set either `drop_events` + or `overflow_action`, but not both.' example: false type: boolean enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (for example, as the `input` + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` to downstream components). example: quota-processor type: string @@ -37641,6 +38419,8 @@ components: items: type: string type: array + too_many_buckets_action: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorOverflowAction' type: $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorType' required: @@ -37651,6 +38431,8 @@ components: - limit - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineQuotaProcessorLimit: description: The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events. @@ -37678,7 +38460,8 @@ components: - BYTES - EVENTS ObservabilityPipelineQuotaProcessorOverflowAction: - description: 'The action to take when the quota is exceeded. Options: + description: 'The action to take when the quota or bucket limit is exceeded. + Options: - `drop`: Drop the event. @@ -37722,13 +38505,16 @@ components: x-enum-varnames: - QUOTA ObservabilityPipelineReduceProcessor: - description: The `reduce` processor aggregates and merges logs based on matching + description: 'The `reduce` processor aggregates and merges logs based on matching keys and merge strategies. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean group_by: @@ -37764,6 +38550,8 @@ components: - merge_strategies - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineReduceProcessorMergeStrategy: description: Defines how a specific field should be merged across grouped events. properties: @@ -37817,12 +38605,15 @@ components: x-enum-varnames: - REDUCE ObservabilityPipelineRemoveFieldsProcessor: - description: The `remove_fields` processor deletes specified fields from logs. + description: 'The `remove_fields` processor deletes specified fields from logs. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean fields: @@ -37834,9 +38625,9 @@ components: type: string type: array id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: remove-fields-processor type: string include: @@ -37853,6 +38644,8 @@ components: - fields - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineRemoveFieldsProcessorType: default: remove_fields description: The processor type. The value should always be `remove_fields`. @@ -37863,12 +38656,15 @@ components: x-enum-varnames: - REMOVE_FIELDS ObservabilityPipelineRenameFieldsProcessor: - description: The `rename_fields` processor changes field names. + description: 'The `rename_fields` processor changes field names. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean fields: @@ -37898,6 +38694,8 @@ components: - fields - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineRenameFieldsProcessorField: description: Defines how to rename a field in log events. properties: @@ -37929,8 +38727,11 @@ components: x-enum-varnames: - RENAME_FIELDS ObservabilityPipelineRsyslogDestination: - description: The `rsyslog` destination forwards logs to an external `rsyslog` + description: 'The `rsyslog` destination forwards logs to an external `rsyslog` server over TCP or UDP using the syslog protocol. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -37959,6 +38760,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineRsyslogDestinationType: default: rsyslog description: The destination type. The value should always be `rsyslog`. @@ -37969,13 +38772,16 @@ components: x-enum-varnames: - RSYSLOG ObservabilityPipelineRsyslogSource: - description: The `rsyslog` source listens for logs over TCP or UDP from an `rsyslog` - server using the syslog protocol. + description: 'The `rsyslog` source listens for logs over TCP or UDP from an + `rsyslog` server using the syslog protocol. + + + **Supported pipeline types:** logs' properties: id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: rsyslog-source type: string mode: @@ -37989,6 +38795,8 @@ components: - type - mode type: object + x-pipeline-types: + - logs ObservabilityPipelineRsyslogSourceType: default: rsyslog description: The source type. The value should always be `rsyslog`. @@ -37999,18 +38807,31 @@ components: x-enum-varnames: - RSYSLOG ObservabilityPipelineSampleProcessor: - description: The `sample` processor allows probabilistic sampling of logs at + description: 'The `sample` processor allows probabilistic sampling of logs at a fixed rate. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean + group_by: + description: Optional list of fields to group events by. Each group is sampled + independently. + example: + - service + - host + items: + type: string + minItems: 1 + type: array id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (for example, as the `input` + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` to downstream components). example: sample-processor type: string @@ -38024,20 +38845,17 @@ components: example: 10.0 format: double type: number - rate: - description: Number of events to sample (1 in N). - example: 10 - format: int64 - minimum: 1 - type: integer type: $ref: '#/components/schemas/ObservabilityPipelineSampleProcessorType' required: - id - type - include + - percentage - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineSampleProcessorType: default: sample description: The processor type. The value should always be `sample`. @@ -38048,19 +38866,22 @@ components: x-enum-varnames: - SAMPLE ObservabilityPipelineSensitiveDataScannerProcessor: - description: The `sensitive_data_scanner` processor detects and optionally redacts - sensitive data in log events. + description: 'The `sensitive_data_scanner` processor detects and optionally + redacts sensitive data in log events. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: sensitive-scanner type: string include: @@ -38083,6 +38904,8 @@ components: - rules - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineSensitiveDataScannerProcessorAction: description: Defines what action to take when sensitive data is matched. oneOf: @@ -38204,6 +39027,11 @@ components: ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions: description: Options for defining a custom regex pattern. properties: + description: + description: Human-readable description providing context about a sensitive + data scanner rule + example: "Custom regex for internal API\u202Fkeys" + type: string rule: description: A regular expression used to detect sensitive values. Must be a valid regex. @@ -38259,6 +39087,11 @@ components: description: Options for selecting a predefined library pattern and enabling keyword support. properties: + description: + description: Human-readable description providing context about a sensitive + data scanner rule + example: Credit card pattern + type: string id: description: Identifier for a predefined pattern from the sensitive data scanner pattern library. @@ -38400,7 +39233,10 @@ components: x-enum-varnames: - SENSITIVE_DATA_SCANNER ObservabilityPipelineSentinelOneDestination: - description: The `sentinel_one` destination sends logs to SentinelOne. + description: 'The `sentinel_one` destination sends logs to SentinelOne. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -38424,6 +39260,8 @@ components: - inputs - region type: object + x-pipeline-types: + - logs ObservabilityPipelineSentinelOneDestinationRegion: description: The SentinelOne region to send logs to. enum: @@ -38448,8 +39286,11 @@ components: x-enum-varnames: - SENTINEL_ONE ObservabilityPipelineSocketDestination: - description: The `socket` destination sends logs over TCP or UDP to a remote + description: 'The `socket` destination sends logs over TCP or UDP to a remote server. + + + **Supported pipeline types:** logs' properties: encoding: $ref: '#/components/schemas/ObservabilityPipelineSocketDestinationEncoding' @@ -38482,6 +39323,8 @@ components: - framing - mode type: object + x-pipeline-types: + - logs ObservabilityPipelineSocketDestinationEncoding: description: Encoding format for log events. enum: @@ -38576,14 +39419,17 @@ components: x-enum-varnames: - SOCKET ObservabilityPipelineSocketSource: - description: The `socket` source ingests logs over TCP or UDP. + description: 'The `socket` source ingests logs over TCP or UDP. + + + **Supported pipeline types:** logs' properties: framing: $ref: '#/components/schemas/ObservabilityPipelineSocketSourceFraming' id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: socket-source type: string mode: @@ -38599,6 +39445,8 @@ components: - mode - framing type: object + x-pipeline-types: + - logs ObservabilityPipelineSocketSourceFraming: description: Framing method configuration for the socket source. oneOf: @@ -38739,9 +39587,79 @@ components: - type - attributes type: object + ObservabilityPipelineSplitArrayProcessor: + description: 'The `split_array` processor splits array fields into separate + events based on configured rules. + + + **Supported pipeline types:** logs' + properties: + arrays: + description: A list of array split configurations. + items: + $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessorArrayConfig' + maxItems: 15 + minItems: 1 + type: array + display_name: + $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' + enabled: + description: Indicates whether the processor is enabled. + example: true + type: boolean + id: + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). + example: split-array-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. For split_array, this should typically be `*`. + example: '*' + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineSplitArrayProcessorType' + required: + - id + - type + - include + - arrays + - enabled + type: object + x-pipeline-types: + - logs + ObservabilityPipelineSplitArrayProcessorArrayConfig: + description: Configuration for a single array split operation. + properties: + field: + description: The path to the array field to split. + example: tags + type: string + include: + description: A Datadog search query used to determine which logs this array + split operation targets. + example: '*' + type: string + required: + - include + - field + type: object + ObservabilityPipelineSplitArrayProcessorType: + default: split_array + description: The processor type. The value should always be `split_array`. + enum: + - split_array + example: split_array + type: string + x-enum-varnames: + - SPLIT_ARRAY ObservabilityPipelineSplunkHecDestination: - description: The `splunk_hec` destination forwards logs to Splunk using the + description: 'The `splunk_hec` destination forwards logs to Splunk using the HTTP Event Collector (HEC). + + + **Supported pipeline types:** logs' properties: auto_extract_timestamp: description: 'If `true`, Splunk tries to extract timestamps from incoming @@ -38753,9 +39671,9 @@ components: encoding: $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestinationEncoding' id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: splunk-hec-destination type: string index: @@ -38781,6 +39699,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineSplunkHecDestinationEncoding: description: Encoding format for log events. enum: @@ -38801,13 +39721,16 @@ components: x-enum-varnames: - SPLUNK_HEC ObservabilityPipelineSplunkHecSource: - description: The `splunk_hec` source implements the Splunk HTTP Event Collector + description: 'The `splunk_hec` source implements the Splunk HTTP Event Collector (HEC) API. + + + **Supported pipeline types:** logs' properties: id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: splunk-hec-source type: string tls: @@ -38818,6 +39741,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineSplunkHecSourceType: default: splunk_hec description: The source type. Always `splunk_hec`. @@ -38831,12 +39756,15 @@ components: description: 'The `splunk_tcp` source receives logs from a Splunk Universal Forwarder over TCP. - TLS is supported for secure transmission.' + TLS is supported for secure transmission. + + + **Supported pipeline types:** logs' properties: id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: splunk-tcp-source type: string tls: @@ -38847,6 +39775,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineSplunkTcpSourceType: default: splunk_tcp description: The source type. Always `splunk_tcp`. @@ -38857,7 +39787,10 @@ components: x-enum-varnames: - SPLUNK_TCP ObservabilityPipelineSumoLogicDestination: - description: The `sumo_logic` destination forwards logs to Sumo Logic. + description: 'The `sumo_logic` destination forwards logs to Sumo Logic. + + + **Supported pipeline types:** logs' properties: encoding: $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestinationEncoding' @@ -38898,6 +39831,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineSumoLogicDestinationEncoding: description: The output encoding format. enum: @@ -38935,12 +39870,15 @@ components: x-enum-varnames: - SUMO_LOGIC ObservabilityPipelineSumoLogicSource: - description: The `sumo_logic` source receives logs from Sumo Logic collectors. + description: 'The `sumo_logic` source receives logs from Sumo Logic collectors. + + + **Supported pipeline types:** logs' properties: id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: sumo-logic-source type: string type: @@ -38949,6 +39887,8 @@ components: - id - type type: object + x-pipeline-types: + - logs ObservabilityPipelineSumoLogicSourceType: default: sumo_logic description: The source type. The value should always be `sumo_logic`. @@ -38959,8 +39899,11 @@ components: x-enum-varnames: - SUMO_LOGIC ObservabilityPipelineSyslogNgDestination: - description: The `syslog_ng` destination forwards logs to an external `syslog-ng` + description: 'The `syslog_ng` destination forwards logs to an external `syslog-ng` server over TCP or UDP using the syslog protocol. + + + **Supported pipeline types:** logs' properties: id: description: The unique identifier for this component. @@ -38989,6 +39932,8 @@ components: - type - inputs type: object + x-pipeline-types: + - logs ObservabilityPipelineSyslogNgDestinationType: default: syslog_ng description: The destination type. The value should always be `syslog_ng`. @@ -38999,13 +39944,16 @@ components: x-enum-varnames: - SYSLOG_NG ObservabilityPipelineSyslogNgSource: - description: The `syslog_ng` source listens for logs over TCP or UDP from a + description: 'The `syslog_ng` source listens for logs over TCP or UDP from a `syslog-ng` server using the syslog protocol. + + + **Supported pipeline types:** logs' properties: id: - description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (e.g., as input to downstream - components). + description: The unique identifier for this component. Used in other parts + of the pipeline to reference this component (for example, as the `input` + to downstream components). example: syslog-ng-source type: string mode: @@ -39019,6 +39967,8 @@ components: - type - mode type: object + x-pipeline-types: + - logs ObservabilityPipelineSyslogNgSourceType: default: syslog_ng description: The source type. The value should always be `syslog_ng`. @@ -39039,13 +39989,16 @@ components: - TCP - UDP ObservabilityPipelineThrottleProcessor: - description: The `throttle` processor limits the number of events that pass + description: 'The `throttle` processor limits the number of events that pass through over a given time window. + + + **Supported pipeline types:** logs' properties: display_name: $ref: '#/components/schemas/ObservabilityPipelineComponentDisplayName' enabled: - description: Whether this processor is enabled. + description: Indicates whether the processor is enabled. example: true type: boolean group_by: @@ -39086,6 +40039,8 @@ components: - window - enabled type: object + x-pipeline-types: + - logs ObservabilityPipelineThrottleProcessorType: default: throttle description: The processor type. The value should always be `throttle`. @@ -76499,6 +77454,222 @@ paths: summary: Get all aggregated DNS traffic tags: - Cloud Network Monitoring + /api/v2/obs-pipelines/pipelines: + get: + description: Retrieve a list of pipelines. + operationId: ListPipelines + parameters: + - $ref: '#/components/parameters/PageSize' + - $ref: '#/components/parameters/PageNumber' + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ListPipelinesResponse' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: List pipelines + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_read + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' + post: + description: Create a new pipeline. + operationId: CreatePipeline + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipelineSpec' + required: true + responses: + '201': + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipeline' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '409': + $ref: '#/components/responses/ConflictResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Create a new pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_deploy + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' + /api/v2/obs-pipelines/pipelines/validate: + post: + description: 'Validates a pipeline configuration without creating or updating + any resources. + + Returns a list of validation errors, if any.' + operationId: ValidatePipeline + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipelineSpec' + required: true + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ValidationResponse' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Validate an observability pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_read + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' + /api/v2/obs-pipelines/pipelines/{pipeline_id}: + delete: + description: Delete a pipeline. + operationId: DeletePipeline + parameters: + - description: The ID of the pipeline to delete. + in: path + name: pipeline_id + required: true + schema: + type: string + responses: + '204': + description: OK + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/APIErrorResponse' + description: Forbidden + '404': + content: + application/json: + schema: + $ref: '#/components/schemas/APIErrorResponse' + description: Not Found + '409': + content: + application/json: + schema: + $ref: '#/components/schemas/APIErrorResponse' + description: Conflict + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Delete a pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_delete + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' + get: + description: Get a specific pipeline by its ID. + operationId: GetPipeline + parameters: + - description: The ID of the pipeline to retrieve. + in: path + name: pipeline_id + required: true + schema: + type: string + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipeline' + description: OK + '403': + content: + application/json: + schema: + $ref: '#/components/schemas/APIErrorResponse' + description: Forbidden + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Get a specific pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_read + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' + put: + description: Update a pipeline. + operationId: UpdatePipeline + parameters: + - description: The ID of the pipeline to update. + in: path + name: pipeline_id + required: true + schema: + type: string + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipeline' + required: true + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipeline' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '404': + $ref: '#/components/responses/NotFoundResponse' + '409': + $ref: '#/components/responses/ConflictResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Update a pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_deploy + x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) + to request access.' /api/v2/on-call/escalation-policies: post: description: Create a new On-Call escalation policy @@ -80157,222 +81328,6 @@ paths: tags: - CSM Threats x-codegen-request-body-name: body - /api/v2/remote_config/products/obs_pipelines/pipelines: - get: - description: Retrieve a list of pipelines. - operationId: ListPipelines - parameters: - - $ref: '#/components/parameters/PageSize' - - $ref: '#/components/parameters/PageNumber' - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ListPipelinesResponse' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: List pipelines - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_read - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' - post: - description: Create a new pipeline. - operationId: CreatePipeline - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipelineSpec' - required: true - responses: - '201': - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipeline' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '409': - $ref: '#/components/responses/ConflictResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: Create a new pipeline - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_deploy - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' - /api/v2/remote_config/products/obs_pipelines/pipelines/validate: - post: - description: 'Validates a pipeline configuration without creating or updating - any resources. - - Returns a list of validation errors, if any.' - operationId: ValidatePipeline - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipelineSpec' - required: true - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ValidationResponse' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: Validate an observability pipeline - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_read - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' - /api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}: - delete: - description: Delete a pipeline. - operationId: DeletePipeline - parameters: - - description: The ID of the pipeline to delete. - in: path - name: pipeline_id - required: true - schema: - type: string - responses: - '204': - description: OK - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Forbidden - '404': - content: - application/json: - schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Not Found - '409': - content: - application/json: - schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Conflict - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: Delete a pipeline - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_delete - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' - get: - description: Get a specific pipeline by its ID. - operationId: GetPipeline - parameters: - - description: The ID of the pipeline to retrieve. - in: path - name: pipeline_id - required: true - schema: - type: string - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipeline' - description: OK - '403': - content: - application/json: - schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Forbidden - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: Get a specific pipeline - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_read - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' - put: - description: Update a pipeline. - operationId: UpdatePipeline - parameters: - - description: The ID of the pipeline to update. - in: path - name: pipeline_id - required: true - schema: - type: string - requestBody: - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipeline' - required: true - responses: - '200': - content: - application/json: - schema: - $ref: '#/components/schemas/ObservabilityPipeline' - description: OK - '400': - $ref: '#/components/responses/BadRequestResponse' - '403': - $ref: '#/components/responses/NotAuthorizedResponse' - '404': - $ref: '#/components/responses/NotFoundResponse' - '409': - $ref: '#/components/responses/ConflictResponse' - '429': - $ref: '#/components/responses/TooManyRequestsResponse' - summary: Update a pipeline - tags: - - Observability Pipelines - x-permission: - operator: OR - permissions: - - observability_pipelines_deploy - x-unstable: '**Note**: This endpoint is in Preview. Fill out this [form](https://www.datadoghq.com/product-preview/observability-pipelines-api-and-terraform-support/) - to request access.' /api/v2/restriction_policy/{resource_id}: delete: description: Deletes the restriction policy associated with a specified resource. diff --git a/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-Bad-Request-response.frozen b/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-Bad-Request-response.frozen index 73133f1c75e9..69f96e35ddbf 100644 --- a/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-Bad-Request-response.frozen +++ b/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-Bad-Request-response.frozen @@ -1 +1 @@ -2025-12-18T16:15:15.575Z \ No newline at end of file +2026-01-09T15:42:36.842Z \ No newline at end of file diff --git a/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-Bad-Request-response.yml b/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-Bad-Request-response.yml index 868bbb7746b9..e8386cc0e65b 100644 --- a/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-Bad-Request-response.yml +++ b/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-Bad-Request-response.yml @@ -1,9 +1,9 @@ http_interactions: -- recorded_at: Thu, 18 Dec 2025 16:15:15 GMT +- recorded_at: Fri, 09 Jan 2026 15:42:36 GMT request: body: encoding: UTF-8 - string: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"unknown-processor","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main + string: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"unknown-processor","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main Observability Pipeline"},"type":"pipelines"}}' headers: Accept: @@ -11,7 +11,7 @@ http_interactions: Content-Type: - application/json method: POST - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines response: body: encoding: UTF-8 diff --git a/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-OK-response.frozen b/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-OK-response.frozen index 20165353d5c2..c09316ab61d2 100644 --- a/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-OK-response.frozen +++ b/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-OK-response.frozen @@ -1 +1 @@ -2025-12-18T16:15:16.062Z \ No newline at end of file +2026-01-09T15:42:37.370Z \ No newline at end of file diff --git a/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-OK-response.yml b/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-OK-response.yml index 67d4c2f969aa..9a4eff364953 100644 --- a/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-OK-response.yml +++ b/cassettes/features/v2/observability_pipelines/Create-a-new-pipeline-returns-OK-response.yml @@ -1,9 +1,9 @@ http_interactions: -- recorded_at: Thu, 18 Dec 2025 16:15:16 GMT +- recorded_at: Fri, 09 Jan 2026 15:42:37 GMT request: body: encoding: UTF-8 - string: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main + string: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main Observability Pipeline"},"type":"pipelines"}}' headers: Accept: @@ -11,12 +11,12 @@ http_interactions: Content-Type: - application/json method: POST - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines response: body: encoding: UTF-8 - string: '{"data":{"id":"bd8d693c-dc2c-11f0-bf69-da7ad0900002","type":"pipelines","attributes":{"name":"Main - Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} + string: '{"data":{"id":"d33cceac-ed71-11f0-bd8c-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} ' headers: @@ -25,14 +25,14 @@ http_interactions: status: code: 201 message: Created -- recorded_at: Thu, 18 Dec 2025 16:15:16 GMT +- recorded_at: Fri, 09 Jan 2026 15:42:37 GMT request: body: null headers: Accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/bd8d693c-dc2c-11f0-bf69-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/d33cceac-ed71-11f0-bd8c-da7ad0900002 response: body: encoding: UTF-8 diff --git a/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-Not-Found-response.frozen b/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-Not-Found-response.frozen index 85deda9192b8..e7cb42324d99 100644 --- a/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-Not-Found-response.frozen +++ b/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-Not-Found-response.frozen @@ -1 +1 @@ -2025-12-18T16:15:17.165Z \ No newline at end of file +2026-01-09T15:42:38.750Z \ No newline at end of file diff --git a/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-Not-Found-response.yml b/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-Not-Found-response.yml index 5d29be3f71d4..eb8b6b1052fc 100644 --- a/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-Not-Found-response.yml +++ b/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-Not-Found-response.yml @@ -1,12 +1,12 @@ http_interactions: -- recorded_at: Thu, 18 Dec 2025 16:15:17 GMT +- recorded_at: Fri, 09 Jan 2026 15:42:38 GMT request: body: null headers: Accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/3fa85f64-5717-4562-b3fc-2c963f66afa6 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/3fa85f64-5717-4562-b3fc-2c963f66afa6 response: body: encoding: UTF-8 diff --git a/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-OK-response.frozen b/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-OK-response.frozen index 201ee9bda87c..f1c85866024a 100644 --- a/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-OK-response.frozen +++ b/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-OK-response.frozen @@ -1 +1 @@ -2025-12-18T16:15:17.716Z \ No newline at end of file +2026-01-09T15:42:39.305Z \ No newline at end of file diff --git a/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-OK-response.yml b/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-OK-response.yml index 9f60b32d4ad6..bd60617f2dc7 100644 --- a/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-OK-response.yml +++ b/cassettes/features/v2/observability_pipelines/Delete-a-pipeline-returns-OK-response.yml @@ -1,9 +1,9 @@ http_interactions: -- recorded_at: Thu, 18 Dec 2025 16:15:17 GMT +- recorded_at: Fri, 09 Jan 2026 15:42:39 GMT request: body: encoding: UTF-8 - string: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + string: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main Observability Pipeline"},"type":"pipelines"}}' @@ -13,12 +13,14 @@ http_interactions: Content-Type: - application/json method: POST - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines response: body: encoding: UTF-8 - string: '{"data":{"id":"be89fea4-dc2c-11f0-bdea-da7ad0900002","type":"pipelines","attributes":{"name":"Main - Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + string: '{"data":{"id":"d46478de-ed71-11f0-99c8-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} @@ -29,14 +31,14 @@ http_interactions: status: code: 201 message: Created -- recorded_at: Thu, 18 Dec 2025 16:15:17 GMT +- recorded_at: Fri, 09 Jan 2026 15:42:39 GMT request: body: null headers: Accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/be89fea4-dc2c-11f0-bdea-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/d46478de-ed71-11f0-99c8-da7ad0900002 response: body: encoding: UTF-8 @@ -47,14 +49,14 @@ http_interactions: status: code: 204 message: No Content -- recorded_at: Thu, 18 Dec 2025 16:15:17 GMT +- recorded_at: Fri, 09 Jan 2026 15:42:39 GMT request: body: null headers: Accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/be89fea4-dc2c-11f0-bdea-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/d46478de-ed71-11f0-99c8-da7ad0900002 response: body: encoding: UTF-8 diff --git a/cassettes/features/v2/observability_pipelines/Get-a-specific-pipeline-returns-OK-response.frozen b/cassettes/features/v2/observability_pipelines/Get-a-specific-pipeline-returns-OK-response.frozen index 2da6d0e5c1dc..6b8e4b640650 100644 --- a/cassettes/features/v2/observability_pipelines/Get-a-specific-pipeline-returns-OK-response.frozen +++ b/cassettes/features/v2/observability_pipelines/Get-a-specific-pipeline-returns-OK-response.frozen @@ -1 +1 @@ -2025-12-18T16:15:20.018Z \ No newline at end of file +2026-01-09T15:42:41.635Z \ No newline at end of file diff --git a/cassettes/features/v2/observability_pipelines/Get-a-specific-pipeline-returns-OK-response.yml b/cassettes/features/v2/observability_pipelines/Get-a-specific-pipeline-returns-OK-response.yml index 2f3f18afc38f..f2053d8a7930 100644 --- a/cassettes/features/v2/observability_pipelines/Get-a-specific-pipeline-returns-OK-response.yml +++ b/cassettes/features/v2/observability_pipelines/Get-a-specific-pipeline-returns-OK-response.yml @@ -1,9 +1,9 @@ http_interactions: -- recorded_at: Thu, 18 Dec 2025 16:15:20 GMT +- recorded_at: Fri, 09 Jan 2026 15:42:41 GMT request: body: encoding: UTF-8 - string: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + string: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main Observability Pipeline"},"type":"pipelines"}}' @@ -13,12 +13,14 @@ http_interactions: Content-Type: - application/json method: POST - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines response: body: encoding: UTF-8 - string: '{"data":{"id":"bfe664a4-dc2c-11f0-bdec-da7ad0900002","type":"pipelines","attributes":{"name":"Main - Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + string: '{"data":{"id":"d5da36ae-ed71-11f0-bd8e-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} @@ -29,19 +31,21 @@ http_interactions: status: code: 201 message: Created -- recorded_at: Thu, 18 Dec 2025 16:15:20 GMT +- recorded_at: Fri, 09 Jan 2026 15:42:41 GMT request: body: null headers: Accept: - application/json method: GET - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/bfe664a4-dc2c-11f0-bdec-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/d5da36ae-ed71-11f0-bd8e-da7ad0900002 response: body: encoding: UTF-8 - string: '{"data":{"id":"bfe664a4-dc2c-11f0-bdec-da7ad0900002","type":"pipelines","attributes":{"name":"Main - Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + string: '{"data":{"id":"d5da36ae-ed71-11f0-bd8e-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} @@ -52,14 +56,14 @@ http_interactions: status: code: 200 message: OK -- recorded_at: Thu, 18 Dec 2025 16:15:20 GMT +- recorded_at: Fri, 09 Jan 2026 15:42:41 GMT request: body: null headers: Accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/bfe664a4-dc2c-11f0-bdec-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/d5da36ae-ed71-11f0-bd8e-da7ad0900002 response: body: encoding: UTF-8 diff --git a/cassettes/features/v2/observability_pipelines/List-pipelines-returns-Bad-Request-response.frozen b/cassettes/features/v2/observability_pipelines/List-pipelines-returns-Bad-Request-response.frozen index a1ae2640d7db..12cd810b8fa7 100644 --- a/cassettes/features/v2/observability_pipelines/List-pipelines-returns-Bad-Request-response.frozen +++ b/cassettes/features/v2/observability_pipelines/List-pipelines-returns-Bad-Request-response.frozen @@ -1 +1 @@ -2025-12-18T16:15:22.038Z \ No newline at end of file +2026-01-09T15:42:44.442Z \ No newline at end of file diff --git a/cassettes/features/v2/observability_pipelines/List-pipelines-returns-Bad-Request-response.yml b/cassettes/features/v2/observability_pipelines/List-pipelines-returns-Bad-Request-response.yml index 597549ed76f8..8a8414ce9a0c 100644 --- a/cassettes/features/v2/observability_pipelines/List-pipelines-returns-Bad-Request-response.yml +++ b/cassettes/features/v2/observability_pipelines/List-pipelines-returns-Bad-Request-response.yml @@ -1,12 +1,12 @@ http_interactions: -- recorded_at: Thu, 18 Dec 2025 16:15:22 GMT +- recorded_at: Fri, 09 Jan 2026 15:42:44 GMT request: body: null headers: Accept: - application/json method: GET - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines?page%5Bsize%5D=0 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines?page%5Bsize%5D=0 response: body: encoding: UTF-8 diff --git a/cassettes/features/v2/observability_pipelines/List-pipelines-returns-OK-response.frozen b/cassettes/features/v2/observability_pipelines/List-pipelines-returns-OK-response.frozen index fb745af4ce5f..f91e733f3eb6 100644 --- a/cassettes/features/v2/observability_pipelines/List-pipelines-returns-OK-response.frozen +++ b/cassettes/features/v2/observability_pipelines/List-pipelines-returns-OK-response.frozen @@ -1 +1 @@ -2025-12-18T16:15:22.507Z \ No newline at end of file +2026-01-09T15:42:45.631Z \ No newline at end of file diff --git a/cassettes/features/v2/observability_pipelines/List-pipelines-returns-OK-response.yml b/cassettes/features/v2/observability_pipelines/List-pipelines-returns-OK-response.yml index 7e35e7aa83a4..36267c49dc25 100644 --- a/cassettes/features/v2/observability_pipelines/List-pipelines-returns-OK-response.yml +++ b/cassettes/features/v2/observability_pipelines/List-pipelines-returns-OK-response.yml @@ -1,9 +1,9 @@ http_interactions: -- recorded_at: Thu, 18 Dec 2025 16:15:22 GMT +- recorded_at: Fri, 09 Jan 2026 15:42:45 GMT request: body: encoding: UTF-8 - string: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + string: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main Observability Pipeline"},"type":"pipelines"}}' @@ -13,12 +13,14 @@ http_interactions: Content-Type: - application/json method: POST - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines response: body: encoding: UTF-8 - string: '{"data":{"id":"c162e83e-dc2c-11f0-bf6b-da7ad0900002","type":"pipelines","attributes":{"name":"Main - Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + string: '{"data":{"id":"d831c96c-ed71-11f0-99ca-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} @@ -29,21 +31,80 @@ http_interactions: status: code: 201 message: Created -- recorded_at: Thu, 18 Dec 2025 16:15:22 GMT +- recorded_at: Fri, 09 Jan 2026 15:42:45 GMT request: body: null headers: Accept: - application/json method: GET - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines response: body: encoding: UTF-8 - string: '{"data":[{"id":"c162e83e-dc2c-11f0-bf6b-da7ad0900002","type":"pipelines","attributes":{"name":"Main - Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + string: '{"data":[{"id":"4bf478ba-dc68-11f0-87e9-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"a78e416a-de66-11f0-a039-da7ad0900002","type":"pipelines","attributes":{"name":"http-server-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["http-source-1"],"type":"datadog_logs"}],"processor_groups":[],"processors":[],"sources":[{"auth_strategy":"plain","decoding":"json","id":"http-source-1","tls":{"ca_file":"/etc/ssl/certs/ca.crt","crt_file":"/etc/ssl/certs/http.crt","key_file":"/etc/ssl/private/http.key"},"type":"http_server"}]}}},{"id":"a84fd58c-de66-11f0-a03b-da7ad0900002","type":"pipelines","attributes":{"name":"amazon_s3-source-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["s3-source-1"],"type":"datadog_logs"}],"processor_groups":[],"processors":[],"sources":[{"auth":{"assume_role":"arn:aws:iam::123456789012:role/test-role","external_id":"external-test-id","session_name":"session-test"},"id":"s3-source-1","region":"us-east-1","tls":{"ca_file":"/etc/ssl/certs/s3.ca","crt_file":"/etc/ssl/certs/s3.crt","key_file":"/etc/ssl/private/s3.key"},"type":"amazon_s3"}]}}},{"id":"a42e22e0-df49-11f0-81d5-da7ad0900002","type":"pipelines","attributes":{"name":"dedupe + pipeline","config":{"destinations":[{"id":"destination-1","inputs":["dedupe-group-2"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"dedupe-group-1","include":"*","inputs":["source-1"],"processors":[{"enabled":true,"fields":["log.message","log.tags"],"id":"dedupe-match","include":"*","mode":"match","type":"dedupe"}]},{"enabled":true,"id":"dedupe-group-2","include":"*","inputs":["dedupe-group-1"],"processors":[{"enabled":true,"fields":["log.source","log.context"],"id":"dedupe-ignore","include":"*","mode":"ignore","type":"dedupe"}]}],"processors":[{"enabled":true,"id":"dedupe-group-1","include":"*","inputs":["source-1"],"processors":[{"enabled":true,"fields":["log.message","log.tags"],"id":"dedupe-match","include":"*","mode":"match","type":"dedupe"}]},{"enabled":true,"id":"dedupe-group-2","include":"*","inputs":["dedupe-group-1"],"processors":[{"enabled":true,"fields":["log.source","log.context"],"id":"dedupe-ignore","include":"*","mode":"ignore","type":"dedupe"}]}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"2cd3c342-e0c2-11f0-9d34-da7ad0900002","type":"pipelines","attributes":{"name":"add-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["add-fields-group-1"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"add-fields-group-1","include":"*","inputs":["source-1"],"processors":[{"enabled":true,"fields":[{"name":"custom.field","value":"hello-world"},{"name":"env","value":"prod"}],"id":"add-fields-1","include":"*","type":"add_fields"}]}],"processors":[{"enabled":true,"id":"add-fields-group-1","include":"*","inputs":["source-1"],"processors":[{"enabled":true,"fields":[{"name":"custom.field","value":"hello-world"},{"name":"env","value":"prod"}],"id":"add-fields-1","include":"*","type":"add_fields"}]}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"20f4849c-e579-11f0-af79-da7ad0900002","type":"pipelines","attributes":{"name":"fluent-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["fluent-source-1"],"type":"datadog_logs"}],"processor_groups":[],"processors":[],"sources":[{"id":"fluent-source-1","tls":{"ca_file":"/etc/ssl/certs/ca.crt","crt_file":"/etc/ssl/certs/fluent.crt","key_file":"/etc/ssl/private/fluent.key"},"type":"fluentd"}]}}},{"id":"15621afe-e669-11f0-bec3-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"dfbeb25a-e6c1-11f0-9bc1-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"923fbdb6-e771-11f0-9388-da7ad0900002","type":"pipelines","attributes":{"name":"http-client + pipeline","config":{"destinations":[{"id":"destination-1","inputs":["http-source-1"],"type":"datadog_logs"}],"processor_groups":[],"processors":[],"sources":[{"auth_strategy":"basic","decoding":"json","id":"http-source-1","scrape_interval_secs":60,"scrape_timeout_secs":10,"tls":{"crt_file":"/path/to/http.crt"},"type":"http_client"}]}}},{"id":"a7b600ce-e771-11f0-939c-da7ad0900002","type":"pipelines","attributes":{"name":"newrelic + pipeline","config":{"destinations":[{"id":"destination-1","inputs":["source-1"],"region":"us","type":"new_relic"}],"processor_groups":[],"processors":[],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"306bab4c-e904-11f0-aa8a-da7ad0900002","type":"pipelines","attributes":{"name":"splunk-hec-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["splunk-hec-source-1"],"type":"datadog_logs"}],"processor_groups":[],"processors":[],"sources":[{"id":"splunk-hec-source-1","tls":{"ca_file":"/etc/ssl/certs/ca.crt","crt_file":"/etc/ssl/certs/splunk.crt","key_file":"/etc/ssl/private/splunk.key"},"type":"splunk_hec"}]}}},{"id":"51faefca-e922-11f0-a260-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"8d025dea-ea96-11f0-8a79-da7ad0900002","type":"pipelines","attributes":{"name":"crowdstrike-next-gen-siem-destination-pipeline-basic","config":{"destinations":[{"encoding":"raw_message","id":"crowdstrike-dest-basic-1","inputs":["source-1"],"type":"crowdstrike_next_gen_siem"}],"processor_groups":[],"processors":[],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"ed4d493e-eabf-11f0-852d-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"29a560ae-ec7a-11f0-a7f4-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"65fb8a3e-ec92-11f0-8664-da7ad0900002","type":"pipelines","attributes":{"name":"amazon + s3 pipeline","config":{"destinations":[{"auth":{"assume_role":"arn:aws:iam::123456789012:role/example-role","external_id":"external-id-123","session_name":"s3-session"},"bucket":"my-logs-bucket","id":"s3-dest-1","inputs":["source-1"],"key_prefix":"logs/","region":"us-east-1","storage_class":"STANDARD","type":"amazon_s3"}],"pipeline_type":"logs","processor_groups":[],"processors":[],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"31a1d8e0-ec9e-11f0-baa1-da7ad0900002","type":"pipelines","attributes":{"name":"rsyslog-source-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["rsyslog-source-1"],"type":"datadog_logs"}],"pipeline_type":"logs","processor_groups":[],"processors":[],"sources":[{"id":"rsyslog-source-1","mode":"tcp","tls":{"crt_file":"/etc/certs/rsyslog.crt"},"type":"rsyslog"}]}}},{"id":"328f42d8-ec9e-11f0-baa3-da7ad0900002","type":"pipelines","attributes":{"name":"syslogng-source-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["syslogng-source-1"],"type":"datadog_logs"}],"pipeline_type":"logs","processor_groups":[],"processors":[],"sources":[{"id":"syslogng-source-1","mode":"udp","tls":{"crt_file":"/etc/certs/syslogng.crt"},"type":"syslog_ng"}]}}},{"id":"335c38ba-ec9e-11f0-baa5-da7ad0900002","type":"pipelines","attributes":{"name":"rsyslog-dest-pipeline","config":{"destinations":[{"id":"rsyslog-destination-1","inputs":["source-1"],"keepalive":60000,"tls":{"crt_file":"/etc/certs/rsyslog.crt"},"type":"rsyslog"}],"pipeline_type":"logs","processor_groups":[],"processors":[],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"b669fdac-eca6-11f0-9a8c-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"bfb53c8e-ecb8-11f0-b85b-da7ad0900002","type":"pipelines","attributes":{"name":"syslogng-source-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["syslogng-source-1"],"type":"datadog_logs"}],"pipeline_type":"logs","processor_groups":[],"processors":[],"sources":[{"id":"syslogng-source-1","mode":"udp","tls":{"crt_file":"/etc/certs/syslogng.crt"},"type":"syslog_ng"}]}}},{"id":"51cf915c-ed4d-11f0-a300-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"52b0c636-ed4d-11f0-b3ff-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"5314eb34-ed4d-11f0-b401-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"53a48b72-ed4d-11f0-b403-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"5400fc90-ed4d-11f0-a302-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"54ab9b78-ed4d-11f0-a304-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"f352d0f0-ed54-11f0-92c4-da7ad0900002","type":"pipelines","attributes":{"name":"gcs-destination-pipeline","config":{"destinations":[{"acl":"project-private","auth":{"credentials_file":"/var/secrets/gcp-creds.json"},"bucket":"my-gcs-bucket","id":"gcs-destination-1","inputs":["source-1"],"key_prefix":"logs/","metadata":[{"name":"environment","value":"production"},{"name":"team","value":"platform"}],"storage_class":"NEARLINE","type":"google_cloud_storage"}],"processor_groups":[],"processors":[],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"d831c96c-ed71-11f0-99ca-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My - Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}],"meta":{"totalCount":1}} + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}],"meta":{"totalCount":29}} ' headers: @@ -52,14 +113,14 @@ http_interactions: status: code: 200 message: OK -- recorded_at: Thu, 18 Dec 2025 16:15:22 GMT +- recorded_at: Fri, 09 Jan 2026 15:42:45 GMT request: body: null headers: Accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c162e83e-dc2c-11f0-bf6b-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/d831c96c-ed71-11f0-99ca-da7ad0900002 response: body: encoding: UTF-8 diff --git a/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Bad-Request-response.frozen b/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Bad-Request-response.frozen index 92a0e9377f04..dd2c3e2c78c5 100644 --- a/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Bad-Request-response.frozen +++ b/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Bad-Request-response.frozen @@ -1 +1 @@ -2025-12-18T16:15:24.455Z \ No newline at end of file +2026-01-09T15:42:48.887Z \ No newline at end of file diff --git a/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Bad-Request-response.yml b/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Bad-Request-response.yml index ac916bfc6dd7..eb342c57df77 100644 --- a/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Bad-Request-response.yml +++ b/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Bad-Request-response.yml @@ -1,9 +1,9 @@ http_interactions: -- recorded_at: Thu, 18 Dec 2025 16:15:24 GMT +- recorded_at: Fri, 09 Jan 2026 15:42:48 GMT request: body: encoding: UTF-8 - string: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + string: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main Observability Pipeline"},"type":"pipelines"}}' @@ -13,12 +13,14 @@ http_interactions: Content-Type: - application/json method: POST - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines response: body: encoding: UTF-8 - string: '{"data":{"id":"c28a5ad0-dc2c-11f0-bdee-da7ad0900002","type":"pipelines","attributes":{"name":"Main - Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + string: '{"data":{"id":"da187384-ed71-11f0-bd90-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} @@ -29,11 +31,11 @@ http_interactions: status: code: 201 message: Created -- recorded_at: Thu, 18 Dec 2025 16:15:24 GMT +- recorded_at: Fri, 09 Jan 2026 15:42:48 GMT request: body: encoding: UTF-8 - string: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"unknown-processor","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main + string: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"unknown-processor","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main Observability Pipeline"},"id":"3fa85f64-5717-4562-b3fc-2c963f66afa6","type":"pipelines"}}' headers: Accept: @@ -41,7 +43,7 @@ http_interactions: Content-Type: - application/json method: PUT - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c28a5ad0-dc2c-11f0-bdee-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/da187384-ed71-11f0-bd90-da7ad0900002 response: body: encoding: UTF-8 @@ -58,14 +60,14 @@ http_interactions: status: code: 400 message: Bad Request -- recorded_at: Thu, 18 Dec 2025 16:15:24 GMT +- recorded_at: Fri, 09 Jan 2026 15:42:48 GMT request: body: null headers: Accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c28a5ad0-dc2c-11f0-bdee-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/da187384-ed71-11f0-bd90-da7ad0900002 response: body: encoding: UTF-8 diff --git a/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Not-Found-response.frozen b/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Not-Found-response.frozen index b083fd34ae0b..9290a5103893 100644 --- a/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Not-Found-response.frozen +++ b/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Not-Found-response.frozen @@ -1 +1 @@ -2025-12-18T16:15:26.411Z \ No newline at end of file +2026-01-09T15:42:51.159Z \ No newline at end of file diff --git a/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Not-Found-response.yml b/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Not-Found-response.yml index e7c88d0aad7f..c42e7d5ac13b 100644 --- a/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Not-Found-response.yml +++ b/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-Not-Found-response.yml @@ -1,9 +1,9 @@ http_interactions: -- recorded_at: Thu, 18 Dec 2025 16:15:26 GMT +- recorded_at: Fri, 09 Jan 2026 15:42:51 GMT request: body: encoding: UTF-8 - string: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main + string: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main Observability Pipeline"},"id":"3fa85f64-5717-4562-b3fc-2c963f66afa6","type":"pipelines"}}' headers: Accept: @@ -11,7 +11,7 @@ http_interactions: Content-Type: - application/json method: PUT - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/3fa85f64-5717-4562-b3fc-2c963f66afa6 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/3fa85f64-5717-4562-b3fc-2c963f66afa6 response: body: encoding: UTF-8 diff --git a/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-OK-response.frozen b/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-OK-response.frozen index b9cbd5fcac4f..f264c1022b64 100644 --- a/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-OK-response.frozen +++ b/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-OK-response.frozen @@ -1 +1 @@ -2025-12-18T16:15:26.929Z \ No newline at end of file +2026-01-09T15:42:51.716Z \ No newline at end of file diff --git a/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-OK-response.yml b/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-OK-response.yml index 3f765505ace2..e236777412cc 100644 --- a/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-OK-response.yml +++ b/cassettes/features/v2/observability_pipelines/Update-a-pipeline-returns-OK-response.yml @@ -1,9 +1,9 @@ http_interactions: -- recorded_at: Thu, 18 Dec 2025 16:15:26 GMT +- recorded_at: Fri, 09 Jan 2026 15:42:51 GMT request: body: encoding: UTF-8 - string: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + string: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main Observability Pipeline"},"type":"pipelines"}}' @@ -13,12 +13,14 @@ http_interactions: Content-Type: - application/json method: POST - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines response: body: encoding: UTF-8 - string: '{"data":{"id":"c40401e0-dc2c-11f0-bf6d-da7ad0900002","type":"pipelines","attributes":{"name":"Main - Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processors":[{"display_name":"My + string: '{"data":{"id":"dbc8f316-ed71-11f0-99cc-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["processor-group-0"],"type":"datadog_logs"}],"processor_groups":[{"display_name":"My + Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My + Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"display_name":"My Processor Group","enabled":true,"id":"processor-group-0","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"display_name":"My Filter Processor","enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} @@ -29,11 +31,11 @@ http_interactions: status: code: 201 message: Created -- recorded_at: Thu, 18 Dec 2025 16:15:26 GMT +- recorded_at: Fri, 09 Jan 2026 15:42:51 GMT request: body: encoding: UTF-8 - string: '{"data":{"attributes":{"config":{"destinations":[{"id":"updated-datadog-logs-destination-id","inputs":["my-processor-group"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Updated + string: '{"data":{"attributes":{"config":{"destinations":[{"id":"updated-datadog-logs-destination-id","inputs":["my-processor-group"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Updated Pipeline Name"},"id":"3fa85f64-5717-4562-b3fc-2c963f66afa6","type":"pipelines"}}' headers: Accept: @@ -41,12 +43,12 @@ http_interactions: Content-Type: - application/json method: PUT - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c40401e0-dc2c-11f0-bf6d-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/dbc8f316-ed71-11f0-99cc-da7ad0900002 response: body: encoding: UTF-8 - string: '{"data":{"id":"c40401e0-dc2c-11f0-bf6d-da7ad0900002","type":"pipelines","attributes":{"name":"Updated - Pipeline Name","config":{"destinations":[{"id":"updated-datadog-logs-destination-id","inputs":["my-processor-group"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} + string: '{"data":{"id":"dbc8f316-ed71-11f0-99cc-da7ad0900002","type":"pipelines","attributes":{"name":"Updated + Pipeline Name","config":{"destinations":[{"id":"updated-datadog-logs-destination-id","inputs":["my-processor-group"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} ' headers: @@ -55,14 +57,14 @@ http_interactions: status: code: 200 message: OK -- recorded_at: Thu, 18 Dec 2025 16:15:26 GMT +- recorded_at: Fri, 09 Jan 2026 15:42:51 GMT request: body: null headers: Accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c40401e0-dc2c-11f0-bf6d-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/dbc8f316-ed71-11f0-99cc-da7ad0900002 response: body: encoding: UTF-8 diff --git a/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-Bad-Request-response.frozen b/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-Bad-Request-response.frozen index 745856e49a6d..6c17934ae249 100644 --- a/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-Bad-Request-response.frozen +++ b/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-Bad-Request-response.frozen @@ -1 +1 @@ -2025-12-18T16:15:29.179Z \ No newline at end of file +2026-01-09T15:42:54.207Z \ No newline at end of file diff --git a/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-Bad-Request-response.yml b/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-Bad-Request-response.yml index 4db7a093b5c7..449325bced07 100644 --- a/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-Bad-Request-response.yml +++ b/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-Bad-Request-response.yml @@ -1,9 +1,9 @@ http_interactions: -- recorded_at: Thu, 18 Dec 2025 16:15:29 GMT +- recorded_at: Fri, 09 Jan 2026 15:42:54 GMT request: body: encoding: UTF-8 - string: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main + string: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main Observability Pipeline"},"type":"pipelines"}}' headers: Accept: @@ -11,7 +11,7 @@ http_interactions: Content-Type: - application/json method: POST - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/validate + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/validate response: body: encoding: UTF-8 diff --git a/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-OK-response.frozen b/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-OK-response.frozen index ecfcc54162aa..25983fc2c35e 100644 --- a/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-OK-response.frozen +++ b/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-OK-response.frozen @@ -1 +1 @@ -2025-12-18T16:15:29.647Z \ No newline at end of file +2026-01-09T15:42:54.714Z \ No newline at end of file diff --git a/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-OK-response.yml b/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-OK-response.yml index 28bb324a6496..1a0914d5f244 100644 --- a/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-OK-response.yml +++ b/cassettes/features/v2/observability_pipelines/Validate-an-observability-pipeline-returns-OK-response.yml @@ -1,9 +1,9 @@ http_interactions: -- recorded_at: Thu, 18 Dec 2025 16:15:29 GMT +- recorded_at: Fri, 09 Jan 2026 15:42:54 GMT request: body: encoding: UTF-8 - string: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processors":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main + string: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["my-processor-group"],"type":"datadog_logs"}],"processor_groups":[{"enabled":true,"id":"my-processor-group","include":"service:my-service","inputs":["datadog-agent-source"],"processors":[{"enabled":true,"id":"filter-processor","include":"status:error","type":"filter"}]}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main Observability Pipeline"},"type":"pipelines"}}' headers: Accept: @@ -11,7 +11,7 @@ http_interactions: Content-Type: - application/json method: POST - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/validate + uri: https://api.datadoghq.com/api/v2/obs-pipelines/pipelines/validate response: body: encoding: UTF-8 diff --git a/examples/v2/observability-pipelines/CreatePipeline.rb b/examples/v2/observability-pipelines/CreatePipeline.rb index 26427950224e..fd052c1ac10f 100644 --- a/examples/v2/observability-pipelines/CreatePipeline.rb +++ b/examples/v2/observability-pipelines/CreatePipeline.rb @@ -19,7 +19,7 @@ type: DatadogAPIClient::V2::ObservabilityPipelineDatadogLogsDestinationType::DATADOG_LOGS, }), ], - processors: [ + processor_groups: [ DatadogAPIClient::V2::ObservabilityPipelineConfigProcessorGroup.new({ enabled: true, id: "my-processor-group", diff --git a/examples/v2/observability-pipelines/UpdatePipeline.rb b/examples/v2/observability-pipelines/UpdatePipeline.rb index dc510dc3dde1..d4591d88914b 100644 --- a/examples/v2/observability-pipelines/UpdatePipeline.rb +++ b/examples/v2/observability-pipelines/UpdatePipeline.rb @@ -22,7 +22,7 @@ type: DatadogAPIClient::V2::ObservabilityPipelineDatadogLogsDestinationType::DATADOG_LOGS, }), ], - processors: [ + processor_groups: [ DatadogAPIClient::V2::ObservabilityPipelineConfigProcessorGroup.new({ enabled: true, id: "my-processor-group", diff --git a/examples/v2/observability-pipelines/ValidatePipeline.rb b/examples/v2/observability-pipelines/ValidatePipeline.rb index ab3f267c04a2..01ff8243ed1b 100644 --- a/examples/v2/observability-pipelines/ValidatePipeline.rb +++ b/examples/v2/observability-pipelines/ValidatePipeline.rb @@ -19,7 +19,7 @@ type: DatadogAPIClient::V2::ObservabilityPipelineDatadogLogsDestinationType::DATADOG_LOGS, }), ], - processors: [ + processor_groups: [ DatadogAPIClient::V2::ObservabilityPipelineConfigProcessorGroup.new({ enabled: true, id: "my-processor-group", diff --git a/features/scenarios_model_mapping.rb b/features/scenarios_model_mapping.rb index 50840f4f7593..dece58574f17 100644 --- a/features/scenarios_model_mapping.rb +++ b/features/scenarios_model_mapping.rb @@ -2633,6 +2633,26 @@ "tags" => "String", "limit" => "Integer", }, + "v2.ListPipelines" => { + "page_size" => "Integer", + "page_number" => "Integer", + }, + "v2.CreatePipeline" => { + "body" => "ObservabilityPipelineSpec", + }, + "v2.ValidatePipeline" => { + "body" => "ObservabilityPipelineSpec", + }, + "v2.DeletePipeline" => { + "pipeline_id" => "String", + }, + "v2.GetPipeline" => { + "pipeline_id" => "String", + }, + "v2.UpdatePipeline" => { + "pipeline_id" => "String", + "body" => "ObservabilityPipeline", + }, "v2.CreateOnCallEscalationPolicy" => { "include" => "String", "body" => "EscalationPolicyCreateRequest", @@ -2977,26 +2997,6 @@ "agent_rule_id" => "String", "body" => "CloudWorkloadSecurityAgentRuleUpdateRequest", }, - "v2.ListPipelines" => { - "page_size" => "Integer", - "page_number" => "Integer", - }, - "v2.CreatePipeline" => { - "body" => "ObservabilityPipelineSpec", - }, - "v2.ValidatePipeline" => { - "body" => "ObservabilityPipelineSpec", - }, - "v2.DeletePipeline" => { - "pipeline_id" => "String", - }, - "v2.GetPipeline" => { - "pipeline_id" => "String", - }, - "v2.UpdatePipeline" => { - "pipeline_id" => "String", - "body" => "ObservabilityPipeline", - }, "v2.DeleteRestrictionPolicy" => { "resource_id" => "String", }, diff --git a/features/v2/given.json b/features/v2/given.json index aa08181879c7..3a01a060c5bd 100644 --- a/features/v2/given.json +++ b/features/v2/given.json @@ -727,6 +727,18 @@ "tag": "Monitors", "operationId": "CreateMonitorUserTemplate" }, + { + "parameters": [ + { + "name": "body", + "value": "{\n \"data\":{\n \"attributes\":{\n \"config\":{\n \"destinations\":[\n {\n \"id\":\"datadog-logs-destination\",\n \"inputs\":[\n \"processor-group-0\"\n ],\n \"type\":\"datadog_logs\"\n }\n ],\n \"processor_groups\":[\n {\n \"id\":\"processor-group-0\",\n \"include\":\"service:my-service\",\n \"display_name\": \"My Processor Group\",\n \"inputs\":[\n \"datadog-agent-source\"\n ],\n \"enabled\": true,\n \"processors\": [\n {\n \"id\": \"filter-processor\",\n \"type\": \"filter\",\n \"include\": \"status:error\",\n \"display_name\": \"My Filter Processor\",\n \"enabled\": true\n }\n ]\n }\n ],\n \"sources\":[\n {\n \"id\":\"datadog-agent-source\",\n \"type\":\"datadog_agent\"\n }\n ]\n },\n \"name\":\"Main Observability Pipeline\"\n },\n \"type\":\"pipelines\"\n }\n}" + } + ], + "step": "there is a valid \"pipeline\" in the system", + "key": "pipeline", + "tag": "Observability Pipelines", + "operationId": "CreatePipeline" + }, { "parameters": [ { @@ -879,18 +891,6 @@ "tag": "CSM Threats", "operationId": "CreateCSMThreatsAgentPolicy" }, - { - "parameters": [ - { - "name": "body", - "value": "{\n \"data\":{\n \"attributes\":{\n \"config\":{\n \"destinations\":[\n {\n \"id\":\"datadog-logs-destination\",\n \"inputs\":[\n \"processor-group-0\"\n ],\n \"type\":\"datadog_logs\"\n }\n ],\n \"processors\":[\n {\n \"id\":\"processor-group-0\",\n \"include\":\"service:my-service\",\n \"display_name\": \"My Processor Group\",\n \"inputs\":[\n \"datadog-agent-source\"\n ],\n \"enabled\": true,\n \"processors\": [\n {\n \"id\": \"filter-processor\",\n \"type\": \"filter\",\n \"include\": \"status:error\",\n \"display_name\": \"My Filter Processor\",\n \"enabled\": true\n }\n ]\n }\n ],\n \"sources\":[\n {\n \"id\":\"datadog-agent-source\",\n \"type\":\"datadog_agent\"\n }\n ]\n },\n \"name\":\"Main Observability Pipeline\"\n },\n \"type\":\"pipelines\"\n }\n}" - } - ], - "step": "there is a valid \"pipeline\" in the system", - "key": "pipeline", - "tag": "Observability Pipelines", - "operationId": "CreatePipeline" - }, { "parameters": [ { diff --git a/features/v2/observability_pipelines.feature b/features/v2/observability_pipelines.feature index c43fa8b3b767..37f1ec8c7472 100644 --- a/features/v2/observability_pipelines.feature +++ b/features/v2/observability_pipelines.feature @@ -12,7 +12,7 @@ Feature: Observability Pipelines Scenario: Create a new pipeline returns "Bad Request" response Given operation "CreatePipeline" enabled And new "CreatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "unknown-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "unknown-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 400 Bad Request @@ -20,7 +20,7 @@ Feature: Observability Pipelines Scenario: Create a new pipeline returns "Conflict" response Given operation "CreatePipeline" enabled And new "CreatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "pipeline_type": "logs", "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "processors": [], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 409 Conflict @@ -28,14 +28,14 @@ Feature: Observability Pipelines Scenario: Create a new pipeline returns "OK" response Given operation "CreatePipeline" enabled And new "CreatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 201 OK And the response "data" has field "id" And the response "data.type" is equal to "pipelines" And the response "data.attributes.name" is equal to "Main Observability Pipeline" And the response "data.attributes.config.sources" has length 1 - And the response "data.attributes.config.processors" has length 1 + And the response "data.attributes.config.processor_groups" has length 1 And the response "data.attributes.config.destinations" has length 1 @generated @skip @team:DataDog/observability-pipelines @@ -75,7 +75,7 @@ Feature: Observability Pipelines And the response "data.type" is equal to "pipelines" And the response "data.attributes.name" is equal to "Main Observability Pipeline" And the response "data.attributes.config.sources" has length 1 - And the response "data.attributes.config.processors" has length 1 + And the response "data.attributes.config.processor_groups" has length 1 And the response "data.attributes.config.destinations" has length 1 @team:DataDog/observability-pipelines @@ -97,7 +97,7 @@ Feature: Observability Pipelines And the response "data[0].type" is equal to "pipelines" And the response "data[0].attributes.name" is equal to "Main Observability Pipeline" And the response "data[0].attributes.config.sources" has length 1 - And the response "data[0].attributes.config.processors" has length 1 + And the response "data[0].attributes.config.processor_groups" has length 1 And the response "data[0].attributes.config.destinations" has length 1 @team:DataDog/observability-pipelines @@ -106,7 +106,7 @@ Feature: Observability Pipelines And new "UpdatePipeline" request And there is a valid "pipeline" in the system And request contains "pipeline_id" parameter from "pipeline.data.id" - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "unknown-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "unknown-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} When the request is sent Then the response status is 400 Bad Request @@ -115,7 +115,7 @@ Feature: Observability Pipelines Given operation "UpdatePipeline" enabled And new "UpdatePipeline" request And request contains "pipeline_id" parameter from "REPLACE.ME" - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "pipeline_type": "logs", "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}, {"enabled": true, "field": "message", "id": "json-processor", "include": "*", "type": "parse_json"}]}], "processors": [], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} When the request is sent Then the response status is 409 Conflict @@ -124,7 +124,7 @@ Feature: Observability Pipelines Given operation "UpdatePipeline" enabled And new "UpdatePipeline" request And request contains "pipeline_id" parameter with value "3fa85f64-5717-4562-b3fc-2c963f66afa6" - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} When the request is sent Then the response status is 404 Not Found @@ -134,14 +134,14 @@ Feature: Observability Pipelines And there is a valid "pipeline" in the system And new "UpdatePipeline" request And request contains "pipeline_id" parameter from "pipeline.data.id" - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "updated-datadog-logs-destination-id", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Updated Pipeline Name"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "updated-datadog-logs-destination-id", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Updated Pipeline Name"}, "id": "3fa85f64-5717-4562-b3fc-2c963f66afa6", "type": "pipelines"}} When the request is sent Then the response status is 200 OK And the response "data" has field "id" And the response "data.type" is equal to "pipelines" And the response "data.attributes.name" is equal to "Updated Pipeline Name" And the response "data.attributes.config.sources" has length 1 - And the response "data.attributes.config.processors" has length 1 + And the response "data.attributes.config.processor_groups" has length 1 And the response "data.attributes.config.destinations" has length 1 And the response "data.attributes.config.destinations[0].id" is equal to "updated-datadog-logs-destination-id" @@ -149,7 +149,7 @@ Feature: Observability Pipelines Scenario: Validate an observability pipeline returns "Bad Request" response Given operation "ValidatePipeline" enabled And new "ValidatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 400 Bad Request And the response "errors[0].title" is equal to "Field 'include' is required" @@ -161,7 +161,7 @@ Feature: Observability Pipelines Scenario: Validate an observability pipeline returns "OK" response Given operation "ValidatePipeline" enabled And new "ValidatePipeline" request - And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processors": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["my-processor-group"], "type": "datadog_logs"}], "processor_groups": [{"enabled": true, "id": "my-processor-group", "include": "service:my-service", "inputs": ["datadog-agent-source"], "processors": [{"enabled": true, "id": "filter-processor", "include": "status:error", "type": "filter"}]}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} When the request is sent Then the response status is 200 OK And the response "errors" has length 0 diff --git a/features/v2/undo.json b/features/v2/undo.json index b54378e93ba6..1b8a3c101a6c 100644 --- a/features/v2/undo.json +++ b/features/v2/undo.json @@ -2838,6 +2838,49 @@ "type": "safe" } }, + "ListPipelines": { + "tag": "Observability Pipelines", + "undo": { + "type": "safe" + } + }, + "CreatePipeline": { + "tag": "Observability Pipelines", + "undo": { + "operationId": "DeletePipeline", + "parameters": [ + { + "name": "pipeline_id", + "source": "data.id" + } + ], + "type": "unsafe" + } + }, + "ValidatePipeline": { + "tag": "Observability Pipelines", + "undo": { + "type": "safe" + } + }, + "DeletePipeline": { + "tag": "Observability Pipelines", + "undo": { + "type": "idempotent" + } + }, + "GetPipeline": { + "tag": "Observability Pipelines", + "undo": { + "type": "safe" + } + }, + "UpdatePipeline": { + "tag": "Observability Pipelines", + "undo": { + "type": "idempotent" + } + }, "CreateOnCallEscalationPolicy": { "tag": "On-Call", "undo": { @@ -3449,49 +3492,6 @@ "type": "idempotent" } }, - "ListPipelines": { - "tag": "Observability Pipelines", - "undo": { - "type": "safe" - } - }, - "CreatePipeline": { - "tag": "Observability Pipelines", - "undo": { - "operationId": "DeletePipeline", - "parameters": [ - { - "name": "pipeline_id", - "source": "data.id" - } - ], - "type": "unsafe" - } - }, - "ValidatePipeline": { - "tag": "Observability Pipelines", - "undo": { - "type": "safe" - } - }, - "DeletePipeline": { - "tag": "Observability Pipelines", - "undo": { - "type": "idempotent" - } - }, - "GetPipeline": { - "tag": "Observability Pipelines", - "undo": { - "type": "safe" - } - }, - "UpdatePipeline": { - "tag": "Observability Pipelines", - "undo": { - "type": "idempotent" - } - }, "DeleteRestrictionPolicy": { "tag": "Restriction Policies", "undo": { diff --git a/lib/datadog_api_client/configuration.rb b/lib/datadog_api_client/configuration.rb index 3257b422c9b1..cf26b3990676 100644 --- a/lib/datadog_api_client/configuration.rb +++ b/lib/datadog_api_client/configuration.rb @@ -301,6 +301,12 @@ def initialize "v2.update_monitor_user_template": false, "v2.validate_existing_monitor_user_template": false, "v2.validate_monitor_user_template": false, + "v2.create_pipeline": false, + "v2.delete_pipeline": false, + "v2.get_pipeline": false, + "v2.list_pipelines": false, + "v2.update_pipeline": false, + "v2.validate_pipeline": false, "v2.list_role_templates": false, "v2.create_connection": false, "v2.delete_connection": false, @@ -312,12 +318,6 @@ def initialize "v2.query_event_filtered_users": false, "v2.query_users": false, "v2.update_connection": false, - "v2.create_pipeline": false, - "v2.delete_pipeline": false, - "v2.get_pipeline": false, - "v2.list_pipelines": false, - "v2.update_pipeline": false, - "v2.validate_pipeline": false, "v2.create_scorecard_outcomes_batch": false, "v2.create_scorecard_rule": false, "v2.delete_scorecard_rule": false, diff --git a/lib/datadog_api_client/inflector.rb b/lib/datadog_api_client/inflector.rb index 345f1ec65e1a..90b191311e09 100644 --- a/lib/datadog_api_client/inflector.rb +++ b/lib/datadog_api_client/inflector.rb @@ -3156,6 +3156,8 @@ def overrides "v2.observability_pipeline_add_env_vars_processor_variable" => "ObservabilityPipelineAddEnvVarsProcessorVariable", "v2.observability_pipeline_add_fields_processor" => "ObservabilityPipelineAddFieldsProcessor", "v2.observability_pipeline_add_fields_processor_type" => "ObservabilityPipelineAddFieldsProcessorType", + "v2.observability_pipeline_add_hostname_processor" => "ObservabilityPipelineAddHostnameProcessor", + "v2.observability_pipeline_add_hostname_processor_type" => "ObservabilityPipelineAddHostnameProcessorType", "v2.observability_pipeline_amazon_data_firehose_source" => "ObservabilityPipelineAmazonDataFirehoseSource", "v2.observability_pipeline_amazon_data_firehose_source_type" => "ObservabilityPipelineAmazonDataFirehoseSourceType", "v2.observability_pipeline_amazon_open_search_destination" => "ObservabilityPipelineAmazonOpenSearchDestination", @@ -3170,8 +3172,11 @@ def overrides "v2.observability_pipeline_amazon_security_lake_destination" => "ObservabilityPipelineAmazonSecurityLakeDestination", "v2.observability_pipeline_amazon_security_lake_destination_type" => "ObservabilityPipelineAmazonSecurityLakeDestinationType", "v2.observability_pipeline_aws_auth" => "ObservabilityPipelineAwsAuth", + "v2.observability_pipeline_cloud_prem_destination" => "ObservabilityPipelineCloudPremDestination", + "v2.observability_pipeline_cloud_prem_destination_type" => "ObservabilityPipelineCloudPremDestinationType", "v2.observability_pipeline_config" => "ObservabilityPipelineConfig", "v2.observability_pipeline_config_destination_item" => "ObservabilityPipelineConfigDestinationItem", + "v2.observability_pipeline_config_pipeline_type" => "ObservabilityPipelineConfigPipelineType", "v2.observability_pipeline_config_processor_group" => "ObservabilityPipelineConfigProcessorGroup", "v2.observability_pipeline_config_processor_item" => "ObservabilityPipelineConfigProcessorItem", "v2.observability_pipeline_config_source_item" => "ObservabilityPipelineConfigSourceItem", @@ -3189,6 +3194,8 @@ def overrides "v2.observability_pipeline_datadog_agent_source_type" => "ObservabilityPipelineDatadogAgentSourceType", "v2.observability_pipeline_datadog_logs_destination" => "ObservabilityPipelineDatadogLogsDestination", "v2.observability_pipeline_datadog_logs_destination_type" => "ObservabilityPipelineDatadogLogsDestinationType", + "v2.observability_pipeline_datadog_metrics_destination" => "ObservabilityPipelineDatadogMetricsDestination", + "v2.observability_pipeline_datadog_metrics_destination_type" => "ObservabilityPipelineDatadogMetricsDestinationType", "v2.observability_pipeline_datadog_tags_processor" => "ObservabilityPipelineDatadogTagsProcessor", "v2.observability_pipeline_datadog_tags_processor_action" => "ObservabilityPipelineDatadogTagsProcessorAction", "v2.observability_pipeline_datadog_tags_processor_mode" => "ObservabilityPipelineDatadogTagsProcessorMode", @@ -3199,6 +3206,7 @@ def overrides "v2.observability_pipeline_dedupe_processor_type" => "ObservabilityPipelineDedupeProcessorType", "v2.observability_pipeline_elasticsearch_destination" => "ObservabilityPipelineElasticsearchDestination", "v2.observability_pipeline_elasticsearch_destination_api_version" => "ObservabilityPipelineElasticsearchDestinationApiVersion", + "v2.observability_pipeline_elasticsearch_destination_data_stream" => "ObservabilityPipelineElasticsearchDestinationDataStream", "v2.observability_pipeline_elasticsearch_destination_type" => "ObservabilityPipelineElasticsearchDestinationType", "v2.observability_pipeline_enrichment_table_file" => "ObservabilityPipelineEnrichmentTableFile", "v2.observability_pipeline_enrichment_table_file_encoding" => "ObservabilityPipelineEnrichmentTableFileEncoding", @@ -3210,6 +3218,7 @@ def overrides "v2.observability_pipeline_enrichment_table_geo_ip" => "ObservabilityPipelineEnrichmentTableGeoIp", "v2.observability_pipeline_enrichment_table_processor" => "ObservabilityPipelineEnrichmentTableProcessor", "v2.observability_pipeline_enrichment_table_processor_type" => "ObservabilityPipelineEnrichmentTableProcessorType", + "v2.observability_pipeline_enrichment_table_reference_table" => "ObservabilityPipelineEnrichmentTableReferenceTable", "v2.observability_pipeline_field_value" => "ObservabilityPipelineFieldValue", "v2.observability_pipeline_filter_processor" => "ObservabilityPipelineFilterProcessor", "v2.observability_pipeline_filter_processor_type" => "ObservabilityPipelineFilterProcessorType", @@ -3238,19 +3247,35 @@ def overrides "v2.observability_pipeline_google_pub_sub_destination_type" => "ObservabilityPipelineGooglePubSubDestinationType", "v2.observability_pipeline_google_pub_sub_source" => "ObservabilityPipelineGooglePubSubSource", "v2.observability_pipeline_google_pub_sub_source_type" => "ObservabilityPipelineGooglePubSubSourceType", + "v2.observability_pipeline_http_client_destination" => "ObservabilityPipelineHttpClientDestination", + "v2.observability_pipeline_http_client_destination_auth_strategy" => "ObservabilityPipelineHttpClientDestinationAuthStrategy", + "v2.observability_pipeline_http_client_destination_compression" => "ObservabilityPipelineHttpClientDestinationCompression", + "v2.observability_pipeline_http_client_destination_compression_algorithm" => "ObservabilityPipelineHttpClientDestinationCompressionAlgorithm", + "v2.observability_pipeline_http_client_destination_encoding" => "ObservabilityPipelineHttpClientDestinationEncoding", + "v2.observability_pipeline_http_client_destination_type" => "ObservabilityPipelineHttpClientDestinationType", "v2.observability_pipeline_http_client_source" => "ObservabilityPipelineHttpClientSource", "v2.observability_pipeline_http_client_source_auth_strategy" => "ObservabilityPipelineHttpClientSourceAuthStrategy", "v2.observability_pipeline_http_client_source_type" => "ObservabilityPipelineHttpClientSourceType", "v2.observability_pipeline_http_server_source" => "ObservabilityPipelineHttpServerSource", "v2.observability_pipeline_http_server_source_auth_strategy" => "ObservabilityPipelineHttpServerSourceAuthStrategy", "v2.observability_pipeline_http_server_source_type" => "ObservabilityPipelineHttpServerSourceType", + "v2.observability_pipeline_kafka_destination" => "ObservabilityPipelineKafkaDestination", + "v2.observability_pipeline_kafka_destination_compression" => "ObservabilityPipelineKafkaDestinationCompression", + "v2.observability_pipeline_kafka_destination_encoding" => "ObservabilityPipelineKafkaDestinationEncoding", + "v2.observability_pipeline_kafka_destination_type" => "ObservabilityPipelineKafkaDestinationType", + "v2.observability_pipeline_kafka_librdkafka_option" => "ObservabilityPipelineKafkaLibrdkafkaOption", + "v2.observability_pipeline_kafka_sasl" => "ObservabilityPipelineKafkaSasl", + "v2.observability_pipeline_kafka_sasl_mechanism" => "ObservabilityPipelineKafkaSaslMechanism", "v2.observability_pipeline_kafka_source" => "ObservabilityPipelineKafkaSource", - "v2.observability_pipeline_kafka_source_librdkafka_option" => "ObservabilityPipelineKafkaSourceLibrdkafkaOption", - "v2.observability_pipeline_kafka_source_sasl" => "ObservabilityPipelineKafkaSourceSasl", "v2.observability_pipeline_kafka_source_type" => "ObservabilityPipelineKafkaSourceType", "v2.observability_pipeline_logstash_source" => "ObservabilityPipelineLogstashSource", "v2.observability_pipeline_logstash_source_type" => "ObservabilityPipelineLogstashSourceType", "v2.observability_pipeline_metadata_entry" => "ObservabilityPipelineMetadataEntry", + "v2.observability_pipeline_metric_tags_processor" => "ObservabilityPipelineMetricTagsProcessor", + "v2.observability_pipeline_metric_tags_processor_rule" => "ObservabilityPipelineMetricTagsProcessorRule", + "v2.observability_pipeline_metric_tags_processor_rule_action" => "ObservabilityPipelineMetricTagsProcessorRuleAction", + "v2.observability_pipeline_metric_tags_processor_rule_mode" => "ObservabilityPipelineMetricTagsProcessorRuleMode", + "v2.observability_pipeline_metric_tags_processor_type" => "ObservabilityPipelineMetricTagsProcessorType", "v2.observability_pipeline_metric_value" => "ObservabilityPipelineMetricValue", "v2.observability_pipeline_new_relic_destination" => "ObservabilityPipelineNewRelicDestination", "v2.observability_pipeline_new_relic_destination_region" => "ObservabilityPipelineNewRelicDestinationRegion", @@ -3262,6 +3287,8 @@ def overrides "v2.observability_pipeline_ocsf_mapping_library" => "ObservabilityPipelineOcsfMappingLibrary", "v2.observability_pipeline_open_search_destination" => "ObservabilityPipelineOpenSearchDestination", "v2.observability_pipeline_open_search_destination_type" => "ObservabilityPipelineOpenSearchDestinationType", + "v2.observability_pipeline_opentelemetry_source" => "ObservabilityPipelineOpentelemetrySource", + "v2.observability_pipeline_opentelemetry_source_type" => "ObservabilityPipelineOpentelemetrySourceType", "v2.observability_pipeline_parse_grok_processor" => "ObservabilityPipelineParseGrokProcessor", "v2.observability_pipeline_parse_grok_processor_rule" => "ObservabilityPipelineParseGrokProcessorRule", "v2.observability_pipeline_parse_grok_processor_rule_match_rule" => "ObservabilityPipelineParseGrokProcessorRuleMatchRule", @@ -3269,7 +3296,8 @@ def overrides "v2.observability_pipeline_parse_grok_processor_type" => "ObservabilityPipelineParseGrokProcessorType", "v2.observability_pipeline_parse_json_processor" => "ObservabilityPipelineParseJSONProcessor", "v2.observability_pipeline_parse_json_processor_type" => "ObservabilityPipelineParseJSONProcessorType", - "v2.observability_pipeline_pipeline_kafka_source_sasl_mechanism" => "ObservabilityPipelinePipelineKafkaSourceSaslMechanism", + "v2.observability_pipeline_parse_xml_processor" => "ObservabilityPipelineParseXMLProcessor", + "v2.observability_pipeline_parse_xml_processor_type" => "ObservabilityPipelineParseXMLProcessorType", "v2.observability_pipeline_quota_processor" => "ObservabilityPipelineQuotaProcessor", "v2.observability_pipeline_quota_processor_limit" => "ObservabilityPipelineQuotaProcessorLimit", "v2.observability_pipeline_quota_processor_limit_enforce_type" => "ObservabilityPipelineQuotaProcessorLimitEnforceType", @@ -3350,6 +3378,9 @@ def overrides "v2.observability_pipeline_socket_source_type" => "ObservabilityPipelineSocketSourceType", "v2.observability_pipeline_spec" => "ObservabilityPipelineSpec", "v2.observability_pipeline_spec_data" => "ObservabilityPipelineSpecData", + "v2.observability_pipeline_split_array_processor" => "ObservabilityPipelineSplitArrayProcessor", + "v2.observability_pipeline_split_array_processor_array_config" => "ObservabilityPipelineSplitArrayProcessorArrayConfig", + "v2.observability_pipeline_split_array_processor_type" => "ObservabilityPipelineSplitArrayProcessorType", "v2.observability_pipeline_splunk_hec_destination" => "ObservabilityPipelineSplunkHecDestination", "v2.observability_pipeline_splunk_hec_destination_encoding" => "ObservabilityPipelineSplunkHecDestinationEncoding", "v2.observability_pipeline_splunk_hec_destination_type" => "ObservabilityPipelineSplunkHecDestinationType", diff --git a/lib/datadog_api_client/v2/api/observability_pipelines_api.rb b/lib/datadog_api_client/v2/api/observability_pipelines_api.rb index 2bc926176292..8f51a63e5293 100644 --- a/lib/datadog_api_client/v2/api/observability_pipelines_api.rb +++ b/lib/datadog_api_client/v2/api/observability_pipelines_api.rb @@ -54,7 +54,7 @@ def create_pipeline_with_http_info(body, opts = {}) fail ArgumentError, "Missing the required parameter 'body' when calling ObservabilityPipelinesAPI.create_pipeline" end # resource path - local_var_path = '/api/v2/remote_config/products/obs_pipelines/pipelines' + local_var_path = '/api/v2/obs-pipelines/pipelines' # query parameters query_params = opts[:query_params] || {} @@ -127,7 +127,7 @@ def delete_pipeline_with_http_info(pipeline_id, opts = {}) fail ArgumentError, "Missing the required parameter 'pipeline_id' when calling ObservabilityPipelinesAPI.delete_pipeline" end # resource path - local_var_path = '/api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}'.sub('{pipeline_id}', CGI.escape(pipeline_id.to_s).gsub('%2F', '/')) + local_var_path = '/api/v2/obs-pipelines/pipelines/{pipeline_id}'.sub('{pipeline_id}', CGI.escape(pipeline_id.to_s).gsub('%2F', '/')) # query parameters query_params = opts[:query_params] || {} @@ -198,7 +198,7 @@ def get_pipeline_with_http_info(pipeline_id, opts = {}) fail ArgumentError, "Missing the required parameter 'pipeline_id' when calling ObservabilityPipelinesAPI.get_pipeline" end # resource path - local_var_path = '/api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}'.sub('{pipeline_id}', CGI.escape(pipeline_id.to_s).gsub('%2F', '/')) + local_var_path = '/api/v2/obs-pipelines/pipelines/{pipeline_id}'.sub('{pipeline_id}', CGI.escape(pipeline_id.to_s).gsub('%2F', '/')) # query parameters query_params = opts[:query_params] || {} @@ -266,7 +266,7 @@ def list_pipelines_with_http_info(opts = {}) @api_client.config.logger.debug 'Calling API: ObservabilityPipelinesAPI.list_pipelines ...' end # resource path - local_var_path = '/api/v2/remote_config/products/obs_pipelines/pipelines' + local_var_path = '/api/v2/obs-pipelines/pipelines' # query parameters query_params = opts[:query_params] || {} @@ -344,7 +344,7 @@ def update_pipeline_with_http_info(pipeline_id, body, opts = {}) fail ArgumentError, "Missing the required parameter 'body' when calling ObservabilityPipelinesAPI.update_pipeline" end # resource path - local_var_path = '/api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}'.sub('{pipeline_id}', CGI.escape(pipeline_id.to_s).gsub('%2F', '/')) + local_var_path = '/api/v2/obs-pipelines/pipelines/{pipeline_id}'.sub('{pipeline_id}', CGI.escape(pipeline_id.to_s).gsub('%2F', '/')) # query parameters query_params = opts[:query_params] || {} @@ -418,7 +418,7 @@ def validate_pipeline_with_http_info(body, opts = {}) fail ArgumentError, "Missing the required parameter 'body' when calling ObservabilityPipelinesAPI.validate_pipeline" end # resource path - local_var_path = '/api/v2/remote_config/products/obs_pipelines/pipelines/validate' + local_var_path = '/api/v2/obs-pipelines/pipelines/validate' # query parameters query_params = opts[:query_params] || {} diff --git a/lib/datadog_api_client/v2/models/azure_storage_destination.rb b/lib/datadog_api_client/v2/models/azure_storage_destination.rb index ef29c2cade85..b17e732af9cb 100644 --- a/lib/datadog_api_client/v2/models/azure_storage_destination.rb +++ b/lib/datadog_api_client/v2/models/azure_storage_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `azure_storage` destination forwards logs to an Azure Blob Storage container. + # + # **Supported pipeline types:** logs class AzureStorageDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/microsoft_sentinel_destination.rb b/lib/datadog_api_client/v2/models/microsoft_sentinel_destination.rb index 03266b733402..4948de659f07 100644 --- a/lib/datadog_api_client/v2/models/microsoft_sentinel_destination.rb +++ b/lib/datadog_api_client/v2/models/microsoft_sentinel_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `microsoft_sentinel` destination forwards logs to Microsoft Sentinel. + # + # **Supported pipeline types:** logs class MicrosoftSentinelDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_add_env_vars_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_add_env_vars_processor.rb index 03fce60c6e93..4b08a7c72457 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_add_env_vars_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_add_env_vars_processor.rb @@ -18,13 +18,15 @@ module DatadogAPIClient::V2 # The `add_env_vars` processor adds environment variable values to log events. + # + # **Supported pipeline types:** logs class ObservabilityPipelineAddEnvVarsProcessor include BaseGenericModel # The display name for a component. attr_accessor :display_name - # Whether this processor is enabled. + # Indicates whether the processor is enabled. attr_reader :enabled # The unique identifier for this component. Used to reference this processor in the pipeline. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_add_fields_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_add_fields_processor.rb index ee063e97ea95..f833ad9430c5 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_add_fields_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_add_fields_processor.rb @@ -18,19 +18,21 @@ module DatadogAPIClient::V2 # The `add_fields` processor adds static key-value fields to logs. + # + # **Supported pipeline types:** logs class ObservabilityPipelineAddFieldsProcessor include BaseGenericModel # The display name for a component. attr_accessor :display_name - # Whether this processor is enabled. + # Indicates whether the processor is enabled. attr_reader :enabled # A list of static fields (key-value pairs) that is added to each log event processed by this component. attr_reader :fields - # The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). attr_reader :id # A Datadog search query used to determine which logs this processor targets. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_add_hostname_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_add_hostname_processor.rb new file mode 100644 index 000000000000..530d847691bc --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_add_hostname_processor.rb @@ -0,0 +1,198 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `add_hostname` processor adds the hostname to log events. + # + # **Supported pipeline types:** logs + class ObservabilityPipelineAddHostnameProcessor + include BaseGenericModel + + # The display name for a component. + attr_accessor :display_name + + # Indicates whether the processor is enabled. + attr_reader :enabled + + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). + attr_reader :id + + # A Datadog search query used to determine which logs this processor targets. + attr_reader :include + + # The processor type. The value should always be `add_hostname`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'display_name' => :'display_name', + :'enabled' => :'enabled', + :'id' => :'id', + :'include' => :'include', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'display_name' => :'String', + :'enabled' => :'Boolean', + :'id' => :'String', + :'include' => :'String', + :'type' => :'ObservabilityPipelineAddHostnameProcessorType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineAddHostnameProcessor` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'display_name') + self.display_name = attributes[:'display_name'] + end + + if attributes.key?(:'enabled') + self.enabled = attributes[:'enabled'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'include') + self.include = attributes[:'include'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @enabled.nil? + return false if @id.nil? + return false if @include.nil? + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param enabled [Object] Object to be assigned + # @!visibility private + def enabled=(enabled) + if enabled.nil? + fail ArgumentError, 'invalid value for "enabled", enabled cannot be nil.' + end + @enabled = enabled + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param include [Object] Object to be assigned + # @!visibility private + def include=(include) + if include.nil? + fail ArgumentError, 'invalid value for "include", include cannot be nil.' + end + @include = include + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + display_name == o.display_name && + enabled == o.enabled && + id == o.id && + include == o.include && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [display_name, enabled, id, include, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_add_hostname_processor_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_add_hostname_processor_type.rb new file mode 100644 index 000000000000..5eca2a92ffb4 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_add_hostname_processor_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The processor type. The value should always be `add_hostname`. + class ObservabilityPipelineAddHostnameProcessorType + include BaseEnumModel + + ADD_HOSTNAME = "add_hostname".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_data_firehose_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_data_firehose_source.rb index bd174706a449..51b01b03197c 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_data_firehose_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_data_firehose_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `amazon_data_firehose` source ingests logs from AWS Data Firehose. + # + # **Supported pipeline types:** logs class ObservabilityPipelineAmazonDataFirehoseSource include BaseGenericModel @@ -25,7 +27,7 @@ class ObservabilityPipelineAmazonDataFirehoseSource # If omitted, the system’s default credentials are used (for example, the IAM role and environment variables). attr_accessor :auth - # The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). attr_reader :id # Configuration for enabling TLS encryption between the pipeline component and external services. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_open_search_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_open_search_destination.rb index ab8721363e09..44f1c6db5b6c 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_open_search_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_open_search_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `amazon_opensearch` destination writes logs to Amazon OpenSearch. + # + # **Supported pipeline types:** logs class ObservabilityPipelineAmazonOpenSearchDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_destination.rb index 6e74bafcd33b..aec7f0bbcbfe 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `amazon_s3` destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving. + # + # **Supported pipeline types:** logs class ObservabilityPipelineAmazonS3Destination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_source.rb index a764ab83045b..679d1d09a0e6 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_s3_source.rb @@ -19,6 +19,8 @@ module DatadogAPIClient::V2 # The `amazon_s3` source ingests logs from an Amazon S3 bucket. # It supports AWS authentication and TLS encryption. + # + # **Supported pipeline types:** logs class ObservabilityPipelineAmazonS3Source include BaseGenericModel @@ -26,7 +28,7 @@ class ObservabilityPipelineAmazonS3Source # If omitted, the system’s default credentials are used (for example, the IAM role and environment variables). attr_accessor :auth - # The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). attr_reader :id # AWS region where the S3 bucket resides. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_security_lake_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_security_lake_destination.rb index 6904637a9723..6bc480ae9973 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_amazon_security_lake_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_amazon_security_lake_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `amazon_security_lake` destination sends your logs to Amazon Security Lake. + # + # **Supported pipeline types:** logs class ObservabilityPipelineAmazonSecurityLakeDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_cloud_prem_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_cloud_prem_destination.rb new file mode 100644 index 000000000000..fcc35829a771 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_cloud_prem_destination.rb @@ -0,0 +1,169 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `cloud_prem` destination sends logs to Datadog CloudPrem. + # + # **Supported pipeline types:** logs + class ObservabilityPipelineCloudPremDestination + include BaseGenericModel + + # The unique identifier for this component. + attr_reader :id + + # A list of component IDs whose output is used as the `input` for this component. + attr_reader :inputs + + # The destination type. The value should always be `cloud_prem`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'id' => :'id', + :'inputs' => :'inputs', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'id' => :'String', + :'inputs' => :'Array', + :'type' => :'ObservabilityPipelineCloudPremDestinationType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineCloudPremDestination` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'inputs') + if (value = attributes[:'inputs']).is_a?(Array) + self.inputs = value + end + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @id.nil? + return false if @inputs.nil? + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param inputs [Object] Object to be assigned + # @!visibility private + def inputs=(inputs) + if inputs.nil? + fail ArgumentError, 'invalid value for "inputs", inputs cannot be nil.' + end + @inputs = inputs + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + id == o.id && + inputs == o.inputs && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [id, inputs, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_cloud_prem_destination_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_cloud_prem_destination_type.rb new file mode 100644 index 000000000000..8f235942f08e --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_cloud_prem_destination_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The destination type. The value should always be `cloud_prem`. + class ObservabilityPipelineCloudPremDestinationType + include BaseEnumModel + + CLOUD_PREM = "cloud_prem".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_config.rb b/lib/datadog_api_client/v2/models/observability_pipeline_config.rb index 04005274e028..0fae61d9128b 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_config.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_config.rb @@ -24,7 +24,15 @@ class ObservabilityPipelineConfig # A list of destination components where processed logs are sent. attr_reader :destinations + # The type of data being ingested. Defaults to `logs` if not specified. + attr_accessor :pipeline_type + + # A list of processor groups that transform or enrich log data. + attr_accessor :processor_groups + # A list of processor groups that transform or enrich log data. + # + # **Deprecated:** This field is deprecated, you should now use the processor_groups field. attr_accessor :processors # A list of configured data sources for the pipeline. @@ -37,6 +45,8 @@ class ObservabilityPipelineConfig def self.attribute_map { :'destinations' => :'destinations', + :'pipeline_type' => :'pipeline_type', + :'processor_groups' => :'processor_groups', :'processors' => :'processors', :'sources' => :'sources' } @@ -47,6 +57,8 @@ def self.attribute_map def self.openapi_types { :'destinations' => :'Array', + :'pipeline_type' => :'ObservabilityPipelineConfigPipelineType', + :'processor_groups' => :'Array', :'processors' => :'Array', :'sources' => :'Array' } @@ -76,6 +88,16 @@ def initialize(attributes = {}) end end + if attributes.key?(:'pipeline_type') + self.pipeline_type = attributes[:'pipeline_type'] + end + + if attributes.key?(:'processor_groups') + if (value = attributes[:'processor_groups']).is_a?(Array) + self.processor_groups = value + end + end + if attributes.key?(:'processors') if (value = attributes[:'processors']).is_a?(Array) self.processors = value @@ -145,6 +167,8 @@ def ==(o) return true if self.equal?(o) self.class == o.class && destinations == o.destinations && + pipeline_type == o.pipeline_type && + processor_groups == o.processor_groups && processors == o.processors && sources == o.sources && additional_properties == o.additional_properties @@ -154,7 +178,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [destinations, processors, sources, additional_properties].hash + [destinations, pipeline_type, processor_groups, processors, sources, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_config_destination_item.rb b/lib/datadog_api_client/v2/models/observability_pipeline_config_destination_item.rb index 00386a1aa190..43df429f92fb 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_config_destination_item.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_config_destination_item.rb @@ -26,25 +26,29 @@ class << self # List of class defined in oneOf (OpenAPI v3) def openapi_one_of [ - :'ObservabilityPipelineDatadogLogsDestination', + :'ObservabilityPipelineHttpClientDestination', + :'ObservabilityPipelineAmazonOpenSearchDestination', :'ObservabilityPipelineAmazonS3Destination', - :'ObservabilityPipelineGoogleCloudStorageDestination', - :'ObservabilityPipelineSplunkHecDestination', - :'ObservabilityPipelineSumoLogicDestination', - :'ObservabilityPipelineElasticsearchDestination', - :'ObservabilityPipelineRsyslogDestination', - :'ObservabilityPipelineSyslogNgDestination', + :'ObservabilityPipelineAmazonSecurityLakeDestination', :'AzureStorageDestination', - :'MicrosoftSentinelDestination', + :'ObservabilityPipelineCloudPremDestination', + :'ObservabilityPipelineCrowdStrikeNextGenSiemDestination', + :'ObservabilityPipelineDatadogLogsDestination', + :'ObservabilityPipelineElasticsearchDestination', :'ObservabilityPipelineGoogleChronicleDestination', + :'ObservabilityPipelineGoogleCloudStorageDestination', + :'ObservabilityPipelineGooglePubSubDestination', + :'ObservabilityPipelineKafkaDestination', + :'MicrosoftSentinelDestination', :'ObservabilityPipelineNewRelicDestination', - :'ObservabilityPipelineSentinelOneDestination', :'ObservabilityPipelineOpenSearchDestination', - :'ObservabilityPipelineAmazonOpenSearchDestination', + :'ObservabilityPipelineRsyslogDestination', + :'ObservabilityPipelineSentinelOneDestination', :'ObservabilityPipelineSocketDestination', - :'ObservabilityPipelineAmazonSecurityLakeDestination', - :'ObservabilityPipelineCrowdStrikeNextGenSiemDestination', - :'ObservabilityPipelineGooglePubSubDestination' + :'ObservabilityPipelineSplunkHecDestination', + :'ObservabilityPipelineSumoLogicDestination', + :'ObservabilityPipelineSyslogNgDestination', + :'ObservabilityPipelineDatadogMetricsDestination' ] end # Builds the object diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_config_pipeline_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_config_pipeline_type.rb new file mode 100644 index 000000000000..ef043dc88c52 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_config_pipeline_type.rb @@ -0,0 +1,27 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The type of data being ingested. Defaults to `logs` if not specified. + class ObservabilityPipelineConfigPipelineType + include BaseEnumModel + + LOGS = "logs".freeze + METRICS = "metrics".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_config_processor_item.rb b/lib/datadog_api_client/v2/models/observability_pipeline_config_processor_item.rb index 334f6e14d8a9..9c6c290c3c53 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_config_processor_item.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_config_processor_item.rb @@ -27,23 +27,27 @@ class << self def openapi_one_of [ :'ObservabilityPipelineFilterProcessor', + :'ObservabilityPipelineAddEnvVarsProcessor', + :'ObservabilityPipelineAddFieldsProcessor', + :'ObservabilityPipelineAddHostnameProcessor', + :'ObservabilityPipelineCustomProcessor', + :'ObservabilityPipelineDatadogTagsProcessor', + :'ObservabilityPipelineDedupeProcessor', + :'ObservabilityPipelineEnrichmentTableProcessor', + :'ObservabilityPipelineGenerateMetricsProcessor', + :'ObservabilityPipelineOcsfMapperProcessor', + :'ObservabilityPipelineParseGrokProcessor', :'ObservabilityPipelineParseJSONProcessor', + :'ObservabilityPipelineParseXMLProcessor', :'ObservabilityPipelineQuotaProcessor', - :'ObservabilityPipelineAddFieldsProcessor', + :'ObservabilityPipelineReduceProcessor', :'ObservabilityPipelineRemoveFieldsProcessor', :'ObservabilityPipelineRenameFieldsProcessor', - :'ObservabilityPipelineGenerateMetricsProcessor', :'ObservabilityPipelineSampleProcessor', - :'ObservabilityPipelineParseGrokProcessor', :'ObservabilityPipelineSensitiveDataScannerProcessor', - :'ObservabilityPipelineOcsfMapperProcessor', - :'ObservabilityPipelineAddEnvVarsProcessor', - :'ObservabilityPipelineDedupeProcessor', - :'ObservabilityPipelineEnrichmentTableProcessor', - :'ObservabilityPipelineReduceProcessor', + :'ObservabilityPipelineSplitArrayProcessor', :'ObservabilityPipelineThrottleProcessor', - :'ObservabilityPipelineCustomProcessor', - :'ObservabilityPipelineDatadogTagsProcessor' + :'ObservabilityPipelineMetricTagsProcessor' ] end # Builds the object diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_config_source_item.rb b/lib/datadog_api_client/v2/models/observability_pipeline_config_source_item.rb index 6c5e58312de6..74579da90735 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_config_source_item.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_config_source_item.rb @@ -26,22 +26,23 @@ class << self # List of class defined in oneOf (OpenAPI v3) def openapi_one_of [ - :'ObservabilityPipelineKafkaSource', :'ObservabilityPipelineDatadogAgentSource', - :'ObservabilityPipelineSplunkTcpSource', - :'ObservabilityPipelineSplunkHecSource', + :'ObservabilityPipelineAmazonDataFirehoseSource', :'ObservabilityPipelineAmazonS3Source', - :'ObservabilityPipelineFluentdSource', :'ObservabilityPipelineFluentBitSource', - :'ObservabilityPipelineHttpServerSource', - :'ObservabilityPipelineSumoLogicSource', - :'ObservabilityPipelineRsyslogSource', - :'ObservabilityPipelineSyslogNgSource', - :'ObservabilityPipelineAmazonDataFirehoseSource', + :'ObservabilityPipelineFluentdSource', :'ObservabilityPipelineGooglePubSubSource', :'ObservabilityPipelineHttpClientSource', + :'ObservabilityPipelineHttpServerSource', + :'ObservabilityPipelineKafkaSource', :'ObservabilityPipelineLogstashSource', - :'ObservabilityPipelineSocketSource' + :'ObservabilityPipelineRsyslogSource', + :'ObservabilityPipelineSocketSource', + :'ObservabilityPipelineSplunkHecSource', + :'ObservabilityPipelineSplunkTcpSource', + :'ObservabilityPipelineSumoLogicSource', + :'ObservabilityPipelineSyslogNgSource', + :'ObservabilityPipelineOpentelemetrySource' ] end # Builds the object diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_crowd_strike_next_gen_siem_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_crowd_strike_next_gen_siem_destination.rb index 827d8cc8c390..3a24137f6c73 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_crowd_strike_next_gen_siem_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_crowd_strike_next_gen_siem_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `crowdstrike_next_gen_siem` destination forwards logs to CrowdStrike Next Gen SIEM. + # + # **Supported pipeline types:** logs class ObservabilityPipelineCrowdStrikeNextGenSiemDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_custom_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_custom_processor.rb index 5b554a17df55..d7bdb82daaa8 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_custom_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_custom_processor.rb @@ -18,13 +18,15 @@ module DatadogAPIClient::V2 # The `custom_processor` processor transforms events using [Vector Remap Language (VRL)](https://vector.dev/docs/reference/vrl/) scripts with advanced filtering capabilities. + # + # **Supported pipeline types:** logs class ObservabilityPipelineCustomProcessor include BaseGenericModel # The display name for a component. attr_accessor :display_name - # Whether this processor is enabled. + # Indicates whether the processor is enabled. attr_reader :enabled # The unique identifier for this processor. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_agent_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_agent_source.rb index 27b724373297..373c2592e31e 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_agent_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_agent_source.rb @@ -17,11 +17,13 @@ require 'time' module DatadogAPIClient::V2 - # The `datadog_agent` source collects logs from the Datadog Agent. + # The `datadog_agent` source collects logs/metrics from the Datadog Agent. + # + # **Supported pipeline types:** logs, metrics class ObservabilityPipelineDatadogAgentSource include BaseGenericModel - # The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). attr_reader :id # Configuration for enabling TLS encryption between the pipeline component and external services. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_logs_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_logs_destination.rb index 3b08d1fd9c22..ea784246329d 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_logs_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_logs_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `datadog_logs` destination forwards logs to Datadog Log Management. + # + # **Supported pipeline types:** logs class ObservabilityPipelineDatadogLogsDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_metrics_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_metrics_destination.rb new file mode 100644 index 000000000000..e94fadc3fa3c --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_metrics_destination.rb @@ -0,0 +1,169 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `datadog_metrics` destination forwards metrics to Datadog. + # + # **Supported pipeline types:** metrics + class ObservabilityPipelineDatadogMetricsDestination + include BaseGenericModel + + # The unique identifier for this component. + attr_reader :id + + # A list of component IDs whose output is used as the input for this component. + attr_reader :inputs + + # The destination type. The value should always be `datadog_metrics`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'id' => :'id', + :'inputs' => :'inputs', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'id' => :'String', + :'inputs' => :'Array', + :'type' => :'ObservabilityPipelineDatadogMetricsDestinationType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineDatadogMetricsDestination` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'inputs') + if (value = attributes[:'inputs']).is_a?(Array) + self.inputs = value + end + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @id.nil? + return false if @inputs.nil? + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param inputs [Object] Object to be assigned + # @!visibility private + def inputs=(inputs) + if inputs.nil? + fail ArgumentError, 'invalid value for "inputs", inputs cannot be nil.' + end + @inputs = inputs + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + id == o.id && + inputs == o.inputs && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [id, inputs, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_metrics_destination_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_metrics_destination_type.rb new file mode 100644 index 000000000000..c32b6952246a --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_metrics_destination_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The destination type. The value should always be `datadog_metrics`. + class ObservabilityPipelineDatadogMetricsDestinationType + include BaseEnumModel + + DATADOG_METRICS = "datadog_metrics".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_tags_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_tags_processor.rb index 5eeb0091f9d0..306a8309a752 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_datadog_tags_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_datadog_tags_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `datadog_tags` processor includes or excludes specific Datadog tags in your logs. + # + # **Supported pipeline types:** logs class ObservabilityPipelineDatadogTagsProcessor include BaseGenericModel @@ -27,10 +29,10 @@ class ObservabilityPipelineDatadogTagsProcessor # The display name for a component. attr_accessor :display_name - # Whether this processor is enabled. + # Indicates whether the processor is enabled. attr_reader :enabled - # The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). attr_reader :id # A Datadog search query used to determine which logs this processor targets. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_dedupe_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_dedupe_processor.rb index 4420f11b474b..5cc292b7a8ec 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_dedupe_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_dedupe_processor.rb @@ -18,13 +18,15 @@ module DatadogAPIClient::V2 # The `dedupe` processor removes duplicate fields in log events. + # + # **Supported pipeline types:** logs class ObservabilityPipelineDedupeProcessor include BaseGenericModel # The display name for a component. attr_accessor :display_name - # Whether this processor is enabled. + # Indicates whether the processor is enabled. attr_reader :enabled # A list of log field paths to check for duplicates. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination.rb index f49df443a047..d3e3475ce72d 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `elasticsearch` destination writes logs to an Elasticsearch cluster. + # + # **Supported pipeline types:** logs class ObservabilityPipelineElasticsearchDestination include BaseGenericModel @@ -27,6 +29,9 @@ class ObservabilityPipelineElasticsearchDestination # The index to write logs to in Elasticsearch. attr_accessor :bulk_index + # Configuration options for writing to Elasticsearch Data Streams instead of a fixed index. + attr_accessor :data_stream + # The unique identifier for this component. attr_reader :id @@ -44,6 +49,7 @@ def self.attribute_map { :'api_version' => :'api_version', :'bulk_index' => :'bulk_index', + :'data_stream' => :'data_stream', :'id' => :'id', :'inputs' => :'inputs', :'type' => :'type' @@ -56,6 +62,7 @@ def self.openapi_types { :'api_version' => :'ObservabilityPipelineElasticsearchDestinationApiVersion', :'bulk_index' => :'String', + :'data_stream' => :'ObservabilityPipelineElasticsearchDestinationDataStream', :'id' => :'String', :'inputs' => :'Array', :'type' => :'ObservabilityPipelineElasticsearchDestinationType' @@ -88,6 +95,10 @@ def initialize(attributes = {}) self.bulk_index = attributes[:'bulk_index'] end + if attributes.key?(:'data_stream') + self.data_stream = attributes[:'data_stream'] + end + if attributes.key?(:'id') self.id = attributes[:'id'] end @@ -171,6 +182,7 @@ def ==(o) self.class == o.class && api_version == o.api_version && bulk_index == o.bulk_index && + data_stream == o.data_stream && id == o.id && inputs == o.inputs && type == o.type && @@ -181,7 +193,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [api_version, bulk_index, id, inputs, type, additional_properties].hash + [api_version, bulk_index, data_stream, id, inputs, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination_data_stream.rb b/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination_data_stream.rb new file mode 100644 index 000000000000..8bf597f0b45d --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_elasticsearch_destination_data_stream.rb @@ -0,0 +1,125 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Configuration options for writing to Elasticsearch Data Streams instead of a fixed index. + class ObservabilityPipelineElasticsearchDestinationDataStream + include BaseGenericModel + + # The data stream dataset for your logs. This groups logs by their source or application. + attr_accessor :dataset + + # The data stream type for your logs. This determines how logs are categorized within the data stream. + attr_accessor :dtype + + # The data stream namespace for your logs. This separates logs into different environments or domains. + attr_accessor :namespace + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'dataset' => :'dataset', + :'dtype' => :'dtype', + :'namespace' => :'namespace' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'dataset' => :'String', + :'dtype' => :'String', + :'namespace' => :'String' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineElasticsearchDestinationDataStream` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'dataset') + self.dataset = attributes[:'dataset'] + end + + if attributes.key?(:'dtype') + self.dtype = attributes[:'dtype'] + end + + if attributes.key?(:'namespace') + self.namespace = attributes[:'namespace'] + end + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + dataset == o.dataset && + dtype == o.dtype && + namespace == o.namespace && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [dataset, dtype, namespace, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_processor.rb index ea73dec4ede1..de5f7bc222be 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_processor.rb @@ -17,14 +17,16 @@ require 'time' module DatadogAPIClient::V2 - # The `enrichment_table` processor enriches logs using a static CSV file or GeoIP database. + # The `enrichment_table` processor enriches logs using a static CSV file, GeoIP database, or reference table. Exactly one of `file`, `geoip`, or `reference_table` must be configured. + # + # **Supported pipeline types:** logs class ObservabilityPipelineEnrichmentTableProcessor include BaseGenericModel # The display name for a component. attr_accessor :display_name - # Whether this processor is enabled. + # Indicates whether the processor is enabled. attr_reader :enabled # Defines a static enrichment table loaded from a CSV file. @@ -39,6 +41,9 @@ class ObservabilityPipelineEnrichmentTableProcessor # A Datadog search query used to determine which logs this processor targets. attr_reader :include + # Uses a Datadog reference table to enrich logs. + attr_accessor :reference_table + # Path where enrichment results should be stored in the log. attr_reader :target @@ -57,6 +62,7 @@ def self.attribute_map :'geoip' => :'geoip', :'id' => :'id', :'include' => :'include', + :'reference_table' => :'reference_table', :'target' => :'target', :'type' => :'type' } @@ -72,6 +78,7 @@ def self.openapi_types :'geoip' => :'ObservabilityPipelineEnrichmentTableGeoIp', :'id' => :'String', :'include' => :'String', + :'reference_table' => :'ObservabilityPipelineEnrichmentTableReferenceTable', :'target' => :'String', :'type' => :'ObservabilityPipelineEnrichmentTableProcessorType' } @@ -119,6 +126,10 @@ def initialize(attributes = {}) self.include = attributes[:'include'] end + if attributes.key?(:'reference_table') + self.reference_table = attributes[:'reference_table'] + end + if attributes.key?(:'target') self.target = attributes[:'target'] end @@ -222,6 +233,7 @@ def ==(o) geoip == o.geoip && id == o.id && include == o.include && + reference_table == o.reference_table && target == o.target && type == o.type && additional_properties == o.additional_properties @@ -231,7 +243,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [display_name, enabled, file, geoip, id, include, target, type, additional_properties].hash + [display_name, enabled, file, geoip, id, include, reference_table, target, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_reference_table.rb b/lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_reference_table.rb new file mode 100644 index 000000000000..2248ad6775d1 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_enrichment_table_reference_table.rb @@ -0,0 +1,156 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Uses a Datadog reference table to enrich logs. + class ObservabilityPipelineEnrichmentTableReferenceTable + include BaseGenericModel + + # List of column names to include from the reference table. If not provided, all columns are included. + attr_accessor :columns + + # Path to the field in the log event to match against the reference table. + attr_reader :key_field + + # The unique identifier of the reference table. + attr_reader :table_id + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'columns' => :'columns', + :'key_field' => :'key_field', + :'table_id' => :'table_id' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'columns' => :'Array', + :'key_field' => :'String', + :'table_id' => :'String' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineEnrichmentTableReferenceTable` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'columns') + if (value = attributes[:'columns']).is_a?(Array) + self.columns = value + end + end + + if attributes.key?(:'key_field') + self.key_field = attributes[:'key_field'] + end + + if attributes.key?(:'table_id') + self.table_id = attributes[:'table_id'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @key_field.nil? + return false if @table_id.nil? + true + end + + # Custom attribute writer method with validation + # @param key_field [Object] Object to be assigned + # @!visibility private + def key_field=(key_field) + if key_field.nil? + fail ArgumentError, 'invalid value for "key_field", key_field cannot be nil.' + end + @key_field = key_field + end + + # Custom attribute writer method with validation + # @param table_id [Object] Object to be assigned + # @!visibility private + def table_id=(table_id) + if table_id.nil? + fail ArgumentError, 'invalid value for "table_id", table_id cannot be nil.' + end + @table_id = table_id + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + columns == o.columns && + key_field == o.key_field && + table_id == o.table_id && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [columns, key_field, table_id, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_filter_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_filter_processor.rb index c7339f0900e3..863f432e1b23 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_filter_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_filter_processor.rb @@ -17,20 +17,22 @@ require 'time' module DatadogAPIClient::V2 - # The `filter` processor allows conditional processing of logs based on a Datadog search query. Logs that match the `include` query are passed through; others are discarded. + # The `filter` processor allows conditional processing of logs/metrics based on a Datadog search query. Logs/metrics that match the `include` query are passed through; others are discarded. + # + # **Supported pipeline types:** logs, metrics class ObservabilityPipelineFilterProcessor include BaseGenericModel # The display name for a component. attr_accessor :display_name - # Whether this processor is enabled. + # Indicates whether the processor is enabled. attr_reader :enabled - # The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). attr_reader :id - # A Datadog search query used to determine which logs should pass through the filter. Logs that match this query continue to downstream components; others are dropped. + # A Datadog search query used to determine which logs/metrics should pass through the filter. Logs/metrics that match this query continue to downstream components; others are dropped. attr_reader :include # The processor type. The value should always be `filter`. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_fluent_bit_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_fluent_bit_source.rb index bbcc1a105056..5026870295d5 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_fluent_bit_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_fluent_bit_source.rb @@ -18,10 +18,12 @@ module DatadogAPIClient::V2 # The `fluent_bit` source ingests logs from Fluent Bit. + # + # **Supported pipeline types:** logs class ObservabilityPipelineFluentBitSource include BaseGenericModel - # The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). attr_reader :id # Configuration for enabling TLS encryption between the pipeline component and external services. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_fluentd_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_fluentd_source.rb index 1ab9185baaa1..dd38ba8d0ab9 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_fluentd_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_fluentd_source.rb @@ -18,10 +18,12 @@ module DatadogAPIClient::V2 # The `fluentd` source ingests logs from a Fluentd-compatible service. + # + # **Supported pipeline types:** logs class ObservabilityPipelineFluentdSource include BaseGenericModel - # The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). attr_reader :id # Configuration for enabling TLS encryption between the pipeline component and external services. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_generate_metrics_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_generate_metrics_processor.rb index 55d8cf77969f..6365695fa405 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_generate_metrics_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_generate_metrics_processor.rb @@ -19,13 +19,15 @@ module DatadogAPIClient::V2 # The `generate_datadog_metrics` processor creates custom metrics from logs and sends them to Datadog. # Metrics can be counters, gauges, or distributions and optionally grouped by log fields. + # + # **Supported pipeline types:** logs class ObservabilityPipelineGenerateMetricsProcessor include BaseGenericModel # The display name for a component. attr_accessor :display_name - # Whether this processor is enabled. + # Indicates whether the processor is enabled. attr_reader :enabled # The unique identifier for this component. Used to reference this component in other parts of the pipeline. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_google_chronicle_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_google_chronicle_destination.rb index 73b6a5d47df9..93eed57d6361 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_google_chronicle_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_google_chronicle_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `google_chronicle` destination sends logs to Google Chronicle. + # + # **Supported pipeline types:** logs class ObservabilityPipelineGoogleChronicleDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_google_cloud_storage_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_google_cloud_storage_destination.rb index ae95f0729d98..e1572e3804a4 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_google_cloud_storage_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_google_cloud_storage_destination.rb @@ -19,6 +19,8 @@ module DatadogAPIClient::V2 # The `google_cloud_storage` destination stores logs in a Google Cloud Storage (GCS) bucket. # It requires a bucket name, GCP authentication, and metadata fields. + # + # **Supported pipeline types:** logs class ObservabilityPipelineGoogleCloudStorageDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_destination.rb index fe396691e124..f85845c5aead 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `google_pubsub` destination publishes logs to a Google Cloud Pub/Sub topic. + # + # **Supported pipeline types:** logs class ObservabilityPipelineGooglePubSubDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_source.rb index 73fff4e50425..313d862c9833 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_google_pub_sub_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `google_pubsub` source ingests logs from a Google Cloud Pub/Sub subscription. + # + # **Supported pipeline types:** logs class ObservabilityPipelineGooglePubSubSource include BaseGenericModel @@ -27,7 +29,7 @@ class ObservabilityPipelineGooglePubSubSource # The decoding format used to interpret incoming logs. attr_reader :decoding - # The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). attr_reader :id # The GCP project ID that owns the Pub/Sub subscription. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination.rb new file mode 100644 index 000000000000..b113b66cef97 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination.rb @@ -0,0 +1,220 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `http_client` destination sends data to an HTTP endpoint. + # + # **Supported pipeline types:** logs, metrics + class ObservabilityPipelineHttpClientDestination + include BaseGenericModel + + # HTTP authentication strategy. + attr_accessor :auth_strategy + + # Compression configuration for HTTP requests. + attr_accessor :compression + + # Encoding format for log events. + attr_reader :encoding + + # The unique identifier for this component. + attr_reader :id + + # A list of component IDs whose output is used as the input for this component. + attr_reader :inputs + + # Configuration for enabling TLS encryption between the pipeline component and external services. + attr_accessor :tls + + # The destination type. The value should always be `http_client`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'auth_strategy' => :'auth_strategy', + :'compression' => :'compression', + :'encoding' => :'encoding', + :'id' => :'id', + :'inputs' => :'inputs', + :'tls' => :'tls', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'auth_strategy' => :'ObservabilityPipelineHttpClientDestinationAuthStrategy', + :'compression' => :'ObservabilityPipelineHttpClientDestinationCompression', + :'encoding' => :'ObservabilityPipelineHttpClientDestinationEncoding', + :'id' => :'String', + :'inputs' => :'Array', + :'tls' => :'ObservabilityPipelineTls', + :'type' => :'ObservabilityPipelineHttpClientDestinationType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineHttpClientDestination` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'auth_strategy') + self.auth_strategy = attributes[:'auth_strategy'] + end + + if attributes.key?(:'compression') + self.compression = attributes[:'compression'] + end + + if attributes.key?(:'encoding') + self.encoding = attributes[:'encoding'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'inputs') + if (value = attributes[:'inputs']).is_a?(Array) + self.inputs = value + end + end + + if attributes.key?(:'tls') + self.tls = attributes[:'tls'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @encoding.nil? + return false if @id.nil? + return false if @inputs.nil? + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param encoding [Object] Object to be assigned + # @!visibility private + def encoding=(encoding) + if encoding.nil? + fail ArgumentError, 'invalid value for "encoding", encoding cannot be nil.' + end + @encoding = encoding + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param inputs [Object] Object to be assigned + # @!visibility private + def inputs=(inputs) + if inputs.nil? + fail ArgumentError, 'invalid value for "inputs", inputs cannot be nil.' + end + @inputs = inputs + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + auth_strategy == o.auth_strategy && + compression == o.compression && + encoding == o.encoding && + id == o.id && + inputs == o.inputs && + tls == o.tls && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [auth_strategy, compression, encoding, id, inputs, tls, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_auth_strategy.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_auth_strategy.rb new file mode 100644 index 000000000000..dcb34218cfdb --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_auth_strategy.rb @@ -0,0 +1,28 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # HTTP authentication strategy. + class ObservabilityPipelineHttpClientDestinationAuthStrategy + include BaseEnumModel + + NONE = "none".freeze + BASIC = "basic".freeze + BEARER = "bearer".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_compression.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_compression.rb new file mode 100644 index 000000000000..705e0d42f63e --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_compression.rb @@ -0,0 +1,123 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Compression configuration for HTTP requests. + class ObservabilityPipelineHttpClientDestinationCompression + include BaseGenericModel + + # Compression algorithm. + attr_reader :algorithm + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'algorithm' => :'algorithm' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'algorithm' => :'ObservabilityPipelineHttpClientDestinationCompressionAlgorithm' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineHttpClientDestinationCompression` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'algorithm') + self.algorithm = attributes[:'algorithm'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @algorithm.nil? + true + end + + # Custom attribute writer method with validation + # @param algorithm [Object] Object to be assigned + # @!visibility private + def algorithm=(algorithm) + if algorithm.nil? + fail ArgumentError, 'invalid value for "algorithm", algorithm cannot be nil.' + end + @algorithm = algorithm + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + algorithm == o.algorithm && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [algorithm, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_compression_algorithm.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_compression_algorithm.rb new file mode 100644 index 000000000000..747413a6726c --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_compression_algorithm.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Compression algorithm. + class ObservabilityPipelineHttpClientDestinationCompressionAlgorithm + include BaseEnumModel + + GZIP = "gzip".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_encoding.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_encoding.rb new file mode 100644 index 000000000000..fd8b88c0335b --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_encoding.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Encoding format for log events. + class ObservabilityPipelineHttpClientDestinationEncoding + include BaseEnumModel + + JSON = "json".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_type.rb new file mode 100644 index 000000000000..dd91c9255eae --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_destination_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The destination type. The value should always be `http_client`. + class ObservabilityPipelineHttpClientDestinationType + include BaseEnumModel + + HTTP_CLIENT = "http_client".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_source.rb index 7a11460ae036..2d9d526579a2 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `http_client` source scrapes logs from HTTP endpoints at regular intervals. + # + # **Supported pipeline types:** logs class ObservabilityPipelineHttpClientSource include BaseGenericModel @@ -27,7 +29,7 @@ class ObservabilityPipelineHttpClientSource # The decoding format used to interpret incoming logs. attr_reader :decoding - # The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). attr_reader :id # The interval (in seconds) between HTTP scrape requests. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_source_auth_strategy.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_source_auth_strategy.rb index 4909eb48e5bf..0c50637d8f1e 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_http_client_source_auth_strategy.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_client_source_auth_strategy.rb @@ -21,6 +21,7 @@ module DatadogAPIClient::V2 class ObservabilityPipelineHttpClientSourceAuthStrategy include BaseEnumModel + NONE = "none".freeze BASIC = "basic".freeze BEARER = "bearer".freeze end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_http_server_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_http_server_source.rb index 80c08b3a58d7..271315644f12 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_http_server_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_http_server_source.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `http_server` source collects logs over HTTP POST from external services. + # + # **Supported pipeline types:** logs class ObservabilityPipelineHttpServerSource include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination.rb new file mode 100644 index 000000000000..5ed8aea58afb --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination.rb @@ -0,0 +1,361 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `kafka` destination sends logs to Apache Kafka topics. + # + # **Supported pipeline types:** logs + class ObservabilityPipelineKafkaDestination + include BaseGenericModel + + # Compression codec for Kafka messages. + attr_accessor :compression + + # Encoding format for log events. + attr_reader :encoding + + # The field name to use for Kafka message headers. + attr_accessor :headers_key + + # The unique identifier for this component. + attr_reader :id + + # A list of component IDs whose output is used as the `input` for this component. + attr_reader :inputs + + # The field name to use as the Kafka message key. + attr_accessor :key_field + + # Optional list of advanced Kafka producer configuration options, defined as key-value pairs. + attr_accessor :librdkafka_options + + # Maximum time in milliseconds to wait for message delivery confirmation. + attr_reader :message_timeout_ms + + # Duration in seconds for the rate limit window. + attr_reader :rate_limit_duration_secs + + # Maximum number of messages allowed per rate limit duration. + attr_reader :rate_limit_num + + # Specifies the SASL mechanism for authenticating with a Kafka cluster. + attr_accessor :sasl + + # Socket timeout in milliseconds for network requests. + attr_reader :socket_timeout_ms + + # Configuration for enabling TLS encryption between the pipeline component and external services. + attr_accessor :tls + + # The Kafka topic name to publish logs to. + attr_reader :topic + + # The destination type. The value should always be `kafka`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'compression' => :'compression', + :'encoding' => :'encoding', + :'headers_key' => :'headers_key', + :'id' => :'id', + :'inputs' => :'inputs', + :'key_field' => :'key_field', + :'librdkafka_options' => :'librdkafka_options', + :'message_timeout_ms' => :'message_timeout_ms', + :'rate_limit_duration_secs' => :'rate_limit_duration_secs', + :'rate_limit_num' => :'rate_limit_num', + :'sasl' => :'sasl', + :'socket_timeout_ms' => :'socket_timeout_ms', + :'tls' => :'tls', + :'topic' => :'topic', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'compression' => :'ObservabilityPipelineKafkaDestinationCompression', + :'encoding' => :'ObservabilityPipelineKafkaDestinationEncoding', + :'headers_key' => :'String', + :'id' => :'String', + :'inputs' => :'Array', + :'key_field' => :'String', + :'librdkafka_options' => :'Array', + :'message_timeout_ms' => :'Integer', + :'rate_limit_duration_secs' => :'Integer', + :'rate_limit_num' => :'Integer', + :'sasl' => :'ObservabilityPipelineKafkaSasl', + :'socket_timeout_ms' => :'Integer', + :'tls' => :'ObservabilityPipelineTls', + :'topic' => :'String', + :'type' => :'ObservabilityPipelineKafkaDestinationType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineKafkaDestination` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'compression') + self.compression = attributes[:'compression'] + end + + if attributes.key?(:'encoding') + self.encoding = attributes[:'encoding'] + end + + if attributes.key?(:'headers_key') + self.headers_key = attributes[:'headers_key'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'inputs') + if (value = attributes[:'inputs']).is_a?(Array) + self.inputs = value + end + end + + if attributes.key?(:'key_field') + self.key_field = attributes[:'key_field'] + end + + if attributes.key?(:'librdkafka_options') + if (value = attributes[:'librdkafka_options']).is_a?(Array) + self.librdkafka_options = value + end + end + + if attributes.key?(:'message_timeout_ms') + self.message_timeout_ms = attributes[:'message_timeout_ms'] + end + + if attributes.key?(:'rate_limit_duration_secs') + self.rate_limit_duration_secs = attributes[:'rate_limit_duration_secs'] + end + + if attributes.key?(:'rate_limit_num') + self.rate_limit_num = attributes[:'rate_limit_num'] + end + + if attributes.key?(:'sasl') + self.sasl = attributes[:'sasl'] + end + + if attributes.key?(:'socket_timeout_ms') + self.socket_timeout_ms = attributes[:'socket_timeout_ms'] + end + + if attributes.key?(:'tls') + self.tls = attributes[:'tls'] + end + + if attributes.key?(:'topic') + self.topic = attributes[:'topic'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @encoding.nil? + return false if @id.nil? + return false if @inputs.nil? + return false if !@message_timeout_ms.nil? && @message_timeout_ms < 1 + return false if !@rate_limit_duration_secs.nil? && @rate_limit_duration_secs < 1 + return false if !@rate_limit_num.nil? && @rate_limit_num < 1 + return false if !@socket_timeout_ms.nil? && @socket_timeout_ms > 300000 + return false if !@socket_timeout_ms.nil? && @socket_timeout_ms < 10 + return false if @topic.nil? + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param encoding [Object] Object to be assigned + # @!visibility private + def encoding=(encoding) + if encoding.nil? + fail ArgumentError, 'invalid value for "encoding", encoding cannot be nil.' + end + @encoding = encoding + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param inputs [Object] Object to be assigned + # @!visibility private + def inputs=(inputs) + if inputs.nil? + fail ArgumentError, 'invalid value for "inputs", inputs cannot be nil.' + end + @inputs = inputs + end + + # Custom attribute writer method with validation + # @param message_timeout_ms [Object] Object to be assigned + # @!visibility private + def message_timeout_ms=(message_timeout_ms) + if !message_timeout_ms.nil? && message_timeout_ms < 1 + fail ArgumentError, 'invalid value for "message_timeout_ms", must be greater than or equal to 1.' + end + @message_timeout_ms = message_timeout_ms + end + + # Custom attribute writer method with validation + # @param rate_limit_duration_secs [Object] Object to be assigned + # @!visibility private + def rate_limit_duration_secs=(rate_limit_duration_secs) + if !rate_limit_duration_secs.nil? && rate_limit_duration_secs < 1 + fail ArgumentError, 'invalid value for "rate_limit_duration_secs", must be greater than or equal to 1.' + end + @rate_limit_duration_secs = rate_limit_duration_secs + end + + # Custom attribute writer method with validation + # @param rate_limit_num [Object] Object to be assigned + # @!visibility private + def rate_limit_num=(rate_limit_num) + if !rate_limit_num.nil? && rate_limit_num < 1 + fail ArgumentError, 'invalid value for "rate_limit_num", must be greater than or equal to 1.' + end + @rate_limit_num = rate_limit_num + end + + # Custom attribute writer method with validation + # @param socket_timeout_ms [Object] Object to be assigned + # @!visibility private + def socket_timeout_ms=(socket_timeout_ms) + if !socket_timeout_ms.nil? && socket_timeout_ms > 300000 + fail ArgumentError, 'invalid value for "socket_timeout_ms", must be smaller than or equal to 300000.' + end + if !socket_timeout_ms.nil? && socket_timeout_ms < 10 + fail ArgumentError, 'invalid value for "socket_timeout_ms", must be greater than or equal to 10.' + end + @socket_timeout_ms = socket_timeout_ms + end + + # Custom attribute writer method with validation + # @param topic [Object] Object to be assigned + # @!visibility private + def topic=(topic) + if topic.nil? + fail ArgumentError, 'invalid value for "topic", topic cannot be nil.' + end + @topic = topic + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + compression == o.compression && + encoding == o.encoding && + headers_key == o.headers_key && + id == o.id && + inputs == o.inputs && + key_field == o.key_field && + librdkafka_options == o.librdkafka_options && + message_timeout_ms == o.message_timeout_ms && + rate_limit_duration_secs == o.rate_limit_duration_secs && + rate_limit_num == o.rate_limit_num && + sasl == o.sasl && + socket_timeout_ms == o.socket_timeout_ms && + tls == o.tls && + topic == o.topic && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [compression, encoding, headers_key, id, inputs, key_field, librdkafka_options, message_timeout_ms, rate_limit_duration_secs, rate_limit_num, sasl, socket_timeout_ms, tls, topic, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_compression.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_compression.rb new file mode 100644 index 000000000000..ccb074bfdb21 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_compression.rb @@ -0,0 +1,30 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Compression codec for Kafka messages. + class ObservabilityPipelineKafkaDestinationCompression + include BaseEnumModel + + NONE = "none".freeze + GZIP = "gzip".freeze + SNAPPY = "snappy".freeze + LZ4 = "lz4".freeze + ZSTD = "zstd".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_encoding.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_encoding.rb new file mode 100644 index 000000000000..34209b47273c --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_encoding.rb @@ -0,0 +1,27 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Encoding format for log events. + class ObservabilityPipelineKafkaDestinationEncoding + include BaseEnumModel + + JSON = "json".freeze + RAW_MESSAGE = "raw_message".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_type.rb new file mode 100644 index 000000000000..0b2f5afbda5f --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_destination_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The destination type. The value should always be `kafka`. + class ObservabilityPipelineKafkaDestinationType + include BaseEnumModel + + KAFKA = "kafka".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source_librdkafka_option.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_librdkafka_option.rb similarity index 95% rename from lib/datadog_api_client/v2/models/observability_pipeline_kafka_source_librdkafka_option.rb rename to lib/datadog_api_client/v2/models/observability_pipeline_kafka_librdkafka_option.rb index b911c8aaf19a..bf29a784d49c 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source_librdkafka_option.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_librdkafka_option.rb @@ -17,8 +17,8 @@ require 'time' module DatadogAPIClient::V2 - # Represents a key-value pair used to configure low-level `librdkafka` client options for Kafka sources, such as timeouts, buffer sizes, and security settings. - class ObservabilityPipelineKafkaSourceLibrdkafkaOption + # Represents a key-value pair used to configure low-level `librdkafka` client options for Kafka source and destination, such as timeouts, buffer sizes, and security settings. + class ObservabilityPipelineKafkaLibrdkafkaOption include BaseGenericModel # The name of the `librdkafka` configuration option to set. @@ -52,7 +52,7 @@ def self.openapi_types # @!visibility private def initialize(attributes = {}) if (!attributes.is_a?(Hash)) - fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineKafkaSourceLibrdkafkaOption` initialize method" + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineKafkaLibrdkafkaOption` initialize method" end self.additional_properties = {} diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source_sasl.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_sasl.rb similarity index 95% rename from lib/datadog_api_client/v2/models/observability_pipeline_kafka_source_sasl.rb rename to lib/datadog_api_client/v2/models/observability_pipeline_kafka_sasl.rb index 886923a1868f..88fb7915b9da 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source_sasl.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_sasl.rb @@ -18,7 +18,7 @@ module DatadogAPIClient::V2 # Specifies the SASL mechanism for authenticating with a Kafka cluster. - class ObservabilityPipelineKafkaSourceSasl + class ObservabilityPipelineKafkaSasl include BaseGenericModel # SASL mechanism used for Kafka authentication. @@ -38,7 +38,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { - :'mechanism' => :'ObservabilityPipelinePipelineKafkaSourceSaslMechanism' + :'mechanism' => :'ObservabilityPipelineKafkaSaslMechanism' } end @@ -47,7 +47,7 @@ def self.openapi_types # @!visibility private def initialize(attributes = {}) if (!attributes.is_a?(Hash)) - fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineKafkaSourceSasl` initialize method" + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineKafkaSasl` initialize method" end self.additional_properties = {} diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_pipeline_kafka_source_sasl_mechanism.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_sasl_mechanism.rb similarity index 92% rename from lib/datadog_api_client/v2/models/observability_pipeline_pipeline_kafka_source_sasl_mechanism.rb rename to lib/datadog_api_client/v2/models/observability_pipeline_kafka_sasl_mechanism.rb index 64187c5011f0..64cdb527b7ab 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_pipeline_kafka_source_sasl_mechanism.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_sasl_mechanism.rb @@ -18,7 +18,7 @@ module DatadogAPIClient::V2 # SASL mechanism used for Kafka authentication. - class ObservabilityPipelinePipelineKafkaSourceSaslMechanism + class ObservabilityPipelineKafkaSaslMechanism include BaseEnumModel PLAIN = "PLAIN".freeze diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source.rb index 33c4b1649b15..ccba9f42446f 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_kafka_source.rb @@ -18,13 +18,15 @@ module DatadogAPIClient::V2 # The `kafka` source ingests data from Apache Kafka topics. + # + # **Supported pipeline types:** logs class ObservabilityPipelineKafkaSource include BaseGenericModel # Consumer group ID used by the Kafka client. attr_reader :group_id - # The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). attr_reader :id # Optional list of advanced Kafka client configuration options, defined as key-value pairs. @@ -64,8 +66,8 @@ def self.openapi_types { :'group_id' => :'String', :'id' => :'String', - :'librdkafka_options' => :'Array', - :'sasl' => :'ObservabilityPipelineKafkaSourceSasl', + :'librdkafka_options' => :'Array', + :'sasl' => :'ObservabilityPipelineKafkaSasl', :'tls' => :'ObservabilityPipelineTls', :'topics' => :'Array', :'type' => :'ObservabilityPipelineKafkaSourceType' diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_logstash_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_logstash_source.rb index 3a9550b0bb40..4d20f0e57ea2 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_logstash_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_logstash_source.rb @@ -18,10 +18,12 @@ module DatadogAPIClient::V2 # The `logstash` source ingests logs from a Logstash forwarder. + # + # **Supported pipeline types:** logs class ObservabilityPipelineLogstashSource include BaseGenericModel - # The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). attr_reader :id # Configuration for enabling TLS encryption between the pipeline component and external services. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor.rb new file mode 100644 index 000000000000..f0aec076716b --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor.rb @@ -0,0 +1,229 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `metric_tags` processor filters metrics based on their tags using Datadog tag key patterns. + # + # **Supported pipeline types:** metrics + class ObservabilityPipelineMetricTagsProcessor + include BaseGenericModel + + # The display name for a component. + attr_accessor :display_name + + # Indicates whether the processor is enabled. + attr_reader :enabled + + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). + attr_reader :id + + # A Datadog search query that determines which metrics the processor targets. + attr_reader :include + + # A list of rules for filtering metric tags. + attr_reader :rules + + # The processor type. The value should always be `metric_tags`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'display_name' => :'display_name', + :'enabled' => :'enabled', + :'id' => :'id', + :'include' => :'include', + :'rules' => :'rules', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'display_name' => :'String', + :'enabled' => :'Boolean', + :'id' => :'String', + :'include' => :'String', + :'rules' => :'Array', + :'type' => :'ObservabilityPipelineMetricTagsProcessorType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineMetricTagsProcessor` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'display_name') + self.display_name = attributes[:'display_name'] + end + + if attributes.key?(:'enabled') + self.enabled = attributes[:'enabled'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'include') + self.include = attributes[:'include'] + end + + if attributes.key?(:'rules') + if (value = attributes[:'rules']).is_a?(Array) + self.rules = value + end + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @enabled.nil? + return false if @id.nil? + return false if @include.nil? + return false if @rules.nil? + return false if @rules.length > 100 + return false if @rules.length < 1 + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param enabled [Object] Object to be assigned + # @!visibility private + def enabled=(enabled) + if enabled.nil? + fail ArgumentError, 'invalid value for "enabled", enabled cannot be nil.' + end + @enabled = enabled + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param include [Object] Object to be assigned + # @!visibility private + def include=(include) + if include.nil? + fail ArgumentError, 'invalid value for "include", include cannot be nil.' + end + @include = include + end + + # Custom attribute writer method with validation + # @param rules [Object] Object to be assigned + # @!visibility private + def rules=(rules) + if rules.nil? + fail ArgumentError, 'invalid value for "rules", rules cannot be nil.' + end + if rules.length > 100 + fail ArgumentError, 'invalid value for "rules", number of items must be less than or equal to 100.' + end + if rules.length < 1 + fail ArgumentError, 'invalid value for "rules", number of items must be greater than or equal to 1.' + end + @rules = rules + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + display_name == o.display_name && + enabled == o.enabled && + id == o.id && + include == o.include && + rules == o.rules && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [display_name, enabled, id, include, rules, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule.rb b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule.rb new file mode 100644 index 000000000000..55e5c467a28e --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule.rb @@ -0,0 +1,188 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Defines a rule for filtering metric tags based on key patterns. + class ObservabilityPipelineMetricTagsProcessorRule + include BaseGenericModel + + # The action to take on tags with matching keys. + attr_reader :action + + # A Datadog search query used to determine which metrics this rule targets. + attr_reader :include + + # A list of tag keys to include or exclude. + attr_reader :keys + + # The processing mode for tag filtering. + attr_reader :mode + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'action' => :'action', + :'include' => :'include', + :'keys' => :'keys', + :'mode' => :'mode' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'action' => :'ObservabilityPipelineMetricTagsProcessorRuleAction', + :'include' => :'String', + :'keys' => :'Array', + :'mode' => :'ObservabilityPipelineMetricTagsProcessorRuleMode' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineMetricTagsProcessorRule` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'action') + self.action = attributes[:'action'] + end + + if attributes.key?(:'include') + self.include = attributes[:'include'] + end + + if attributes.key?(:'keys') + if (value = attributes[:'keys']).is_a?(Array) + self.keys = value + end + end + + if attributes.key?(:'mode') + self.mode = attributes[:'mode'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @action.nil? + return false if @include.nil? + return false if @keys.nil? + return false if @mode.nil? + true + end + + # Custom attribute writer method with validation + # @param action [Object] Object to be assigned + # @!visibility private + def action=(action) + if action.nil? + fail ArgumentError, 'invalid value for "action", action cannot be nil.' + end + @action = action + end + + # Custom attribute writer method with validation + # @param include [Object] Object to be assigned + # @!visibility private + def include=(include) + if include.nil? + fail ArgumentError, 'invalid value for "include", include cannot be nil.' + end + @include = include + end + + # Custom attribute writer method with validation + # @param keys [Object] Object to be assigned + # @!visibility private + def keys=(keys) + if keys.nil? + fail ArgumentError, 'invalid value for "keys", keys cannot be nil.' + end + @keys = keys + end + + # Custom attribute writer method with validation + # @param mode [Object] Object to be assigned + # @!visibility private + def mode=(mode) + if mode.nil? + fail ArgumentError, 'invalid value for "mode", mode cannot be nil.' + end + @mode = mode + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + action == o.action && + include == o.include && + keys == o.keys && + mode == o.mode && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [action, include, keys, mode, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule_action.rb b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule_action.rb new file mode 100644 index 000000000000..253bdb73f64b --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule_action.rb @@ -0,0 +1,27 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The action to take on tags with matching keys. + class ObservabilityPipelineMetricTagsProcessorRuleAction + include BaseEnumModel + + INCLUDE = "include".freeze + EXCLUDE = "exclude".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule_mode.rb b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule_mode.rb new file mode 100644 index 000000000000..019343fe1a3c --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_rule_mode.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The processing mode for tag filtering. + class ObservabilityPipelineMetricTagsProcessorRuleMode + include BaseEnumModel + + FILTER = "filter".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_type.rb new file mode 100644 index 000000000000..0ddd08d1d7cd --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_metric_tags_processor_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The processor type. The value should always be `metric_tags`. + class ObservabilityPipelineMetricTagsProcessorType + include BaseEnumModel + + METRIC_TAGS = "metric_tags".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_new_relic_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_new_relic_destination.rb index d8705a24d962..79118217199f 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_new_relic_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_new_relic_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `new_relic` destination sends logs to the New Relic platform. + # + # **Supported pipeline types:** logs class ObservabilityPipelineNewRelicDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_ocsf_mapper_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_ocsf_mapper_processor.rb index a483ca5bbde0..ec44d070a772 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_ocsf_mapper_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_ocsf_mapper_processor.rb @@ -18,13 +18,15 @@ module DatadogAPIClient::V2 # The `ocsf_mapper` processor transforms logs into the OCSF schema using a predefined mapping configuration. + # + # **Supported pipeline types:** logs class ObservabilityPipelineOcsfMapperProcessor include BaseGenericModel # The display name for a component. attr_accessor :display_name - # Whether this processor is enabled. + # Indicates whether the processor is enabled. attr_reader :enabled # The unique identifier for this component. Used to reference this component in other parts of the pipeline. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_open_search_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_open_search_destination.rb index 2120c2eef855..09331c31ee2b 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_open_search_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_open_search_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `opensearch` destination writes logs to an OpenSearch cluster. + # + # **Supported pipeline types:** logs class ObservabilityPipelineOpenSearchDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_opentelemetry_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_opentelemetry_source.rb new file mode 100644 index 000000000000..8404b77f9341 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_opentelemetry_source.rb @@ -0,0 +1,176 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `opentelemetry` source receives telemetry data using the OpenTelemetry Protocol (OTLP) over gRPC and HTTP. + # + # **Supported pipeline types:** logs + class ObservabilityPipelineOpentelemetrySource + include BaseGenericModel + + # Environment variable name containing the gRPC server address for receiving OTLP data. Must be a valid environment variable name (alphanumeric characters and underscores only). + attr_accessor :grpc_address_key + + # Environment variable name containing the HTTP server address for receiving OTLP data. Must be a valid environment variable name (alphanumeric characters and underscores only). + attr_accessor :http_address_key + + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). + attr_reader :id + + # Configuration for enabling TLS encryption between the pipeline component and external services. + attr_accessor :tls + + # The source type. The value should always be `opentelemetry`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'grpc_address_key' => :'grpc_address_key', + :'http_address_key' => :'http_address_key', + :'id' => :'id', + :'tls' => :'tls', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'grpc_address_key' => :'String', + :'http_address_key' => :'String', + :'id' => :'String', + :'tls' => :'ObservabilityPipelineTls', + :'type' => :'ObservabilityPipelineOpentelemetrySourceType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineOpentelemetrySource` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'grpc_address_key') + self.grpc_address_key = attributes[:'grpc_address_key'] + end + + if attributes.key?(:'http_address_key') + self.http_address_key = attributes[:'http_address_key'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'tls') + self.tls = attributes[:'tls'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @id.nil? + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + grpc_address_key == o.grpc_address_key && + http_address_key == o.http_address_key && + id == o.id && + tls == o.tls && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [grpc_address_key, http_address_key, id, tls, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_opentelemetry_source_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_opentelemetry_source_type.rb new file mode 100644 index 000000000000..30df02a5c2ef --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_opentelemetry_source_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The source type. The value should always be `opentelemetry`. + class ObservabilityPipelineOpentelemetrySourceType + include BaseEnumModel + + OPENTELEMETRY = "opentelemetry".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor.rb index 3fa75b46f8ae..e7f3e1ab34e1 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_parse_grok_processor.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `parse_grok` processor extracts structured fields from unstructured log messages using Grok patterns. + # + # **Supported pipeline types:** logs class ObservabilityPipelineParseGrokProcessor include BaseGenericModel @@ -27,7 +29,7 @@ class ObservabilityPipelineParseGrokProcessor # The display name for a component. attr_accessor :display_name - # Whether this processor is enabled. + # Indicates whether the processor is enabled. attr_reader :enabled # A unique identifier for this processor. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_parse_json_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_parse_json_processor.rb index ef5839c3709c..5ca884aab9fb 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_parse_json_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_parse_json_processor.rb @@ -18,13 +18,15 @@ module DatadogAPIClient::V2 # The `parse_json` processor extracts JSON from a specified field and flattens it into the event. This is useful when logs contain embedded JSON as a string. + # + # **Supported pipeline types:** logs class ObservabilityPipelineParseJSONProcessor include BaseGenericModel # The display name for a component. attr_accessor :display_name - # Whether this processor is enabled. + # Indicates whether the processor is enabled. attr_reader :enabled # The name of the log field that contains a JSON string. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_parse_xml_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_parse_xml_processor.rb new file mode 100644 index 000000000000..400cb4c99eea --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_parse_xml_processor.rb @@ -0,0 +1,300 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `parse_xml` processor parses XML from a specified field and extracts it into the event. + # + # **Supported pipeline types:** logs + class ObservabilityPipelineParseXMLProcessor + include BaseGenericModel + + # Whether to always use a text key for element content. + attr_accessor :always_use_text_key + + # The prefix to use for XML attributes in the parsed output. + attr_accessor :attr_prefix + + # The display name for a component. + attr_accessor :display_name + + # Indicates whether the processor is enabled. + attr_reader :enabled + + # The name of the log field that contains an XML string. + attr_reader :field + + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). + attr_reader :id + + # A Datadog search query used to determine which logs this processor targets. + attr_reader :include + + # Whether to include XML attributes in the parsed output. + attr_accessor :include_attr + + # Whether to parse boolean values from strings. + attr_accessor :parse_bool + + # Whether to parse null values. + attr_accessor :parse_null + + # Whether to parse numeric values from strings. + attr_accessor :parse_number + + # The key name to use for text content within XML elements. Must be at least 1 character if specified. + attr_reader :text_key + + # The processor type. The value should always be `parse_xml`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'always_use_text_key' => :'always_use_text_key', + :'attr_prefix' => :'attr_prefix', + :'display_name' => :'display_name', + :'enabled' => :'enabled', + :'field' => :'field', + :'id' => :'id', + :'include' => :'include', + :'include_attr' => :'include_attr', + :'parse_bool' => :'parse_bool', + :'parse_null' => :'parse_null', + :'parse_number' => :'parse_number', + :'text_key' => :'text_key', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'always_use_text_key' => :'Boolean', + :'attr_prefix' => :'String', + :'display_name' => :'String', + :'enabled' => :'Boolean', + :'field' => :'String', + :'id' => :'String', + :'include' => :'String', + :'include_attr' => :'Boolean', + :'parse_bool' => :'Boolean', + :'parse_null' => :'Boolean', + :'parse_number' => :'Boolean', + :'text_key' => :'String', + :'type' => :'ObservabilityPipelineParseXMLProcessorType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineParseXMLProcessor` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'always_use_text_key') + self.always_use_text_key = attributes[:'always_use_text_key'] + end + + if attributes.key?(:'attr_prefix') + self.attr_prefix = attributes[:'attr_prefix'] + end + + if attributes.key?(:'display_name') + self.display_name = attributes[:'display_name'] + end + + if attributes.key?(:'enabled') + self.enabled = attributes[:'enabled'] + end + + if attributes.key?(:'field') + self.field = attributes[:'field'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'include') + self.include = attributes[:'include'] + end + + if attributes.key?(:'include_attr') + self.include_attr = attributes[:'include_attr'] + end + + if attributes.key?(:'parse_bool') + self.parse_bool = attributes[:'parse_bool'] + end + + if attributes.key?(:'parse_null') + self.parse_null = attributes[:'parse_null'] + end + + if attributes.key?(:'parse_number') + self.parse_number = attributes[:'parse_number'] + end + + if attributes.key?(:'text_key') + self.text_key = attributes[:'text_key'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @enabled.nil? + return false if @field.nil? + return false if @id.nil? + return false if @include.nil? + return false if !@text_key.nil? && @text_key.to_s.length < 1 + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param enabled [Object] Object to be assigned + # @!visibility private + def enabled=(enabled) + if enabled.nil? + fail ArgumentError, 'invalid value for "enabled", enabled cannot be nil.' + end + @enabled = enabled + end + + # Custom attribute writer method with validation + # @param field [Object] Object to be assigned + # @!visibility private + def field=(field) + if field.nil? + fail ArgumentError, 'invalid value for "field", field cannot be nil.' + end + @field = field + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param include [Object] Object to be assigned + # @!visibility private + def include=(include) + if include.nil? + fail ArgumentError, 'invalid value for "include", include cannot be nil.' + end + @include = include + end + + # Custom attribute writer method with validation + # @param text_key [Object] Object to be assigned + # @!visibility private + def text_key=(text_key) + if !text_key.nil? && text_key.to_s.length < 1 + fail ArgumentError, 'invalid value for "text_key", the character length must be great than or equal to 1.' + end + @text_key = text_key + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + always_use_text_key == o.always_use_text_key && + attr_prefix == o.attr_prefix && + display_name == o.display_name && + enabled == o.enabled && + field == o.field && + id == o.id && + include == o.include && + include_attr == o.include_attr && + parse_bool == o.parse_bool && + parse_null == o.parse_null && + parse_number == o.parse_number && + text_key == o.text_key && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [always_use_text_key, attr_prefix, display_name, enabled, field, id, include, include_attr, parse_bool, parse_null, parse_number, text_key, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_parse_xml_processor_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_parse_xml_processor_type.rb new file mode 100644 index 000000000000..c75fd6adfa84 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_parse_xml_processor_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The processor type. The value should always be `parse_xml`. + class ObservabilityPipelineParseXMLProcessorType + include BaseEnumModel + + PARSE_XML = "parse_xml".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor.rb index 48b8ef8622a8..f8b1902ab168 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor.rb @@ -17,20 +17,22 @@ require 'time' module DatadogAPIClient::V2 - # The Quota Processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert. + # The `quota` processor measures logging traffic for logs that match a specified filter. When the configured daily quota is met, the processor can drop or alert. + # + # **Supported pipeline types:** logs class ObservabilityPipelineQuotaProcessor include BaseGenericModel # The display name for a component. attr_accessor :display_name - # If set to `true`, logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline. + # If set to `true`, logs that match the quota filter and are sent after the quota is exceeded are dropped. Logs that do not match the filter continue through the pipeline. **Note**: You can set either `drop_events` or `overflow_action`, but not both. attr_accessor :drop_events - # Whether this processor is enabled. + # Indicates whether the processor is enabled. attr_reader :enabled - # The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). attr_reader :id # If `true`, the processor skips quota checks when partition fields are missing from the logs. @@ -45,7 +47,7 @@ class ObservabilityPipelineQuotaProcessor # Name of the quota. attr_reader :name - # The action to take when the quota is exceeded. Options: + # The action to take when the quota or bucket limit is exceeded. Options: # - `drop`: Drop the event. # - `no_action`: Let the event pass through. # - `overflow_routing`: Route to an overflow destination. @@ -57,6 +59,12 @@ class ObservabilityPipelineQuotaProcessor # A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values. attr_accessor :partition_fields + # The action to take when the quota or bucket limit is exceeded. Options: + # - `drop`: Drop the event. + # - `no_action`: Let the event pass through. + # - `overflow_routing`: Route to an overflow destination. + attr_accessor :too_many_buckets_action + # The processor type. The value should always be `quota`. attr_reader :type @@ -77,6 +85,7 @@ def self.attribute_map :'overflow_action' => :'overflow_action', :'overrides' => :'overrides', :'partition_fields' => :'partition_fields', + :'too_many_buckets_action' => :'too_many_buckets_action', :'type' => :'type' } end @@ -96,6 +105,7 @@ def self.openapi_types :'overflow_action' => :'ObservabilityPipelineQuotaProcessorOverflowAction', :'overrides' => :'Array', :'partition_fields' => :'Array', + :'too_many_buckets_action' => :'ObservabilityPipelineQuotaProcessorOverflowAction', :'type' => :'ObservabilityPipelineQuotaProcessorType' } end @@ -166,6 +176,10 @@ def initialize(attributes = {}) end end + if attributes.key?(:'too_many_buckets_action') + self.too_many_buckets_action = attributes[:'too_many_buckets_action'] + end + if attributes.key?(:'type') self.type = attributes[:'type'] end @@ -281,6 +295,7 @@ def ==(o) overflow_action == o.overflow_action && overrides == o.overrides && partition_fields == o.partition_fields && + too_many_buckets_action == o.too_many_buckets_action && type == o.type && additional_properties == o.additional_properties end @@ -289,7 +304,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [display_name, drop_events, enabled, id, ignore_when_missing_partitions, include, limit, name, overflow_action, overrides, partition_fields, type, additional_properties].hash + [display_name, drop_events, enabled, id, ignore_when_missing_partitions, include, limit, name, overflow_action, overrides, partition_fields, too_many_buckets_action, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor_overflow_action.rb b/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor_overflow_action.rb index 4990af20ad61..06bc2d264f1f 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor_overflow_action.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_quota_processor_overflow_action.rb @@ -17,7 +17,7 @@ require 'time' module DatadogAPIClient::V2 - # The action to take when the quota is exceeded. Options: + # The action to take when the quota or bucket limit is exceeded. Options: # - `drop`: Drop the event. # - `no_action`: Let the event pass through. # - `overflow_routing`: Route to an overflow destination. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_reduce_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_reduce_processor.rb index 1e5b4c0c8db8..2f0a61b720f1 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_reduce_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_reduce_processor.rb @@ -18,13 +18,15 @@ module DatadogAPIClient::V2 # The `reduce` processor aggregates and merges logs based on matching keys and merge strategies. + # + # **Supported pipeline types:** logs class ObservabilityPipelineReduceProcessor include BaseGenericModel # The display name for a component. attr_accessor :display_name - # Whether this processor is enabled. + # Indicates whether the processor is enabled. attr_reader :enabled # A list of fields used to group log events for merging. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_remove_fields_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_remove_fields_processor.rb index a6c12fffa560..99c0b12d8c3d 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_remove_fields_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_remove_fields_processor.rb @@ -18,19 +18,21 @@ module DatadogAPIClient::V2 # The `remove_fields` processor deletes specified fields from logs. + # + # **Supported pipeline types:** logs class ObservabilityPipelineRemoveFieldsProcessor include BaseGenericModel # The display name for a component. attr_accessor :display_name - # Whether this processor is enabled. + # Indicates whether the processor is enabled. attr_reader :enabled # A list of field names to be removed from each log event. attr_reader :fields - # The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). attr_reader :id # A Datadog search query used to determine which logs this processor targets. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_rename_fields_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_rename_fields_processor.rb index a0ab10e93ae8..11f85950ab0b 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_rename_fields_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_rename_fields_processor.rb @@ -18,13 +18,15 @@ module DatadogAPIClient::V2 # The `rename_fields` processor changes field names. + # + # **Supported pipeline types:** logs class ObservabilityPipelineRenameFieldsProcessor include BaseGenericModel # The display name for a component. attr_accessor :display_name - # Whether this processor is enabled. + # Indicates whether the processor is enabled. attr_reader :enabled # A list of rename rules specifying which fields to rename in the event, what to rename them to, and whether to preserve the original fields. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_destination.rb index 4c15ba87ef60..c3151af7153c 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `rsyslog` destination forwards logs to an external `rsyslog` server over TCP or UDP using the syslog protocol. + # + # **Supported pipeline types:** logs class ObservabilityPipelineRsyslogDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_source.rb index 51a55a2daf12..c7a88f0e7715 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_rsyslog_source.rb @@ -18,10 +18,12 @@ module DatadogAPIClient::V2 # The `rsyslog` source listens for logs over TCP or UDP from an `rsyslog` server using the syslog protocol. + # + # **Supported pipeline types:** logs class ObservabilityPipelineRsyslogSource include BaseGenericModel - # The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). attr_reader :id # Protocol used by the syslog source to receive messages. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor.rb index e8e7e7af2b7b..1a8fcf727ca8 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sample_processor.rb @@ -18,26 +18,28 @@ module DatadogAPIClient::V2 # The `sample` processor allows probabilistic sampling of logs at a fixed rate. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSampleProcessor include BaseGenericModel # The display name for a component. attr_accessor :display_name - # Whether this processor is enabled. + # Indicates whether the processor is enabled. attr_reader :enabled - # The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). + # Optional list of fields to group events by. Each group is sampled independently. + attr_reader :group_by + + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). attr_reader :id # A Datadog search query used to determine which logs this processor targets. attr_reader :include # The percentage of logs to sample. - attr_accessor :percentage - - # Number of events to sample (1 in N). - attr_reader :rate + attr_reader :percentage # The processor type. The value should always be `sample`. attr_reader :type @@ -50,10 +52,10 @@ def self.attribute_map { :'display_name' => :'display_name', :'enabled' => :'enabled', + :'group_by' => :'group_by', :'id' => :'id', :'include' => :'include', :'percentage' => :'percentage', - :'rate' => :'rate', :'type' => :'type' } end @@ -64,10 +66,10 @@ def self.openapi_types { :'display_name' => :'String', :'enabled' => :'Boolean', + :'group_by' => :'Array', :'id' => :'String', :'include' => :'String', :'percentage' => :'Float', - :'rate' => :'Integer', :'type' => :'ObservabilityPipelineSampleProcessorType' } end @@ -98,6 +100,12 @@ def initialize(attributes = {}) self.enabled = attributes[:'enabled'] end + if attributes.key?(:'group_by') + if (value = attributes[:'group_by']).is_a?(Array) + self.group_by = value + end + end + if attributes.key?(:'id') self.id = attributes[:'id'] end @@ -110,10 +118,6 @@ def initialize(attributes = {}) self.percentage = attributes[:'percentage'] end - if attributes.key?(:'rate') - self.rate = attributes[:'rate'] - end - if attributes.key?(:'type') self.type = attributes[:'type'] end @@ -124,9 +128,10 @@ def initialize(attributes = {}) # @!visibility private def valid? return false if @enabled.nil? + return false if !@group_by.nil? && @group_by.length < 1 return false if @id.nil? return false if @include.nil? - return false if !@rate.nil? && @rate < 1 + return false if @percentage.nil? return false if @type.nil? true end @@ -141,6 +146,16 @@ def enabled=(enabled) @enabled = enabled end + # Custom attribute writer method with validation + # @param group_by [Object] Object to be assigned + # @!visibility private + def group_by=(group_by) + if !group_by.nil? && group_by.length < 1 + fail ArgumentError, 'invalid value for "group_by", number of items must be greater than or equal to 1.' + end + @group_by = group_by + end + # Custom attribute writer method with validation # @param id [Object] Object to be assigned # @!visibility private @@ -162,13 +177,13 @@ def include=(include) end # Custom attribute writer method with validation - # @param rate [Object] Object to be assigned + # @param percentage [Object] Object to be assigned # @!visibility private - def rate=(rate) - if !rate.nil? && rate < 1 - fail ArgumentError, 'invalid value for "rate", must be greater than or equal to 1.' + def percentage=(percentage) + if percentage.nil? + fail ArgumentError, 'invalid value for "percentage", percentage cannot be nil.' end - @rate = rate + @percentage = percentage end # Custom attribute writer method with validation @@ -209,10 +224,10 @@ def ==(o) self.class == o.class && display_name == o.display_name && enabled == o.enabled && + group_by == o.group_by && id == o.id && include == o.include && percentage == o.percentage && - rate == o.rate && type == o.type && additional_properties == o.additional_properties end @@ -221,7 +236,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [display_name, enabled, id, include, percentage, rate, type, additional_properties].hash + [display_name, enabled, group_by, id, include, percentage, type, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor.rb index 898739512aa8..b873684faa0e 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor.rb @@ -18,16 +18,18 @@ module DatadogAPIClient::V2 # The `sensitive_data_scanner` processor detects and optionally redacts sensitive data in log events. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSensitiveDataScannerProcessor include BaseGenericModel # The display name for a component. attr_accessor :display_name - # Whether this processor is enabled. + # Indicates whether the processor is enabled. attr_reader :enabled - # The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). attr_reader :id # A Datadog search query used to determine which logs this processor targets. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.rb index e7da9561356a..bd5edd5fb419 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.rb @@ -21,6 +21,9 @@ module DatadogAPIClient::V2 class ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions include BaseGenericModel + # Human-readable description providing context about a sensitive data scanner rule + attr_accessor :description + # A regular expression used to detect sensitive values. Must be a valid regex. attr_reader :rule @@ -30,6 +33,7 @@ class ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions # @!visibility private def self.attribute_map { + :'description' => :'description', :'rule' => :'rule' } end @@ -38,6 +42,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { + :'description' => :'String', :'rule' => :'String' } end @@ -60,6 +65,10 @@ def initialize(attributes = {}) end } + if attributes.key?(:'description') + self.description = attributes[:'description'] + end + if attributes.key?(:'rule') self.rule = attributes[:'rule'] end @@ -109,6 +118,7 @@ def to_hash def ==(o) return true if self.equal?(o) self.class == o.class && + description == o.description && rule == o.rule && additional_properties == o.additional_properties end @@ -117,7 +127,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [rule, additional_properties].hash + [description, rule, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.rb index d565d68bb035..b4c3b6d952b4 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.rb @@ -21,6 +21,9 @@ module DatadogAPIClient::V2 class ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions include BaseGenericModel + # Human-readable description providing context about a sensitive data scanner rule + attr_accessor :description + # Identifier for a predefined pattern from the sensitive data scanner pattern library. attr_reader :id @@ -33,6 +36,7 @@ class ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions # @!visibility private def self.attribute_map { + :'description' => :'description', :'id' => :'id', :'use_recommended_keywords' => :'use_recommended_keywords' } @@ -42,6 +46,7 @@ def self.attribute_map # @!visibility private def self.openapi_types { + :'description' => :'String', :'id' => :'String', :'use_recommended_keywords' => :'Boolean' } @@ -65,6 +70,10 @@ def initialize(attributes = {}) end } + if attributes.key?(:'description') + self.description = attributes[:'description'] + end + if attributes.key?(:'id') self.id = attributes[:'id'] end @@ -118,6 +127,7 @@ def to_hash def ==(o) return true if self.equal?(o) self.class == o.class && + description == o.description && id == o.id && use_recommended_keywords == o.use_recommended_keywords && additional_properties == o.additional_properties @@ -127,7 +137,7 @@ def ==(o) # @return [Integer] Hash code # @!visibility private def hash - [id, use_recommended_keywords, additional_properties].hash + [description, id, use_recommended_keywords, additional_properties].hash end end end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sentinel_one_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sentinel_one_destination.rb index 9d0af0150f23..4b3548624a76 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sentinel_one_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sentinel_one_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `sentinel_one` destination sends logs to SentinelOne. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSentinelOneDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_socket_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_socket_destination.rb index bbe025820793..5a8c561616cc 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_socket_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_socket_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `socket` destination sends logs over TCP or UDP to a remote server. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSocketDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_socket_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_socket_source.rb index 17356b2960a3..802310ffad1e 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_socket_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_socket_source.rb @@ -18,13 +18,15 @@ module DatadogAPIClient::V2 # The `socket` source ingests logs over TCP or UDP. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSocketSource include BaseGenericModel # Framing method configuration for the socket source. attr_reader :framing - # The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). attr_reader :id # Protocol used to receive logs. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor.rb new file mode 100644 index 000000000000..66ffbb6a69bd --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor.rb @@ -0,0 +1,229 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The `split_array` processor splits array fields into separate events based on configured rules. + # + # **Supported pipeline types:** logs + class ObservabilityPipelineSplitArrayProcessor + include BaseGenericModel + + # A list of array split configurations. + attr_reader :arrays + + # The display name for a component. + attr_accessor :display_name + + # Indicates whether the processor is enabled. + attr_reader :enabled + + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). + attr_reader :id + + # A Datadog search query used to determine which logs this processor targets. For split_array, this should typically be `*`. + attr_reader :include + + # The processor type. The value should always be `split_array`. + attr_reader :type + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'arrays' => :'arrays', + :'display_name' => :'display_name', + :'enabled' => :'enabled', + :'id' => :'id', + :'include' => :'include', + :'type' => :'type' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'arrays' => :'Array', + :'display_name' => :'String', + :'enabled' => :'Boolean', + :'id' => :'String', + :'include' => :'String', + :'type' => :'ObservabilityPipelineSplitArrayProcessorType' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineSplitArrayProcessor` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'arrays') + if (value = attributes[:'arrays']).is_a?(Array) + self.arrays = value + end + end + + if attributes.key?(:'display_name') + self.display_name = attributes[:'display_name'] + end + + if attributes.key?(:'enabled') + self.enabled = attributes[:'enabled'] + end + + if attributes.key?(:'id') + self.id = attributes[:'id'] + end + + if attributes.key?(:'include') + self.include = attributes[:'include'] + end + + if attributes.key?(:'type') + self.type = attributes[:'type'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @arrays.nil? + return false if @arrays.length > 15 + return false if @arrays.length < 1 + return false if @enabled.nil? + return false if @id.nil? + return false if @include.nil? + return false if @type.nil? + true + end + + # Custom attribute writer method with validation + # @param arrays [Object] Object to be assigned + # @!visibility private + def arrays=(arrays) + if arrays.nil? + fail ArgumentError, 'invalid value for "arrays", arrays cannot be nil.' + end + if arrays.length > 15 + fail ArgumentError, 'invalid value for "arrays", number of items must be less than or equal to 15.' + end + if arrays.length < 1 + fail ArgumentError, 'invalid value for "arrays", number of items must be greater than or equal to 1.' + end + @arrays = arrays + end + + # Custom attribute writer method with validation + # @param enabled [Object] Object to be assigned + # @!visibility private + def enabled=(enabled) + if enabled.nil? + fail ArgumentError, 'invalid value for "enabled", enabled cannot be nil.' + end + @enabled = enabled + end + + # Custom attribute writer method with validation + # @param id [Object] Object to be assigned + # @!visibility private + def id=(id) + if id.nil? + fail ArgumentError, 'invalid value for "id", id cannot be nil.' + end + @id = id + end + + # Custom attribute writer method with validation + # @param include [Object] Object to be assigned + # @!visibility private + def include=(include) + if include.nil? + fail ArgumentError, 'invalid value for "include", include cannot be nil.' + end + @include = include + end + + # Custom attribute writer method with validation + # @param type [Object] Object to be assigned + # @!visibility private + def type=(type) + if type.nil? + fail ArgumentError, 'invalid value for "type", type cannot be nil.' + end + @type = type + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + arrays == o.arrays && + display_name == o.display_name && + enabled == o.enabled && + id == o.id && + include == o.include && + type == o.type && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [arrays, display_name, enabled, id, include, type, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor_array_config.rb b/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor_array_config.rb new file mode 100644 index 000000000000..50595a668515 --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor_array_config.rb @@ -0,0 +1,144 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # Configuration for a single array split operation. + class ObservabilityPipelineSplitArrayProcessorArrayConfig + include BaseGenericModel + + # The path to the array field to split. + attr_reader :field + + # A Datadog search query used to determine which logs this array split operation targets. + attr_reader :include + + attr_accessor :additional_properties + + # Attribute mapping from ruby-style variable name to JSON key. + # @!visibility private + def self.attribute_map + { + :'field' => :'field', + :'include' => :'include' + } + end + + # Attribute type mapping. + # @!visibility private + def self.openapi_types + { + :'field' => :'String', + :'include' => :'String' + } + end + + # Initializes the object + # @param attributes [Hash] Model attributes in the form of hash + # @!visibility private + def initialize(attributes = {}) + if (!attributes.is_a?(Hash)) + fail ArgumentError, "The input argument (attributes) must be a hash in `DatadogAPIClient::V2::ObservabilityPipelineSplitArrayProcessorArrayConfig` initialize method" + end + + self.additional_properties = {} + # check to see if the attribute exists and convert string to symbol for hash key + attributes = attributes.each_with_object({}) { |(k, v), h| + if (!self.class.attribute_map.key?(k.to_sym)) + self.additional_properties[k.to_sym] = v + else + h[k.to_sym] = v + end + } + + if attributes.key?(:'field') + self.field = attributes[:'field'] + end + + if attributes.key?(:'include') + self.include = attributes[:'include'] + end + end + + # Check to see if the all the properties in the model are valid + # @return true if the model is valid + # @!visibility private + def valid? + return false if @field.nil? + return false if @include.nil? + true + end + + # Custom attribute writer method with validation + # @param field [Object] Object to be assigned + # @!visibility private + def field=(field) + if field.nil? + fail ArgumentError, 'invalid value for "field", field cannot be nil.' + end + @field = field + end + + # Custom attribute writer method with validation + # @param include [Object] Object to be assigned + # @!visibility private + def include=(include) + if include.nil? + fail ArgumentError, 'invalid value for "include", include cannot be nil.' + end + @include = include + end + + # Returns the object in the form of hash, with additionalProperties support. + # @return [Hash] Returns the object in the form of hash + # @!visibility private + def to_hash + hash = {} + self.class.attribute_map.each_pair do |attr, param| + value = self.send(attr) + if value.nil? + is_nullable = self.class.openapi_nullable.include?(attr) + next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}")) + end + + hash[param] = _to_hash(value) + end + self.additional_properties.each_pair do |attr, value| + hash[attr] = value + end + hash + end + + # Checks equality by comparing each attribute. + # @param o [Object] Object to be compared + # @!visibility private + def ==(o) + return true if self.equal?(o) + self.class == o.class && + field == o.field && + include == o.include && + additional_properties == o.additional_properties + end + + # Calculates hash code according to all attributes. + # @return [Integer] Hash code + # @!visibility private + def hash + [field, include, additional_properties].hash + end + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor_type.rb b/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor_type.rb new file mode 100644 index 000000000000..fcb416f78acb --- /dev/null +++ b/lib/datadog_api_client/v2/models/observability_pipeline_split_array_processor_type.rb @@ -0,0 +1,26 @@ +=begin +#Datadog API V2 Collection + +#Collection of all Datadog Public endpoints. + +The version of the OpenAPI document: 1.0 +Contact: support@datadoghq.com +Generated by: https://github.com/DataDog/datadog-api-client-ruby/tree/master/.generator + + Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. + This product includes software developed at Datadog (https://www.datadoghq.com/). + Copyright 2020-Present Datadog, Inc. + +=end + +require 'date' +require 'time' + +module DatadogAPIClient::V2 + # The processor type. The value should always be `split_array`. + class ObservabilityPipelineSplitArrayProcessorType + include BaseEnumModel + + SPLIT_ARRAY = "split_array".freeze + end +end diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_destination.rb index dd77cfec9255..707c0ff5e122 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `splunk_hec` destination forwards logs to Splunk using the HTTP Event Collector (HEC). + # + # **Supported pipeline types:** logs class ObservabilityPipelineSplunkHecDestination include BaseGenericModel @@ -28,7 +30,7 @@ class ObservabilityPipelineSplunkHecDestination # Encoding format for log events. attr_accessor :encoding - # The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). attr_reader :id # Optional name of the Splunk index where logs are written. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_source.rb index f3402f6b6db2..0845f1540745 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_splunk_hec_source.rb @@ -18,10 +18,12 @@ module DatadogAPIClient::V2 # The `splunk_hec` source implements the Splunk HTTP Event Collector (HEC) API. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSplunkHecSource include BaseGenericModel - # The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). attr_reader :id # Configuration for enabling TLS encryption between the pipeline component and external services. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_splunk_tcp_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_splunk_tcp_source.rb index 66d3633cd1b2..a78d633227c2 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_splunk_tcp_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_splunk_tcp_source.rb @@ -19,10 +19,12 @@ module DatadogAPIClient::V2 # The `splunk_tcp` source receives logs from a Splunk Universal Forwarder over TCP. # TLS is supported for secure transmission. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSplunkTcpSource include BaseGenericModel - # The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). attr_reader :id # Configuration for enabling TLS encryption between the pipeline component and external services. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_destination.rb index 6193fb57f020..7a3c31742c5f 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `sumo_logic` destination forwards logs to Sumo Logic. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSumoLogicDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_source.rb index e4b3bcf3c833..f43d9b510beb 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_sumo_logic_source.rb @@ -18,10 +18,12 @@ module DatadogAPIClient::V2 # The `sumo_logic` source receives logs from Sumo Logic collectors. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSumoLogicSource include BaseGenericModel - # The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). attr_reader :id # The source type. The value should always be `sumo_logic`. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_destination.rb b/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_destination.rb index 045dcfe90403..132ffd70a899 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_destination.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_destination.rb @@ -18,6 +18,8 @@ module DatadogAPIClient::V2 # The `syslog_ng` destination forwards logs to an external `syslog-ng` server over TCP or UDP using the syslog protocol. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSyslogNgDestination include BaseGenericModel diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_source.rb b/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_source.rb index 85f107e721cd..5f59f534b879 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_source.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_syslog_ng_source.rb @@ -18,10 +18,12 @@ module DatadogAPIClient::V2 # The `syslog_ng` source listens for logs over TCP or UDP from a `syslog-ng` server using the syslog protocol. + # + # **Supported pipeline types:** logs class ObservabilityPipelineSyslogNgSource include BaseGenericModel - # The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + # The unique identifier for this component. Used in other parts of the pipeline to reference this component (for example, as the `input` to downstream components). attr_reader :id # Protocol used by the syslog source to receive messages. diff --git a/lib/datadog_api_client/v2/models/observability_pipeline_throttle_processor.rb b/lib/datadog_api_client/v2/models/observability_pipeline_throttle_processor.rb index 3d19ef6446cf..45982f10c28e 100644 --- a/lib/datadog_api_client/v2/models/observability_pipeline_throttle_processor.rb +++ b/lib/datadog_api_client/v2/models/observability_pipeline_throttle_processor.rb @@ -18,13 +18,15 @@ module DatadogAPIClient::V2 # The `throttle` processor limits the number of events that pass through over a given time window. + # + # **Supported pipeline types:** logs class ObservabilityPipelineThrottleProcessor include BaseGenericModel # The display name for a component. attr_accessor :display_name - # Whether this processor is enabled. + # Indicates whether the processor is enabled. attr_reader :enabled # Optional list of fields used to group events before the threshold has been reached.