diff --git a/.apigentools-info b/.apigentools-info index 9ee1072176..22556ef6d1 100644 --- a/.apigentools-info +++ b/.apigentools-info @@ -4,13 +4,13 @@ "spec_versions": { "v1": { "apigentools_version": "1.6.6", - "regenerated": "2025-04-29 16:12:55.566790", - "spec_repo_commit": "22937387" + "regenerated": "2025-04-29 18:57:43.853175", + "spec_repo_commit": "d1252b21" }, "v2": { "apigentools_version": "1.6.6", - "regenerated": "2025-04-29 16:12:55.584059", - "spec_repo_commit": "22937387" + "regenerated": "2025-04-29 18:57:43.868755", + "spec_repo_commit": "d1252b21" } } } \ No newline at end of file diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index a024ef35ee..dd3346b50b 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -4357,6 +4357,48 @@ components: required: - data type: object + AzureStorageDestination: + description: The `azure_storage` destination forwards logs to an Azure Blob + Storage container. + properties: + blob_prefix: + description: Optional prefix for blobs written to the container. + example: logs/ + type: string + container_name: + description: The name of the Azure Blob Storage container to store logs + in. + example: my-log-container + type: string + id: + description: The unique identifier for this component. + example: azure-storage-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - processor-id + items: + type: string + type: array + type: + $ref: '#/components/schemas/AzureStorageDestinationType' + required: + - id + - type + - inputs + - container_name + type: object + AzureStorageDestinationType: + default: azure_storage + description: The destination type. The value should always be `azure_storage`. + enum: + - azure_storage + example: azure_storage + type: string + x-enum-varnames: + - AZURE_STORAGE AzureUCConfig: description: Azure config. properties: @@ -18998,6 +19040,29 @@ components: meta: $ref: '#/components/schemas/HistoricalJobListMeta' type: object + ListPipelinesResponse: + description: Represents the response payload containing a list of pipelines + and associated metadata. + properties: + data: + description: The `schema` `data`. + items: + $ref: '#/components/schemas/ObservabilityPipelineData' + type: array + meta: + $ref: '#/components/schemas/ListPipelinesResponseMeta' + required: + - data + type: object + ListPipelinesResponseMeta: + description: Metadata about the response. + properties: + totalCount: + description: The total number of pipelines. + example: 42 + format: int64 + type: integer + type: object ListPowerpacksResponse: description: Response object which includes all powerpack configurations. properties: @@ -21653,6 +21718,58 @@ components: - data_source - query type: object + MicrosoftSentinelDestination: + description: The `microsoft_sentinel` destination forwards logs to Microsoft + Sentinel. + properties: + client_id: + description: Azure AD client ID used for authentication. + example: a1b2c3d4-5678-90ab-cdef-1234567890ab + type: string + dcr_immutable_id: + description: The immutable ID of the Data Collection Rule (DCR). + example: dcr-uuid-1234 + type: string + id: + description: The unique identifier for this component. + example: sentinel-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + table: + description: The name of the Log Analytics table where logs are sent. + example: CustomLogsTable + type: string + tenant_id: + description: Azure AD tenant ID. + example: abcdef12-3456-7890-abcd-ef1234567890 + type: string + type: + $ref: '#/components/schemas/MicrosoftSentinelDestinationType' + required: + - id + - type + - inputs + - client_id + - tenant_id + - dcr_immutable_id + - table + type: object + MicrosoftSentinelDestinationType: + default: microsoft_sentinel + description: The destination type. The value should always be `microsoft_sentinel`. + enum: + - microsoft_sentinel + example: microsoft_sentinel + type: string + x-enum-varnames: + - MICROSOFT_SENTINEL MicrosoftTeamsChannelInfoResponseAttributes: description: Channel attributes. properties: @@ -22757,6 +22874,66 @@ components: required: - data type: object + ObservabilityPipelineAddEnvVarsProcessor: + description: The `add_env_vars` processor adds environment variable values to + log events. + properties: + id: + description: The unique identifier for this component. Used to reference + this processor in the pipeline. + example: add-env-vars-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the input for + this processor. + example: + - datadog-agent-source + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessorType' + variables: + description: A list of environment variable mappings to apply to log fields. + items: + $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessorVariable' + type: array + required: + - id + - type + - include + - inputs + - variables + type: object + ObservabilityPipelineAddEnvVarsProcessorType: + default: add_env_vars + description: The processor type. The value should always be `add_env_vars`. + enum: + - add_env_vars + example: add_env_vars + type: string + x-enum-varnames: + - ADD_ENV_VARS + ObservabilityPipelineAddEnvVarsProcessorVariable: + description: Defines a mapping between an environment variable and a log field. + properties: + field: + description: The target field in the log event. + example: log.environment.region + type: string + name: + description: The name of the environment variable to read. + example: AWS_REGION + type: string + required: + - field + - name + type: object ObservabilityPipelineAddFieldsProcessor: description: The `add_fields` processor adds static key-value fields to logs. properties: @@ -22803,6 +22980,236 @@ components: type: string x-enum-varnames: - ADD_FIELDS + ObservabilityPipelineAmazonDataFirehoseSource: + description: The `amazon_data_firehose` source ingests logs from AWS Data Firehose. + properties: + auth: + $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: amazon-firehose-source + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineAmazonDataFirehoseSourceType' + required: + - id + - type + type: object + ObservabilityPipelineAmazonDataFirehoseSourceType: + default: amazon_data_firehose + description: The source type. The value should always be `amazon_data_firehose`. + enum: + - amazon_data_firehose + example: amazon_data_firehose + type: string + x-enum-varnames: + - AMAZON_DATA_FIREHOSE + ObservabilityPipelineAmazonOpenSearchDestination: + description: The `amazon_opensearch` destination writes logs to Amazon OpenSearch. + properties: + auth: + $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestinationAuth' + bulk_index: + description: The index to write logs to. + example: logs-index + type: string + id: + description: The unique identifier for this component. + example: elasticsearch-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestinationType' + required: + - id + - type + - inputs + - auth + type: object + ObservabilityPipelineAmazonOpenSearchDestinationAuth: + description: 'Authentication settings for the Amazon OpenSearch destination. + + The `strategy` field determines whether basic or AWS-based authentication + is used. + + ' + properties: + assume_role: + description: The ARN of the role to assume (used with `aws` strategy). + type: string + aws_region: + description: AWS region + type: string + external_id: + description: External ID for the assumed role (used with `aws` strategy). + type: string + session_name: + description: Session name for the assumed role (used with `aws` strategy). + type: string + strategy: + $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy' + required: + - strategy + type: object + ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy: + description: The authentication strategy to use. + enum: + - basic + - aws + example: aws + type: string + x-enum-varnames: + - BASIC + - AWS + ObservabilityPipelineAmazonOpenSearchDestinationType: + default: amazon_opensearch + description: The destination type. The value should always be `amazon_opensearch`. + enum: + - amazon_opensearch + example: amazon_opensearch + type: string + x-enum-varnames: + - AMAZON_OPENSEARCH + ObservabilityPipelineAmazonS3Destination: + description: The `amazon_s3` destination sends your logs in Datadog-rehydratable + format to an Amazon S3 bucket for archiving. + properties: + auth: + $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' + bucket: + description: S3 bucket name. + example: error-logs + type: string + id: + description: Unique identifier for the destination component. + example: amazon-s3-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + key_prefix: + description: Optional prefix for object keys. + type: string + region: + description: AWS region of the S3 bucket. + example: us-east-1 + type: string + storage_class: + $ref: '#/components/schemas/ObservabilityPipelineAmazonS3DestinationStorageClass' + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineAmazonS3DestinationType' + required: + - id + - type + - inputs + - bucket + - region + - storage_class + type: object + ObservabilityPipelineAmazonS3DestinationStorageClass: + description: S3 storage class. + enum: + - STANDARD + - REDUCED_REDUNDANCY + - INTELLIGENT_TIERING + - STANDARD_IA + - EXPRESS_ONEZONE + - ONEZONE_IA + - GLACIER + - GLACIER_IR + - DEEP_ARCHIVE + example: STANDARD + type: string + x-enum-varnames: + - STANDARD + - REDUCED_REDUNDANCY + - INTELLIGENT_TIERING + - STANDARD_IA + - EXPRESS_ONEZONE + - ONEZONE_IA + - GLACIER + - GLACIER_IR + - DEEP_ARCHIVE + ObservabilityPipelineAmazonS3DestinationType: + default: amazon_s3 + description: The destination type. Always `amazon_s3`. + enum: + - amazon_s3 + example: amazon_s3 + type: string + x-enum-varnames: + - AMAZON_S3 + ObservabilityPipelineAmazonS3Source: + description: 'The `amazon_s3` source ingests logs from an Amazon S3 bucket. + + It supports AWS authentication and TLS encryption. + + ' + properties: + auth: + $ref: '#/components/schemas/ObservabilityPipelineAwsAuth' + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: aws-s3-source + type: string + region: + description: AWS region where the S3 bucket resides. + example: us-east-1 + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineAmazonS3SourceType' + required: + - id + - type + - region + type: object + ObservabilityPipelineAmazonS3SourceType: + default: amazon_s3 + description: The source type. Always `amazon_s3`. + enum: + - amazon_s3 + example: amazon_s3 + type: string + x-enum-varnames: + - AMAZON_S3 + ObservabilityPipelineAwsAuth: + description: "AWS authentication credentials used for accessing AWS services + such as S3.\nIf omitted, the system\u2019s default credentials are used (for + example, the IAM role and environment variables).\n" + properties: + assume_role: + description: The Amazon Resource Name (ARN) of the role to assume. + type: string + external_id: + description: A unique identifier for cross-account role assumption. + type: string + session_name: + description: A session identifier used for logging and tracing the assumed + role session. + type: string + type: object ObservabilityPipelineConfig: description: Specifies the pipeline's configuration, including its sources, processors, and destinations. @@ -22838,13 +23245,26 @@ components: type: array required: - sources - - processors - destinations type: object ObservabilityPipelineConfigDestinationItem: description: A destination for the pipeline. oneOf: - $ref: '#/components/schemas/ObservabilityPipelineDatadogLogsDestination' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonS3Destination' + - $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestination' + - $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestination' + - $ref: '#/components/schemas/ObservabilityPipelineRsyslogDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgDestination' + - $ref: '#/components/schemas/AzureStorageDestination' + - $ref: '#/components/schemas/MicrosoftSentinelDestination' + - $ref: '#/components/schemas/ObservabilityPipelineGoogleChronicleDestination' + - $ref: '#/components/schemas/ObservabilityPipelineNewRelicDestination' + - $ref: '#/components/schemas/ObservabilityPipelineSentinelOneDestination' + - $ref: '#/components/schemas/ObservabilityPipelineOpenSearchDestination' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestination' ObservabilityPipelineConfigProcessorItem: description: A processor for the pipeline. oneOf: @@ -22854,34 +23274,34 @@ components: - $ref: '#/components/schemas/ObservabilityPipelineAddFieldsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRemoveFieldsProcessor' - $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineSampleProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineAddEnvVarsProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineReduceProcessor' + - $ref: '#/components/schemas/ObservabilityPipelineThrottleProcessor' ObservabilityPipelineConfigSourceItem: description: A data source for the pipeline. oneOf: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSource' - $ref: '#/components/schemas/ObservabilityPipelineDatadogAgentSource' - ObservabilityPipelineCreateRequest: - description: Top-level schema representing a pipeline. - properties: - data: - $ref: '#/components/schemas/ObservabilityPipelineCreateRequestData' - required: - - data - type: object - ObservabilityPipelineCreateRequestData: - description: "Contains the pipeline\u2019s ID, type, and configuration attributes." - properties: - attributes: - $ref: '#/components/schemas/ObservabilityPipelineDataAttributes' - type: - default: pipelines - description: The resource type identifier. For pipeline resources, this - should always be set to `pipelines`. - example: pipelines - type: string - required: - - type - - attributes - type: object + - $ref: '#/components/schemas/ObservabilityPipelineSplunkTcpSource' + - $ref: '#/components/schemas/ObservabilityPipelineSplunkHecSource' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonS3Source' + - $ref: '#/components/schemas/ObservabilityPipelineFluentdSource' + - $ref: '#/components/schemas/ObservabilityPipelineFluentBitSource' + - $ref: '#/components/schemas/ObservabilityPipelineHttpServerSource' + - $ref: '#/components/schemas/ObservabilityPipelineSumoLogicSource' + - $ref: '#/components/schemas/ObservabilityPipelineRsyslogSource' + - $ref: '#/components/schemas/ObservabilityPipelineSyslogNgSource' + - $ref: '#/components/schemas/ObservabilityPipelineAmazonDataFirehoseSource' + - $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubSource' + - $ref: '#/components/schemas/ObservabilityPipelineHttpClientSource' + - $ref: '#/components/schemas/ObservabilityPipelineLogstashSource' ObservabilityPipelineData: description: "Contains the pipeline\u2019s ID, type, and configuration attributes." properties: @@ -22973,427 +23393,2592 @@ components: type: string x-enum-varnames: - DATADOG_LOGS + ObservabilityPipelineDecoding: + description: The decoding format used to interpret incoming logs. + enum: + - bytes + - gelf + - json + - syslog + example: json + type: string + x-enum-varnames: + - DECODE_BYTES + - DECODE_GELF + - DECODE_JSON + - DECODE_SYSLOG + ObservabilityPipelineDedupeProcessor: + description: The `dedupe` processor removes duplicate fields in log events. + properties: + fields: + description: A list of log field paths to check for duplicates. + example: + - log.message + - log.error + items: + type: string + type: array + id: + description: The unique identifier for this processor. + example: dedupe-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the input for + this processor. + example: + - parse-json-processor + items: + type: string + type: array + mode: + $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessorMode' + type: + $ref: '#/components/schemas/ObservabilityPipelineDedupeProcessorType' + required: + - id + - type + - include + - inputs + - fields + - mode + type: object + ObservabilityPipelineDedupeProcessorMode: + description: The deduplication mode to apply to the fields. + enum: + - match + - ignore + example: match + type: string + x-enum-varnames: + - MATCH + - IGNORE + ObservabilityPipelineDedupeProcessorType: + default: dedupe + description: The processor type. The value should always be `dedupe`. + enum: + - dedupe + example: dedupe + type: string + x-enum-varnames: + - DEDUPE + ObservabilityPipelineElasticsearchDestination: + description: The `elasticsearch` destination writes logs to an Elasticsearch + cluster. + properties: + api_version: + $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationApiVersion' + bulk_index: + description: The index to write logs to in Elasticsearch. + example: logs-index + type: string + id: + description: The unique identifier for this component. + example: elasticsearch-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationType' + required: + - id + - type + - inputs + type: object + ObservabilityPipelineElasticsearchDestinationApiVersion: + description: The Elasticsearch API version to use. Set to `auto` to auto-detect. + enum: + - auto + - v6 + - v7 + - v8 + example: auto + type: string + x-enum-varnames: + - AUTO + - V6 + - V7 + - V8 + ObservabilityPipelineElasticsearchDestinationType: + default: elasticsearch + description: The destination type. The value should always be `elasticsearch`. + enum: + - elasticsearch + example: elasticsearch + type: string + x-enum-varnames: + - ELASTICSEARCH + ObservabilityPipelineEnrichmentTableFile: + description: Defines a static enrichment table loaded from a CSV file. + properties: + encoding: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableFileEncoding' + key: + description: Key fields used to look up enrichment values. + items: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableFileKeyItems' + type: array + path: + description: Path to the CSV file. + example: /etc/enrichment/lookup.csv + type: string + schema: + description: Schema defining column names and their types. + items: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableFileSchemaItems' + type: array + required: + - encoding + - key + - path + - schema + type: object + ObservabilityPipelineEnrichmentTableFileEncoding: + description: File encoding format. + properties: + delimiter: + description: The `encoding` `delimiter`. + example: ',' + type: string + includes_headers: + description: The `encoding` `includes_headers`. + example: true + type: boolean + type: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableFileEncodingType' + required: + - type + - delimiter + - includes_headers + type: object + ObservabilityPipelineEnrichmentTableFileEncodingType: + description: Specifies the encoding format (e.g., CSV) used for enrichment tables. + enum: + - csv + example: csv + type: string + x-enum-varnames: + - CSV + ObservabilityPipelineEnrichmentTableFileKeyItems: + description: Defines how to map log fields to enrichment table columns during + lookups. + properties: + column: + description: The `items` `column`. + example: user_id + type: string + comparison: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableFileKeyItemsComparison' + field: + description: The `items` `field`. + example: log.user.id + type: string + required: + - column + - comparison + - field + type: object + ObservabilityPipelineEnrichmentTableFileKeyItemsComparison: + description: Defines how to compare key fields for enrichment table lookups. + enum: + - equals + example: equals + type: string + x-enum-varnames: + - EQUALS + ObservabilityPipelineEnrichmentTableFileSchemaItems: + description: Describes a single column and its type in an enrichment table schema. + properties: + column: + description: The `items` `column`. + example: region + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableFileSchemaItemsType' + required: + - column + - type + type: object + ObservabilityPipelineEnrichmentTableFileSchemaItemsType: + description: Declares allowed data types for enrichment table columns. + enum: + - string + - boolean + - integer + - float + - date + - timestamp + example: string + type: string + x-enum-varnames: + - STRING + - BOOLEAN + - INTEGER + - FLOAT + - DATE + - TIMESTAMP + ObservabilityPipelineEnrichmentTableGeoIp: + description: Uses a GeoIP database to enrich logs based on an IP field. + properties: + key_field: + description: Path to the IP field in the log. + example: log.source.ip + type: string + locale: + description: Locale used to resolve geographical names. + example: en + type: string + path: + description: Path to the GeoIP database file. + example: /etc/geoip/GeoLite2-City.mmdb + type: string + required: + - key_field + - locale + - path + type: object + ObservabilityPipelineEnrichmentTableProcessor: + description: The `enrichment_table` processor enriches logs using a static CSV + file or GeoIP database. + properties: + file: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableFile' + geoip: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableGeoIp' + id: + description: The unique identifier for this processor. + example: enrichment-table-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: source:my-source + type: string + inputs: + description: A list of component IDs whose output is used as the input for + this processor. + example: + - add-fields-processor + items: + type: string + type: array + target: + description: Path where enrichment results should be stored in the log. + example: enriched.geoip + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineEnrichmentTableProcessorType' + required: + - id + - type + - include + - inputs + - target + type: object + ObservabilityPipelineEnrichmentTableProcessorType: + default: enrichment_table + description: The processor type. The value should always be `enrichment_table`. + enum: + - enrichment_table + example: enrichment_table + type: string + x-enum-varnames: + - ENRICHMENT_TABLE ObservabilityPipelineFieldValue: description: Represents a static key-value pair used in various processors. properties: - name: - description: The field name. - example: field_name - type: string - value: - description: The field value. - example: field_value + name: + description: The field name. + example: field_name + type: string + value: + description: The field value. + example: field_value + type: string + required: + - name + - value + type: object + ObservabilityPipelineFilterProcessor: + description: The `filter` processor allows conditional processing of logs based + on a Datadog search query. Logs that match the `include` query are passed + through; others are discarded. + properties: + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: filter-processor + type: string + include: + description: A Datadog search query used to determine which logs should + pass through the filter. Logs that match this query continue to downstream + components; others are dropped. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineFilterProcessorType' + required: + - id + - type + - include + - inputs + type: object + ObservabilityPipelineFilterProcessorType: + default: filter + description: The processor type. The value should always be `filter`. + enum: + - filter + example: filter + type: string + x-enum-varnames: + - FILTER + ObservabilityPipelineFluentBitSource: + description: The `fluent_bit` source ingests logs from Fluent Bit. + properties: + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: fluent-source + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineFluentBitSourceType' + required: + - id + - type + type: object + ObservabilityPipelineFluentBitSourceType: + default: fluent_bit + description: The source type. The value should always be `fluent_bit`. + enum: + - fluent_bit + example: fluent_bit + type: string + x-enum-varnames: + - FLUENT_BIT + ObservabilityPipelineFluentdSource: + description: The `fluentd` source ingests logs from a Fluentd-compatible service. + properties: + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: fluent-source + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineFluentdSourceType' + required: + - id + - type + type: object + ObservabilityPipelineFluentdSourceType: + default: fluentd + description: The source type. The value should always be `fluentd. + enum: + - fluentd + example: fluentd + type: string + x-enum-varnames: + - FLUENTD + ObservabilityPipelineGcpAuth: + description: 'GCP credentials used to authenticate with Google Cloud Storage. + + ' + properties: + credentials_file: + description: Path to the GCP service account key file. + example: /var/secrets/gcp-credentials.json + type: string + required: + - credentials_file + type: object + ObservabilityPipelineGenerateMetricsProcessor: + description: 'The `generate_datadog_metrics` processor creates custom metrics + from logs and sends them to Datadog. + + Metrics can be counters, gauges, or distributions and optionally grouped by + log fields. + + ' + properties: + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline. + example: generate-metrics-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this processor. + example: + - source-id + items: + type: string + type: array + metrics: + description: Configuration for generating individual metrics. + items: + $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetric' + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineGenerateMetricsProcessorType' + required: + - id + - type + - inputs + - include + - metrics + type: object + ObservabilityPipelineGenerateMetricsProcessorType: + default: generate_datadog_metrics + description: The processor type. Always `generate_datadog_metrics`. + enum: + - generate_datadog_metrics + example: generate_datadog_metrics + type: string + x-enum-varnames: + - GENERATE_DATADOG_METRICS + ObservabilityPipelineGeneratedMetric: + description: 'Defines a log-based custom metric, including its name, type, filter, + value computation strategy, + + and optional grouping fields. + + ' + properties: + group_by: + description: Optional fields used to group the metric series. + example: + - service + - env + items: + type: string + type: array + include: + description: Datadog filter query to match logs for metric generation. + example: service:billing + type: string + metric_type: + $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricMetricType' + name: + description: Name of the custom metric to be created. + example: logs.processed + type: string + value: + $ref: '#/components/schemas/ObservabilityPipelineMetricValue' + required: + - name + - include + - metric_type + - value + type: object + ObservabilityPipelineGeneratedMetricIncrementByField: + description: Strategy that increments a generated metric based on the value + of a log field. + properties: + field: + description: Name of the log field containing the numeric value to increment + the metric by. + example: errors + type: string + strategy: + $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy' + required: + - strategy + - field + type: object + ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy: + description: Uses a numeric field in the log event as the metric increment. + enum: + - increment_by_field + example: increment_by_field + type: string + x-enum-varnames: + - INCREMENT_BY_FIELD + ObservabilityPipelineGeneratedMetricIncrementByOne: + description: Strategy that increments a generated metric by one for each matching + event. + properties: + strategy: + $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByOneStrategy' + required: + - strategy + type: object + ObservabilityPipelineGeneratedMetricIncrementByOneStrategy: + description: Increments the metric by 1 for each matching event. + enum: + - increment_by_one + example: increment_by_one + type: string + x-enum-varnames: + - INCREMENT_BY_ONE + ObservabilityPipelineGeneratedMetricMetricType: + description: Type of metric to create. + enum: + - count + - gauge + - distribution + example: count + type: string + x-enum-varnames: + - COUNT + - GAUGE + - DISTRIBUTION + ObservabilityPipelineGoogleChronicleDestination: + description: The `google_chronicle` destination sends logs to Google Chronicle. + properties: + auth: + $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' + customer_id: + description: The Google Chronicle customer ID. + example: abcdefg123456789 + type: string + encoding: + $ref: '#/components/schemas/ObservabilityPipelineGoogleChronicleDestinationEncoding' + id: + description: The unique identifier for this component. + example: google-chronicle-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - parse-json-processor + items: + type: string + type: array + log_type: + description: The log type metadata associated with the Chronicle destination. + example: nginx_logs + type: string + type: + $ref: '#/components/schemas/ObservabilityPipelineGoogleChronicleDestinationType' + required: + - id + - type + - inputs + - auth + - customer_id + type: object + ObservabilityPipelineGoogleChronicleDestinationEncoding: + description: The encoding format for the logs sent to Chronicle. + enum: + - json + - raw_message + example: json + type: string + x-enum-varnames: + - JSON + - RAW_MESSAGE + ObservabilityPipelineGoogleChronicleDestinationType: + default: google_chronicle + description: The destination type. The value should always be `google_chronicle`. + enum: + - google_chronicle + example: google_chronicle + type: string + x-enum-varnames: + - GOOGLE_CHRONICLE + ObservabilityPipelineGoogleCloudStorageDestination: + description: 'The `google_cloud_storage` destination stores logs in a Google + Cloud Storage (GCS) bucket. + + It requires a bucket name, GCP authentication, and metadata fields. + + ' + properties: + acl: + $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestinationAcl' + auth: + $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' + bucket: + description: Name of the GCS bucket. + example: error-logs + type: string + id: + description: Unique identifier for the destination component. + example: gcs-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + key_prefix: + description: Optional prefix for object keys within the GCS bucket. + type: string + metadata: + description: Custom metadata key-value pairs added to each object. + items: + $ref: '#/components/schemas/ObservabilityPipelineMetadataEntry' + type: array + storage_class: + $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestinationStorageClass' + type: + $ref: '#/components/schemas/ObservabilityPipelineGoogleCloudStorageDestinationType' + required: + - id + - type + - inputs + - bucket + - auth + - storage_class + - acl + - metadata + type: object + ObservabilityPipelineGoogleCloudStorageDestinationAcl: + description: Access control list setting for objects written to the bucket. + enum: + - private + - project-private + - public-read + - authenticated-read + - bucket-owner-read + - bucket-owner-full-control + example: private + type: string + x-enum-varnames: + - PRIVATE + - PROJECTNOT_PRIVATE + - PUBLICNOT_READ + - AUTHENTICATEDNOT_READ + - BUCKETNOT_OWNERNOT_READ + - BUCKETNOT_OWNERNOT_FULLNOT_CONTROL + ObservabilityPipelineGoogleCloudStorageDestinationStorageClass: + description: Storage class used for objects stored in GCS. + enum: + - STANDARD + - NEARLINE + - COLDLINE + - ARCHIVE + example: STANDARD + type: string + x-enum-varnames: + - STANDARD + - NEARLINE + - COLDLINE + - ARCHIVE + ObservabilityPipelineGoogleCloudStorageDestinationType: + default: google_cloud_storage + description: The destination type. Always `google_cloud_storage`. + enum: + - google_cloud_storage + example: google_cloud_storage + type: string + x-enum-varnames: + - GOOGLE_CLOUD_STORAGE + ObservabilityPipelineGooglePubSubSource: + description: The `google_pubsub` source ingests logs from a Google Cloud Pub/Sub + subscription. + properties: + auth: + $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' + decoding: + $ref: '#/components/schemas/ObservabilityPipelineDecoding' + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: google-pubsub-source + type: string + project: + description: The GCP project ID that owns the Pub/Sub subscription. + example: my-gcp-project + type: string + subscription: + description: The Pub/Sub subscription name from which messages are consumed. + example: logs-subscription + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineGooglePubSubSourceType' + required: + - id + - type + - auth + - decoding + - project + - subscription + type: object + ObservabilityPipelineGooglePubSubSourceType: + default: google_pubsub + description: The source type. The value should always be `google_pubsub`. + enum: + - google_pubsub + example: google_pubsub + type: string + x-enum-varnames: + - GOOGLE_PUBSUB + ObservabilityPipelineHttpClientSource: + description: The `http_client` source scrapes logs from HTTP endpoints at regular + intervals. + properties: + auth_strategy: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientSourceAuthStrategy' + decoding: + $ref: '#/components/schemas/ObservabilityPipelineDecoding' + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: http-client-source + type: string + scrape_interval_secs: + description: The interval (in seconds) between HTTP scrape requests. + example: 60 + format: int64 + type: integer + scrape_timeout_secs: + description: The timeout (in seconds) for each scrape request. + example: 10 + format: int64 + type: integer + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineHttpClientSourceType' + required: + - id + - type + - decoding + type: object + ObservabilityPipelineHttpClientSourceAuthStrategy: + description: Optional authentication strategy for HTTP requests. + enum: + - basic + - bearer + example: basic + type: string + x-enum-varnames: + - BASIC + - BEARER + ObservabilityPipelineHttpClientSourceType: + default: http_client + description: The source type. The value should always be `http_client`. + enum: + - http_client + example: http_client + type: string + x-enum-varnames: + - HTTP_CLIENT + ObservabilityPipelineHttpServerSource: + description: The `http_server` source collects logs over HTTP POST from external + services. + properties: + auth_strategy: + $ref: '#/components/schemas/ObservabilityPipelineHttpServerSourceAuthStrategy' + decoding: + $ref: '#/components/schemas/ObservabilityPipelineDecoding' + id: + description: Unique ID for the HTTP server source. + example: http-server-source + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineHttpServerSourceType' + required: + - id + - type + - auth_strategy + - decoding + type: object + ObservabilityPipelineHttpServerSourceAuthStrategy: + description: HTTP authentication method. + enum: + - none + - plain + example: plain + type: string + x-enum-varnames: + - NONE + - PLAIN + ObservabilityPipelineHttpServerSourceType: + default: http_server + description: The source type. The value should always be `http_server`. + enum: + - http_server + example: http_server + type: string + x-enum-varnames: + - HTTP_SERVER + ObservabilityPipelineKafkaSource: + description: The `kafka` source ingests data from Apache Kafka topics. + properties: + group_id: + description: Consumer group ID used by the Kafka client. + example: consumer-group-0 + type: string + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: kafka-source + type: string + librdkafka_options: + description: Optional list of advanced Kafka client configuration options, + defined as key-value pairs. + items: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceLibrdkafkaOption' + type: array + sasl: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceSasl' + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + topics: + description: A list of Kafka topic names to subscribe to. The source ingests + messages from each topic specified. + example: + - topic1 + - topic2 + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceType' + required: + - id + - type + - group_id + - topics + type: object + ObservabilityPipelineKafkaSourceLibrdkafkaOption: + description: Represents a key-value pair used to configure low-level `librdkafka` + client options for Kafka sources, such as timeouts, buffer sizes, and security + settings. + properties: + name: + description: The name of the `librdkafka` configuration option to set. + example: fetch.message.max.bytes + type: string + value: + description: The value assigned to the specified `librdkafka` configuration + option. + example: '1048576' + type: string + required: + - name + - value + type: object + ObservabilityPipelineKafkaSourceSasl: + description: Specifies the SASL mechanism for authenticating with a Kafka cluster. + properties: + mechanism: + $ref: '#/components/schemas/ObservabilityPipelinePipelineKafkaSourceSaslMechanism' + type: object + ObservabilityPipelineKafkaSourceType: + default: kafka + description: The source type. The value should always be `kafka`. + enum: + - kafka + example: kafka + type: string + x-enum-varnames: + - KAFKA + ObservabilityPipelineLogstashSource: + description: The `logstash` source ingests logs from a Logstash forwarder. + properties: + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: logstash-source + type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineLogstashSourceType' + required: + - id + - type + type: object + ObservabilityPipelineLogstashSourceType: + default: logstash + description: The source type. The value should always be `logstash`. + enum: + - logstash + example: logstash + type: string + x-enum-varnames: + - LOGSTASH + ObservabilityPipelineMetadataEntry: + description: A custom metadata entry to attach to each object uploaded to the + GCS bucket. + properties: + name: + description: The metadata key. + example: environment + type: string + value: + description: The metadata value. + example: production + type: string + required: + - name + - value + type: object + ObservabilityPipelineMetricValue: + description: Specifies how the value of the generated metric is computed. + oneOf: + - $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByOne' + - $ref: '#/components/schemas/ObservabilityPipelineGeneratedMetricIncrementByField' + ObservabilityPipelineNewRelicDestination: + description: The `new_relic` destination sends logs to the New Relic platform. + properties: + id: + description: The unique identifier for this component. + example: new-relic-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - parse-json-processor + items: + type: string + type: array + region: + $ref: '#/components/schemas/ObservabilityPipelineNewRelicDestinationRegion' + type: + $ref: '#/components/schemas/ObservabilityPipelineNewRelicDestinationType' + required: + - id + - type + - inputs + - region + type: object + ObservabilityPipelineNewRelicDestinationRegion: + description: The New Relic region. + enum: + - us + - eu + example: us + type: string + x-enum-varnames: + - US + - EU + ObservabilityPipelineNewRelicDestinationType: + default: new_relic + description: The destination type. The value should always be `new_relic`. + enum: + - new_relic + example: new_relic + type: string + x-enum-varnames: + - NEW_RELIC + ObservabilityPipelineOcsfMapperProcessor: + description: The `ocsf_mapper` processor transforms logs into the OCSF schema + using a predefined mapping configuration. + properties: + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline. + example: ocsf-mapper-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this processor. + example: + - filter-processor + items: + type: string + type: array + mappings: + description: A list of mapping rules to convert events to the OCSF format. + items: + $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessorMapping' + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessorType' + required: + - id + - type + - include + - inputs + - mappings + type: object + ObservabilityPipelineOcsfMapperProcessorMapping: + description: Defines how specific events are transformed to OCSF using a mapping + configuration. + properties: + include: + description: A Datadog search query used to select the logs that this mapping + should apply to. + example: service:my-service + type: string + mapping: + $ref: '#/components/schemas/ObservabilityPipelineOcsfMapperProcessorMappingMapping' + required: + - include + - mapping + type: object + ObservabilityPipelineOcsfMapperProcessorMappingMapping: + description: Defines a single mapping rule for transforming logs into the OCSF + schema. + oneOf: + - $ref: '#/components/schemas/ObservabilityPipelineOcsfMappingLibrary' + ObservabilityPipelineOcsfMapperProcessorType: + default: ocsf_mapper + description: The processor type. The value should always be `ocsf_mapper`. + enum: + - ocsf_mapper + example: ocsf_mapper + type: string + x-enum-varnames: + - OCSF_MAPPER + ObservabilityPipelineOcsfMappingLibrary: + description: Predefined library mappings for common log formats. + enum: + - CloudTrail Account Change + - GCP Cloud Audit CreateBucket + - GCP Cloud Audit CreateSink + - GCP Cloud Audit SetIamPolicy + - GCP Cloud Audit UpdateSink + - Github Audit Log API Activity + - Google Workspace Admin Audit addPrivilege + - Microsoft 365 Defender Incident + - Microsoft 365 Defender UserLoggedIn + - Okta System Log Authentication + - Palo Alto Networks Firewall Traffic + example: CloudTrail Account Change + type: string + x-enum-varnames: + - CLOUDTRAIL_ACCOUNT_CHANGE + - GCP_CLOUD_AUDIT_CREATEBUCKET + - GCP_CLOUD_AUDIT_CREATESINK + - GCP_CLOUD_AUDIT_SETIAMPOLICY + - GCP_CLOUD_AUDIT_UPDATESINK + - GITHUB_AUDIT_LOG_API_ACTIVITY + - GOOGLE_WORKSPACE_ADMIN_AUDIT_ADDPRIVILEGE + - MICROSOFT_365_DEFENDER_INCIDENT + - MICROSOFT_365_DEFENDER_USERLOGGEDIN + - OKTA_SYSTEM_LOG_AUTHENTICATION + - PALO_ALTO_NETWORKS_FIREWALL_TRAFFIC + ObservabilityPipelineOpenSearchDestination: + description: The `opensearch` destination writes logs to an OpenSearch cluster. + properties: + bulk_index: + description: The index to write logs to. + example: logs-index + type: string + id: + description: The unique identifier for this component. + example: opensearch-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineOpenSearchDestinationType' + required: + - id + - type + - inputs + type: object + ObservabilityPipelineOpenSearchDestinationType: + default: opensearch + description: The destination type. The value should always be `opensearch`. + enum: + - opensearch + example: opensearch + type: string + x-enum-varnames: + - OPENSEARCH + ObservabilityPipelineParseGrokProcessor: + description: The `parse_grok` processor extracts structured fields from unstructured + log messages using Grok patterns. + properties: + disable_library_rules: + default: false + description: If set to `true`, disables the default Grok rules provided + by Datadog. + example: true + type: boolean + id: + description: A unique identifier for this processor. + example: parse-grok-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + rules: + description: The list of Grok parsing rules. If multiple matching rules + are provided, they are evaluated in order. The first successful match + is applied. + items: + $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessorRule' + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessorType' + required: + - id + - type + - include + - inputs + - rules + type: object + ObservabilityPipelineParseGrokProcessorRule: + description: 'A Grok parsing rule used in the `parse_grok` processor. Each rule + defines how to extract structured fields + + from a specific log field using Grok patterns. + + ' + properties: + match_rules: + description: 'A list of Grok parsing rules that define how to extract fields + from the source field. + + Each rule must contain a name and a valid Grok pattern. + + ' + example: + - name: MyParsingRule + rule: '%{word:user} connected on %{date("MM/dd/yyyy"):date}' + items: + $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessorRuleMatchRule' + type: array + source: + description: The name of the field in the log event to apply the Grok rules + to. + example: message + type: string + support_rules: + description: 'A list of Grok helper rules that can be referenced by the + parsing rules. + + ' + example: + - name: user + rule: '%{word:user.name}' + items: + $ref: '#/components/schemas/ObservabilityPipelineParseGrokProcessorRuleSupportRule' + type: array + required: + - source + - match_rules + - support_rules + type: object + ObservabilityPipelineParseGrokProcessorRuleMatchRule: + description: 'Defines a Grok parsing rule, which extracts structured fields + from log content using named Grok patterns. + + Each rule must have a unique name and a valid Datadog Grok pattern that will + be applied to the source field. + + ' + properties: + name: + description: The name of the rule. + example: MyParsingRule + type: string + rule: + description: The definition of the Grok rule. + example: '%{word:user} connected on %{date("MM/dd/yyyy"):date}' + type: string + required: + - name + - rule + type: object + ObservabilityPipelineParseGrokProcessorRuleSupportRule: + description: The Grok helper rule referenced in the parsing rules. + properties: + name: + description: The name of the Grok helper rule. + example: user + type: string + rule: + description: The definition of the Grok helper rule. + example: ' %{word:user.name}' + type: string + required: + - name + - rule + type: object + ObservabilityPipelineParseGrokProcessorType: + default: parse_grok + description: The processor type. The value should always be `parse_grok`. + enum: + - parse_grok + example: parse_grok + type: string + x-enum-varnames: + - PARSE_GROK + ObservabilityPipelineParseJSONProcessor: + description: The `parse_json` processor extracts JSON from a specified field + and flattens it into the event. This is useful when logs contain embedded + JSON as a string. + properties: + field: + description: The name of the log field that contains a JSON string. + example: message + type: string + id: + description: A unique identifier for this component. Used to reference this + component in other parts of the pipeline (e.g., as input to downstream + components). + example: parse-json-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineParseJSONProcessorType' + required: + - id + - type + - include + - field + - inputs + type: object + ObservabilityPipelineParseJSONProcessorType: + default: parse_json + description: The processor type. The value should always be `parse_json`. + enum: + - parse_json + example: parse_json + type: string + x-enum-varnames: + - PARSE_JSON + ObservabilityPipelinePipelineKafkaSourceSaslMechanism: + description: SASL mechanism used for Kafka authentication. + enum: + - PLAIN + - SCRAM-SHA-256 + - SCRAM-SHA-512 + type: string + x-enum-varnames: + - PLAIN + - SCRAMNOT_SHANOT_256 + - SCRAMNOT_SHANOT_512 + ObservabilityPipelineQuotaProcessor: + description: The Quota Processor measures logging traffic for logs that match + a specified filter. When the configured daily quota is met, the processor + can drop or alert. + properties: + drop_events: + description: If set to `true`, logs that matched the quota filter and sent + after the quota has been met are dropped; only logs that did not match + the filter query continue through the pipeline. + example: false + type: boolean + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: quota-processor + type: string + ignore_when_missing_partitions: + description: If `true`, the processor skips quota checks when partition + fields are missing from the logs. + type: boolean + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + limit: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorLimit' + name: + description: Name of the quota. + example: MyQuota + type: string + overflow_action: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorOverflowAction' + overrides: + description: A list of alternate quota rules that apply to specific sets + of events, identified by matching field values. Each override can define + a custom limit. + items: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorOverride' + type: array + partition_fields: + description: A list of fields used to segment log traffic for quota enforcement. + Quotas are tracked independently by unique combinations of these field + values. + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorType' + required: + - id + - type + - include + - name + - drop_events + - limit + - inputs + type: object + ObservabilityPipelineQuotaProcessorLimit: + description: The maximum amount of data or number of events allowed before the + quota is enforced. Can be specified in bytes or events. + properties: + enforce: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorLimitEnforceType' + limit: + description: The limit for quota enforcement. + example: 1000 + format: int64 + type: integer + required: + - enforce + - limit + type: object + ObservabilityPipelineQuotaProcessorLimitEnforceType: + description: Unit for quota enforcement in bytes for data size or events for + count. + enum: + - bytes + - events + example: bytes + type: string + x-enum-varnames: + - BYTES + - EVENTS + ObservabilityPipelineQuotaProcessorOverflowAction: + description: 'The action to take when the quota is exceeded. Options: + + - `drop`: Drop the event. + + - `no_action`: Let the event pass through. + + - `overflow_routing`: Route to an overflow destination. + + ' + enum: + - drop + - no_action + - overflow_routing + example: drop + type: string + x-enum-varnames: + - DROP + - NO_ACTION + - OVERFLOW_ROUTING + ObservabilityPipelineQuotaProcessorOverride: + description: Defines a custom quota limit that applies to specific log events + based on matching field values. + properties: + fields: + description: A list of field matchers used to apply a specific override. + If an event matches all listed key-value pairs, the corresponding override + limit is enforced. + items: + $ref: '#/components/schemas/ObservabilityPipelineFieldValue' + type: array + limit: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorLimit' + required: + - fields + - limit + type: object + ObservabilityPipelineQuotaProcessorType: + default: quota + description: The processor type. The value should always be `quota`. + enum: + - quota + example: quota + type: string + x-enum-varnames: + - QUOTA + ObservabilityPipelineReduceProcessor: + description: The `reduce` processor aggregates and merges logs based on matching + keys and merge strategies. + properties: + group_by: + description: A list of fields used to group log events for merging. + example: + - log.user.id + - log.device.id + items: + type: string + type: array + id: + description: The unique identifier for this processor. + example: reduce-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: env:prod + type: string + inputs: + description: A list of component IDs whose output is used as the input for + this processor. + example: + - parse-json-processor + items: + type: string + type: array + merge_strategies: + description: List of merge strategies defining how values from grouped events + should be combined. + items: + $ref: '#/components/schemas/ObservabilityPipelineReduceProcessorMergeStrategy' + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineReduceProcessorType' + required: + - id + - type + - include + - inputs + - group_by + - merge_strategies + type: object + ObservabilityPipelineReduceProcessorMergeStrategy: + description: Defines how a specific field should be merged across grouped events. + properties: + path: + description: The field path in the log event. + example: log.user.roles + type: string + strategy: + $ref: '#/components/schemas/ObservabilityPipelineReduceProcessorMergeStrategyStrategy' + required: + - path + - strategy + type: object + ObservabilityPipelineReduceProcessorMergeStrategyStrategy: + description: The merge strategy to apply. + enum: + - discard + - retain + - sum + - max + - min + - array + - concat + - concat_newline + - concat_raw + - shortest_array + - longest_array + - flat_unique + example: flat_unique + type: string + x-enum-varnames: + - DISCARD + - RETAIN + - SUM + - MAX + - MIN + - ARRAY + - CONCAT + - CONCAT_NEWLINE + - CONCAT_RAW + - SHORTEST_ARRAY + - LONGEST_ARRAY + - FLAT_UNIQUE + ObservabilityPipelineReduceProcessorType: + default: reduce + description: The processor type. The value should always be `reduce`. + enum: + - reduce + example: reduce + type: string + x-enum-varnames: + - REDUCE + ObservabilityPipelineRemoveFieldsProcessor: + description: The `remove_fields` processor deletes specified fields from logs. + properties: + fields: + description: A list of field names to be removed from each log event. + example: + - field1 + - field2 + items: + type: string + type: array + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: remove-fields-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: The `PipelineRemoveFieldsProcessor` `inputs`. + example: + - datadog-agent-source + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineRemoveFieldsProcessorType' + required: + - id + - type + - include + - fields + - inputs + type: object + ObservabilityPipelineRemoveFieldsProcessorType: + default: remove_fields + description: The processor type. The value should always be `remove_fields`. + enum: + - remove_fields + example: remove_fields + type: string + x-enum-varnames: + - REMOVE_FIELDS + ObservabilityPipelineRenameFieldsProcessor: + description: The `rename_fields` processor changes field names. + properties: + fields: + description: A list of rename rules specifying which fields to rename in + the event, what to rename them to, and whether to preserve the original + fields. + items: + $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessorField' + type: array + id: + description: A unique identifier for this component. Used to reference this + component in other parts of the pipeline (e.g., as input to downstream + components). + example: rename-fields-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessorType' + required: + - id + - type + - include + - fields + - inputs + type: object + ObservabilityPipelineRenameFieldsProcessorField: + description: Defines how to rename a field in log events. + properties: + destination: + description: The field name to assign the renamed value to. + example: destination_field + type: string + preserve_source: + description: Indicates whether the original field, that is received from + the source, should be kept (`true`) or removed (`false`) after renaming. + example: false + type: boolean + source: + description: The original field name in the log event that should be renamed. + example: source_field + type: string + required: + - source + - destination + - preserve_source + type: object + ObservabilityPipelineRenameFieldsProcessorType: + default: rename_fields + description: The processor type. The value should always be `rename_fields`. + enum: + - rename_fields + example: rename_fields + type: string + x-enum-varnames: + - RENAME_FIELDS + ObservabilityPipelineRsyslogDestination: + description: The `rsyslog` destination forwards logs to an external `rsyslog` + server over TCP or UDP using the syslog protocol. + properties: + id: + description: The unique identifier for this component. + example: rsyslog-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + keepalive: + description: Optional socket keepalive duration in milliseconds. + example: 60000 + format: int64 + minimum: 0 + type: integer + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineRsyslogDestinationType' + required: + - id + - type + - inputs + type: object + ObservabilityPipelineRsyslogDestinationType: + default: rsyslog + description: The destination type. The value should always be `rsyslog`. + enum: + - rsyslog + example: rsyslog + type: string + x-enum-varnames: + - RSYSLOG + ObservabilityPipelineRsyslogSource: + description: The `rsyslog` source listens for logs over TCP or UDP from an `rsyslog` + server using the syslog protocol. + properties: + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: rsyslog-source + type: string + mode: + $ref: '#/components/schemas/ObservabilityPipelineSyslogSourceMode' + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineRsyslogSourceType' + required: + - id + - type + - mode + type: object + ObservabilityPipelineRsyslogSourceType: + default: rsyslog + description: The source type. The value should always be `rsyslog`. + enum: + - rsyslog + example: rsyslog + type: string + x-enum-varnames: + - RSYSLOG + ObservabilityPipelineSampleProcessor: + description: The `sample` processor allows probabilistic sampling of logs at + a fixed rate. + properties: + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (for example, as the `input` + to downstream components). + example: sample-processor + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: service:my-service + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - datadog-agent-source + items: + type: string + type: array + percentage: + description: The percentage of logs to sample. + example: 10.0 + format: double + type: number + rate: + description: Number of events to sample (1 in N). + example: 10 + format: int64 + minimum: 1 + type: integer + type: + $ref: '#/components/schemas/ObservabilityPipelineSampleProcessorType' + required: + - id + - type + - include + - inputs + type: object + ObservabilityPipelineSampleProcessorType: + default: sample + description: The processor type. The value should always be `sample`. + enum: + - sample + example: sample + type: string + x-enum-varnames: + - SAMPLE + ObservabilityPipelineSensitiveDataScannerProcessor: + description: The `sensitive_data_scanner` processor detects and optionally redacts + sensitive data in log events. + properties: + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: sensitive-scanner + type: string + include: + description: A Datadog search query used to determine which logs this processor + targets. + example: source:prod + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - parse-json-processor + items: + type: string + type: array + rules: + description: A list of rules for identifying and acting on sensitive data + patterns. + items: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorRule' + type: array + type: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorType' + required: + - id + - type + - include + - inputs + - rules + type: object + ObservabilityPipelineSensitiveDataScannerProcessorAction: + description: Defines what action to take when sensitive data is matched. + oneOf: + - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorActionRedact' + - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorActionHash' + - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact' + ObservabilityPipelineSensitiveDataScannerProcessorActionHash: + description: Configuration for hashing matched sensitive values. + properties: + action: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction' + options: + description: The `ObservabilityPipelineSensitiveDataScannerProcessorActionHash` + `options`. + type: object + required: + - action + type: object + ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction: + description: Action type that replaces the matched sensitive data with a hashed + representation, preserving structure while securing content. + enum: + - hash + example: hash + type: string + x-enum-varnames: + - HASH + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact: + description: Configuration for partially redacting matched sensitive data. + properties: + action: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction' + options: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions' + required: + - action + - options + type: object + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction: + description: Action type that redacts part of the sensitive data while preserving + a configurable number of characters, typically used for masking purposes (e.g., + show last 4 digits of a credit card). + enum: + - partial_redact + example: partial_redact + type: string + x-enum-varnames: + - PARTIAL_REDACT + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions: + description: Controls how partial redaction is applied, including character + count and direction. + properties: + characters: + description: The `ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions` + `characters`. + example: 4 + format: int64 + type: integer + direction: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection' + required: + - characters + - direction + type: object + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection: + description: Indicates whether to redact characters from the first or last part + of the matched value. + enum: + - first + - last + example: last + type: string + x-enum-varnames: + - FIRST + - LAST + ObservabilityPipelineSensitiveDataScannerProcessorActionRedact: + description: Configuration for completely redacting matched sensitive data. + properties: + action: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction' + options: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions' + required: + - action + - options + type: object + ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction: + description: Action type that completely replaces the matched sensitive data + with a fixed replacement string to remove all visibility. + enum: + - redact + example: redact + type: string + x-enum-varnames: + - REDACT + ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions: + description: Configuration for fully redacting sensitive data. + properties: + replace: + description: The `ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions` + `replace`. + example: '***' + type: string + required: + - replace + type: object + ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern: + description: Defines a custom regex-based pattern for identifying sensitive + data in logs. + properties: + options: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions' + type: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType' + required: + - type + - options + type: object + ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions: + description: Options for defining a custom regex pattern. + properties: + rule: + description: A regular expression used to detect sensitive values. Must + be a valid regex. + example: \b\d{16}\b + type: string + required: + - rule + type: object + ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType: + description: Indicates a custom regular expression is used for matching. + enum: + - custom + example: custom + type: string + x-enum-varnames: + - CUSTOM + ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions: + description: Configuration for keywords used to reinforce sensitive data pattern + detection. + properties: + keywords: + description: A list of keywords to match near the sensitive pattern. + example: + - ssn + - card + - account + items: + type: string + type: array + proximity: + description: Maximum number of tokens between a keyword and a sensitive + value match. + example: 5 + format: int64 + type: integer + required: + - keywords + - proximity + type: object + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern: + description: "Specifies a pattern from Datadog\u2019s sensitive data detection + library to match known sensitive data types." + properties: + options: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions' + type: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType' + required: + - type + - options + type: object + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions: + description: Options for selecting a predefined library pattern and enabling + keyword support. + properties: + id: + description: Identifier for a predefined pattern from the sensitive data + scanner pattern library. + example: credit_card + type: string + use_recommended_keywords: + description: Whether to augment the pattern with recommended keywords (optional). + type: boolean + required: + - id + type: object + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType: + description: Indicates that a predefined library pattern is used. + enum: + - library + example: library + type: string + x-enum-varnames: + - LIBRARY + ObservabilityPipelineSensitiveDataScannerProcessorPattern: + description: Pattern detection configuration for identifying sensitive data + using either a custom regex or a library reference. + oneOf: + - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern' + - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern' + ObservabilityPipelineSensitiveDataScannerProcessorRule: + description: Defines a rule for detecting sensitive data, including matching + pattern, scope, and the action to take. + properties: + keyword_options: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions' + name: + description: A name identifying the rule. + example: Redact Credit Card Numbers + type: string + on_match: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorAction' + pattern: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorPattern' + scope: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorScope' + tags: + description: Tags assigned to this rule for filtering and classification. + example: + - pii + - ccn + items: + type: string + type: array + required: + - name + - tags + - pattern + - scope + - on_match + type: object + ObservabilityPipelineSensitiveDataScannerProcessorScope: + description: Determines which parts of the log the pattern-matching rule should + be applied to. + oneOf: + - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude' + - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude' + - $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorScopeAll' + ObservabilityPipelineSensitiveDataScannerProcessorScopeAll: + description: Applies scanning across all available fields. + properties: + target: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget' + required: + - target + type: object + ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget: + description: Applies the rule to all fields. + enum: + - all + example: all + type: string + x-enum-varnames: + - ALL + ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude: + description: Excludes specific fields from sensitive data scanning. + properties: + options: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions' + target: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget' + required: + - target + - options + type: object + ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget: + description: Excludes specific fields from processing. + enum: + - exclude + example: exclude + type: string + x-enum-varnames: + - EXCLUDE + ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude: + description: Includes only specific fields for sensitive data scanning. + properties: + options: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions' + target: + $ref: '#/components/schemas/ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget' + required: + - target + - options + type: object + ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget: + description: Applies the rule only to included fields. + enum: + - include + example: include + type: string + x-enum-varnames: + - INCLUDE + ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions: + description: Fields to which the scope rule applies. + properties: + fields: + description: The `ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions` + `fields`. + example: + - '' + items: + type: string + type: array + required: + - fields + type: object + ObservabilityPipelineSensitiveDataScannerProcessorType: + default: sensitive_data_scanner + description: The processor type. The value should always be `sensitive_data_scanner`. + enum: + - sensitive_data_scanner + example: sensitive_data_scanner + type: string + x-enum-varnames: + - SENSITIVE_DATA_SCANNER + ObservabilityPipelineSentinelOneDestination: + description: The `sentinel_one` destination sends logs to SentinelOne. + properties: + id: + description: The unique identifier for this component. + example: sentinelone-destination + type: string + inputs: + description: A list of component IDs whose output is used as the `input` + for this component. + example: + - filter-processor + items: + type: string + type: array + region: + $ref: '#/components/schemas/ObservabilityPipelineSentinelOneDestinationRegion' + type: + $ref: '#/components/schemas/ObservabilityPipelineSentinelOneDestinationType' + required: + - id + - type + - inputs + - region + type: object + ObservabilityPipelineSentinelOneDestinationRegion: + description: The SentinelOne region to send logs to. + enum: + - us + - eu + - ca + - data_set_us + example: us + type: string + x-enum-varnames: + - US + - EU + - CA + - DATA_SET_US + ObservabilityPipelineSentinelOneDestinationType: + default: sentinel_one + description: The destination type. The value should always be `sentinel_one`. + enum: + - sentinel_one + example: sentinel_one + type: string + x-enum-varnames: + - SENTINEL_ONE + ObservabilityPipelineSpec: + description: Input schema representing an observability pipeline configuration. + Used in create and validate requests. + properties: + data: + $ref: '#/components/schemas/ObservabilityPipelineSpecData' + required: + - data + type: object + ObservabilityPipelineSpecData: + description: Contains the the pipeline configuration. + properties: + attributes: + $ref: '#/components/schemas/ObservabilityPipelineDataAttributes' + type: + default: pipelines + description: The resource type identifier. For pipeline resources, this + should always be set to `pipelines`. + example: pipelines type: string required: - - name - - value + - type + - attributes type: object - ObservabilityPipelineFilterProcessor: - description: The `filter` processor allows conditional processing of logs based - on a Datadog search query. Logs that match the `include` query are passed - through; others are discarded. + ObservabilityPipelineSplunkHecDestination: + description: 'The `splunk_hec` destination forwards logs to Splunk using the + HTTP Event Collector (HEC). + + ' properties: + auto_extract_timestamp: + description: 'If `true`, Splunk tries to extract timestamps from incoming + log events. + + If `false`, Splunk assigns the time the event was received. + + ' + example: true + type: boolean + encoding: + $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestinationEncoding' id: description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (for example, as the `input` - to downstream components). - example: filter-processor + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: splunk-hec-destination type: string - include: - description: A Datadog search query used to determine which logs should - pass through the filter. Logs that match this query continue to downstream - components; others are dropped. - example: service:my-service + index: + description: Optional name of the Splunk index where logs are written. + example: main type: string inputs: description: A list of component IDs whose output is used as the `input` for this component. example: - - datadog-agent-source + - filter-processor items: type: string type: array + sourcetype: + description: The Splunk sourcetype to assign to log events. + example: custom_sourcetype + type: string type: - $ref: '#/components/schemas/ObservabilityPipelineFilterProcessorType' + $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestinationType' required: - id - type - - include - inputs type: object - ObservabilityPipelineFilterProcessorType: - default: filter - description: The processor type. The value should always be `filter`. + ObservabilityPipelineSplunkHecDestinationEncoding: + description: Encoding format for log events. enum: - - filter - example: filter + - json + - raw_message + example: json type: string x-enum-varnames: - - FILTER - ObservabilityPipelineKafkaSource: - description: The `kafka` source ingests data from Apache Kafka topics. + - JSON + - RAW_MESSAGE + ObservabilityPipelineSplunkHecDestinationType: + default: splunk_hec + description: The destination type. Always `splunk_hec`. + enum: + - splunk_hec + example: splunk_hec + type: string + x-enum-varnames: + - SPLUNK_HEC + ObservabilityPipelineSplunkHecSource: + description: 'The `splunk_hec` source implements the Splunk HTTP Event Collector + (HEC) API. + + ' properties: - group_id: - description: Consumer group ID used by the Kafka client. - example: consumer-group-0 - type: string id: description: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). - example: kafka-source + example: splunk-hec-source type: string - librdkafka_options: - description: Optional list of advanced Kafka client configuration options, - defined as key-value pairs. - items: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceLibrdkafkaOption' - type: array - sasl: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceSasl' tls: $ref: '#/components/schemas/ObservabilityPipelineTls' - topics: - description: A list of Kafka topic names to subscribe to. The source ingests - messages from each topic specified. - example: - - topic1 - - topic2 - items: - type: string - type: array type: - $ref: '#/components/schemas/ObservabilityPipelineKafkaSourceType' + $ref: '#/components/schemas/ObservabilityPipelineSplunkHecSourceType' required: - id - type - - group_id - - topics type: object - ObservabilityPipelineKafkaSourceLibrdkafkaOption: - description: Represents a key-value pair used to configure low-level `librdkafka` - client options for Kafka sources, such as timeouts, buffer sizes, and security - settings. + ObservabilityPipelineSplunkHecSourceType: + default: splunk_hec + description: The source type. Always `splunk_hec`. + enum: + - splunk_hec + example: splunk_hec + type: string + x-enum-varnames: + - SPLUNK_HEC + ObservabilityPipelineSplunkTcpSource: + description: 'The `splunk_tcp` source receives logs from a Splunk Universal + Forwarder over TCP. + + TLS is supported for secure transmission. + + ' properties: - name: - description: The name of the `librdkafka` configuration option to set. - example: fetch.message.max.bytes - type: string - value: - description: The value assigned to the specified `librdkafka` configuration - option. - example: '1048576' + id: + description: The unique identifier for this component. Used to reference + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: splunk-tcp-source type: string + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' + type: + $ref: '#/components/schemas/ObservabilityPipelineSplunkTcpSourceType' required: - - name - - value - type: object - ObservabilityPipelineKafkaSourceSasl: - description: Specifies the SASL mechanism for authenticating with a Kafka cluster. - properties: - mechanism: - $ref: '#/components/schemas/ObservabilityPipelinePipelineKafkaSourceSaslMechanism' + - id + - type type: object - ObservabilityPipelineKafkaSourceType: - default: kafka - description: The source type. The value should always be `kafka`. + ObservabilityPipelineSplunkTcpSourceType: + default: splunk_tcp + description: The source type. Always `splunk_tcp`. enum: - - kafka - example: kafka + - splunk_tcp + example: splunk_tcp type: string x-enum-varnames: - - KAFKA - ObservabilityPipelineParseJSONProcessor: - description: The `parse_json` processor extracts JSON from a specified field - and flattens it into the event. This is useful when logs contain embedded - JSON as a string. + - SPLUNK_TCP + ObservabilityPipelineSumoLogicDestination: + description: The `sumo_logic` destination forwards logs to Sumo Logic. properties: - field: - description: The name of the log field that contains a JSON string. - example: message + encoding: + $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestinationEncoding' + header_custom_fields: + description: A list of custom headers to include in the request to Sumo + Logic. + items: + $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem' + type: array + header_host_name: + description: Optional override for the host name header. + example: host-123 type: string - id: - description: A unique identifier for this component. Used to reference this - component in other parts of the pipeline (e.g., as input to downstream - components). - example: parse-json-processor + header_source_category: + description: Optional override for the source category header. + example: source-category type: string - include: - description: A Datadog search query used to determine which logs this processor - targets. - example: service:my-service + header_source_name: + description: Optional override for the source name header. + example: source-name + type: string + id: + description: The unique identifier for this component. + example: sumo-logic-destination type: string inputs: description: A list of component IDs whose output is used as the `input` for this component. example: - - datadog-agent-source + - filter-processor items: type: string type: array type: - $ref: '#/components/schemas/ObservabilityPipelineParseJSONProcessorType' + $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestinationType' required: - id - type - - include - - field - inputs type: object - ObservabilityPipelineParseJSONProcessorType: - default: parse_json - description: The processor type. The value should always be `parse_json`. + ObservabilityPipelineSumoLogicDestinationEncoding: + description: The output encoding format. enum: - - parse_json - example: parse_json + - json + - raw_message + - logfmt + example: json type: string x-enum-varnames: - - PARSE_JSON - ObservabilityPipelinePipelineKafkaSourceSaslMechanism: - description: SASL mechanism used for Kafka authentication. + - JSON + - RAW_MESSAGE + - LOGFMT + ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem: + description: Single key-value pair used as a custom log header for Sumo Logic. + properties: + name: + description: The header field name. + example: X-Sumo-Category + type: string + value: + description: The header field value. + example: my-app-logs + type: string + required: + - name + - value + type: object + ObservabilityPipelineSumoLogicDestinationType: + default: sumo_logic + description: The destination type. The value should always be `sumo_logic`. enum: - - PLAIN - - SCRAM-SHA-256 - - SCRAM-SHA-512 + - sumo_logic + example: sumo_logic type: string x-enum-varnames: - - PLAIN - - SCRAMNOT_SHANOT_256 - - SCRAMNOT_SHANOT_512 - ObservabilityPipelineQuotaProcessor: - description: The Quota Processor measures logging traffic for logs that match - a specified filter. When the configured daily quota is met, the processor - can drop or alert. + - SUMO_LOGIC + ObservabilityPipelineSumoLogicSource: + description: The `sumo_logic` source receives logs from Sumo Logic collectors. properties: - drop_events: - description: If set to `true`, logs that matched the quota filter and sent - after the quota has been met are dropped; only logs that did not match - the filter query continue through the pipeline. - example: false - type: boolean id: description: The unique identifier for this component. Used to reference - this component in other parts of the pipeline (for example, as the `input` - to downstream components). - example: quota-processor + this component in other parts of the pipeline (e.g., as input to downstream + components). + example: sumo-logic-source type: string - ignore_when_missing_partitions: - description: If `true`, the processor skips quota checks when partition - fields are missing from the logs. - type: boolean - include: - description: A Datadog search query used to determine which logs this processor - targets. - example: service:my-service + type: + $ref: '#/components/schemas/ObservabilityPipelineSumoLogicSourceType' + required: + - id + - type + type: object + ObservabilityPipelineSumoLogicSourceType: + default: sumo_logic + description: The source type. The value should always be `sumo_logic`. + enum: + - sumo_logic + example: sumo_logic + type: string + x-enum-varnames: + - SUMO_LOGIC + ObservabilityPipelineSyslogNgDestination: + description: The `syslog_ng` destination forwards logs to an external `syslog-ng` + server over TCP or UDP using the syslog protocol. + properties: + id: + description: The unique identifier for this component. + example: syslog-ng-destination type: string inputs: description: A list of component IDs whose output is used as the `input` for this component. example: - - datadog-agent-source - items: - type: string - type: array - limit: - $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorLimit' - name: - description: Name for identifying the processor. - example: MyPipelineQuotaProcessor - type: string - overrides: - description: A list of alternate quota rules that apply to specific sets - of events, identified by matching field values. Each override can define - a custom limit. - items: - $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorOverride' - type: array - partition_fields: - description: A list of fields used to segment log traffic for quota enforcement. - Quotas are tracked independently by unique combinations of these field - values. + - filter-processor items: type: string type: array + keepalive: + description: Optional socket keepalive duration in milliseconds. + example: 60000 + format: int64 + minimum: 0 + type: integer + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' type: - $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorType' + $ref: '#/components/schemas/ObservabilityPipelineSyslogNgDestinationType' required: - id - type - - include - - name - - drop_events - - limit - inputs type: object - ObservabilityPipelineQuotaProcessorLimit: - description: The maximum amount of data or number of events allowed before the - quota is enforced. Can be specified in bytes or events. - properties: - enforce: - $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorLimitEnforceType' - limit: - description: The limit for quota enforcement. - example: 1000 - format: int64 - type: integer - required: - - enforce - - limit - type: object - ObservabilityPipelineQuotaProcessorLimitEnforceType: - description: Unit for quota enforcement in bytes for data size or events for - count. - enum: - - bytes - - events - example: bytes - type: string - x-enum-varnames: - - BYTES - - EVENTS - ObservabilityPipelineQuotaProcessorOverride: - description: Defines a custom quota limit that applies to specific log events - based on matching field values. - properties: - fields: - description: A list of field matchers used to apply a specific override. - If an event matches all listed key-value pairs, the corresponding override - limit is enforced. - items: - $ref: '#/components/schemas/ObservabilityPipelineFieldValue' - type: array - limit: - $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorLimit' - required: - - fields - - limit - type: object - ObservabilityPipelineQuotaProcessorType: - default: quota - description: The processor type. The value should always be `quota`. + ObservabilityPipelineSyslogNgDestinationType: + default: syslog_ng + description: The destination type. The value should always be `syslog_ng`. enum: - - quota - example: quota + - syslog_ng + example: syslog_ng type: string x-enum-varnames: - - QUOTA - ObservabilityPipelineRemoveFieldsProcessor: - description: The `remove_fields` processor deletes specified fields from logs. + - SYSLOG_NG + ObservabilityPipelineSyslogNgSource: + description: The `syslog_ng` source listens for logs over TCP or UDP from a + `syslog-ng` server using the syslog protocol. properties: - fields: - description: A list of field names to be removed from each log event. - example: - - field1 - - field2 - items: - type: string - type: array id: description: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). - example: remove-fields-processor - type: string - include: - description: A Datadog search query used to determine which logs this processor - targets. - example: service:my-service + example: syslog-ng-source type: string - inputs: - description: The `PipelineRemoveFieldsProcessor` `inputs`. - example: - - datadog-agent-source - items: - type: string - type: array + mode: + $ref: '#/components/schemas/ObservabilityPipelineSyslogSourceMode' + tls: + $ref: '#/components/schemas/ObservabilityPipelineTls' type: - $ref: '#/components/schemas/ObservabilityPipelineRemoveFieldsProcessorType' + $ref: '#/components/schemas/ObservabilityPipelineSyslogNgSourceType' required: - id - type - - include - - fields - - inputs + - mode type: object - ObservabilityPipelineRemoveFieldsProcessorType: - default: remove_fields - description: The processor type. The value should always be `remove_fields`. + ObservabilityPipelineSyslogNgSourceType: + default: syslog_ng + description: The source type. The value should always be `syslog_ng`. enum: - - remove_fields - example: remove_fields + - syslog_ng + example: syslog_ng type: string x-enum-varnames: - - REMOVE_FIELDS - ObservabilityPipelineRenameFieldsProcessor: - description: The `rename_fields` processor changes field names. + - SYSLOG_NG + ObservabilityPipelineSyslogSourceMode: + description: Protocol used by the syslog source to receive messages. + enum: + - tcp + - udp + example: tcp + type: string + x-enum-varnames: + - TCP + - UDP + ObservabilityPipelineThrottleProcessor: + description: The `throttle` processor limits the number of events that pass + through over a given time window. properties: - fields: - description: A list of rename rules specifying which fields to rename in - the event, what to rename them to, and whether to preserve the original - fields. + group_by: + description: Optional list of fields used to group events before the threshold + has been reached. + example: + - log.user.id items: - $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessorField' + type: string type: array id: - description: A unique identifier for this component. Used to reference this - component in other parts of the pipeline (e.g., as input to downstream - components). - example: rename-fields-processor + description: The unique identifier for this processor. + example: throttle-processor type: string include: description: A Datadog search query used to determine which logs this processor targets. - example: service:my-service + example: env:prod type: string inputs: - description: A list of component IDs whose output is used as the `input` - for this component. + description: A list of component IDs whose output is used as the input for + this processor. example: - datadog-agent-source items: type: string type: array + threshold: + description: the number of events allowed in a given time window. Events + sent after the threshold has been reached, are dropped. + example: 1000 + format: int64 + type: integer type: - $ref: '#/components/schemas/ObservabilityPipelineRenameFieldsProcessorType' + $ref: '#/components/schemas/ObservabilityPipelineThrottleProcessorType' + window: + description: The time window in seconds over which the threshold applies. + example: 60.0 + format: double + type: number required: - id - type - include - - fields - inputs + - threshold + - window type: object - ObservabilityPipelineRenameFieldsProcessorField: - description: Defines how to rename a field in log events. - properties: - destination: - description: The field name to assign the renamed value to. - example: destination_field - type: string - preserve_source: - description: Indicates whether the original field, that is received from - the source, should be kept (`true`) or removed (`false`) after renaming. - example: false - type: boolean - source: - description: The original field name in the log event that should be renamed. - example: source_field - type: string - required: - - source - - destination - - preserve_source - type: object - ObservabilityPipelineRenameFieldsProcessorType: - default: rename_fields - description: The processor type. The value should always be `rename_fields`. + ObservabilityPipelineThrottleProcessorType: + default: throttle + description: The processor type. The value should always be `throttle`. enum: - - rename_fields - example: rename_fields + - throttle + example: throttle type: string x-enum-varnames: - - RENAME_FIELDS + - THROTTLE ObservabilityPipelineTls: - description: Configuration for enabling TLS encryption. + description: Configuration for enabling TLS encryption between the pipeline + component and external services. properties: ca_file: description: "Path to the Certificate Authority (CA) file used to validate @@ -36294,6 +38879,55 @@ components: type: string x-enum-varnames: - USERS + ValidationError: + description: Represents a single validation error, including a human-readable + title and metadata. + properties: + meta: + $ref: '#/components/schemas/ValidationErrorMeta' + title: + description: A short, human-readable summary of the error. + example: Field 'region' is required + type: string + required: + - title + - meta + type: object + ValidationErrorMeta: + description: Describes additional metadata for validation errors, including + field names and error messages. + properties: + field: + description: The field name that caused the error. + example: region + type: string + id: + description: The ID of the component in which the error occurred. + example: datadog-agent-source + type: string + message: + description: The detailed error message. + example: Field 'region' is required + type: string + required: + - message + type: object + ValidationResponse: + description: Response containing validation errors. + example: + errors: + - meta: + field: region + id: datadog-agent-source + message: Field 'region' is required + title: Field 'region' is required + properties: + errors: + description: The `ValidationResponse` `errors`. + items: + $ref: '#/components/schemas/ValidationError' + type: array + type: object Version: description: Version of the notification rule. It is updated when the rule is modified. @@ -49365,6 +51999,33 @@ paths: tags: - CSM Threats /api/v2/remote_config/products/obs_pipelines/pipelines: + get: + description: Retrieve a list of pipelines. + operationId: ListPipelines + parameters: + - $ref: '#/components/parameters/PageSize' + - $ref: '#/components/parameters/PageNumber' + responses: + '200': + content: + application/json: + schema: + $ref: '#/components/schemas/ListPipelinesResponse' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: List pipelines + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_read + x-unstable: '**Note**: This endpoint is in Preview.' post: description: Create a new pipeline. operationId: CreatePipeline @@ -49372,7 +52033,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/ObservabilityPipelineCreateRequest' + $ref: '#/components/schemas/ObservabilityPipelineSpec' required: true responses: '201': @@ -49382,28 +52043,56 @@ paths: $ref: '#/components/schemas/ObservabilityPipeline' description: OK '400': - content: - application/json: - schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Bad Request + $ref: '#/components/responses/BadRequestResponse' '403': - content: - application/json: - schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Forbidden + $ref: '#/components/responses/NotAuthorizedResponse' '409': + $ref: '#/components/responses/ConflictResponse' + '429': + $ref: '#/components/responses/TooManyRequestsResponse' + summary: Create a new pipeline + tags: + - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_deploy + x-unstable: '**Note**: This endpoint is in Preview.' + /api/v2/remote_config/products/obs_pipelines/pipelines/validate: + post: + description: 'Validates a pipeline configuration without creating or updating + any resources. + + Returns a list of validation errors, if any. + + ' + operationId: ValidatePipeline + requestBody: + content: + application/json: + schema: + $ref: '#/components/schemas/ObservabilityPipelineSpec' + required: true + responses: + '200': content: application/json: schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Conflict + $ref: '#/components/schemas/ValidationResponse' + description: OK + '400': + $ref: '#/components/responses/BadRequestResponse' + '403': + $ref: '#/components/responses/NotAuthorizedResponse' '429': $ref: '#/components/responses/TooManyRequestsResponse' - summary: Create a new pipeline + summary: Validate an observability pipeline tags: - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_read x-unstable: '**Note**: This endpoint is in Preview.' /api/v2/remote_config/products/obs_pipelines/pipelines/{pipeline_id}: delete: @@ -49442,6 +52131,10 @@ paths: summary: Delete a pipeline tags: - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_delete x-unstable: '**Note**: This endpoint is in Preview.' get: description: Get a specific pipeline by its ID. @@ -49471,6 +52164,10 @@ paths: summary: Get a specific pipeline tags: - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_read x-unstable: '**Note**: This endpoint is in Preview.' put: description: Update a pipeline. @@ -49496,34 +52193,22 @@ paths: $ref: '#/components/schemas/ObservabilityPipeline' description: OK '400': - content: - application/json: - schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Bad Request + $ref: '#/components/responses/BadRequestResponse' '403': - content: - application/json: - schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Forbidden + $ref: '#/components/responses/NotAuthorizedResponse' '404': - content: - application/json: - schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Not Found + $ref: '#/components/responses/NotFoundResponse' '409': - content: - application/json: - schema: - $ref: '#/components/schemas/APIErrorResponse' - description: Conflict + $ref: '#/components/responses/ConflictResponse' '429': $ref: '#/components/responses/TooManyRequestsResponse' summary: Update a pipeline tags: - Observability Pipelines + x-permission: + operator: OR + permissions: + - observability_pipelines_deploy x-unstable: '**Note**: This endpoint is in Preview.' /api/v2/restriction_policy/{resource_id}: delete: diff --git a/docs/datadog_api_client.v2.model.rst b/docs/datadog_api_client.v2.model.rst index 14db82e851..86a647f665 100644 --- a/docs/datadog_api_client.v2.model.rst +++ b/docs/datadog_api_client.v2.model.rst @@ -1642,6 +1642,20 @@ datadog\_api\_client.v2.model.aws\_traces\_config module :members: :show-inheritance: +datadog\_api\_client.v2.model.azure\_storage\_destination module +---------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.azure_storage_destination + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.azure\_storage\_destination\_type module +---------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.azure_storage_destination_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.azure\_uc\_config module ------------------------------------------------------ @@ -7991,6 +8005,20 @@ datadog\_api\_client.v2.model.list\_historical\_jobs\_response module :members: :show-inheritance: +datadog\_api\_client.v2.model.list\_pipelines\_response module +-------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.list_pipelines_response + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.list\_pipelines\_response\_meta module +-------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.list_pipelines_response_meta + :members: + :show-inheritance: + datadog\_api\_client.v2.model.list\_powerpacks\_response module --------------------------------------------------------------- @@ -9265,6 +9293,20 @@ datadog\_api\_client.v2.model.metrics\_timeseries\_query module :members: :show-inheritance: +datadog\_api\_client.v2.model.microsoft\_sentinel\_destination module +--------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.microsoft_sentinel_destination + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.microsoft\_sentinel\_destination\_type module +--------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.microsoft_sentinel_destination_type + :members: + :show-inheritance: + datadog\_api\_client.v2.model.microsoft\_teams\_channel\_info\_response\_attributes module ------------------------------------------------------------------------------------------ @@ -9853,6 +9895,27 @@ datadog\_api\_client.v2.model.observability\_pipeline module :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_add\_env\_vars\_processor module +--------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_add\_env\_vars\_processor\_type module +--------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_add\_env\_vars\_processor\_variable module +------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor_variable + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_add\_fields\_processor module ------------------------------------------------------------------------------------ @@ -9867,6 +9930,90 @@ datadog\_api\_client.v2.model.observability\_pipeline\_add\_fields\_processor\_t :members: :show-inheritance: +datadog\_api\_client.v2.model.observability\_pipeline\_amazon\_data\_firehose\_source module +-------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_amazon_data_firehose_source + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_amazon\_data\_firehose\_source\_type module +-------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_amazon_data_firehose_source_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_amazon\_open\_search\_destination module +----------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_amazon\_open\_search\_destination\_auth module +----------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination_auth + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_amazon\_open\_search\_destination\_auth\_strategy module +--------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination_auth_strategy + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_amazon\_open\_search\_destination\_type module +----------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_amazon\_s3\_destination module +------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_amazon_s3_destination + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_amazon\_s3\_destination\_storage\_class module +----------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_amazon_s3_destination_storage_class + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_amazon\_s3\_destination\_type module +------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_amazon_s3_destination_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_amazon\_s3\_source module +-------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_amazon_s3_source + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_amazon\_s3\_source\_type module +-------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_amazon_s3_source_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_aws\_auth module +----------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_aws_auth + :members: + :show-inheritance: + datadog\_api\_client.v2.model.observability\_pipeline\_config module -------------------------------------------------------------------- @@ -9895,20 +10042,6 @@ datadog\_api\_client.v2.model.observability\_pipeline\_config\_source\_item modu :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_create\_request module ------------------------------------------------------------------------------ - -.. automodule:: datadog_api_client.v2.model.observability_pipeline_create_request - :members: - :show-inheritance: - -datadog\_api\_client.v2.model.observability\_pipeline\_create\_request\_data module ------------------------------------------------------------------------------------ - -.. automodule:: datadog_api_client.v2.model.observability_pipeline_create_request_data - :members: - :show-inheritance: - datadog\_api\_client.v2.model.observability\_pipeline\_data module ------------------------------------------------------------------ @@ -9951,253 +10084,1149 @@ datadog\_api\_client.v2.model.observability\_pipeline\_datadog\_logs\_destinatio :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_field\_value module --------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_decoding module +---------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_field_value +.. automodule:: datadog_api_client.v2.model.observability_pipeline_decoding :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_filter\_processor module +datadog\_api\_client.v2.model.observability\_pipeline\_dedupe\_processor module ------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_filter_processor +.. automodule:: datadog_api_client.v2.model.observability_pipeline_dedupe_processor :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_filter\_processor\_type module +datadog\_api\_client.v2.model.observability\_pipeline\_dedupe\_processor\_mode module ------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_filter_processor_type +.. automodule:: datadog_api_client.v2.model.observability_pipeline_dedupe_processor_mode :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_source module ---------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_dedupe\_processor\_type module +------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_source +.. automodule:: datadog_api_client.v2.model.observability_pipeline_dedupe_processor_type :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_source\_librdkafka\_option module ------------------------------------------------------------------------------------------------ +datadog\_api\_client.v2.model.observability\_pipeline\_elasticsearch\_destination module +---------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_source_librdkafka_option +.. automodule:: datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_source\_sasl module ---------------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_elasticsearch\_destination\_type module +---------------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_source_sasl +.. automodule:: datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_type :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_source\_type module ---------------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_enrichment\_table\_file module +------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_source_type +.. automodule:: datadog_api_client.v2.model.observability_pipeline_enrichment_table_file :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_parse\_json\_processor module ------------------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_enrichment\_table\_file\_encoding module +----------------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_parse_json_processor +.. automodule:: datadog_api_client.v2.model.observability_pipeline_enrichment_table_file_encoding :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_parse\_json\_processor\_type module ------------------------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_enrichment\_table\_file\_encoding\_type module +----------------------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_parse_json_processor_type +.. automodule:: datadog_api_client.v2.model.observability_pipeline_enrichment_table_file_encoding_type :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_pipeline\_kafka\_source\_sasl\_mechanism module ------------------------------------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_enrichment\_table\_file\_key\_items module +------------------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_pipeline_kafka_source_sasl_mechanism +.. automodule:: datadog_api_client.v2.model.observability_pipeline_enrichment_table_file_key_items :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_quota\_processor module ------------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_enrichment\_table\_file\_key\_items\_comparison module +------------------------------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_quota_processor +.. automodule:: datadog_api_client.v2.model.observability_pipeline_enrichment_table_file_key_items_comparison :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_quota\_processor\_limit module -------------------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_enrichment\_table\_file\_schema\_items module +---------------------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_quota_processor_limit +.. automodule:: datadog_api_client.v2.model.observability_pipeline_enrichment_table_file_schema_items :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_quota\_processor\_limit\_enforce\_type module ----------------------------------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_enrichment\_table\_file\_schema\_items\_type module +---------------------------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_quota_processor_limit_enforce_type +.. automodule:: datadog_api_client.v2.model.observability_pipeline_enrichment_table_file_schema_items_type :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_quota\_processor\_override module +datadog\_api\_client.v2.model.observability\_pipeline\_enrichment\_table\_geo\_ip module ---------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_quota_processor_override +.. automodule:: datadog_api_client.v2.model.observability_pipeline_enrichment_table_geo_ip :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_quota\_processor\_type module ------------------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_enrichment\_table\_processor module +------------------------------------------------------------------------------------------ -.. automodule:: datadog_api_client.v2.model.observability_pipeline_quota_processor_type +.. automodule:: datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_remove\_fields\_processor module ---------------------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_enrichment\_table\_processor\_type module +------------------------------------------------------------------------------------------------ -.. automodule:: datadog_api_client.v2.model.observability_pipeline_remove_fields_processor +.. automodule:: datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor_type :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_remove\_fields\_processor\_type module ---------------------------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_field\_value module +-------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_remove_fields_processor_type +.. automodule:: datadog_api_client.v2.model.observability_pipeline_field_value :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_rename\_fields\_processor module ---------------------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_filter\_processor module +------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_rename_fields_processor +.. automodule:: datadog_api_client.v2.model.observability_pipeline_filter_processor :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_rename\_fields\_processor\_field module ----------------------------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_filter\_processor\_type module +------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_rename_fields_processor_field +.. automodule:: datadog_api_client.v2.model.observability_pipeline_filter_processor_type :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_rename\_fields\_processor\_type module ---------------------------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_fluent\_bit\_source module +--------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_rename_fields_processor_type +.. automodule:: datadog_api_client.v2.model.observability_pipeline_fluent_bit_source :members: :show-inheritance: -datadog\_api\_client.v2.model.observability\_pipeline\_tls module ------------------------------------------------------------------ +datadog\_api\_client.v2.model.observability\_pipeline\_fluent\_bit\_source\_type module +--------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.observability_pipeline_tls +.. automodule:: datadog_api_client.v2.model.observability_pipeline_fluent_bit_source_type :members: :show-inheritance: -datadog\_api\_client.v2.model.okta\_account module --------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_fluentd\_source module +----------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.okta_account +.. automodule:: datadog_api_client.v2.model.observability_pipeline_fluentd_source :members: :show-inheritance: -datadog\_api\_client.v2.model.okta\_account\_attributes module --------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_fluentd\_source\_type module +----------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.okta_account_attributes +.. automodule:: datadog_api_client.v2.model.observability_pipeline_fluentd_source_type :members: :show-inheritance: -datadog\_api\_client.v2.model.okta\_account\_request module ------------------------------------------------------------ +datadog\_api\_client.v2.model.observability\_pipeline\_gcp\_auth module +----------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.okta_account_request +.. automodule:: datadog_api_client.v2.model.observability_pipeline_gcp_auth :members: :show-inheritance: -datadog\_api\_client.v2.model.okta\_account\_response module ------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_generate\_metrics\_processor module +------------------------------------------------------------------------------------------ -.. automodule:: datadog_api_client.v2.model.okta_account_response +.. automodule:: datadog_api_client.v2.model.observability_pipeline_generate_metrics_processor :members: :show-inheritance: -datadog\_api\_client.v2.model.okta\_account\_response\_data module ------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_generate\_metrics\_processor\_type module +------------------------------------------------------------------------------------------------ -.. automodule:: datadog_api_client.v2.model.okta_account_response_data +.. automodule:: datadog_api_client.v2.model.observability_pipeline_generate_metrics_processor_type :members: :show-inheritance: -datadog\_api\_client.v2.model.okta\_account\_type module --------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_generated\_metric module +------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.okta_account_type +.. automodule:: datadog_api_client.v2.model.observability_pipeline_generated_metric :members: :show-inheritance: -datadog\_api\_client.v2.model.okta\_account\_update\_request module -------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_generated\_metric\_increment\_by\_field module +----------------------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.okta_account_update_request +.. automodule:: datadog_api_client.v2.model.observability_pipeline_generated_metric_increment_by_field :members: :show-inheritance: -datadog\_api\_client.v2.model.okta\_account\_update\_request\_attributes module -------------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_generated\_metric\_increment\_by\_field\_strategy module +--------------------------------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.okta_account_update_request_attributes +.. automodule:: datadog_api_client.v2.model.observability_pipeline_generated_metric_increment_by_field_strategy :members: :show-inheritance: -datadog\_api\_client.v2.model.okta\_account\_update\_request\_data module -------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_generated\_metric\_increment\_by\_one module +--------------------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.okta_account_update_request_data +.. automodule:: datadog_api_client.v2.model.observability_pipeline_generated_metric_increment_by_one :members: :show-inheritance: -datadog\_api\_client.v2.model.okta\_accounts\_response module -------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_generated\_metric\_increment\_by\_one\_strategy module +------------------------------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.okta_accounts_response +.. automodule:: datadog_api_client.v2.model.observability_pipeline_generated_metric_increment_by_one_strategy :members: :show-inheritance: -datadog\_api\_client.v2.model.on\_demand\_concurrency\_cap module ------------------------------------------------------------------ +datadog\_api\_client.v2.model.observability\_pipeline\_generated\_metric\_metric\_type module +--------------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.on_demand_concurrency_cap +.. automodule:: datadog_api_client.v2.model.observability_pipeline_generated_metric_metric_type :members: :show-inheritance: -datadog\_api\_client.v2.model.on\_demand\_concurrency\_cap\_attributes module ------------------------------------------------------------------------------ +datadog\_api\_client.v2.model.observability\_pipeline\_google\_chronicle\_destination module +-------------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.on_demand_concurrency_cap_attributes +.. automodule:: datadog_api_client.v2.model.observability_pipeline_google_chronicle_destination :members: :show-inheritance: -datadog\_api\_client.v2.model.on\_demand\_concurrency\_cap\_response module ---------------------------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_google\_chronicle\_destination\_encoding module +------------------------------------------------------------------------------------------------------ -.. automodule:: datadog_api_client.v2.model.on_demand_concurrency_cap_response +.. automodule:: datadog_api_client.v2.model.observability_pipeline_google_chronicle_destination_encoding :members: :show-inheritance: -datadog\_api\_client.v2.model.on\_demand\_concurrency\_cap\_type module ------------------------------------------------------------------------ +datadog\_api\_client.v2.model.observability\_pipeline\_google\_chronicle\_destination\_type module +-------------------------------------------------------------------------------------------------- -.. automodule:: datadog_api_client.v2.model.on_demand_concurrency_cap_type +.. automodule:: datadog_api_client.v2.model.observability_pipeline_google_chronicle_destination_type :members: :show-inheritance: -datadog\_api\_client.v2.model.open\_api\_endpoint module --------------------------------------------------------- +datadog\_api\_client.v2.model.observability\_pipeline\_google\_cloud\_storage\_destination module +------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_google\_cloud\_storage\_destination\_acl module +------------------------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination_acl + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_google\_cloud\_storage\_destination\_storage\_class module +----------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination_storage_class + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_google\_cloud\_storage\_destination\_type module +------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_google\_pub\_sub\_source module +-------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_google_pub_sub_source + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_google\_pub\_sub\_source\_type module +-------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_google_pub_sub_source_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_http\_client\_source module +---------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_http_client_source + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_http\_client\_source\_auth\_strategy module +-------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_http_client_source_auth_strategy + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_http\_client\_source\_type module +---------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_http_client_source_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_http\_server\_source module +---------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_http_server_source + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_http\_server\_source\_auth\_strategy module +-------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_http_server_source_auth_strategy + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_http\_server\_source\_type module +---------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_http_server_source_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_source module +--------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_source + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_source\_librdkafka\_option module +----------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_source_librdkafka_option + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_source\_sasl module +--------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_source_sasl + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_kafka\_source\_type module +--------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_kafka_source_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_logstash\_source module +------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_logstash_source + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_logstash\_source\_type module +------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_logstash_source_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_metadata\_entry module +----------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_metadata_entry + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_metric\_value module +--------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_metric_value + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_new\_relic\_destination module +------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_new_relic_destination + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_new\_relic\_destination\_region module +--------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_new_relic_destination_region + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_new\_relic\_destination\_type module +------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_new_relic_destination_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_ocsf\_mapper\_processor module +------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_ocsf\_mapper\_processor\_mapping module +---------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor_mapping + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_ocsf\_mapper\_processor\_mapping\_mapping module +------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor_mapping_mapping + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_ocsf\_mapper\_processor\_type module +------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_ocsf\_mapping\_library module +------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_ocsf_mapping_library + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_open\_search\_destination module +--------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_open_search_destination + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_open\_search\_destination\_type module +--------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_open_search_destination_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_parse\_grok\_processor module +------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_parse_grok_processor + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_parse\_grok\_processor\_rule module +------------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_parse_grok_processor_rule + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_parse\_grok\_processor\_rule\_match\_rule module +------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_parse_grok_processor_rule_match_rule + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_parse\_grok\_processor\_rule\_support\_rule module +--------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_parse_grok_processor_rule_support_rule + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_parse\_grok\_processor\_type module +------------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_parse_grok_processor_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_parse\_json\_processor module +------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_parse_json_processor + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_parse\_json\_processor\_type module +------------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_parse_json_processor_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_pipeline\_kafka\_source\_sasl\_mechanism module +------------------------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_pipeline_kafka_source_sasl_mechanism + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_quota\_processor module +------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_quota_processor + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_quota\_processor\_limit module +------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_quota_processor_limit + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_quota\_processor\_limit\_enforce\_type module +---------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_quota_processor_limit_enforce_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_quota\_processor\_overflow\_action module +------------------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_quota_processor_overflow_action + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_quota\_processor\_override module +---------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_quota_processor_override + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_quota\_processor\_type module +------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_quota_processor_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_reduce\_processor module +------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_reduce_processor + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_reduce\_processor\_merge\_strategy module +------------------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_reduce_processor_merge_strategy + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_reduce\_processor\_merge\_strategy\_strategy module +---------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_reduce_processor_merge_strategy_strategy + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_reduce\_processor\_type module +------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_reduce_processor_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_remove\_fields\_processor module +--------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_remove_fields_processor + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_remove\_fields\_processor\_type module +--------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_remove_fields_processor_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_rename\_fields\_processor module +--------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_rename_fields_processor + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_rename\_fields\_processor\_field module +---------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_rename_fields_processor_field + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_rename\_fields\_processor\_type module +--------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_rename_fields_processor_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_rsyslog\_destination module +---------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_rsyslog_destination + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_rsyslog\_destination\_type module +---------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_rsyslog_destination_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_rsyslog\_source module +----------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_rsyslog_source + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_rsyslog\_source\_type module +----------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_rsyslog_source_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sample\_processor module +------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sample_processor + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sample\_processor\_type module +------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sample_processor_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor module +------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_action module +--------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_action\_hash module +--------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_hash + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_action\_hash\_action module +----------------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_hash_action + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_action\_partial\_redact module +-------------------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_partial_redact + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_action\_partial\_redact\_action module +---------------------------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_partial_redact_action + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_action\_partial\_redact\_options module +----------------------------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_partial_redact_options + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_action\_partial\_redact\_options\_direction module +---------------------------------------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_partial_redact_options_direction + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_action\_redact module +----------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_redact + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_action\_redact\_action module +------------------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_redact_action + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_action\_redact\_options module +-------------------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_redact_options + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_custom\_pattern module +------------------------------------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_custom_pattern + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_custom\_pattern\_options module +--------------------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_custom\_pattern\_type module +------------------------------------------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_custom_pattern_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_keyword\_options module +------------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_keyword_options + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_library\_pattern module +------------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_library_pattern + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_library\_pattern\_options module +---------------------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_library_pattern_options + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_library\_pattern\_type module +------------------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_library_pattern_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_pattern module +---------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_pattern + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_rule module +------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_rule + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_scope module +-------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_scope\_all module +------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_all + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_scope\_all\_target module +--------------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_all_target + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_scope\_exclude module +----------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_exclude + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_scope\_exclude\_target module +------------------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_exclude_target + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_scope\_include module +----------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_include + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_scope\_include\_target module +------------------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_include_target + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_scope\_options module +----------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_options + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sensitive\_data\_scanner\_processor\_type module +------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sentinel\_one\_destination module +---------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sentinel\_one\_destination\_region module +------------------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination_region + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sentinel\_one\_destination\_type module +---------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_spec module +------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_spec + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_spec\_data module +------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_spec_data + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_splunk\_hec\_destination module +-------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_splunk\_hec\_destination\_encoding module +------------------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination_encoding + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_splunk\_hec\_destination\_type module +-------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_splunk\_hec\_source module +--------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_splunk_hec_source + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_splunk\_hec\_source\_type module +--------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_splunk_hec_source_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_splunk\_tcp\_source module +--------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_splunk_tcp_source + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_splunk\_tcp\_source\_type module +--------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_splunk_tcp_source_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sumo\_logic\_destination module +-------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sumo\_logic\_destination\_encoding module +------------------------------------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination_encoding + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sumo\_logic\_destination\_header\_custom\_fields\_item module +-------------------------------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination_header_custom_fields_item + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sumo\_logic\_destination\_type module +-------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sumo\_logic\_source module +--------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sumo_logic_source + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_sumo\_logic\_source\_type module +--------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_sumo_logic_source_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_syslog\_ng\_destination module +------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_syslog_ng_destination + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_syslog\_ng\_destination\_type module +------------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_syslog_ng_destination_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_syslog\_ng\_source module +-------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_syslog_ng_source + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_syslog\_ng\_source\_type module +-------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_syslog_ng_source_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_syslog\_source\_mode module +---------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_syslog_source_mode + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_throttle\_processor module +--------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_throttle_processor + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_throttle\_processor\_type module +--------------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_throttle_processor_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.observability\_pipeline\_tls module +----------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.observability_pipeline_tls + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.okta\_account module +-------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.okta_account + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.okta\_account\_attributes module +-------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.okta_account_attributes + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.okta\_account\_request module +----------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.okta_account_request + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.okta\_account\_response module +------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.okta_account_response + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.okta\_account\_response\_data module +------------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.okta_account_response_data + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.okta\_account\_type module +-------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.okta_account_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.okta\_account\_update\_request module +------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.okta_account_update_request + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.okta\_account\_update\_request\_attributes module +------------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.okta_account_update_request_attributes + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.okta\_account\_update\_request\_data module +------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.okta_account_update_request_data + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.okta\_accounts\_response module +------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.okta_accounts_response + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.on\_demand\_concurrency\_cap module +----------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.on_demand_concurrency_cap + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.on\_demand\_concurrency\_cap\_attributes module +----------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.on_demand_concurrency_cap_attributes + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.on\_demand\_concurrency\_cap\_response module +--------------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.on_demand_concurrency_cap_response + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.on\_demand\_concurrency\_cap\_type module +----------------------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.on_demand_concurrency_cap_type + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.open\_api\_endpoint module +-------------------------------------------------------- .. automodule:: datadog_api_client.v2.model.open_api_endpoint :members: @@ -15999,6 +17028,27 @@ datadog\_api\_client.v2.model.users\_type module :members: :show-inheritance: +datadog\_api\_client.v2.model.validation\_error module +------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.validation_error + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.validation\_error\_meta module +------------------------------------------------------------ + +.. automodule:: datadog_api_client.v2.model.validation_error_meta + :members: + :show-inheritance: + +datadog\_api\_client.v2.model.validation\_response module +--------------------------------------------------------- + +.. automodule:: datadog_api_client.v2.model.validation_response + :members: + :show-inheritance: + datadog\_api\_client.v2.model.vulnerabilities\_type module ---------------------------------------------------------- diff --git a/examples/v2/observability-pipelines/CreatePipeline.py b/examples/v2/observability-pipelines/CreatePipeline.py index 12008e4acd..a6303525e7 100644 --- a/examples/v2/observability-pipelines/CreatePipeline.py +++ b/examples/v2/observability-pipelines/CreatePipeline.py @@ -5,10 +5,6 @@ from datadog_api_client import ApiClient, Configuration from datadog_api_client.v2.api.observability_pipelines_api import ObservabilityPipelinesApi from datadog_api_client.v2.model.observability_pipeline_config import ObservabilityPipelineConfig -from datadog_api_client.v2.model.observability_pipeline_create_request import ObservabilityPipelineCreateRequest -from datadog_api_client.v2.model.observability_pipeline_create_request_data import ( - ObservabilityPipelineCreateRequestData, -) from datadog_api_client.v2.model.observability_pipeline_data_attributes import ObservabilityPipelineDataAttributes from datadog_api_client.v2.model.observability_pipeline_datadog_agent_source import ( ObservabilityPipelineDatadogAgentSource, @@ -26,9 +22,11 @@ from datadog_api_client.v2.model.observability_pipeline_filter_processor_type import ( ObservabilityPipelineFilterProcessorType, ) +from datadog_api_client.v2.model.observability_pipeline_spec import ObservabilityPipelineSpec +from datadog_api_client.v2.model.observability_pipeline_spec_data import ObservabilityPipelineSpecData -body = ObservabilityPipelineCreateRequest( - data=ObservabilityPipelineCreateRequestData( +body = ObservabilityPipelineSpec( + data=ObservabilityPipelineSpecData( attributes=ObservabilityPipelineDataAttributes( config=ObservabilityPipelineConfig( destinations=[ diff --git a/examples/v2/observability-pipelines/ListPipelines.py b/examples/v2/observability-pipelines/ListPipelines.py new file mode 100644 index 0000000000..eb90067488 --- /dev/null +++ b/examples/v2/observability-pipelines/ListPipelines.py @@ -0,0 +1,14 @@ +""" +List pipelines returns "OK" response +""" + +from datadog_api_client import ApiClient, Configuration +from datadog_api_client.v2.api.observability_pipelines_api import ObservabilityPipelinesApi + +configuration = Configuration() +configuration.unstable_operations["list_pipelines"] = True +with ApiClient(configuration) as api_client: + api_instance = ObservabilityPipelinesApi(api_client) + response = api_instance.list_pipelines() + + print(response) diff --git a/examples/v2/observability-pipelines/ValidatePipeline.py b/examples/v2/observability-pipelines/ValidatePipeline.py new file mode 100644 index 0000000000..31cd6775b3 --- /dev/null +++ b/examples/v2/observability-pipelines/ValidatePipeline.py @@ -0,0 +1,70 @@ +""" +Validate an observability pipeline returns "OK" response +""" + +from datadog_api_client import ApiClient, Configuration +from datadog_api_client.v2.api.observability_pipelines_api import ObservabilityPipelinesApi +from datadog_api_client.v2.model.observability_pipeline_config import ObservabilityPipelineConfig +from datadog_api_client.v2.model.observability_pipeline_data_attributes import ObservabilityPipelineDataAttributes +from datadog_api_client.v2.model.observability_pipeline_datadog_agent_source import ( + ObservabilityPipelineDatadogAgentSource, +) +from datadog_api_client.v2.model.observability_pipeline_datadog_agent_source_type import ( + ObservabilityPipelineDatadogAgentSourceType, +) +from datadog_api_client.v2.model.observability_pipeline_datadog_logs_destination import ( + ObservabilityPipelineDatadogLogsDestination, +) +from datadog_api_client.v2.model.observability_pipeline_datadog_logs_destination_type import ( + ObservabilityPipelineDatadogLogsDestinationType, +) +from datadog_api_client.v2.model.observability_pipeline_filter_processor import ObservabilityPipelineFilterProcessor +from datadog_api_client.v2.model.observability_pipeline_filter_processor_type import ( + ObservabilityPipelineFilterProcessorType, +) +from datadog_api_client.v2.model.observability_pipeline_spec import ObservabilityPipelineSpec +from datadog_api_client.v2.model.observability_pipeline_spec_data import ObservabilityPipelineSpecData + +body = ObservabilityPipelineSpec( + data=ObservabilityPipelineSpecData( + attributes=ObservabilityPipelineDataAttributes( + config=ObservabilityPipelineConfig( + destinations=[ + ObservabilityPipelineDatadogLogsDestination( + id="datadog-logs-destination", + inputs=[ + "filter-processor", + ], + type=ObservabilityPipelineDatadogLogsDestinationType.DATADOG_LOGS, + ), + ], + processors=[ + ObservabilityPipelineFilterProcessor( + id="filter-processor", + include="service:my-service", + inputs=[ + "datadog-agent-source", + ], + type=ObservabilityPipelineFilterProcessorType.FILTER, + ), + ], + sources=[ + ObservabilityPipelineDatadogAgentSource( + id="datadog-agent-source", + type=ObservabilityPipelineDatadogAgentSourceType.DATADOG_AGENT, + ), + ], + ), + name="Main Observability Pipeline", + ), + type="pipelines", + ), +) + +configuration = Configuration() +configuration.unstable_operations["validate_pipeline"] = True +with ApiClient(configuration) as api_client: + api_instance = ObservabilityPipelinesApi(api_client) + response = api_instance.validate_pipeline(body=body) + + print(response) diff --git a/src/datadog_api_client/configuration.py b/src/datadog_api_client/configuration.py index 3cd8dbc7df..6487bbd3fa 100644 --- a/src/datadog_api_client/configuration.py +++ b/src/datadog_api_client/configuration.py @@ -299,7 +299,9 @@ def __init__( "v2.create_pipeline": False, "v2.delete_pipeline": False, "v2.get_pipeline": False, + "v2.list_pipelines": False, "v2.update_pipeline": False, + "v2.validate_pipeline": False, "v2.create_scorecard_outcomes_batch": False, "v2.create_scorecard_rule": False, "v2.delete_scorecard_rule": False, diff --git a/src/datadog_api_client/v2/api/observability_pipelines_api.py b/src/datadog_api_client/v2/api/observability_pipelines_api.py index 40874f39b8..2c14f8497a 100644 --- a/src/datadog_api_client/v2/api/observability_pipelines_api.py +++ b/src/datadog_api_client/v2/api/observability_pipelines_api.py @@ -3,12 +3,18 @@ # Copyright 2019-Present Datadog, Inc. from __future__ import annotations -from typing import Any, Dict +from typing import Any, Dict, Union from datadog_api_client.api_client import ApiClient, Endpoint as _Endpoint from datadog_api_client.configuration import Configuration +from datadog_api_client.model_utils import ( + UnsetType, + unset, +) +from datadog_api_client.v2.model.list_pipelines_response import ListPipelinesResponse from datadog_api_client.v2.model.observability_pipeline import ObservabilityPipeline -from datadog_api_client.v2.model.observability_pipeline_create_request import ObservabilityPipelineCreateRequest +from datadog_api_client.v2.model.observability_pipeline_spec import ObservabilityPipelineSpec +from datadog_api_client.v2.model.validation_response import ValidationResponse class ObservabilityPipelinesApi: @@ -33,7 +39,7 @@ def __init__(self, api_client=None): params_map={ "body": { "required": True, - "openapi_types": (ObservabilityPipelineCreateRequest,), + "openapi_types": (ObservabilityPipelineSpec,), "location": "body", }, }, @@ -87,6 +93,33 @@ def __init__(self, api_client=None): api_client=api_client, ) + self._list_pipelines_endpoint = _Endpoint( + settings={ + "response_type": (ListPipelinesResponse,), + "auth": ["apiKeyAuth", "appKeyAuth"], + "endpoint_path": "/api/v2/remote_config/products/obs_pipelines/pipelines", + "operation_id": "list_pipelines", + "http_method": "GET", + "version": "v2", + }, + params_map={ + "page_size": { + "openapi_types": (int,), + "attribute": "page[size]", + "location": "query", + }, + "page_number": { + "openapi_types": (int,), + "attribute": "page[number]", + "location": "query", + }, + }, + headers_map={ + "accept": ["application/json"], + }, + api_client=api_client, + ) + self._update_pipeline_endpoint = _Endpoint( settings={ "response_type": (ObservabilityPipeline,), @@ -113,15 +146,35 @@ def __init__(self, api_client=None): api_client=api_client, ) + self._validate_pipeline_endpoint = _Endpoint( + settings={ + "response_type": (ValidationResponse,), + "auth": ["apiKeyAuth", "appKeyAuth"], + "endpoint_path": "/api/v2/remote_config/products/obs_pipelines/pipelines/validate", + "operation_id": "validate_pipeline", + "http_method": "POST", + "version": "v2", + }, + params_map={ + "body": { + "required": True, + "openapi_types": (ObservabilityPipelineSpec,), + "location": "body", + }, + }, + headers_map={"accept": ["application/json"], "content_type": ["application/json"]}, + api_client=api_client, + ) + def create_pipeline( self, - body: ObservabilityPipelineCreateRequest, + body: ObservabilityPipelineSpec, ) -> ObservabilityPipeline: """Create a new pipeline. Create a new pipeline. - :type body: ObservabilityPipelineCreateRequest + :type body: ObservabilityPipelineSpec :rtype: ObservabilityPipeline """ kwargs: Dict[str, Any] = {} @@ -163,6 +216,31 @@ def get_pipeline( return self._get_pipeline_endpoint.call_with_http_info(**kwargs) + def list_pipelines( + self, + *, + page_size: Union[int, UnsetType] = unset, + page_number: Union[int, UnsetType] = unset, + ) -> ListPipelinesResponse: + """List pipelines. + + Retrieve a list of pipelines. + + :param page_size: Size for a given page. The maximum allowed value is 100. + :type page_size: int, optional + :param page_number: Specific page number to return. + :type page_number: int, optional + :rtype: ListPipelinesResponse + """ + kwargs: Dict[str, Any] = {} + if page_size is not unset: + kwargs["page_size"] = page_size + + if page_number is not unset: + kwargs["page_number"] = page_number + + return self._list_pipelines_endpoint.call_with_http_info(**kwargs) + def update_pipeline( self, pipeline_id: str, @@ -183,3 +261,20 @@ def update_pipeline( kwargs["body"] = body return self._update_pipeline_endpoint.call_with_http_info(**kwargs) + + def validate_pipeline( + self, + body: ObservabilityPipelineSpec, + ) -> ValidationResponse: + """Validate an observability pipeline. + + Validates a pipeline configuration without creating or updating any resources. + Returns a list of validation errors, if any. + + :type body: ObservabilityPipelineSpec + :rtype: ValidationResponse + """ + kwargs: Dict[str, Any] = {} + kwargs["body"] = body + + return self._validate_pipeline_endpoint.call_with_http_info(**kwargs) diff --git a/src/datadog_api_client/v2/model/azure_storage_destination.py b/src/datadog_api_client/v2/model/azure_storage_destination.py new file mode 100644 index 0000000000..283e2df4aa --- /dev/null +++ b/src/datadog_api_client/v2/model/azure_storage_destination.py @@ -0,0 +1,75 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.azure_storage_destination_type import AzureStorageDestinationType + + +class AzureStorageDestination(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.azure_storage_destination_type import AzureStorageDestinationType + + return { + "blob_prefix": (str,), + "container_name": (str,), + "id": (str,), + "inputs": ([str],), + "type": (AzureStorageDestinationType,), + } + + attribute_map = { + "blob_prefix": "blob_prefix", + "container_name": "container_name", + "id": "id", + "inputs": "inputs", + "type": "type", + } + + def __init__( + self_, + container_name: str, + id: str, + inputs: List[str], + type: AzureStorageDestinationType, + blob_prefix: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + The ``azure_storage`` destination forwards logs to an Azure Blob Storage container. + + :param blob_prefix: Optional prefix for blobs written to the container. + :type blob_prefix: str, optional + + :param container_name: The name of the Azure Blob Storage container to store logs in. + :type container_name: str + + :param id: The unique identifier for this component. + :type id: str + + :param inputs: A list of component IDs whose output is used as the ``input`` for this component. + :type inputs: [str] + + :param type: The destination type. The value should always be ``azure_storage``. + :type type: AzureStorageDestinationType + """ + if blob_prefix is not unset: + kwargs["blob_prefix"] = blob_prefix + super().__init__(kwargs) + + self_.container_name = container_name + self_.id = id + self_.inputs = inputs + self_.type = type diff --git a/src/datadog_api_client/v2/model/azure_storage_destination_type.py b/src/datadog_api_client/v2/model/azure_storage_destination_type.py new file mode 100644 index 0000000000..2a42adb254 --- /dev/null +++ b/src/datadog_api_client/v2/model/azure_storage_destination_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class AzureStorageDestinationType(ModelSimple): + """ + The destination type. The value should always be `azure_storage`. + + :param value: If omitted defaults to "azure_storage". Must be one of ["azure_storage"]. + :type value: str + """ + + allowed_values = { + "azure_storage", + } + AZURE_STORAGE: ClassVar["AzureStorageDestinationType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +AzureStorageDestinationType.AZURE_STORAGE = AzureStorageDestinationType("azure_storage") diff --git a/src/datadog_api_client/v2/model/list_pipelines_response.py b/src/datadog_api_client/v2/model/list_pipelines_response.py new file mode 100644 index 0000000000..ef6db90568 --- /dev/null +++ b/src/datadog_api_client/v2/model/list_pipelines_response.py @@ -0,0 +1,56 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_data import ObservabilityPipelineData + from datadog_api_client.v2.model.list_pipelines_response_meta import ListPipelinesResponseMeta + + +class ListPipelinesResponse(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_data import ObservabilityPipelineData + from datadog_api_client.v2.model.list_pipelines_response_meta import ListPipelinesResponseMeta + + return { + "data": ([ObservabilityPipelineData],), + "meta": (ListPipelinesResponseMeta,), + } + + attribute_map = { + "data": "data", + "meta": "meta", + } + + def __init__( + self_, + data: List[ObservabilityPipelineData], + meta: Union[ListPipelinesResponseMeta, UnsetType] = unset, + **kwargs, + ): + """ + Represents the response payload containing a list of pipelines and associated metadata. + + :param data: The ``schema`` ``data``. + :type data: [ObservabilityPipelineData] + + :param meta: Metadata about the response. + :type meta: ListPipelinesResponseMeta, optional + """ + if meta is not unset: + kwargs["meta"] = meta + super().__init__(kwargs) + + self_.data = data diff --git a/src/datadog_api_client/v2/model/list_pipelines_response_meta.py b/src/datadog_api_client/v2/model/list_pipelines_response_meta.py new file mode 100644 index 0000000000..808e1e97ea --- /dev/null +++ b/src/datadog_api_client/v2/model/list_pipelines_response_meta.py @@ -0,0 +1,36 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +class ListPipelinesResponseMeta(ModelNormal): + @cached_property + def openapi_types(_): + return { + "total_count": (int,), + } + + attribute_map = { + "total_count": "totalCount", + } + + def __init__(self_, total_count: Union[int, UnsetType] = unset, **kwargs): + """ + Metadata about the response. + + :param total_count: The total number of pipelines. + :type total_count: int, optional + """ + if total_count is not unset: + kwargs["total_count"] = total_count + super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/microsoft_sentinel_destination.py b/src/datadog_api_client/v2/model/microsoft_sentinel_destination.py new file mode 100644 index 0000000000..44868adb77 --- /dev/null +++ b/src/datadog_api_client/v2/model/microsoft_sentinel_destination.py @@ -0,0 +1,86 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.microsoft_sentinel_destination_type import MicrosoftSentinelDestinationType + + +class MicrosoftSentinelDestination(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.microsoft_sentinel_destination_type import MicrosoftSentinelDestinationType + + return { + "client_id": (str,), + "dcr_immutable_id": (str,), + "id": (str,), + "inputs": ([str],), + "table": (str,), + "tenant_id": (str,), + "type": (MicrosoftSentinelDestinationType,), + } + + attribute_map = { + "client_id": "client_id", + "dcr_immutable_id": "dcr_immutable_id", + "id": "id", + "inputs": "inputs", + "table": "table", + "tenant_id": "tenant_id", + "type": "type", + } + + def __init__( + self_, + client_id: str, + dcr_immutable_id: str, + id: str, + inputs: List[str], + table: str, + tenant_id: str, + type: MicrosoftSentinelDestinationType, + **kwargs, + ): + """ + The ``microsoft_sentinel`` destination forwards logs to Microsoft Sentinel. + + :param client_id: Azure AD client ID used for authentication. + :type client_id: str + + :param dcr_immutable_id: The immutable ID of the Data Collection Rule (DCR). + :type dcr_immutable_id: str + + :param id: The unique identifier for this component. + :type id: str + + :param inputs: A list of component IDs whose output is used as the ``input`` for this component. + :type inputs: [str] + + :param table: The name of the Log Analytics table where logs are sent. + :type table: str + + :param tenant_id: Azure AD tenant ID. + :type tenant_id: str + + :param type: The destination type. The value should always be ``microsoft_sentinel``. + :type type: MicrosoftSentinelDestinationType + """ + super().__init__(kwargs) + + self_.client_id = client_id + self_.dcr_immutable_id = dcr_immutable_id + self_.id = id + self_.inputs = inputs + self_.table = table + self_.tenant_id = tenant_id + self_.type = type diff --git a/src/datadog_api_client/v2/model/microsoft_sentinel_destination_type.py b/src/datadog_api_client/v2/model/microsoft_sentinel_destination_type.py new file mode 100644 index 0000000000..0b3bc49c33 --- /dev/null +++ b/src/datadog_api_client/v2/model/microsoft_sentinel_destination_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class MicrosoftSentinelDestinationType(ModelSimple): + """ + The destination type. The value should always be `microsoft_sentinel`. + + :param value: If omitted defaults to "microsoft_sentinel". Must be one of ["microsoft_sentinel"]. + :type value: str + """ + + allowed_values = { + "microsoft_sentinel", + } + MICROSOFT_SENTINEL: ClassVar["MicrosoftSentinelDestinationType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +MicrosoftSentinelDestinationType.MICROSOFT_SENTINEL = MicrosoftSentinelDestinationType("microsoft_sentinel") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_add_env_vars_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_add_env_vars_processor.py new file mode 100644 index 0000000000..ab11ebe286 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_add_env_vars_processor.py @@ -0,0 +1,82 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor_type import ( + ObservabilityPipelineAddEnvVarsProcessorType, + ) + from datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor_variable import ( + ObservabilityPipelineAddEnvVarsProcessorVariable, + ) + + +class ObservabilityPipelineAddEnvVarsProcessor(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor_type import ( + ObservabilityPipelineAddEnvVarsProcessorType, + ) + from datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor_variable import ( + ObservabilityPipelineAddEnvVarsProcessorVariable, + ) + + return { + "id": (str,), + "include": (str,), + "inputs": ([str],), + "type": (ObservabilityPipelineAddEnvVarsProcessorType,), + "variables": ([ObservabilityPipelineAddEnvVarsProcessorVariable],), + } + + attribute_map = { + "id": "id", + "include": "include", + "inputs": "inputs", + "type": "type", + "variables": "variables", + } + + def __init__( + self_, + id: str, + include: str, + inputs: List[str], + type: ObservabilityPipelineAddEnvVarsProcessorType, + variables: List[ObservabilityPipelineAddEnvVarsProcessorVariable], + **kwargs, + ): + """ + The ``add_env_vars`` processor adds environment variable values to log events. + + :param id: The unique identifier for this component. Used to reference this processor in the pipeline. + :type id: str + + :param include: A Datadog search query used to determine which logs this processor targets. + :type include: str + + :param inputs: A list of component IDs whose output is used as the input for this processor. + :type inputs: [str] + + :param type: The processor type. The value should always be ``add_env_vars``. + :type type: ObservabilityPipelineAddEnvVarsProcessorType + + :param variables: A list of environment variable mappings to apply to log fields. + :type variables: [ObservabilityPipelineAddEnvVarsProcessorVariable] + """ + super().__init__(kwargs) + + self_.id = id + self_.include = include + self_.inputs = inputs + self_.type = type + self_.variables = variables diff --git a/src/datadog_api_client/v2/model/observability_pipeline_add_env_vars_processor_type.py b/src/datadog_api_client/v2/model/observability_pipeline_add_env_vars_processor_type.py new file mode 100644 index 0000000000..f5424ca125 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_add_env_vars_processor_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineAddEnvVarsProcessorType(ModelSimple): + """ + The processor type. The value should always be `add_env_vars`. + + :param value: If omitted defaults to "add_env_vars". Must be one of ["add_env_vars"]. + :type value: str + """ + + allowed_values = { + "add_env_vars", + } + ADD_ENV_VARS: ClassVar["ObservabilityPipelineAddEnvVarsProcessorType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineAddEnvVarsProcessorType.ADD_ENV_VARS = ObservabilityPipelineAddEnvVarsProcessorType("add_env_vars") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_add_env_vars_processor_variable.py b/src/datadog_api_client/v2/model/observability_pipeline_add_env_vars_processor_variable.py new file mode 100644 index 0000000000..df0a1f63e2 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_add_env_vars_processor_variable.py @@ -0,0 +1,39 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +class ObservabilityPipelineAddEnvVarsProcessorVariable(ModelNormal): + @cached_property + def openapi_types(_): + return { + "field": (str,), + "name": (str,), + } + + attribute_map = { + "field": "field", + "name": "name", + } + + def __init__(self_, field: str, name: str, **kwargs): + """ + Defines a mapping between an environment variable and a log field. + + :param field: The target field in the log event. + :type field: str + + :param name: The name of the environment variable to read. + :type name: str + """ + super().__init__(kwargs) + + self_.field = field + self_.name = name diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_data_firehose_source.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_data_firehose_source.py new file mode 100644 index 0000000000..cdb35acf00 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_data_firehose_source.py @@ -0,0 +1,78 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_aws_auth import ObservabilityPipelineAwsAuth + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_amazon_data_firehose_source_type import ( + ObservabilityPipelineAmazonDataFirehoseSourceType, + ) + + +class ObservabilityPipelineAmazonDataFirehoseSource(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_aws_auth import ObservabilityPipelineAwsAuth + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_amazon_data_firehose_source_type import ( + ObservabilityPipelineAmazonDataFirehoseSourceType, + ) + + return { + "auth": (ObservabilityPipelineAwsAuth,), + "id": (str,), + "tls": (ObservabilityPipelineTls,), + "type": (ObservabilityPipelineAmazonDataFirehoseSourceType,), + } + + attribute_map = { + "auth": "auth", + "id": "id", + "tls": "tls", + "type": "type", + } + + def __init__( + self_, + id: str, + type: ObservabilityPipelineAmazonDataFirehoseSourceType, + auth: Union[ObservabilityPipelineAwsAuth, UnsetType] = unset, + tls: Union[ObservabilityPipelineTls, UnsetType] = unset, + **kwargs, + ): + """ + The ``amazon_data_firehose`` source ingests logs from AWS Data Firehose. + + :param auth: AWS authentication credentials used for accessing AWS services such as S3. + If omitted, the system’s default credentials are used (for example, the IAM role and environment variables). + :type auth: ObservabilityPipelineAwsAuth, optional + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + :type id: str + + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional + + :param type: The source type. The value should always be ``amazon_data_firehose``. + :type type: ObservabilityPipelineAmazonDataFirehoseSourceType + """ + if auth is not unset: + kwargs["auth"] = auth + if tls is not unset: + kwargs["tls"] = tls + super().__init__(kwargs) + + self_.id = id + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_data_firehose_source_type.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_data_firehose_source_type.py new file mode 100644 index 0000000000..222e1a96a5 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_data_firehose_source_type.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineAmazonDataFirehoseSourceType(ModelSimple): + """ + The source type. The value should always be `amazon_data_firehose`. + + :param value: If omitted defaults to "amazon_data_firehose". Must be one of ["amazon_data_firehose"]. + :type value: str + """ + + allowed_values = { + "amazon_data_firehose", + } + AMAZON_DATA_FIREHOSE: ClassVar["ObservabilityPipelineAmazonDataFirehoseSourceType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineAmazonDataFirehoseSourceType.AMAZON_DATA_FIREHOSE = ( + ObservabilityPipelineAmazonDataFirehoseSourceType("amazon_data_firehose") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination.py new file mode 100644 index 0000000000..639d156f8b --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination.py @@ -0,0 +1,86 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination_auth import ( + ObservabilityPipelineAmazonOpenSearchDestinationAuth, + ) + from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination_type import ( + ObservabilityPipelineAmazonOpenSearchDestinationType, + ) + + +class ObservabilityPipelineAmazonOpenSearchDestination(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination_auth import ( + ObservabilityPipelineAmazonOpenSearchDestinationAuth, + ) + from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination_type import ( + ObservabilityPipelineAmazonOpenSearchDestinationType, + ) + + return { + "auth": (ObservabilityPipelineAmazonOpenSearchDestinationAuth,), + "bulk_index": (str,), + "id": (str,), + "inputs": ([str],), + "type": (ObservabilityPipelineAmazonOpenSearchDestinationType,), + } + + attribute_map = { + "auth": "auth", + "bulk_index": "bulk_index", + "id": "id", + "inputs": "inputs", + "type": "type", + } + + def __init__( + self_, + auth: ObservabilityPipelineAmazonOpenSearchDestinationAuth, + id: str, + inputs: List[str], + type: ObservabilityPipelineAmazonOpenSearchDestinationType, + bulk_index: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + The ``amazon_opensearch`` destination writes logs to Amazon OpenSearch. + + :param auth: Authentication settings for the Amazon OpenSearch destination. + The ``strategy`` field determines whether basic or AWS-based authentication is used. + :type auth: ObservabilityPipelineAmazonOpenSearchDestinationAuth + + :param bulk_index: The index to write logs to. + :type bulk_index: str, optional + + :param id: The unique identifier for this component. + :type id: str + + :param inputs: A list of component IDs whose output is used as the ``input`` for this component. + :type inputs: [str] + + :param type: The destination type. The value should always be ``amazon_opensearch``. + :type type: ObservabilityPipelineAmazonOpenSearchDestinationType + """ + if bulk_index is not unset: + kwargs["bulk_index"] = bulk_index + super().__init__(kwargs) + + self_.auth = auth + self_.id = id + self_.inputs = inputs + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination_auth.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination_auth.py new file mode 100644 index 0000000000..2165d0a596 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination_auth.py @@ -0,0 +1,83 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination_auth_strategy import ( + ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy, + ) + + +class ObservabilityPipelineAmazonOpenSearchDestinationAuth(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination_auth_strategy import ( + ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy, + ) + + return { + "assume_role": (str,), + "aws_region": (str,), + "external_id": (str,), + "session_name": (str,), + "strategy": (ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy,), + } + + attribute_map = { + "assume_role": "assume_role", + "aws_region": "aws_region", + "external_id": "external_id", + "session_name": "session_name", + "strategy": "strategy", + } + + def __init__( + self_, + strategy: ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy, + assume_role: Union[str, UnsetType] = unset, + aws_region: Union[str, UnsetType] = unset, + external_id: Union[str, UnsetType] = unset, + session_name: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + Authentication settings for the Amazon OpenSearch destination. + The ``strategy`` field determines whether basic or AWS-based authentication is used. + + :param assume_role: The ARN of the role to assume (used with ``aws`` strategy). + :type assume_role: str, optional + + :param aws_region: AWS region + :type aws_region: str, optional + + :param external_id: External ID for the assumed role (used with ``aws`` strategy). + :type external_id: str, optional + + :param session_name: Session name for the assumed role (used with ``aws`` strategy). + :type session_name: str, optional + + :param strategy: The authentication strategy to use. + :type strategy: ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy + """ + if assume_role is not unset: + kwargs["assume_role"] = assume_role + if aws_region is not unset: + kwargs["aws_region"] = aws_region + if external_id is not unset: + kwargs["external_id"] = external_id + if session_name is not unset: + kwargs["session_name"] = session_name + super().__init__(kwargs) + + self_.strategy = strategy diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination_auth_strategy.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination_auth_strategy.py new file mode 100644 index 0000000000..7c1a186e32 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination_auth_strategy.py @@ -0,0 +1,42 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy(ModelSimple): + """ + The authentication strategy to use. + + :param value: Must be one of ["basic", "aws"]. + :type value: str + """ + + allowed_values = { + "basic", + "aws", + } + BASIC: ClassVar["ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy"] + AWS: ClassVar["ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy.BASIC = ( + ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy("basic") +) +ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy.AWS = ( + ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy("aws") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination_type.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination_type.py new file mode 100644 index 0000000000..2023887797 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_open_search_destination_type.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineAmazonOpenSearchDestinationType(ModelSimple): + """ + The destination type. The value should always be `amazon_opensearch`. + + :param value: If omitted defaults to "amazon_opensearch". Must be one of ["amazon_opensearch"]. + :type value: str + """ + + allowed_values = { + "amazon_opensearch", + } + AMAZON_OPENSEARCH: ClassVar["ObservabilityPipelineAmazonOpenSearchDestinationType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineAmazonOpenSearchDestinationType.AMAZON_OPENSEARCH = ( + ObservabilityPipelineAmazonOpenSearchDestinationType("amazon_opensearch") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_destination.py new file mode 100644 index 0000000000..1a7af99838 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_destination.py @@ -0,0 +1,120 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_aws_auth import ObservabilityPipelineAwsAuth + from datadog_api_client.v2.model.observability_pipeline_amazon_s3_destination_storage_class import ( + ObservabilityPipelineAmazonS3DestinationStorageClass, + ) + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_amazon_s3_destination_type import ( + ObservabilityPipelineAmazonS3DestinationType, + ) + + +class ObservabilityPipelineAmazonS3Destination(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_aws_auth import ObservabilityPipelineAwsAuth + from datadog_api_client.v2.model.observability_pipeline_amazon_s3_destination_storage_class import ( + ObservabilityPipelineAmazonS3DestinationStorageClass, + ) + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_amazon_s3_destination_type import ( + ObservabilityPipelineAmazonS3DestinationType, + ) + + return { + "auth": (ObservabilityPipelineAwsAuth,), + "bucket": (str,), + "id": (str,), + "inputs": ([str],), + "key_prefix": (str,), + "region": (str,), + "storage_class": (ObservabilityPipelineAmazonS3DestinationStorageClass,), + "tls": (ObservabilityPipelineTls,), + "type": (ObservabilityPipelineAmazonS3DestinationType,), + } + + attribute_map = { + "auth": "auth", + "bucket": "bucket", + "id": "id", + "inputs": "inputs", + "key_prefix": "key_prefix", + "region": "region", + "storage_class": "storage_class", + "tls": "tls", + "type": "type", + } + + def __init__( + self_, + bucket: str, + id: str, + inputs: List[str], + region: str, + storage_class: ObservabilityPipelineAmazonS3DestinationStorageClass, + type: ObservabilityPipelineAmazonS3DestinationType, + auth: Union[ObservabilityPipelineAwsAuth, UnsetType] = unset, + key_prefix: Union[str, UnsetType] = unset, + tls: Union[ObservabilityPipelineTls, UnsetType] = unset, + **kwargs, + ): + """ + The ``amazon_s3`` destination sends your logs in Datadog-rehydratable format to an Amazon S3 bucket for archiving. + + :param auth: AWS authentication credentials used for accessing AWS services such as S3. + If omitted, the system’s default credentials are used (for example, the IAM role and environment variables). + :type auth: ObservabilityPipelineAwsAuth, optional + + :param bucket: S3 bucket name. + :type bucket: str + + :param id: Unique identifier for the destination component. + :type id: str + + :param inputs: A list of component IDs whose output is used as the ``input`` for this component. + :type inputs: [str] + + :param key_prefix: Optional prefix for object keys. + :type key_prefix: str, optional + + :param region: AWS region of the S3 bucket. + :type region: str + + :param storage_class: S3 storage class. + :type storage_class: ObservabilityPipelineAmazonS3DestinationStorageClass + + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional + + :param type: The destination type. Always ``amazon_s3``. + :type type: ObservabilityPipelineAmazonS3DestinationType + """ + if auth is not unset: + kwargs["auth"] = auth + if key_prefix is not unset: + kwargs["key_prefix"] = key_prefix + if tls is not unset: + kwargs["tls"] = tls + super().__init__(kwargs) + + self_.bucket = bucket + self_.id = id + self_.inputs = inputs + self_.region = region + self_.storage_class = storage_class + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_destination_storage_class.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_destination_storage_class.py new file mode 100644 index 0000000000..8f2d537f47 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_destination_storage_class.py @@ -0,0 +1,77 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineAmazonS3DestinationStorageClass(ModelSimple): + """ + S3 storage class. + + :param value: Must be one of ["STANDARD", "REDUCED_REDUNDANCY", "INTELLIGENT_TIERING", "STANDARD_IA", "EXPRESS_ONEZONE", "ONEZONE_IA", "GLACIER", "GLACIER_IR", "DEEP_ARCHIVE"]. + :type value: str + """ + + allowed_values = { + "STANDARD", + "REDUCED_REDUNDANCY", + "INTELLIGENT_TIERING", + "STANDARD_IA", + "EXPRESS_ONEZONE", + "ONEZONE_IA", + "GLACIER", + "GLACIER_IR", + "DEEP_ARCHIVE", + } + STANDARD: ClassVar["ObservabilityPipelineAmazonS3DestinationStorageClass"] + REDUCED_REDUNDANCY: ClassVar["ObservabilityPipelineAmazonS3DestinationStorageClass"] + INTELLIGENT_TIERING: ClassVar["ObservabilityPipelineAmazonS3DestinationStorageClass"] + STANDARD_IA: ClassVar["ObservabilityPipelineAmazonS3DestinationStorageClass"] + EXPRESS_ONEZONE: ClassVar["ObservabilityPipelineAmazonS3DestinationStorageClass"] + ONEZONE_IA: ClassVar["ObservabilityPipelineAmazonS3DestinationStorageClass"] + GLACIER: ClassVar["ObservabilityPipelineAmazonS3DestinationStorageClass"] + GLACIER_IR: ClassVar["ObservabilityPipelineAmazonS3DestinationStorageClass"] + DEEP_ARCHIVE: ClassVar["ObservabilityPipelineAmazonS3DestinationStorageClass"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineAmazonS3DestinationStorageClass.STANDARD = ObservabilityPipelineAmazonS3DestinationStorageClass( + "STANDARD" +) +ObservabilityPipelineAmazonS3DestinationStorageClass.REDUCED_REDUNDANCY = ( + ObservabilityPipelineAmazonS3DestinationStorageClass("REDUCED_REDUNDANCY") +) +ObservabilityPipelineAmazonS3DestinationStorageClass.INTELLIGENT_TIERING = ( + ObservabilityPipelineAmazonS3DestinationStorageClass("INTELLIGENT_TIERING") +) +ObservabilityPipelineAmazonS3DestinationStorageClass.STANDARD_IA = ObservabilityPipelineAmazonS3DestinationStorageClass( + "STANDARD_IA" +) +ObservabilityPipelineAmazonS3DestinationStorageClass.EXPRESS_ONEZONE = ( + ObservabilityPipelineAmazonS3DestinationStorageClass("EXPRESS_ONEZONE") +) +ObservabilityPipelineAmazonS3DestinationStorageClass.ONEZONE_IA = ObservabilityPipelineAmazonS3DestinationStorageClass( + "ONEZONE_IA" +) +ObservabilityPipelineAmazonS3DestinationStorageClass.GLACIER = ObservabilityPipelineAmazonS3DestinationStorageClass( + "GLACIER" +) +ObservabilityPipelineAmazonS3DestinationStorageClass.GLACIER_IR = ObservabilityPipelineAmazonS3DestinationStorageClass( + "GLACIER_IR" +) +ObservabilityPipelineAmazonS3DestinationStorageClass.DEEP_ARCHIVE = ( + ObservabilityPipelineAmazonS3DestinationStorageClass("DEEP_ARCHIVE") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_destination_type.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_destination_type.py new file mode 100644 index 0000000000..f3e589f5a1 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_destination_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineAmazonS3DestinationType(ModelSimple): + """ + The destination type. Always `amazon_s3`. + + :param value: If omitted defaults to "amazon_s3". Must be one of ["amazon_s3"]. + :type value: str + """ + + allowed_values = { + "amazon_s3", + } + AMAZON_S3: ClassVar["ObservabilityPipelineAmazonS3DestinationType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineAmazonS3DestinationType.AMAZON_S3 = ObservabilityPipelineAmazonS3DestinationType("amazon_s3") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_source.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_source.py new file mode 100644 index 0000000000..fe4730a921 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_source.py @@ -0,0 +1,86 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_aws_auth import ObservabilityPipelineAwsAuth + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_amazon_s3_source_type import ( + ObservabilityPipelineAmazonS3SourceType, + ) + + +class ObservabilityPipelineAmazonS3Source(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_aws_auth import ObservabilityPipelineAwsAuth + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_amazon_s3_source_type import ( + ObservabilityPipelineAmazonS3SourceType, + ) + + return { + "auth": (ObservabilityPipelineAwsAuth,), + "id": (str,), + "region": (str,), + "tls": (ObservabilityPipelineTls,), + "type": (ObservabilityPipelineAmazonS3SourceType,), + } + + attribute_map = { + "auth": "auth", + "id": "id", + "region": "region", + "tls": "tls", + "type": "type", + } + + def __init__( + self_, + id: str, + region: str, + type: ObservabilityPipelineAmazonS3SourceType, + auth: Union[ObservabilityPipelineAwsAuth, UnsetType] = unset, + tls: Union[ObservabilityPipelineTls, UnsetType] = unset, + **kwargs, + ): + """ + The ``amazon_s3`` source ingests logs from an Amazon S3 bucket. + It supports AWS authentication and TLS encryption. + + :param auth: AWS authentication credentials used for accessing AWS services such as S3. + If omitted, the system’s default credentials are used (for example, the IAM role and environment variables). + :type auth: ObservabilityPipelineAwsAuth, optional + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + :type id: str + + :param region: AWS region where the S3 bucket resides. + :type region: str + + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional + + :param type: The source type. Always ``amazon_s3``. + :type type: ObservabilityPipelineAmazonS3SourceType + """ + if auth is not unset: + kwargs["auth"] = auth + if tls is not unset: + kwargs["tls"] = tls + super().__init__(kwargs) + + self_.id = id + self_.region = region + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_source_type.py b/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_source_type.py new file mode 100644 index 0000000000..357857a2c7 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_amazon_s3_source_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineAmazonS3SourceType(ModelSimple): + """ + The source type. Always `amazon_s3`. + + :param value: If omitted defaults to "amazon_s3". Must be one of ["amazon_s3"]. + :type value: str + """ + + allowed_values = { + "amazon_s3", + } + AMAZON_S3: ClassVar["ObservabilityPipelineAmazonS3SourceType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineAmazonS3SourceType.AMAZON_S3 = ObservabilityPipelineAmazonS3SourceType("amazon_s3") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_aws_auth.py b/src/datadog_api_client/v2/model/observability_pipeline_aws_auth.py new file mode 100644 index 0000000000..89196a5301 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_aws_auth.py @@ -0,0 +1,57 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +class ObservabilityPipelineAwsAuth(ModelNormal): + @cached_property + def openapi_types(_): + return { + "assume_role": (str,), + "external_id": (str,), + "session_name": (str,), + } + + attribute_map = { + "assume_role": "assume_role", + "external_id": "external_id", + "session_name": "session_name", + } + + def __init__( + self_, + assume_role: Union[str, UnsetType] = unset, + external_id: Union[str, UnsetType] = unset, + session_name: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + AWS authentication credentials used for accessing AWS services such as S3. + If omitted, the system’s default credentials are used (for example, the IAM role and environment variables). + + :param assume_role: The Amazon Resource Name (ARN) of the role to assume. + :type assume_role: str, optional + + :param external_id: A unique identifier for cross-account role assumption. + :type external_id: str, optional + + :param session_name: A session identifier used for logging and tracing the assumed role session. + :type session_name: str, optional + """ + if assume_role is not unset: + kwargs["assume_role"] = assume_role + if external_id is not unset: + kwargs["external_id"] = external_id + if session_name is not unset: + kwargs["session_name"] = session_name + super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config.py b/src/datadog_api_client/v2/model/observability_pipeline_config.py index af1d64f311..31d48480c4 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_config.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_config.py @@ -8,6 +8,8 @@ from datadog_api_client.model_utils import ( ModelNormal, cached_property, + unset, + UnsetType, ) @@ -24,6 +26,44 @@ from datadog_api_client.v2.model.observability_pipeline_datadog_logs_destination import ( ObservabilityPipelineDatadogLogsDestination, ) + from datadog_api_client.v2.model.observability_pipeline_amazon_s3_destination import ( + ObservabilityPipelineAmazonS3Destination, + ) + from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination import ( + ObservabilityPipelineGoogleCloudStorageDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination import ( + ObservabilityPipelineSplunkHecDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination import ( + ObservabilityPipelineSumoLogicDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination import ( + ObservabilityPipelineElasticsearchDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_rsyslog_destination import ( + ObservabilityPipelineRsyslogDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_syslog_ng_destination import ( + ObservabilityPipelineSyslogNgDestination, + ) + from datadog_api_client.v2.model.azure_storage_destination import AzureStorageDestination + from datadog_api_client.v2.model.microsoft_sentinel_destination import MicrosoftSentinelDestination + from datadog_api_client.v2.model.observability_pipeline_google_chronicle_destination import ( + ObservabilityPipelineGoogleChronicleDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_new_relic_destination import ( + ObservabilityPipelineNewRelicDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination import ( + ObservabilityPipelineSentinelOneDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_open_search_destination import ( + ObservabilityPipelineOpenSearchDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination import ( + ObservabilityPipelineAmazonOpenSearchDestination, + ) from datadog_api_client.v2.model.observability_pipeline_filter_processor import ObservabilityPipelineFilterProcessor from datadog_api_client.v2.model.observability_pipeline_parse_json_processor import ( ObservabilityPipelineParseJSONProcessor, @@ -38,10 +78,63 @@ from datadog_api_client.v2.model.observability_pipeline_rename_fields_processor import ( ObservabilityPipelineRenameFieldsProcessor, ) + from datadog_api_client.v2.model.observability_pipeline_generate_metrics_processor import ( + ObservabilityPipelineGenerateMetricsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_sample_processor import ObservabilityPipelineSampleProcessor + from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor import ( + ObservabilityPipelineParseGrokProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor import ( + ObservabilityPipelineSensitiveDataScannerProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor import ( + ObservabilityPipelineOcsfMapperProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor import ( + ObservabilityPipelineAddEnvVarsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_dedupe_processor import ObservabilityPipelineDedupeProcessor + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor import ( + ObservabilityPipelineEnrichmentTableProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_reduce_processor import ObservabilityPipelineReduceProcessor + from datadog_api_client.v2.model.observability_pipeline_throttle_processor import ( + ObservabilityPipelineThrottleProcessor, + ) from datadog_api_client.v2.model.observability_pipeline_kafka_source import ObservabilityPipelineKafkaSource from datadog_api_client.v2.model.observability_pipeline_datadog_agent_source import ( ObservabilityPipelineDatadogAgentSource, ) + from datadog_api_client.v2.model.observability_pipeline_splunk_tcp_source import ( + ObservabilityPipelineSplunkTcpSource, + ) + from datadog_api_client.v2.model.observability_pipeline_splunk_hec_source import ( + ObservabilityPipelineSplunkHecSource, + ) + from datadog_api_client.v2.model.observability_pipeline_amazon_s3_source import ObservabilityPipelineAmazonS3Source + from datadog_api_client.v2.model.observability_pipeline_fluentd_source import ObservabilityPipelineFluentdSource + from datadog_api_client.v2.model.observability_pipeline_fluent_bit_source import ( + ObservabilityPipelineFluentBitSource, + ) + from datadog_api_client.v2.model.observability_pipeline_http_server_source import ( + ObservabilityPipelineHttpServerSource, + ) + from datadog_api_client.v2.model.observability_pipeline_sumo_logic_source import ( + ObservabilityPipelineSumoLogicSource, + ) + from datadog_api_client.v2.model.observability_pipeline_rsyslog_source import ObservabilityPipelineRsyslogSource + from datadog_api_client.v2.model.observability_pipeline_syslog_ng_source import ObservabilityPipelineSyslogNgSource + from datadog_api_client.v2.model.observability_pipeline_amazon_data_firehose_source import ( + ObservabilityPipelineAmazonDataFirehoseSource, + ) + from datadog_api_client.v2.model.observability_pipeline_google_pub_sub_source import ( + ObservabilityPipelineGooglePubSubSource, + ) + from datadog_api_client.v2.model.observability_pipeline_http_client_source import ( + ObservabilityPipelineHttpClientSource, + ) + from datadog_api_client.v2.model.observability_pipeline_logstash_source import ObservabilityPipelineLogstashSource class ObservabilityPipelineConfig(ModelNormal): @@ -72,17 +165,23 @@ def openapi_types(_): def __init__( self_, destinations: List[ - Union[ObservabilityPipelineConfigDestinationItem, ObservabilityPipelineDatadogLogsDestination] - ], - processors: List[ Union[ - ObservabilityPipelineConfigProcessorItem, - ObservabilityPipelineFilterProcessor, - ObservabilityPipelineParseJSONProcessor, - ObservabilityPipelineQuotaProcessor, - ObservabilityPipelineAddFieldsProcessor, - ObservabilityPipelineRemoveFieldsProcessor, - ObservabilityPipelineRenameFieldsProcessor, + ObservabilityPipelineConfigDestinationItem, + ObservabilityPipelineDatadogLogsDestination, + ObservabilityPipelineAmazonS3Destination, + ObservabilityPipelineGoogleCloudStorageDestination, + ObservabilityPipelineSplunkHecDestination, + ObservabilityPipelineSumoLogicDestination, + ObservabilityPipelineElasticsearchDestination, + ObservabilityPipelineRsyslogDestination, + ObservabilityPipelineSyslogNgDestination, + AzureStorageDestination, + MicrosoftSentinelDestination, + ObservabilityPipelineGoogleChronicleDestination, + ObservabilityPipelineNewRelicDestination, + ObservabilityPipelineSentinelOneDestination, + ObservabilityPipelineOpenSearchDestination, + ObservabilityPipelineAmazonOpenSearchDestination, ] ], sources: List[ @@ -90,8 +189,45 @@ def __init__( ObservabilityPipelineConfigSourceItem, ObservabilityPipelineKafkaSource, ObservabilityPipelineDatadogAgentSource, + ObservabilityPipelineSplunkTcpSource, + ObservabilityPipelineSplunkHecSource, + ObservabilityPipelineAmazonS3Source, + ObservabilityPipelineFluentdSource, + ObservabilityPipelineFluentBitSource, + ObservabilityPipelineHttpServerSource, + ObservabilityPipelineSumoLogicSource, + ObservabilityPipelineRsyslogSource, + ObservabilityPipelineSyslogNgSource, + ObservabilityPipelineAmazonDataFirehoseSource, + ObservabilityPipelineGooglePubSubSource, + ObservabilityPipelineHttpClientSource, + ObservabilityPipelineLogstashSource, ] ], + processors: Union[ + List[ + Union[ + ObservabilityPipelineConfigProcessorItem, + ObservabilityPipelineFilterProcessor, + ObservabilityPipelineParseJSONProcessor, + ObservabilityPipelineQuotaProcessor, + ObservabilityPipelineAddFieldsProcessor, + ObservabilityPipelineRemoveFieldsProcessor, + ObservabilityPipelineRenameFieldsProcessor, + ObservabilityPipelineGenerateMetricsProcessor, + ObservabilityPipelineSampleProcessor, + ObservabilityPipelineParseGrokProcessor, + ObservabilityPipelineSensitiveDataScannerProcessor, + ObservabilityPipelineOcsfMapperProcessor, + ObservabilityPipelineAddEnvVarsProcessor, + ObservabilityPipelineDedupeProcessor, + ObservabilityPipelineEnrichmentTableProcessor, + ObservabilityPipelineReduceProcessor, + ObservabilityPipelineThrottleProcessor, + ] + ], + UnsetType, + ] = unset, **kwargs, ): """ @@ -101,13 +237,14 @@ def __init__( :type destinations: [ObservabilityPipelineConfigDestinationItem] :param processors: A list of processors that transform or enrich log data. - :type processors: [ObservabilityPipelineConfigProcessorItem] + :type processors: [ObservabilityPipelineConfigProcessorItem], optional :param sources: A list of configured data sources for the pipeline. :type sources: [ObservabilityPipelineConfigSourceItem] """ + if processors is not unset: + kwargs["processors"] = processors super().__init__(kwargs) self_.destinations = destinations - self_.processors = processors self_.sources = sources diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config_destination_item.py b/src/datadog_api_client/v2/model/observability_pipeline_config_destination_item.py index 01cf940587..c4a23e1136 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_config_destination_item.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_config_destination_item.py @@ -23,6 +23,91 @@ def __init__(self, **kwargs): :param type: The destination type. The value should always be `datadog_logs`. :type type: ObservabilityPipelineDatadogLogsDestinationType + + :param auth: AWS authentication credentials used for accessing AWS services such as S3. + If omitted, the system’s default credentials are used (for example, the IAM role and environment variables). + + :type auth: ObservabilityPipelineAwsAuth, optional + + :param bucket: S3 bucket name. + :type bucket: str + + :param key_prefix: Optional prefix for object keys. + :type key_prefix: str, optional + + :param region: AWS region of the S3 bucket. + :type region: str + + :param storage_class: S3 storage class. + :type storage_class: ObservabilityPipelineAmazonS3DestinationStorageClass + + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional + + :param acl: Access control list setting for objects written to the bucket. + :type acl: ObservabilityPipelineGoogleCloudStorageDestinationAcl + + :param metadata: Custom metadata key-value pairs added to each object. + :type metadata: [ObservabilityPipelineMetadataEntry] + + :param auto_extract_timestamp: If `true`, Splunk tries to extract timestamps from incoming log events. + If `false`, Splunk assigns the time the event was received. + + :type auto_extract_timestamp: bool, optional + + :param encoding: Encoding format for log events. + :type encoding: ObservabilityPipelineSplunkHecDestinationEncoding, optional + + :param index: Optional name of the Splunk index where logs are written. + :type index: str, optional + + :param sourcetype: The Splunk sourcetype to assign to log events. + :type sourcetype: str, optional + + :param header_custom_fields: A list of custom headers to include in the request to Sumo Logic. + :type header_custom_fields: [ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem], optional + + :param header_host_name: Optional override for the host name header. + :type header_host_name: str, optional + + :param header_source_category: Optional override for the source category header. + :type header_source_category: str, optional + + :param header_source_name: Optional override for the source name header. + :type header_source_name: str, optional + + :param api_version: The Elasticsearch API version to use. Set to `auto` to auto-detect. + :type api_version: ObservabilityPipelineElasticsearchDestinationApiVersion, optional + + :param bulk_index: The index to write logs to in Elasticsearch. + :type bulk_index: str, optional + + :param keepalive: Optional socket keepalive duration in milliseconds. + :type keepalive: int, optional + + :param blob_prefix: Optional prefix for blobs written to the container. + :type blob_prefix: str, optional + + :param container_name: The name of the Azure Blob Storage container to store logs in. + :type container_name: str + + :param client_id: Azure AD client ID used for authentication. + :type client_id: str + + :param dcr_immutable_id: The immutable ID of the Data Collection Rule (DCR). + :type dcr_immutable_id: str + + :param table: The name of the Log Analytics table where logs are sent. + :type table: str + + :param tenant_id: Azure AD tenant ID. + :type tenant_id: str + + :param customer_id: The Google Chronicle customer ID. + :type customer_id: str + + :param log_type: The log type metadata associated with the Chronicle destination. + :type log_type: str, optional """ super().__init__(kwargs) @@ -38,9 +123,61 @@ def _composed_schemas(_): from datadog_api_client.v2.model.observability_pipeline_datadog_logs_destination import ( ObservabilityPipelineDatadogLogsDestination, ) + from datadog_api_client.v2.model.observability_pipeline_amazon_s3_destination import ( + ObservabilityPipelineAmazonS3Destination, + ) + from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination import ( + ObservabilityPipelineGoogleCloudStorageDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination import ( + ObservabilityPipelineSplunkHecDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination import ( + ObservabilityPipelineSumoLogicDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination import ( + ObservabilityPipelineElasticsearchDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_rsyslog_destination import ( + ObservabilityPipelineRsyslogDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_syslog_ng_destination import ( + ObservabilityPipelineSyslogNgDestination, + ) + from datadog_api_client.v2.model.azure_storage_destination import AzureStorageDestination + from datadog_api_client.v2.model.microsoft_sentinel_destination import MicrosoftSentinelDestination + from datadog_api_client.v2.model.observability_pipeline_google_chronicle_destination import ( + ObservabilityPipelineGoogleChronicleDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_new_relic_destination import ( + ObservabilityPipelineNewRelicDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination import ( + ObservabilityPipelineSentinelOneDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_open_search_destination import ( + ObservabilityPipelineOpenSearchDestination, + ) + from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination import ( + ObservabilityPipelineAmazonOpenSearchDestination, + ) return { "oneOf": [ ObservabilityPipelineDatadogLogsDestination, + ObservabilityPipelineAmazonS3Destination, + ObservabilityPipelineGoogleCloudStorageDestination, + ObservabilityPipelineSplunkHecDestination, + ObservabilityPipelineSumoLogicDestination, + ObservabilityPipelineElasticsearchDestination, + ObservabilityPipelineRsyslogDestination, + ObservabilityPipelineSyslogNgDestination, + AzureStorageDestination, + MicrosoftSentinelDestination, + ObservabilityPipelineGoogleChronicleDestination, + ObservabilityPipelineNewRelicDestination, + ObservabilityPipelineSentinelOneDestination, + ObservabilityPipelineOpenSearchDestination, + ObservabilityPipelineAmazonOpenSearchDestination, ], } diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py b/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py index 174a2ff327..a761a8beb8 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_config_processor_item.py @@ -39,9 +39,16 @@ def __init__(self, **kwargs): :param limit: The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events. :type limit: ObservabilityPipelineQuotaProcessorLimit - :param name: Name for identifying the processor. + :param name: Name of the quota. :type name: str + :param overflow_action: The action to take when the quota is exceeded. Options: + - `drop`: Drop the event. + - `no_action`: Let the event pass through. + - `overflow_routing`: Route to an overflow destination. + + :type overflow_action: ObservabilityPipelineQuotaProcessorOverflowAction, optional + :param overrides: A list of alternate quota rules that apply to specific sets of events, identified by matching field values. Each override can define a custom limit. :type overrides: [ObservabilityPipelineQuotaProcessorOverride], optional @@ -50,6 +57,51 @@ def __init__(self, **kwargs): :param fields: A list of static fields (key-value pairs) that is added to each log event processed by this component. :type fields: [ObservabilityPipelineFieldValue] + + :param metrics: Configuration for generating individual metrics. + :type metrics: [ObservabilityPipelineGeneratedMetric] + + :param percentage: The percentage of logs to sample. + :type percentage: float, optional + + :param rate: Number of events to sample (1 in N). + :type rate: int, optional + + :param disable_library_rules: If set to `true`, disables the default Grok rules provided by Datadog. + :type disable_library_rules: bool, optional + + :param rules: The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in order. The first successful match is applied. + :type rules: [ObservabilityPipelineParseGrokProcessorRule] + + :param mappings: A list of mapping rules to convert events to the OCSF format. + :type mappings: [ObservabilityPipelineOcsfMapperProcessorMapping] + + :param variables: A list of environment variable mappings to apply to log fields. + :type variables: [ObservabilityPipelineAddEnvVarsProcessorVariable] + + :param mode: The deduplication mode to apply to the fields. + :type mode: ObservabilityPipelineDedupeProcessorMode + + :param file: Defines a static enrichment table loaded from a CSV file. + :type file: ObservabilityPipelineEnrichmentTableFile, optional + + :param geoip: Uses a GeoIP database to enrich logs based on an IP field. + :type geoip: ObservabilityPipelineEnrichmentTableGeoIp, optional + + :param target: Path where enrichment results should be stored in the log. + :type target: str + + :param group_by: A list of fields used to group log events for merging. + :type group_by: [str] + + :param merge_strategies: List of merge strategies defining how values from grouped events should be combined. + :type merge_strategies: [ObservabilityPipelineReduceProcessorMergeStrategy] + + :param threshold: the number of events allowed in a given time window. Events sent after the threshold has been reached, are dropped. + :type threshold: int + + :param window: The time window in seconds over which the threshold applies. + :type window: float """ super().__init__(kwargs) @@ -80,6 +132,36 @@ def _composed_schemas(_): from datadog_api_client.v2.model.observability_pipeline_rename_fields_processor import ( ObservabilityPipelineRenameFieldsProcessor, ) + from datadog_api_client.v2.model.observability_pipeline_generate_metrics_processor import ( + ObservabilityPipelineGenerateMetricsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_sample_processor import ( + ObservabilityPipelineSampleProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor import ( + ObservabilityPipelineParseGrokProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor import ( + ObservabilityPipelineSensitiveDataScannerProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor import ( + ObservabilityPipelineOcsfMapperProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor import ( + ObservabilityPipelineAddEnvVarsProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_dedupe_processor import ( + ObservabilityPipelineDedupeProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor import ( + ObservabilityPipelineEnrichmentTableProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_reduce_processor import ( + ObservabilityPipelineReduceProcessor, + ) + from datadog_api_client.v2.model.observability_pipeline_throttle_processor import ( + ObservabilityPipelineThrottleProcessor, + ) return { "oneOf": [ @@ -89,5 +171,15 @@ def _composed_schemas(_): ObservabilityPipelineAddFieldsProcessor, ObservabilityPipelineRemoveFieldsProcessor, ObservabilityPipelineRenameFieldsProcessor, + ObservabilityPipelineGenerateMetricsProcessor, + ObservabilityPipelineSampleProcessor, + ObservabilityPipelineParseGrokProcessor, + ObservabilityPipelineSensitiveDataScannerProcessor, + ObservabilityPipelineOcsfMapperProcessor, + ObservabilityPipelineAddEnvVarsProcessor, + ObservabilityPipelineDedupeProcessor, + ObservabilityPipelineEnrichmentTableProcessor, + ObservabilityPipelineReduceProcessor, + ObservabilityPipelineThrottleProcessor, ], } diff --git a/src/datadog_api_client/v2/model/observability_pipeline_config_source_item.py b/src/datadog_api_client/v2/model/observability_pipeline_config_source_item.py index 77d5f5f430..396b6f6a4d 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_config_source_item.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_config_source_item.py @@ -27,7 +27,7 @@ def __init__(self, **kwargs): :param sasl: Specifies the SASL mechanism for authenticating with a Kafka cluster. :type sasl: ObservabilityPipelineKafkaSourceSasl, optional - :param tls: Configuration for enabling TLS encryption. + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. :type tls: ObservabilityPipelineTls, optional :param topics: A list of Kafka topic names to subscribe to. The source ingests messages from each topic specified. @@ -35,6 +35,35 @@ def __init__(self, **kwargs): :param type: The source type. The value should always be `kafka`. :type type: ObservabilityPipelineKafkaSourceType + + :param auth: AWS authentication credentials used for accessing AWS services such as S3. + If omitted, the system’s default credentials are used (for example, the IAM role and environment variables). + + :type auth: ObservabilityPipelineAwsAuth, optional + + :param region: AWS region where the S3 bucket resides. + :type region: str + + :param auth_strategy: HTTP authentication method. + :type auth_strategy: ObservabilityPipelineHttpServerSourceAuthStrategy + + :param decoding: The decoding format used to interpret incoming logs. + :type decoding: ObservabilityPipelineDecoding + + :param mode: Protocol used by the syslog source to receive messages. + :type mode: ObservabilityPipelineSyslogSourceMode + + :param project: The GCP project ID that owns the Pub/Sub subscription. + :type project: str + + :param subscription: The Pub/Sub subscription name from which messages are consumed. + :type subscription: str + + :param scrape_interval_secs: The interval (in seconds) between HTTP scrape requests. + :type scrape_interval_secs: int, optional + + :param scrape_timeout_secs: The timeout (in seconds) for each scrape request. + :type scrape_timeout_secs: int, optional """ super().__init__(kwargs) @@ -51,10 +80,58 @@ def _composed_schemas(_): from datadog_api_client.v2.model.observability_pipeline_datadog_agent_source import ( ObservabilityPipelineDatadogAgentSource, ) + from datadog_api_client.v2.model.observability_pipeline_splunk_tcp_source import ( + ObservabilityPipelineSplunkTcpSource, + ) + from datadog_api_client.v2.model.observability_pipeline_splunk_hec_source import ( + ObservabilityPipelineSplunkHecSource, + ) + from datadog_api_client.v2.model.observability_pipeline_amazon_s3_source import ( + ObservabilityPipelineAmazonS3Source, + ) + from datadog_api_client.v2.model.observability_pipeline_fluentd_source import ObservabilityPipelineFluentdSource + from datadog_api_client.v2.model.observability_pipeline_fluent_bit_source import ( + ObservabilityPipelineFluentBitSource, + ) + from datadog_api_client.v2.model.observability_pipeline_http_server_source import ( + ObservabilityPipelineHttpServerSource, + ) + from datadog_api_client.v2.model.observability_pipeline_sumo_logic_source import ( + ObservabilityPipelineSumoLogicSource, + ) + from datadog_api_client.v2.model.observability_pipeline_rsyslog_source import ObservabilityPipelineRsyslogSource + from datadog_api_client.v2.model.observability_pipeline_syslog_ng_source import ( + ObservabilityPipelineSyslogNgSource, + ) + from datadog_api_client.v2.model.observability_pipeline_amazon_data_firehose_source import ( + ObservabilityPipelineAmazonDataFirehoseSource, + ) + from datadog_api_client.v2.model.observability_pipeline_google_pub_sub_source import ( + ObservabilityPipelineGooglePubSubSource, + ) + from datadog_api_client.v2.model.observability_pipeline_http_client_source import ( + ObservabilityPipelineHttpClientSource, + ) + from datadog_api_client.v2.model.observability_pipeline_logstash_source import ( + ObservabilityPipelineLogstashSource, + ) return { "oneOf": [ ObservabilityPipelineKafkaSource, ObservabilityPipelineDatadogAgentSource, + ObservabilityPipelineSplunkTcpSource, + ObservabilityPipelineSplunkHecSource, + ObservabilityPipelineAmazonS3Source, + ObservabilityPipelineFluentdSource, + ObservabilityPipelineFluentBitSource, + ObservabilityPipelineHttpServerSource, + ObservabilityPipelineSumoLogicSource, + ObservabilityPipelineRsyslogSource, + ObservabilityPipelineSyslogNgSource, + ObservabilityPipelineAmazonDataFirehoseSource, + ObservabilityPipelineGooglePubSubSource, + ObservabilityPipelineHttpClientSource, + ObservabilityPipelineLogstashSource, ], } diff --git a/src/datadog_api_client/v2/model/observability_pipeline_datadog_agent_source.py b/src/datadog_api_client/v2/model/observability_pipeline_datadog_agent_source.py index 4cb08863d0..3614768ec1 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_datadog_agent_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_datadog_agent_source.py @@ -53,7 +53,7 @@ def __init__( :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). :type id: str - :param tls: Configuration for enabling TLS encryption. + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. :type tls: ObservabilityPipelineTls, optional :param type: The source type. The value should always be ``datadog_agent``. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_decoding.py b/src/datadog_api_client/v2/model/observability_pipeline_decoding.py new file mode 100644 index 0000000000..53c48f8342 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_decoding.py @@ -0,0 +1,44 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineDecoding(ModelSimple): + """ + The decoding format used to interpret incoming logs. + + :param value: Must be one of ["bytes", "gelf", "json", "syslog"]. + :type value: str + """ + + allowed_values = { + "bytes", + "gelf", + "json", + "syslog", + } + DECODE_BYTES: ClassVar["ObservabilityPipelineDecoding"] + DECODE_GELF: ClassVar["ObservabilityPipelineDecoding"] + DECODE_JSON: ClassVar["ObservabilityPipelineDecoding"] + DECODE_SYSLOG: ClassVar["ObservabilityPipelineDecoding"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineDecoding.DECODE_BYTES = ObservabilityPipelineDecoding("bytes") +ObservabilityPipelineDecoding.DECODE_GELF = ObservabilityPipelineDecoding("gelf") +ObservabilityPipelineDecoding.DECODE_JSON = ObservabilityPipelineDecoding("json") +ObservabilityPipelineDecoding.DECODE_SYSLOG = ObservabilityPipelineDecoding("syslog") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_dedupe_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_dedupe_processor.py new file mode 100644 index 0000000000..9021940cfc --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_dedupe_processor.py @@ -0,0 +1,89 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_dedupe_processor_mode import ( + ObservabilityPipelineDedupeProcessorMode, + ) + from datadog_api_client.v2.model.observability_pipeline_dedupe_processor_type import ( + ObservabilityPipelineDedupeProcessorType, + ) + + +class ObservabilityPipelineDedupeProcessor(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_dedupe_processor_mode import ( + ObservabilityPipelineDedupeProcessorMode, + ) + from datadog_api_client.v2.model.observability_pipeline_dedupe_processor_type import ( + ObservabilityPipelineDedupeProcessorType, + ) + + return { + "fields": ([str],), + "id": (str,), + "include": (str,), + "inputs": ([str],), + "mode": (ObservabilityPipelineDedupeProcessorMode,), + "type": (ObservabilityPipelineDedupeProcessorType,), + } + + attribute_map = { + "fields": "fields", + "id": "id", + "include": "include", + "inputs": "inputs", + "mode": "mode", + "type": "type", + } + + def __init__( + self_, + fields: List[str], + id: str, + include: str, + inputs: List[str], + mode: ObservabilityPipelineDedupeProcessorMode, + type: ObservabilityPipelineDedupeProcessorType, + **kwargs, + ): + """ + The ``dedupe`` processor removes duplicate fields in log events. + + :param fields: A list of log field paths to check for duplicates. + :type fields: [str] + + :param id: The unique identifier for this processor. + :type id: str + + :param include: A Datadog search query used to determine which logs this processor targets. + :type include: str + + :param inputs: A list of component IDs whose output is used as the input for this processor. + :type inputs: [str] + + :param mode: The deduplication mode to apply to the fields. + :type mode: ObservabilityPipelineDedupeProcessorMode + + :param type: The processor type. The value should always be ``dedupe``. + :type type: ObservabilityPipelineDedupeProcessorType + """ + super().__init__(kwargs) + + self_.fields = fields + self_.id = id + self_.include = include + self_.inputs = inputs + self_.mode = mode + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_dedupe_processor_mode.py b/src/datadog_api_client/v2/model/observability_pipeline_dedupe_processor_mode.py new file mode 100644 index 0000000000..31b74fd856 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_dedupe_processor_mode.py @@ -0,0 +1,38 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineDedupeProcessorMode(ModelSimple): + """ + The deduplication mode to apply to the fields. + + :param value: Must be one of ["match", "ignore"]. + :type value: str + """ + + allowed_values = { + "match", + "ignore", + } + MATCH: ClassVar["ObservabilityPipelineDedupeProcessorMode"] + IGNORE: ClassVar["ObservabilityPipelineDedupeProcessorMode"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineDedupeProcessorMode.MATCH = ObservabilityPipelineDedupeProcessorMode("match") +ObservabilityPipelineDedupeProcessorMode.IGNORE = ObservabilityPipelineDedupeProcessorMode("ignore") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_dedupe_processor_type.py b/src/datadog_api_client/v2/model/observability_pipeline_dedupe_processor_type.py new file mode 100644 index 0000000000..64a343df18 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_dedupe_processor_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineDedupeProcessorType(ModelSimple): + """ + The processor type. The value should always be `dedupe`. + + :param value: If omitted defaults to "dedupe". Must be one of ["dedupe"]. + :type value: str + """ + + allowed_values = { + "dedupe", + } + DEDUPE: ClassVar["ObservabilityPipelineDedupeProcessorType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineDedupeProcessorType.DEDUPE = ObservabilityPipelineDedupeProcessorType("dedupe") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination.py new file mode 100644 index 0000000000..8ce413b7ef --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination.py @@ -0,0 +1,86 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_api_version import ( + ObservabilityPipelineElasticsearchDestinationApiVersion, + ) + from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_type import ( + ObservabilityPipelineElasticsearchDestinationType, + ) + + +class ObservabilityPipelineElasticsearchDestination(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_api_version import ( + ObservabilityPipelineElasticsearchDestinationApiVersion, + ) + from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_type import ( + ObservabilityPipelineElasticsearchDestinationType, + ) + + return { + "api_version": (ObservabilityPipelineElasticsearchDestinationApiVersion,), + "bulk_index": (str,), + "id": (str,), + "inputs": ([str],), + "type": (ObservabilityPipelineElasticsearchDestinationType,), + } + + attribute_map = { + "api_version": "api_version", + "bulk_index": "bulk_index", + "id": "id", + "inputs": "inputs", + "type": "type", + } + + def __init__( + self_, + id: str, + inputs: List[str], + type: ObservabilityPipelineElasticsearchDestinationType, + api_version: Union[ObservabilityPipelineElasticsearchDestinationApiVersion, UnsetType] = unset, + bulk_index: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + The ``elasticsearch`` destination writes logs to an Elasticsearch cluster. + + :param api_version: The Elasticsearch API version to use. Set to ``auto`` to auto-detect. + :type api_version: ObservabilityPipelineElasticsearchDestinationApiVersion, optional + + :param bulk_index: The index to write logs to in Elasticsearch. + :type bulk_index: str, optional + + :param id: The unique identifier for this component. + :type id: str + + :param inputs: A list of component IDs whose output is used as the ``input`` for this component. + :type inputs: [str] + + :param type: The destination type. The value should always be ``elasticsearch``. + :type type: ObservabilityPipelineElasticsearchDestinationType + """ + if api_version is not unset: + kwargs["api_version"] = api_version + if bulk_index is not unset: + kwargs["bulk_index"] = bulk_index + super().__init__(kwargs) + + self_.id = id + self_.inputs = inputs + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination_api_version.py b/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination_api_version.py new file mode 100644 index 0000000000..a38d49ddff --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination_api_version.py @@ -0,0 +1,52 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineElasticsearchDestinationApiVersion(ModelSimple): + """ + The Elasticsearch API version to use. Set to `auto` to auto-detect. + + :param value: Must be one of ["auto", "v6", "v7", "v8"]. + :type value: str + """ + + allowed_values = { + "auto", + "v6", + "v7", + "v8", + } + AUTO: ClassVar["ObservabilityPipelineElasticsearchDestinationApiVersion"] + V6: ClassVar["ObservabilityPipelineElasticsearchDestinationApiVersion"] + V7: ClassVar["ObservabilityPipelineElasticsearchDestinationApiVersion"] + V8: ClassVar["ObservabilityPipelineElasticsearchDestinationApiVersion"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineElasticsearchDestinationApiVersion.AUTO = ObservabilityPipelineElasticsearchDestinationApiVersion( + "auto" +) +ObservabilityPipelineElasticsearchDestinationApiVersion.V6 = ObservabilityPipelineElasticsearchDestinationApiVersion( + "v6" +) +ObservabilityPipelineElasticsearchDestinationApiVersion.V7 = ObservabilityPipelineElasticsearchDestinationApiVersion( + "v7" +) +ObservabilityPipelineElasticsearchDestinationApiVersion.V8 = ObservabilityPipelineElasticsearchDestinationApiVersion( + "v8" +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination_type.py b/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination_type.py new file mode 100644 index 0000000000..550bb330da --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_elasticsearch_destination_type.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineElasticsearchDestinationType(ModelSimple): + """ + The destination type. The value should always be `elasticsearch`. + + :param value: If omitted defaults to "elasticsearch". Must be one of ["elasticsearch"]. + :type value: str + """ + + allowed_values = { + "elasticsearch", + } + ELASTICSEARCH: ClassVar["ObservabilityPipelineElasticsearchDestinationType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineElasticsearchDestinationType.ELASTICSEARCH = ObservabilityPipelineElasticsearchDestinationType( + "elasticsearch" +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_file.py b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_file.py new file mode 100644 index 0000000000..fe676e7d65 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_file.py @@ -0,0 +1,81 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_file_encoding import ( + ObservabilityPipelineEnrichmentTableFileEncoding, + ) + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_file_key_items import ( + ObservabilityPipelineEnrichmentTableFileKeyItems, + ) + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_file_schema_items import ( + ObservabilityPipelineEnrichmentTableFileSchemaItems, + ) + + +class ObservabilityPipelineEnrichmentTableFile(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_file_encoding import ( + ObservabilityPipelineEnrichmentTableFileEncoding, + ) + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_file_key_items import ( + ObservabilityPipelineEnrichmentTableFileKeyItems, + ) + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_file_schema_items import ( + ObservabilityPipelineEnrichmentTableFileSchemaItems, + ) + + return { + "encoding": (ObservabilityPipelineEnrichmentTableFileEncoding,), + "key": ([ObservabilityPipelineEnrichmentTableFileKeyItems],), + "path": (str,), + "schema": ([ObservabilityPipelineEnrichmentTableFileSchemaItems],), + } + + attribute_map = { + "encoding": "encoding", + "key": "key", + "path": "path", + "schema": "schema", + } + + def __init__( + self_, + encoding: ObservabilityPipelineEnrichmentTableFileEncoding, + key: List[ObservabilityPipelineEnrichmentTableFileKeyItems], + path: str, + schema: List[ObservabilityPipelineEnrichmentTableFileSchemaItems], + **kwargs, + ): + """ + Defines a static enrichment table loaded from a CSV file. + + :param encoding: File encoding format. + :type encoding: ObservabilityPipelineEnrichmentTableFileEncoding + + :param key: Key fields used to look up enrichment values. + :type key: [ObservabilityPipelineEnrichmentTableFileKeyItems] + + :param path: Path to the CSV file. + :type path: str + + :param schema: Schema defining column names and their types. + :type schema: [ObservabilityPipelineEnrichmentTableFileSchemaItems] + """ + super().__init__(kwargs) + + self_.encoding = encoding + self_.key = key + self_.path = path + self_.schema = schema diff --git a/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_file_encoding.py b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_file_encoding.py new file mode 100644 index 0000000000..a943cd828d --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_file_encoding.py @@ -0,0 +1,62 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_file_encoding_type import ( + ObservabilityPipelineEnrichmentTableFileEncodingType, + ) + + +class ObservabilityPipelineEnrichmentTableFileEncoding(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_file_encoding_type import ( + ObservabilityPipelineEnrichmentTableFileEncodingType, + ) + + return { + "delimiter": (str,), + "includes_headers": (bool,), + "type": (ObservabilityPipelineEnrichmentTableFileEncodingType,), + } + + attribute_map = { + "delimiter": "delimiter", + "includes_headers": "includes_headers", + "type": "type", + } + + def __init__( + self_, + delimiter: str, + includes_headers: bool, + type: ObservabilityPipelineEnrichmentTableFileEncodingType, + **kwargs, + ): + """ + File encoding format. + + :param delimiter: The ``encoding`` ``delimiter``. + :type delimiter: str + + :param includes_headers: The ``encoding`` ``includes_headers``. + :type includes_headers: bool + + :param type: Specifies the encoding format (e.g., CSV) used for enrichment tables. + :type type: ObservabilityPipelineEnrichmentTableFileEncodingType + """ + super().__init__(kwargs) + + self_.delimiter = delimiter + self_.includes_headers = includes_headers + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_file_encoding_type.py b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_file_encoding_type.py new file mode 100644 index 0000000000..22d54d87cb --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_file_encoding_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineEnrichmentTableFileEncodingType(ModelSimple): + """ + Specifies the encoding format (e.g., CSV) used for enrichment tables. + + :param value: If omitted defaults to "csv". Must be one of ["csv"]. + :type value: str + """ + + allowed_values = { + "csv", + } + CSV: ClassVar["ObservabilityPipelineEnrichmentTableFileEncodingType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineEnrichmentTableFileEncodingType.CSV = ObservabilityPipelineEnrichmentTableFileEncodingType("csv") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_file_key_items.py b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_file_key_items.py new file mode 100644 index 0000000000..7b57fc97a8 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_file_key_items.py @@ -0,0 +1,58 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_file_key_items_comparison import ( + ObservabilityPipelineEnrichmentTableFileKeyItemsComparison, + ) + + +class ObservabilityPipelineEnrichmentTableFileKeyItems(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_file_key_items_comparison import ( + ObservabilityPipelineEnrichmentTableFileKeyItemsComparison, + ) + + return { + "column": (str,), + "comparison": (ObservabilityPipelineEnrichmentTableFileKeyItemsComparison,), + "field": (str,), + } + + attribute_map = { + "column": "column", + "comparison": "comparison", + "field": "field", + } + + def __init__( + self_, column: str, comparison: ObservabilityPipelineEnrichmentTableFileKeyItemsComparison, field: str, **kwargs + ): + """ + Defines how to map log fields to enrichment table columns during lookups. + + :param column: The ``items`` ``column``. + :type column: str + + :param comparison: Defines how to compare key fields for enrichment table lookups. + :type comparison: ObservabilityPipelineEnrichmentTableFileKeyItemsComparison + + :param field: The ``items`` ``field``. + :type field: str + """ + super().__init__(kwargs) + + self_.column = column + self_.comparison = comparison + self_.field = field diff --git a/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_file_key_items_comparison.py b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_file_key_items_comparison.py new file mode 100644 index 0000000000..fdddf8cde5 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_file_key_items_comparison.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineEnrichmentTableFileKeyItemsComparison(ModelSimple): + """ + Defines how to compare key fields for enrichment table lookups. + + :param value: If omitted defaults to "equals". Must be one of ["equals"]. + :type value: str + """ + + allowed_values = { + "equals", + } + EQUALS: ClassVar["ObservabilityPipelineEnrichmentTableFileKeyItemsComparison"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineEnrichmentTableFileKeyItemsComparison.EQUALS = ( + ObservabilityPipelineEnrichmentTableFileKeyItemsComparison("equals") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_file_schema_items.py b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_file_schema_items.py new file mode 100644 index 0000000000..b90f40c2b8 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_file_schema_items.py @@ -0,0 +1,50 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_file_schema_items_type import ( + ObservabilityPipelineEnrichmentTableFileSchemaItemsType, + ) + + +class ObservabilityPipelineEnrichmentTableFileSchemaItems(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_file_schema_items_type import ( + ObservabilityPipelineEnrichmentTableFileSchemaItemsType, + ) + + return { + "column": (str,), + "type": (ObservabilityPipelineEnrichmentTableFileSchemaItemsType,), + } + + attribute_map = { + "column": "column", + "type": "type", + } + + def __init__(self_, column: str, type: ObservabilityPipelineEnrichmentTableFileSchemaItemsType, **kwargs): + """ + Describes a single column and its type in an enrichment table schema. + + :param column: The ``items`` ``column``. + :type column: str + + :param type: Declares allowed data types for enrichment table columns. + :type type: ObservabilityPipelineEnrichmentTableFileSchemaItemsType + """ + super().__init__(kwargs) + + self_.column = column + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_file_schema_items_type.py b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_file_schema_items_type.py new file mode 100644 index 0000000000..fbdb4bade9 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_file_schema_items_type.py @@ -0,0 +1,62 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineEnrichmentTableFileSchemaItemsType(ModelSimple): + """ + Declares allowed data types for enrichment table columns. + + :param value: Must be one of ["string", "boolean", "integer", "float", "date", "timestamp"]. + :type value: str + """ + + allowed_values = { + "string", + "boolean", + "integer", + "float", + "date", + "timestamp", + } + STRING: ClassVar["ObservabilityPipelineEnrichmentTableFileSchemaItemsType"] + BOOLEAN: ClassVar["ObservabilityPipelineEnrichmentTableFileSchemaItemsType"] + INTEGER: ClassVar["ObservabilityPipelineEnrichmentTableFileSchemaItemsType"] + FLOAT: ClassVar["ObservabilityPipelineEnrichmentTableFileSchemaItemsType"] + DATE: ClassVar["ObservabilityPipelineEnrichmentTableFileSchemaItemsType"] + TIMESTAMP: ClassVar["ObservabilityPipelineEnrichmentTableFileSchemaItemsType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineEnrichmentTableFileSchemaItemsType.STRING = ( + ObservabilityPipelineEnrichmentTableFileSchemaItemsType("string") +) +ObservabilityPipelineEnrichmentTableFileSchemaItemsType.BOOLEAN = ( + ObservabilityPipelineEnrichmentTableFileSchemaItemsType("boolean") +) +ObservabilityPipelineEnrichmentTableFileSchemaItemsType.INTEGER = ( + ObservabilityPipelineEnrichmentTableFileSchemaItemsType("integer") +) +ObservabilityPipelineEnrichmentTableFileSchemaItemsType.FLOAT = ObservabilityPipelineEnrichmentTableFileSchemaItemsType( + "float" +) +ObservabilityPipelineEnrichmentTableFileSchemaItemsType.DATE = ObservabilityPipelineEnrichmentTableFileSchemaItemsType( + "date" +) +ObservabilityPipelineEnrichmentTableFileSchemaItemsType.TIMESTAMP = ( + ObservabilityPipelineEnrichmentTableFileSchemaItemsType("timestamp") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_geo_ip.py b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_geo_ip.py new file mode 100644 index 0000000000..665e21464c --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_geo_ip.py @@ -0,0 +1,45 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +class ObservabilityPipelineEnrichmentTableGeoIp(ModelNormal): + @cached_property + def openapi_types(_): + return { + "key_field": (str,), + "locale": (str,), + "path": (str,), + } + + attribute_map = { + "key_field": "key_field", + "locale": "locale", + "path": "path", + } + + def __init__(self_, key_field: str, locale: str, path: str, **kwargs): + """ + Uses a GeoIP database to enrich logs based on an IP field. + + :param key_field: Path to the IP field in the log. + :type key_field: str + + :param locale: Locale used to resolve geographical names. + :type locale: str + + :param path: Path to the GeoIP database file. + :type path: str + """ + super().__init__(kwargs) + + self_.key_field = key_field + self_.locale = locale + self_.path = path diff --git a/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_processor.py new file mode 100644 index 0000000000..480f484822 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_processor.py @@ -0,0 +1,106 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_file import ( + ObservabilityPipelineEnrichmentTableFile, + ) + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_geo_ip import ( + ObservabilityPipelineEnrichmentTableGeoIp, + ) + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor_type import ( + ObservabilityPipelineEnrichmentTableProcessorType, + ) + + +class ObservabilityPipelineEnrichmentTableProcessor(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_file import ( + ObservabilityPipelineEnrichmentTableFile, + ) + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_geo_ip import ( + ObservabilityPipelineEnrichmentTableGeoIp, + ) + from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor_type import ( + ObservabilityPipelineEnrichmentTableProcessorType, + ) + + return { + "file": (ObservabilityPipelineEnrichmentTableFile,), + "geoip": (ObservabilityPipelineEnrichmentTableGeoIp,), + "id": (str,), + "include": (str,), + "inputs": ([str],), + "target": (str,), + "type": (ObservabilityPipelineEnrichmentTableProcessorType,), + } + + attribute_map = { + "file": "file", + "geoip": "geoip", + "id": "id", + "include": "include", + "inputs": "inputs", + "target": "target", + "type": "type", + } + + def __init__( + self_, + id: str, + include: str, + inputs: List[str], + target: str, + type: ObservabilityPipelineEnrichmentTableProcessorType, + file: Union[ObservabilityPipelineEnrichmentTableFile, UnsetType] = unset, + geoip: Union[ObservabilityPipelineEnrichmentTableGeoIp, UnsetType] = unset, + **kwargs, + ): + """ + The ``enrichment_table`` processor enriches logs using a static CSV file or GeoIP database. + + :param file: Defines a static enrichment table loaded from a CSV file. + :type file: ObservabilityPipelineEnrichmentTableFile, optional + + :param geoip: Uses a GeoIP database to enrich logs based on an IP field. + :type geoip: ObservabilityPipelineEnrichmentTableGeoIp, optional + + :param id: The unique identifier for this processor. + :type id: str + + :param include: A Datadog search query used to determine which logs this processor targets. + :type include: str + + :param inputs: A list of component IDs whose output is used as the input for this processor. + :type inputs: [str] + + :param target: Path where enrichment results should be stored in the log. + :type target: str + + :param type: The processor type. The value should always be ``enrichment_table``. + :type type: ObservabilityPipelineEnrichmentTableProcessorType + """ + if file is not unset: + kwargs["file"] = file + if geoip is not unset: + kwargs["geoip"] = geoip + super().__init__(kwargs) + + self_.id = id + self_.include = include + self_.inputs = inputs + self_.target = target + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_processor_type.py b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_processor_type.py new file mode 100644 index 0000000000..80e5b53d54 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_enrichment_table_processor_type.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineEnrichmentTableProcessorType(ModelSimple): + """ + The processor type. The value should always be `enrichment_table`. + + :param value: If omitted defaults to "enrichment_table". Must be one of ["enrichment_table"]. + :type value: str + """ + + allowed_values = { + "enrichment_table", + } + ENRICHMENT_TABLE: ClassVar["ObservabilityPipelineEnrichmentTableProcessorType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineEnrichmentTableProcessorType.ENRICHMENT_TABLE = ObservabilityPipelineEnrichmentTableProcessorType( + "enrichment_table" +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_fluent_bit_source.py b/src/datadog_api_client/v2/model/observability_pipeline_fluent_bit_source.py new file mode 100644 index 0000000000..ba39b1d678 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_fluent_bit_source.py @@ -0,0 +1,67 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_fluent_bit_source_type import ( + ObservabilityPipelineFluentBitSourceType, + ) + + +class ObservabilityPipelineFluentBitSource(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_fluent_bit_source_type import ( + ObservabilityPipelineFluentBitSourceType, + ) + + return { + "id": (str,), + "tls": (ObservabilityPipelineTls,), + "type": (ObservabilityPipelineFluentBitSourceType,), + } + + attribute_map = { + "id": "id", + "tls": "tls", + "type": "type", + } + + def __init__( + self_, + id: str, + type: ObservabilityPipelineFluentBitSourceType, + tls: Union[ObservabilityPipelineTls, UnsetType] = unset, + **kwargs, + ): + """ + The ``fluent_bit`` source ingests logs from Fluent Bit. + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). + :type id: str + + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional + + :param type: The source type. The value should always be ``fluent_bit``. + :type type: ObservabilityPipelineFluentBitSourceType + """ + if tls is not unset: + kwargs["tls"] = tls + super().__init__(kwargs) + + self_.id = id + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_fluent_bit_source_type.py b/src/datadog_api_client/v2/model/observability_pipeline_fluent_bit_source_type.py new file mode 100644 index 0000000000..96bc0c0b17 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_fluent_bit_source_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineFluentBitSourceType(ModelSimple): + """ + The source type. The value should always be `fluent_bit`. + + :param value: If omitted defaults to "fluent_bit". Must be one of ["fluent_bit"]. + :type value: str + """ + + allowed_values = { + "fluent_bit", + } + FLUENT_BIT: ClassVar["ObservabilityPipelineFluentBitSourceType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineFluentBitSourceType.FLUENT_BIT = ObservabilityPipelineFluentBitSourceType("fluent_bit") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_fluentd_source.py b/src/datadog_api_client/v2/model/observability_pipeline_fluentd_source.py new file mode 100644 index 0000000000..2e64961b5f --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_fluentd_source.py @@ -0,0 +1,67 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_fluentd_source_type import ( + ObservabilityPipelineFluentdSourceType, + ) + + +class ObservabilityPipelineFluentdSource(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_fluentd_source_type import ( + ObservabilityPipelineFluentdSourceType, + ) + + return { + "id": (str,), + "tls": (ObservabilityPipelineTls,), + "type": (ObservabilityPipelineFluentdSourceType,), + } + + attribute_map = { + "id": "id", + "tls": "tls", + "type": "type", + } + + def __init__( + self_, + id: str, + type: ObservabilityPipelineFluentdSourceType, + tls: Union[ObservabilityPipelineTls, UnsetType] = unset, + **kwargs, + ): + """ + The ``fluentd`` source ingests logs from a Fluentd-compatible service. + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). + :type id: str + + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional + + :param type: The source type. The value should always be `fluentd. + :type type: ObservabilityPipelineFluentdSourceType + """ + if tls is not unset: + kwargs["tls"] = tls + super().__init__(kwargs) + + self_.id = id + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_fluentd_source_type.py b/src/datadog_api_client/v2/model/observability_pipeline_fluentd_source_type.py new file mode 100644 index 0000000000..5a78fba80f --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_fluentd_source_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineFluentdSourceType(ModelSimple): + """ + The source type. The value should always be `fluentd. + + :param value: If omitted defaults to "fluentd". Must be one of ["fluentd"]. + :type value: str + """ + + allowed_values = { + "fluentd", + } + FLUENTD: ClassVar["ObservabilityPipelineFluentdSourceType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineFluentdSourceType.FLUENTD = ObservabilityPipelineFluentdSourceType("fluentd") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_gcp_auth.py b/src/datadog_api_client/v2/model/observability_pipeline_gcp_auth.py new file mode 100644 index 0000000000..2b2fe04481 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_gcp_auth.py @@ -0,0 +1,33 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +class ObservabilityPipelineGcpAuth(ModelNormal): + @cached_property + def openapi_types(_): + return { + "credentials_file": (str,), + } + + attribute_map = { + "credentials_file": "credentials_file", + } + + def __init__(self_, credentials_file: str, **kwargs): + """ + GCP credentials used to authenticate with Google Cloud Storage. + + :param credentials_file: Path to the GCP service account key file. + :type credentials_file: str + """ + super().__init__(kwargs) + + self_.credentials_file = credentials_file diff --git a/src/datadog_api_client/v2/model/observability_pipeline_generate_metrics_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_generate_metrics_processor.py new file mode 100644 index 0000000000..b0eee92908 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_generate_metrics_processor.py @@ -0,0 +1,81 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_generated_metric import ObservabilityPipelineGeneratedMetric + from datadog_api_client.v2.model.observability_pipeline_generate_metrics_processor_type import ( + ObservabilityPipelineGenerateMetricsProcessorType, + ) + + +class ObservabilityPipelineGenerateMetricsProcessor(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_generated_metric import ( + ObservabilityPipelineGeneratedMetric, + ) + from datadog_api_client.v2.model.observability_pipeline_generate_metrics_processor_type import ( + ObservabilityPipelineGenerateMetricsProcessorType, + ) + + return { + "id": (str,), + "include": (str,), + "inputs": ([str],), + "metrics": ([ObservabilityPipelineGeneratedMetric],), + "type": (ObservabilityPipelineGenerateMetricsProcessorType,), + } + + attribute_map = { + "id": "id", + "include": "include", + "inputs": "inputs", + "metrics": "metrics", + "type": "type", + } + + def __init__( + self_, + id: str, + include: str, + inputs: List[str], + metrics: List[ObservabilityPipelineGeneratedMetric], + type: ObservabilityPipelineGenerateMetricsProcessorType, + **kwargs, + ): + """ + The ``generate_datadog_metrics`` processor creates custom metrics from logs and sends them to Datadog. + Metrics can be counters, gauges, or distributions and optionally grouped by log fields. + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline. + :type id: str + + :param include: A Datadog search query used to determine which logs this processor targets. + :type include: str + + :param inputs: A list of component IDs whose output is used as the ``input`` for this processor. + :type inputs: [str] + + :param metrics: Configuration for generating individual metrics. + :type metrics: [ObservabilityPipelineGeneratedMetric] + + :param type: The processor type. Always ``generate_datadog_metrics``. + :type type: ObservabilityPipelineGenerateMetricsProcessorType + """ + super().__init__(kwargs) + + self_.id = id + self_.include = include + self_.inputs = inputs + self_.metrics = metrics + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_generate_metrics_processor_type.py b/src/datadog_api_client/v2/model/observability_pipeline_generate_metrics_processor_type.py new file mode 100644 index 0000000000..7b4ad58f75 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_generate_metrics_processor_type.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineGenerateMetricsProcessorType(ModelSimple): + """ + The processor type. Always `generate_datadog_metrics`. + + :param value: If omitted defaults to "generate_datadog_metrics". Must be one of ["generate_datadog_metrics"]. + :type value: str + """ + + allowed_values = { + "generate_datadog_metrics", + } + GENERATE_DATADOG_METRICS: ClassVar["ObservabilityPipelineGenerateMetricsProcessorType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineGenerateMetricsProcessorType.GENERATE_DATADOG_METRICS = ( + ObservabilityPipelineGenerateMetricsProcessorType("generate_datadog_metrics") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_generated_metric.py b/src/datadog_api_client/v2/model/observability_pipeline_generated_metric.py new file mode 100644 index 0000000000..08a43d374b --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_generated_metric.py @@ -0,0 +1,92 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_generated_metric_metric_type import ( + ObservabilityPipelineGeneratedMetricMetricType, + ) + from datadog_api_client.v2.model.observability_pipeline_metric_value import ObservabilityPipelineMetricValue + from datadog_api_client.v2.model.observability_pipeline_generated_metric_increment_by_one import ( + ObservabilityPipelineGeneratedMetricIncrementByOne, + ) + from datadog_api_client.v2.model.observability_pipeline_generated_metric_increment_by_field import ( + ObservabilityPipelineGeneratedMetricIncrementByField, + ) + + +class ObservabilityPipelineGeneratedMetric(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_generated_metric_metric_type import ( + ObservabilityPipelineGeneratedMetricMetricType, + ) + from datadog_api_client.v2.model.observability_pipeline_metric_value import ObservabilityPipelineMetricValue + + return { + "group_by": ([str],), + "include": (str,), + "metric_type": (ObservabilityPipelineGeneratedMetricMetricType,), + "name": (str,), + "value": (ObservabilityPipelineMetricValue,), + } + + attribute_map = { + "group_by": "group_by", + "include": "include", + "metric_type": "metric_type", + "name": "name", + "value": "value", + } + + def __init__( + self_, + include: str, + metric_type: ObservabilityPipelineGeneratedMetricMetricType, + name: str, + value: Union[ + ObservabilityPipelineMetricValue, + ObservabilityPipelineGeneratedMetricIncrementByOne, + ObservabilityPipelineGeneratedMetricIncrementByField, + ], + group_by: Union[List[str], UnsetType] = unset, + **kwargs, + ): + """ + Defines a log-based custom metric, including its name, type, filter, value computation strategy, + and optional grouping fields. + + :param group_by: Optional fields used to group the metric series. + :type group_by: [str], optional + + :param include: Datadog filter query to match logs for metric generation. + :type include: str + + :param metric_type: Type of metric to create. + :type metric_type: ObservabilityPipelineGeneratedMetricMetricType + + :param name: Name of the custom metric to be created. + :type name: str + + :param value: Specifies how the value of the generated metric is computed. + :type value: ObservabilityPipelineMetricValue + """ + if group_by is not unset: + kwargs["group_by"] = group_by + super().__init__(kwargs) + + self_.include = include + self_.metric_type = metric_type + self_.name = name + self_.value = value diff --git a/src/datadog_api_client/v2/model/observability_pipeline_generated_metric_increment_by_field.py b/src/datadog_api_client/v2/model/observability_pipeline_generated_metric_increment_by_field.py new file mode 100644 index 0000000000..590a5a8723 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_generated_metric_increment_by_field.py @@ -0,0 +1,50 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_generated_metric_increment_by_field_strategy import ( + ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy, + ) + + +class ObservabilityPipelineGeneratedMetricIncrementByField(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_generated_metric_increment_by_field_strategy import ( + ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy, + ) + + return { + "field": (str,), + "strategy": (ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy,), + } + + attribute_map = { + "field": "field", + "strategy": "strategy", + } + + def __init__(self_, field: str, strategy: ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy, **kwargs): + """ + Strategy that increments a generated metric based on the value of a log field. + + :param field: Name of the log field containing the numeric value to increment the metric by. + :type field: str + + :param strategy: Uses a numeric field in the log event as the metric increment. + :type strategy: ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy + """ + super().__init__(kwargs) + + self_.field = field + self_.strategy = strategy diff --git a/src/datadog_api_client/v2/model/observability_pipeline_generated_metric_increment_by_field_strategy.py b/src/datadog_api_client/v2/model/observability_pipeline_generated_metric_increment_by_field_strategy.py new file mode 100644 index 0000000000..2de91d35b4 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_generated_metric_increment_by_field_strategy.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy(ModelSimple): + """ + Uses a numeric field in the log event as the metric increment. + + :param value: If omitted defaults to "increment_by_field". Must be one of ["increment_by_field"]. + :type value: str + """ + + allowed_values = { + "increment_by_field", + } + INCREMENT_BY_FIELD: ClassVar["ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy.INCREMENT_BY_FIELD = ( + ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy("increment_by_field") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_generated_metric_increment_by_one.py b/src/datadog_api_client/v2/model/observability_pipeline_generated_metric_increment_by_one.py new file mode 100644 index 0000000000..e52d180bb7 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_generated_metric_increment_by_one.py @@ -0,0 +1,44 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_generated_metric_increment_by_one_strategy import ( + ObservabilityPipelineGeneratedMetricIncrementByOneStrategy, + ) + + +class ObservabilityPipelineGeneratedMetricIncrementByOne(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_generated_metric_increment_by_one_strategy import ( + ObservabilityPipelineGeneratedMetricIncrementByOneStrategy, + ) + + return { + "strategy": (ObservabilityPipelineGeneratedMetricIncrementByOneStrategy,), + } + + attribute_map = { + "strategy": "strategy", + } + + def __init__(self_, strategy: ObservabilityPipelineGeneratedMetricIncrementByOneStrategy, **kwargs): + """ + Strategy that increments a generated metric by one for each matching event. + + :param strategy: Increments the metric by 1 for each matching event. + :type strategy: ObservabilityPipelineGeneratedMetricIncrementByOneStrategy + """ + super().__init__(kwargs) + + self_.strategy = strategy diff --git a/src/datadog_api_client/v2/model/observability_pipeline_generated_metric_increment_by_one_strategy.py b/src/datadog_api_client/v2/model/observability_pipeline_generated_metric_increment_by_one_strategy.py new file mode 100644 index 0000000000..a5134b292a --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_generated_metric_increment_by_one_strategy.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineGeneratedMetricIncrementByOneStrategy(ModelSimple): + """ + Increments the metric by 1 for each matching event. + + :param value: If omitted defaults to "increment_by_one". Must be one of ["increment_by_one"]. + :type value: str + """ + + allowed_values = { + "increment_by_one", + } + INCREMENT_BY_ONE: ClassVar["ObservabilityPipelineGeneratedMetricIncrementByOneStrategy"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineGeneratedMetricIncrementByOneStrategy.INCREMENT_BY_ONE = ( + ObservabilityPipelineGeneratedMetricIncrementByOneStrategy("increment_by_one") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_generated_metric_metric_type.py b/src/datadog_api_client/v2/model/observability_pipeline_generated_metric_metric_type.py new file mode 100644 index 0000000000..297eb804c8 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_generated_metric_metric_type.py @@ -0,0 +1,43 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineGeneratedMetricMetricType(ModelSimple): + """ + Type of metric to create. + + :param value: Must be one of ["count", "gauge", "distribution"]. + :type value: str + """ + + allowed_values = { + "count", + "gauge", + "distribution", + } + COUNT: ClassVar["ObservabilityPipelineGeneratedMetricMetricType"] + GAUGE: ClassVar["ObservabilityPipelineGeneratedMetricMetricType"] + DISTRIBUTION: ClassVar["ObservabilityPipelineGeneratedMetricMetricType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineGeneratedMetricMetricType.COUNT = ObservabilityPipelineGeneratedMetricMetricType("count") +ObservabilityPipelineGeneratedMetricMetricType.GAUGE = ObservabilityPipelineGeneratedMetricMetricType("gauge") +ObservabilityPipelineGeneratedMetricMetricType.DISTRIBUTION = ObservabilityPipelineGeneratedMetricMetricType( + "distribution" +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_google_chronicle_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_google_chronicle_destination.py new file mode 100644 index 0000000000..cdfbc1b816 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_google_chronicle_destination.py @@ -0,0 +1,102 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_gcp_auth import ObservabilityPipelineGcpAuth + from datadog_api_client.v2.model.observability_pipeline_google_chronicle_destination_encoding import ( + ObservabilityPipelineGoogleChronicleDestinationEncoding, + ) + from datadog_api_client.v2.model.observability_pipeline_google_chronicle_destination_type import ( + ObservabilityPipelineGoogleChronicleDestinationType, + ) + + +class ObservabilityPipelineGoogleChronicleDestination(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_gcp_auth import ObservabilityPipelineGcpAuth + from datadog_api_client.v2.model.observability_pipeline_google_chronicle_destination_encoding import ( + ObservabilityPipelineGoogleChronicleDestinationEncoding, + ) + from datadog_api_client.v2.model.observability_pipeline_google_chronicle_destination_type import ( + ObservabilityPipelineGoogleChronicleDestinationType, + ) + + return { + "auth": (ObservabilityPipelineGcpAuth,), + "customer_id": (str,), + "encoding": (ObservabilityPipelineGoogleChronicleDestinationEncoding,), + "id": (str,), + "inputs": ([str],), + "log_type": (str,), + "type": (ObservabilityPipelineGoogleChronicleDestinationType,), + } + + attribute_map = { + "auth": "auth", + "customer_id": "customer_id", + "encoding": "encoding", + "id": "id", + "inputs": "inputs", + "log_type": "log_type", + "type": "type", + } + + def __init__( + self_, + auth: ObservabilityPipelineGcpAuth, + customer_id: str, + id: str, + inputs: List[str], + type: ObservabilityPipelineGoogleChronicleDestinationType, + encoding: Union[ObservabilityPipelineGoogleChronicleDestinationEncoding, UnsetType] = unset, + log_type: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + The ``google_chronicle`` destination sends logs to Google Chronicle. + + :param auth: GCP credentials used to authenticate with Google Cloud Storage. + :type auth: ObservabilityPipelineGcpAuth + + :param customer_id: The Google Chronicle customer ID. + :type customer_id: str + + :param encoding: The encoding format for the logs sent to Chronicle. + :type encoding: ObservabilityPipelineGoogleChronicleDestinationEncoding, optional + + :param id: The unique identifier for this component. + :type id: str + + :param inputs: A list of component IDs whose output is used as the ``input`` for this component. + :type inputs: [str] + + :param log_type: The log type metadata associated with the Chronicle destination. + :type log_type: str, optional + + :param type: The destination type. The value should always be ``google_chronicle``. + :type type: ObservabilityPipelineGoogleChronicleDestinationType + """ + if encoding is not unset: + kwargs["encoding"] = encoding + if log_type is not unset: + kwargs["log_type"] = log_type + super().__init__(kwargs) + + self_.auth = auth + self_.customer_id = customer_id + self_.id = id + self_.inputs = inputs + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_google_chronicle_destination_encoding.py b/src/datadog_api_client/v2/model/observability_pipeline_google_chronicle_destination_encoding.py new file mode 100644 index 0000000000..7dca6ae4cb --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_google_chronicle_destination_encoding.py @@ -0,0 +1,42 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineGoogleChronicleDestinationEncoding(ModelSimple): + """ + The encoding format for the logs sent to Chronicle. + + :param value: Must be one of ["json", "raw_message"]. + :type value: str + """ + + allowed_values = { + "json", + "raw_message", + } + JSON: ClassVar["ObservabilityPipelineGoogleChronicleDestinationEncoding"] + RAW_MESSAGE: ClassVar["ObservabilityPipelineGoogleChronicleDestinationEncoding"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineGoogleChronicleDestinationEncoding.JSON = ObservabilityPipelineGoogleChronicleDestinationEncoding( + "json" +) +ObservabilityPipelineGoogleChronicleDestinationEncoding.RAW_MESSAGE = ( + ObservabilityPipelineGoogleChronicleDestinationEncoding("raw_message") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_google_chronicle_destination_type.py b/src/datadog_api_client/v2/model/observability_pipeline_google_chronicle_destination_type.py new file mode 100644 index 0000000000..b18aec6d32 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_google_chronicle_destination_type.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineGoogleChronicleDestinationType(ModelSimple): + """ + The destination type. The value should always be `google_chronicle`. + + :param value: If omitted defaults to "google_chronicle". Must be one of ["google_chronicle"]. + :type value: str + """ + + allowed_values = { + "google_chronicle", + } + GOOGLE_CHRONICLE: ClassVar["ObservabilityPipelineGoogleChronicleDestinationType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineGoogleChronicleDestinationType.GOOGLE_CHRONICLE = ( + ObservabilityPipelineGoogleChronicleDestinationType("google_chronicle") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination.py new file mode 100644 index 0000000000..0103d477e2 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination.py @@ -0,0 +1,124 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination_acl import ( + ObservabilityPipelineGoogleCloudStorageDestinationAcl, + ) + from datadog_api_client.v2.model.observability_pipeline_gcp_auth import ObservabilityPipelineGcpAuth + from datadog_api_client.v2.model.observability_pipeline_metadata_entry import ObservabilityPipelineMetadataEntry + from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination_storage_class import ( + ObservabilityPipelineGoogleCloudStorageDestinationStorageClass, + ) + from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination_type import ( + ObservabilityPipelineGoogleCloudStorageDestinationType, + ) + + +class ObservabilityPipelineGoogleCloudStorageDestination(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination_acl import ( + ObservabilityPipelineGoogleCloudStorageDestinationAcl, + ) + from datadog_api_client.v2.model.observability_pipeline_gcp_auth import ObservabilityPipelineGcpAuth + from datadog_api_client.v2.model.observability_pipeline_metadata_entry import ObservabilityPipelineMetadataEntry + from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination_storage_class import ( + ObservabilityPipelineGoogleCloudStorageDestinationStorageClass, + ) + from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination_type import ( + ObservabilityPipelineGoogleCloudStorageDestinationType, + ) + + return { + "acl": (ObservabilityPipelineGoogleCloudStorageDestinationAcl,), + "auth": (ObservabilityPipelineGcpAuth,), + "bucket": (str,), + "id": (str,), + "inputs": ([str],), + "key_prefix": (str,), + "metadata": ([ObservabilityPipelineMetadataEntry],), + "storage_class": (ObservabilityPipelineGoogleCloudStorageDestinationStorageClass,), + "type": (ObservabilityPipelineGoogleCloudStorageDestinationType,), + } + + attribute_map = { + "acl": "acl", + "auth": "auth", + "bucket": "bucket", + "id": "id", + "inputs": "inputs", + "key_prefix": "key_prefix", + "metadata": "metadata", + "storage_class": "storage_class", + "type": "type", + } + + def __init__( + self_, + acl: ObservabilityPipelineGoogleCloudStorageDestinationAcl, + auth: ObservabilityPipelineGcpAuth, + bucket: str, + id: str, + inputs: List[str], + metadata: List[ObservabilityPipelineMetadataEntry], + storage_class: ObservabilityPipelineGoogleCloudStorageDestinationStorageClass, + type: ObservabilityPipelineGoogleCloudStorageDestinationType, + key_prefix: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + The ``google_cloud_storage`` destination stores logs in a Google Cloud Storage (GCS) bucket. + It requires a bucket name, GCP authentication, and metadata fields. + + :param acl: Access control list setting for objects written to the bucket. + :type acl: ObservabilityPipelineGoogleCloudStorageDestinationAcl + + :param auth: GCP credentials used to authenticate with Google Cloud Storage. + :type auth: ObservabilityPipelineGcpAuth + + :param bucket: Name of the GCS bucket. + :type bucket: str + + :param id: Unique identifier for the destination component. + :type id: str + + :param inputs: A list of component IDs whose output is used as the ``input`` for this component. + :type inputs: [str] + + :param key_prefix: Optional prefix for object keys within the GCS bucket. + :type key_prefix: str, optional + + :param metadata: Custom metadata key-value pairs added to each object. + :type metadata: [ObservabilityPipelineMetadataEntry] + + :param storage_class: Storage class used for objects stored in GCS. + :type storage_class: ObservabilityPipelineGoogleCloudStorageDestinationStorageClass + + :param type: The destination type. Always ``google_cloud_storage``. + :type type: ObservabilityPipelineGoogleCloudStorageDestinationType + """ + if key_prefix is not unset: + kwargs["key_prefix"] = key_prefix + super().__init__(kwargs) + + self_.acl = acl + self_.auth = auth + self_.bucket = bucket + self_.id = id + self_.inputs = inputs + self_.metadata = metadata + self_.storage_class = storage_class + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination_acl.py b/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination_acl.py new file mode 100644 index 0000000000..6f24bc47bd --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination_acl.py @@ -0,0 +1,62 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineGoogleCloudStorageDestinationAcl(ModelSimple): + """ + Access control list setting for objects written to the bucket. + + :param value: Must be one of ["private", "project-private", "public-read", "authenticated-read", "bucket-owner-read", "bucket-owner-full-control"]. + :type value: str + """ + + allowed_values = { + "private", + "project-private", + "public-read", + "authenticated-read", + "bucket-owner-read", + "bucket-owner-full-control", + } + PRIVATE: ClassVar["ObservabilityPipelineGoogleCloudStorageDestinationAcl"] + PROJECTNOT_PRIVATE: ClassVar["ObservabilityPipelineGoogleCloudStorageDestinationAcl"] + PUBLICNOT_READ: ClassVar["ObservabilityPipelineGoogleCloudStorageDestinationAcl"] + AUTHENTICATEDNOT_READ: ClassVar["ObservabilityPipelineGoogleCloudStorageDestinationAcl"] + BUCKETNOT_OWNERNOT_READ: ClassVar["ObservabilityPipelineGoogleCloudStorageDestinationAcl"] + BUCKETNOT_OWNERNOT_FULLNOT_CONTROL: ClassVar["ObservabilityPipelineGoogleCloudStorageDestinationAcl"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineGoogleCloudStorageDestinationAcl.PRIVATE = ObservabilityPipelineGoogleCloudStorageDestinationAcl( + "private" +) +ObservabilityPipelineGoogleCloudStorageDestinationAcl.PROJECTNOT_PRIVATE = ( + ObservabilityPipelineGoogleCloudStorageDestinationAcl("project-private") +) +ObservabilityPipelineGoogleCloudStorageDestinationAcl.PUBLICNOT_READ = ( + ObservabilityPipelineGoogleCloudStorageDestinationAcl("public-read") +) +ObservabilityPipelineGoogleCloudStorageDestinationAcl.AUTHENTICATEDNOT_READ = ( + ObservabilityPipelineGoogleCloudStorageDestinationAcl("authenticated-read") +) +ObservabilityPipelineGoogleCloudStorageDestinationAcl.BUCKETNOT_OWNERNOT_READ = ( + ObservabilityPipelineGoogleCloudStorageDestinationAcl("bucket-owner-read") +) +ObservabilityPipelineGoogleCloudStorageDestinationAcl.BUCKETNOT_OWNERNOT_FULLNOT_CONTROL = ( + ObservabilityPipelineGoogleCloudStorageDestinationAcl("bucket-owner-full-control") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination_storage_class.py b/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination_storage_class.py new file mode 100644 index 0000000000..4e305b97b9 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination_storage_class.py @@ -0,0 +1,52 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineGoogleCloudStorageDestinationStorageClass(ModelSimple): + """ + Storage class used for objects stored in GCS. + + :param value: Must be one of ["STANDARD", "NEARLINE", "COLDLINE", "ARCHIVE"]. + :type value: str + """ + + allowed_values = { + "STANDARD", + "NEARLINE", + "COLDLINE", + "ARCHIVE", + } + STANDARD: ClassVar["ObservabilityPipelineGoogleCloudStorageDestinationStorageClass"] + NEARLINE: ClassVar["ObservabilityPipelineGoogleCloudStorageDestinationStorageClass"] + COLDLINE: ClassVar["ObservabilityPipelineGoogleCloudStorageDestinationStorageClass"] + ARCHIVE: ClassVar["ObservabilityPipelineGoogleCloudStorageDestinationStorageClass"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineGoogleCloudStorageDestinationStorageClass.STANDARD = ( + ObservabilityPipelineGoogleCloudStorageDestinationStorageClass("STANDARD") +) +ObservabilityPipelineGoogleCloudStorageDestinationStorageClass.NEARLINE = ( + ObservabilityPipelineGoogleCloudStorageDestinationStorageClass("NEARLINE") +) +ObservabilityPipelineGoogleCloudStorageDestinationStorageClass.COLDLINE = ( + ObservabilityPipelineGoogleCloudStorageDestinationStorageClass("COLDLINE") +) +ObservabilityPipelineGoogleCloudStorageDestinationStorageClass.ARCHIVE = ( + ObservabilityPipelineGoogleCloudStorageDestinationStorageClass("ARCHIVE") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination_type.py b/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination_type.py new file mode 100644 index 0000000000..87ed09d052 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_google_cloud_storage_destination_type.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineGoogleCloudStorageDestinationType(ModelSimple): + """ + The destination type. Always `google_cloud_storage`. + + :param value: If omitted defaults to "google_cloud_storage". Must be one of ["google_cloud_storage"]. + :type value: str + """ + + allowed_values = { + "google_cloud_storage", + } + GOOGLE_CLOUD_STORAGE: ClassVar["ObservabilityPipelineGoogleCloudStorageDestinationType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineGoogleCloudStorageDestinationType.GOOGLE_CLOUD_STORAGE = ( + ObservabilityPipelineGoogleCloudStorageDestinationType("google_cloud_storage") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_source.py b/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_source.py new file mode 100644 index 0000000000..4cb4395cda --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_source.py @@ -0,0 +1,99 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_gcp_auth import ObservabilityPipelineGcpAuth + from datadog_api_client.v2.model.observability_pipeline_decoding import ObservabilityPipelineDecoding + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_google_pub_sub_source_type import ( + ObservabilityPipelineGooglePubSubSourceType, + ) + + +class ObservabilityPipelineGooglePubSubSource(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_gcp_auth import ObservabilityPipelineGcpAuth + from datadog_api_client.v2.model.observability_pipeline_decoding import ObservabilityPipelineDecoding + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_google_pub_sub_source_type import ( + ObservabilityPipelineGooglePubSubSourceType, + ) + + return { + "auth": (ObservabilityPipelineGcpAuth,), + "decoding": (ObservabilityPipelineDecoding,), + "id": (str,), + "project": (str,), + "subscription": (str,), + "tls": (ObservabilityPipelineTls,), + "type": (ObservabilityPipelineGooglePubSubSourceType,), + } + + attribute_map = { + "auth": "auth", + "decoding": "decoding", + "id": "id", + "project": "project", + "subscription": "subscription", + "tls": "tls", + "type": "type", + } + + def __init__( + self_, + auth: ObservabilityPipelineGcpAuth, + decoding: ObservabilityPipelineDecoding, + id: str, + project: str, + subscription: str, + type: ObservabilityPipelineGooglePubSubSourceType, + tls: Union[ObservabilityPipelineTls, UnsetType] = unset, + **kwargs, + ): + """ + The ``google_pubsub`` source ingests logs from a Google Cloud Pub/Sub subscription. + + :param auth: GCP credentials used to authenticate with Google Cloud Storage. + :type auth: ObservabilityPipelineGcpAuth + + :param decoding: The decoding format used to interpret incoming logs. + :type decoding: ObservabilityPipelineDecoding + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + :type id: str + + :param project: The GCP project ID that owns the Pub/Sub subscription. + :type project: str + + :param subscription: The Pub/Sub subscription name from which messages are consumed. + :type subscription: str + + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional + + :param type: The source type. The value should always be ``google_pubsub``. + :type type: ObservabilityPipelineGooglePubSubSourceType + """ + if tls is not unset: + kwargs["tls"] = tls + super().__init__(kwargs) + + self_.auth = auth + self_.decoding = decoding + self_.id = id + self_.project = project + self_.subscription = subscription + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_source_type.py b/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_source_type.py new file mode 100644 index 0000000000..bec2b92b5b --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_google_pub_sub_source_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineGooglePubSubSourceType(ModelSimple): + """ + The source type. The value should always be `google_pubsub`. + + :param value: If omitted defaults to "google_pubsub". Must be one of ["google_pubsub"]. + :type value: str + """ + + allowed_values = { + "google_pubsub", + } + GOOGLE_PUBSUB: ClassVar["ObservabilityPipelineGooglePubSubSourceType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineGooglePubSubSourceType.GOOGLE_PUBSUB = ObservabilityPipelineGooglePubSubSourceType("google_pubsub") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_http_client_source.py b/src/datadog_api_client/v2/model/observability_pipeline_http_client_source.py new file mode 100644 index 0000000000..5d6b534568 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_http_client_source.py @@ -0,0 +1,106 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_http_client_source_auth_strategy import ( + ObservabilityPipelineHttpClientSourceAuthStrategy, + ) + from datadog_api_client.v2.model.observability_pipeline_decoding import ObservabilityPipelineDecoding + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_http_client_source_type import ( + ObservabilityPipelineHttpClientSourceType, + ) + + +class ObservabilityPipelineHttpClientSource(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_http_client_source_auth_strategy import ( + ObservabilityPipelineHttpClientSourceAuthStrategy, + ) + from datadog_api_client.v2.model.observability_pipeline_decoding import ObservabilityPipelineDecoding + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_http_client_source_type import ( + ObservabilityPipelineHttpClientSourceType, + ) + + return { + "auth_strategy": (ObservabilityPipelineHttpClientSourceAuthStrategy,), + "decoding": (ObservabilityPipelineDecoding,), + "id": (str,), + "scrape_interval_secs": (int,), + "scrape_timeout_secs": (int,), + "tls": (ObservabilityPipelineTls,), + "type": (ObservabilityPipelineHttpClientSourceType,), + } + + attribute_map = { + "auth_strategy": "auth_strategy", + "decoding": "decoding", + "id": "id", + "scrape_interval_secs": "scrape_interval_secs", + "scrape_timeout_secs": "scrape_timeout_secs", + "tls": "tls", + "type": "type", + } + + def __init__( + self_, + decoding: ObservabilityPipelineDecoding, + id: str, + type: ObservabilityPipelineHttpClientSourceType, + auth_strategy: Union[ObservabilityPipelineHttpClientSourceAuthStrategy, UnsetType] = unset, + scrape_interval_secs: Union[int, UnsetType] = unset, + scrape_timeout_secs: Union[int, UnsetType] = unset, + tls: Union[ObservabilityPipelineTls, UnsetType] = unset, + **kwargs, + ): + """ + The ``http_client`` source scrapes logs from HTTP endpoints at regular intervals. + + :param auth_strategy: Optional authentication strategy for HTTP requests. + :type auth_strategy: ObservabilityPipelineHttpClientSourceAuthStrategy, optional + + :param decoding: The decoding format used to interpret incoming logs. + :type decoding: ObservabilityPipelineDecoding + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + :type id: str + + :param scrape_interval_secs: The interval (in seconds) between HTTP scrape requests. + :type scrape_interval_secs: int, optional + + :param scrape_timeout_secs: The timeout (in seconds) for each scrape request. + :type scrape_timeout_secs: int, optional + + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional + + :param type: The source type. The value should always be ``http_client``. + :type type: ObservabilityPipelineHttpClientSourceType + """ + if auth_strategy is not unset: + kwargs["auth_strategy"] = auth_strategy + if scrape_interval_secs is not unset: + kwargs["scrape_interval_secs"] = scrape_interval_secs + if scrape_timeout_secs is not unset: + kwargs["scrape_timeout_secs"] = scrape_timeout_secs + if tls is not unset: + kwargs["tls"] = tls + super().__init__(kwargs) + + self_.decoding = decoding + self_.id = id + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_http_client_source_auth_strategy.py b/src/datadog_api_client/v2/model/observability_pipeline_http_client_source_auth_strategy.py new file mode 100644 index 0000000000..d2cd326f0a --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_http_client_source_auth_strategy.py @@ -0,0 +1,38 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineHttpClientSourceAuthStrategy(ModelSimple): + """ + Optional authentication strategy for HTTP requests. + + :param value: Must be one of ["basic", "bearer"]. + :type value: str + """ + + allowed_values = { + "basic", + "bearer", + } + BASIC: ClassVar["ObservabilityPipelineHttpClientSourceAuthStrategy"] + BEARER: ClassVar["ObservabilityPipelineHttpClientSourceAuthStrategy"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineHttpClientSourceAuthStrategy.BASIC = ObservabilityPipelineHttpClientSourceAuthStrategy("basic") +ObservabilityPipelineHttpClientSourceAuthStrategy.BEARER = ObservabilityPipelineHttpClientSourceAuthStrategy("bearer") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_http_client_source_type.py b/src/datadog_api_client/v2/model/observability_pipeline_http_client_source_type.py new file mode 100644 index 0000000000..f15e09e89f --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_http_client_source_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineHttpClientSourceType(ModelSimple): + """ + The source type. The value should always be `http_client`. + + :param value: If omitted defaults to "http_client". Must be one of ["http_client"]. + :type value: str + """ + + allowed_values = { + "http_client", + } + HTTP_CLIENT: ClassVar["ObservabilityPipelineHttpClientSourceType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineHttpClientSourceType.HTTP_CLIENT = ObservabilityPipelineHttpClientSourceType("http_client") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_http_server_source.py b/src/datadog_api_client/v2/model/observability_pipeline_http_server_source.py new file mode 100644 index 0000000000..260387f57b --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_http_server_source.py @@ -0,0 +1,89 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_http_server_source_auth_strategy import ( + ObservabilityPipelineHttpServerSourceAuthStrategy, + ) + from datadog_api_client.v2.model.observability_pipeline_decoding import ObservabilityPipelineDecoding + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_http_server_source_type import ( + ObservabilityPipelineHttpServerSourceType, + ) + + +class ObservabilityPipelineHttpServerSource(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_http_server_source_auth_strategy import ( + ObservabilityPipelineHttpServerSourceAuthStrategy, + ) + from datadog_api_client.v2.model.observability_pipeline_decoding import ObservabilityPipelineDecoding + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_http_server_source_type import ( + ObservabilityPipelineHttpServerSourceType, + ) + + return { + "auth_strategy": (ObservabilityPipelineHttpServerSourceAuthStrategy,), + "decoding": (ObservabilityPipelineDecoding,), + "id": (str,), + "tls": (ObservabilityPipelineTls,), + "type": (ObservabilityPipelineHttpServerSourceType,), + } + + attribute_map = { + "auth_strategy": "auth_strategy", + "decoding": "decoding", + "id": "id", + "tls": "tls", + "type": "type", + } + + def __init__( + self_, + auth_strategy: ObservabilityPipelineHttpServerSourceAuthStrategy, + decoding: ObservabilityPipelineDecoding, + id: str, + type: ObservabilityPipelineHttpServerSourceType, + tls: Union[ObservabilityPipelineTls, UnsetType] = unset, + **kwargs, + ): + """ + The ``http_server`` source collects logs over HTTP POST from external services. + + :param auth_strategy: HTTP authentication method. + :type auth_strategy: ObservabilityPipelineHttpServerSourceAuthStrategy + + :param decoding: The decoding format used to interpret incoming logs. + :type decoding: ObservabilityPipelineDecoding + + :param id: Unique ID for the HTTP server source. + :type id: str + + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional + + :param type: The source type. The value should always be ``http_server``. + :type type: ObservabilityPipelineHttpServerSourceType + """ + if tls is not unset: + kwargs["tls"] = tls + super().__init__(kwargs) + + self_.auth_strategy = auth_strategy + self_.decoding = decoding + self_.id = id + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_http_server_source_auth_strategy.py b/src/datadog_api_client/v2/model/observability_pipeline_http_server_source_auth_strategy.py new file mode 100644 index 0000000000..a503a84d7b --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_http_server_source_auth_strategy.py @@ -0,0 +1,38 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineHttpServerSourceAuthStrategy(ModelSimple): + """ + HTTP authentication method. + + :param value: Must be one of ["none", "plain"]. + :type value: str + """ + + allowed_values = { + "none", + "plain", + } + NONE: ClassVar["ObservabilityPipelineHttpServerSourceAuthStrategy"] + PLAIN: ClassVar["ObservabilityPipelineHttpServerSourceAuthStrategy"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineHttpServerSourceAuthStrategy.NONE = ObservabilityPipelineHttpServerSourceAuthStrategy("none") +ObservabilityPipelineHttpServerSourceAuthStrategy.PLAIN = ObservabilityPipelineHttpServerSourceAuthStrategy("plain") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_http_server_source_type.py b/src/datadog_api_client/v2/model/observability_pipeline_http_server_source_type.py new file mode 100644 index 0000000000..27a6fc7a67 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_http_server_source_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineHttpServerSourceType(ModelSimple): + """ + The source type. The value should always be `http_server`. + + :param value: If omitted defaults to "http_server". Must be one of ["http_server"]. + :type value: str + """ + + allowed_values = { + "http_server", + } + HTTP_SERVER: ClassVar["ObservabilityPipelineHttpServerSourceType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineHttpServerSourceType.HTTP_SERVER = ObservabilityPipelineHttpServerSourceType("http_server") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_kafka_source.py b/src/datadog_api_client/v2/model/observability_pipeline_kafka_source.py index 97994c7b01..3ba6cfe651 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_kafka_source.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_kafka_source.py @@ -86,7 +86,7 @@ def __init__( :param sasl: Specifies the SASL mechanism for authenticating with a Kafka cluster. :type sasl: ObservabilityPipelineKafkaSourceSasl, optional - :param tls: Configuration for enabling TLS encryption. + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. :type tls: ObservabilityPipelineTls, optional :param topics: A list of Kafka topic names to subscribe to. The source ingests messages from each topic specified. diff --git a/src/datadog_api_client/v2/model/observability_pipeline_logstash_source.py b/src/datadog_api_client/v2/model/observability_pipeline_logstash_source.py new file mode 100644 index 0000000000..1971cae326 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_logstash_source.py @@ -0,0 +1,67 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_logstash_source_type import ( + ObservabilityPipelineLogstashSourceType, + ) + + +class ObservabilityPipelineLogstashSource(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_logstash_source_type import ( + ObservabilityPipelineLogstashSourceType, + ) + + return { + "id": (str,), + "tls": (ObservabilityPipelineTls,), + "type": (ObservabilityPipelineLogstashSourceType,), + } + + attribute_map = { + "id": "id", + "tls": "tls", + "type": "type", + } + + def __init__( + self_, + id: str, + type: ObservabilityPipelineLogstashSourceType, + tls: Union[ObservabilityPipelineTls, UnsetType] = unset, + **kwargs, + ): + """ + The ``logstash`` source ingests logs from a Logstash forwarder. + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + :type id: str + + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional + + :param type: The source type. The value should always be ``logstash``. + :type type: ObservabilityPipelineLogstashSourceType + """ + if tls is not unset: + kwargs["tls"] = tls + super().__init__(kwargs) + + self_.id = id + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_logstash_source_type.py b/src/datadog_api_client/v2/model/observability_pipeline_logstash_source_type.py new file mode 100644 index 0000000000..e9121e6238 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_logstash_source_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineLogstashSourceType(ModelSimple): + """ + The source type. The value should always be `logstash`. + + :param value: If omitted defaults to "logstash". Must be one of ["logstash"]. + :type value: str + """ + + allowed_values = { + "logstash", + } + LOGSTASH: ClassVar["ObservabilityPipelineLogstashSourceType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineLogstashSourceType.LOGSTASH = ObservabilityPipelineLogstashSourceType("logstash") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_metadata_entry.py b/src/datadog_api_client/v2/model/observability_pipeline_metadata_entry.py new file mode 100644 index 0000000000..f42572f069 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_metadata_entry.py @@ -0,0 +1,39 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +class ObservabilityPipelineMetadataEntry(ModelNormal): + @cached_property + def openapi_types(_): + return { + "name": (str,), + "value": (str,), + } + + attribute_map = { + "name": "name", + "value": "value", + } + + def __init__(self_, name: str, value: str, **kwargs): + """ + A custom metadata entry to attach to each object uploaded to the GCS bucket. + + :param name: The metadata key. + :type name: str + + :param value: The metadata value. + :type value: str + """ + super().__init__(kwargs) + + self_.name = name + self_.value = value diff --git a/src/datadog_api_client/v2/model/observability_pipeline_metric_value.py b/src/datadog_api_client/v2/model/observability_pipeline_metric_value.py new file mode 100644 index 0000000000..92f819c103 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_metric_value.py @@ -0,0 +1,47 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelComposed, + cached_property, +) + + +class ObservabilityPipelineMetricValue(ModelComposed): + def __init__(self, **kwargs): + """ + Specifies how the value of the generated metric is computed. + + :param strategy: Increments the metric by 1 for each matching event. + :type strategy: ObservabilityPipelineGeneratedMetricIncrementByOneStrategy + + :param field: Name of the log field containing the numeric value to increment the metric by. + :type field: str + """ + super().__init__(kwargs) + + @cached_property + def _composed_schemas(_): + # we need this here to make our import statements work + # we must store _composed_schemas in here so the code is only run + # when we invoke this method. If we kept this at the class + # level we would get an error because the class level + # code would be run when this module is imported, and these composed + # classes don't exist yet because their module has not finished + # loading + from datadog_api_client.v2.model.observability_pipeline_generated_metric_increment_by_one import ( + ObservabilityPipelineGeneratedMetricIncrementByOne, + ) + from datadog_api_client.v2.model.observability_pipeline_generated_metric_increment_by_field import ( + ObservabilityPipelineGeneratedMetricIncrementByField, + ) + + return { + "oneOf": [ + ObservabilityPipelineGeneratedMetricIncrementByOne, + ObservabilityPipelineGeneratedMetricIncrementByField, + ], + } diff --git a/src/datadog_api_client/v2/model/observability_pipeline_new_relic_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_new_relic_destination.py new file mode 100644 index 0000000000..ec0cb39481 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_new_relic_destination.py @@ -0,0 +1,75 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_new_relic_destination_region import ( + ObservabilityPipelineNewRelicDestinationRegion, + ) + from datadog_api_client.v2.model.observability_pipeline_new_relic_destination_type import ( + ObservabilityPipelineNewRelicDestinationType, + ) + + +class ObservabilityPipelineNewRelicDestination(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_new_relic_destination_region import ( + ObservabilityPipelineNewRelicDestinationRegion, + ) + from datadog_api_client.v2.model.observability_pipeline_new_relic_destination_type import ( + ObservabilityPipelineNewRelicDestinationType, + ) + + return { + "id": (str,), + "inputs": ([str],), + "region": (ObservabilityPipelineNewRelicDestinationRegion,), + "type": (ObservabilityPipelineNewRelicDestinationType,), + } + + attribute_map = { + "id": "id", + "inputs": "inputs", + "region": "region", + "type": "type", + } + + def __init__( + self_, + id: str, + inputs: List[str], + region: ObservabilityPipelineNewRelicDestinationRegion, + type: ObservabilityPipelineNewRelicDestinationType, + **kwargs, + ): + """ + The ``new_relic`` destination sends logs to the New Relic platform. + + :param id: The unique identifier for this component. + :type id: str + + :param inputs: A list of component IDs whose output is used as the ``input`` for this component. + :type inputs: [str] + + :param region: The New Relic region. + :type region: ObservabilityPipelineNewRelicDestinationRegion + + :param type: The destination type. The value should always be ``new_relic``. + :type type: ObservabilityPipelineNewRelicDestinationType + """ + super().__init__(kwargs) + + self_.id = id + self_.inputs = inputs + self_.region = region + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_new_relic_destination_region.py b/src/datadog_api_client/v2/model/observability_pipeline_new_relic_destination_region.py new file mode 100644 index 0000000000..e1ab37cea3 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_new_relic_destination_region.py @@ -0,0 +1,38 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineNewRelicDestinationRegion(ModelSimple): + """ + The New Relic region. + + :param value: Must be one of ["us", "eu"]. + :type value: str + """ + + allowed_values = { + "us", + "eu", + } + US: ClassVar["ObservabilityPipelineNewRelicDestinationRegion"] + EU: ClassVar["ObservabilityPipelineNewRelicDestinationRegion"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineNewRelicDestinationRegion.US = ObservabilityPipelineNewRelicDestinationRegion("us") +ObservabilityPipelineNewRelicDestinationRegion.EU = ObservabilityPipelineNewRelicDestinationRegion("eu") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_new_relic_destination_type.py b/src/datadog_api_client/v2/model/observability_pipeline_new_relic_destination_type.py new file mode 100644 index 0000000000..54fb38e81e --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_new_relic_destination_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineNewRelicDestinationType(ModelSimple): + """ + The destination type. The value should always be `new_relic`. + + :param value: If omitted defaults to "new_relic". Must be one of ["new_relic"]. + :type value: str + """ + + allowed_values = { + "new_relic", + } + NEW_RELIC: ClassVar["ObservabilityPipelineNewRelicDestinationType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineNewRelicDestinationType.NEW_RELIC = ObservabilityPipelineNewRelicDestinationType("new_relic") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_ocsf_mapper_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_ocsf_mapper_processor.py new file mode 100644 index 0000000000..7917f1c81e --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_ocsf_mapper_processor.py @@ -0,0 +1,82 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor_mapping import ( + ObservabilityPipelineOcsfMapperProcessorMapping, + ) + from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor_type import ( + ObservabilityPipelineOcsfMapperProcessorType, + ) + + +class ObservabilityPipelineOcsfMapperProcessor(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor_mapping import ( + ObservabilityPipelineOcsfMapperProcessorMapping, + ) + from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor_type import ( + ObservabilityPipelineOcsfMapperProcessorType, + ) + + return { + "id": (str,), + "include": (str,), + "inputs": ([str],), + "mappings": ([ObservabilityPipelineOcsfMapperProcessorMapping],), + "type": (ObservabilityPipelineOcsfMapperProcessorType,), + } + + attribute_map = { + "id": "id", + "include": "include", + "inputs": "inputs", + "mappings": "mappings", + "type": "type", + } + + def __init__( + self_, + id: str, + include: str, + inputs: List[str], + mappings: List[ObservabilityPipelineOcsfMapperProcessorMapping], + type: ObservabilityPipelineOcsfMapperProcessorType, + **kwargs, + ): + """ + The ``ocsf_mapper`` processor transforms logs into the OCSF schema using a predefined mapping configuration. + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline. + :type id: str + + :param include: A Datadog search query used to determine which logs this processor targets. + :type include: str + + :param inputs: A list of component IDs whose output is used as the ``input`` for this processor. + :type inputs: [str] + + :param mappings: A list of mapping rules to convert events to the OCSF format. + :type mappings: [ObservabilityPipelineOcsfMapperProcessorMapping] + + :param type: The processor type. The value should always be ``ocsf_mapper``. + :type type: ObservabilityPipelineOcsfMapperProcessorType + """ + super().__init__(kwargs) + + self_.id = id + self_.include = include + self_.inputs = inputs + self_.mappings = mappings + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_ocsf_mapper_processor_mapping.py b/src/datadog_api_client/v2/model/observability_pipeline_ocsf_mapper_processor_mapping.py new file mode 100644 index 0000000000..aec241a988 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_ocsf_mapper_processor_mapping.py @@ -0,0 +1,52 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor_mapping_mapping import ( + ObservabilityPipelineOcsfMapperProcessorMappingMapping, + ) + + +class ObservabilityPipelineOcsfMapperProcessorMapping(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor_mapping_mapping import ( + ObservabilityPipelineOcsfMapperProcessorMappingMapping, + ) + + return { + "include": (str,), + "mapping": (ObservabilityPipelineOcsfMapperProcessorMappingMapping,), + } + + attribute_map = { + "include": "include", + "mapping": "mapping", + } + + def __init__( + self_, include: str, mapping: Union[ObservabilityPipelineOcsfMapperProcessorMappingMapping, str], **kwargs + ): + """ + Defines how specific events are transformed to OCSF using a mapping configuration. + + :param include: A Datadog search query used to select the logs that this mapping should apply to. + :type include: str + + :param mapping: Defines a single mapping rule for transforming logs into the OCSF schema. + :type mapping: ObservabilityPipelineOcsfMapperProcessorMappingMapping + """ + super().__init__(kwargs) + + self_.include = include + self_.mapping = mapping diff --git a/src/datadog_api_client/v2/model/observability_pipeline_ocsf_mapper_processor_mapping_mapping.py b/src/datadog_api_client/v2/model/observability_pipeline_ocsf_mapper_processor_mapping_mapping.py new file mode 100644 index 0000000000..680dd496f2 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_ocsf_mapper_processor_mapping_mapping.py @@ -0,0 +1,33 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelComposed, + cached_property, +) + + +class ObservabilityPipelineOcsfMapperProcessorMappingMapping(ModelComposed): + def __init__(self, **kwargs): + """ + Defines a single mapping rule for transforming logs into the OCSF schema. + """ + super().__init__(kwargs) + + @cached_property + def _composed_schemas(_): + # we need this here to make our import statements work + # we must store _composed_schemas in here so the code is only run + # when we invoke this method. If we kept this at the class + # level we would get an error because the class level + # code would be run when this module is imported, and these composed + # classes don't exist yet because their module has not finished + # loading + return { + "oneOf": [ + str, + ], + } diff --git a/src/datadog_api_client/v2/model/observability_pipeline_ocsf_mapper_processor_type.py b/src/datadog_api_client/v2/model/observability_pipeline_ocsf_mapper_processor_type.py new file mode 100644 index 0000000000..37b5ce59ed --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_ocsf_mapper_processor_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineOcsfMapperProcessorType(ModelSimple): + """ + The processor type. The value should always be `ocsf_mapper`. + + :param value: If omitted defaults to "ocsf_mapper". Must be one of ["ocsf_mapper"]. + :type value: str + """ + + allowed_values = { + "ocsf_mapper", + } + OCSF_MAPPER: ClassVar["ObservabilityPipelineOcsfMapperProcessorType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineOcsfMapperProcessorType.OCSF_MAPPER = ObservabilityPipelineOcsfMapperProcessorType("ocsf_mapper") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_ocsf_mapping_library.py b/src/datadog_api_client/v2/model/observability_pipeline_ocsf_mapping_library.py new file mode 100644 index 0000000000..3f7c8794c0 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_ocsf_mapping_library.py @@ -0,0 +1,87 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineOcsfMappingLibrary(ModelSimple): + """ + Predefined library mappings for common log formats. + + :param value: Must be one of ["CloudTrail Account Change", "GCP Cloud Audit CreateBucket", "GCP Cloud Audit CreateSink", "GCP Cloud Audit SetIamPolicy", "GCP Cloud Audit UpdateSink", "Github Audit Log API Activity", "Google Workspace Admin Audit addPrivilege", "Microsoft 365 Defender Incident", "Microsoft 365 Defender UserLoggedIn", "Okta System Log Authentication", "Palo Alto Networks Firewall Traffic"]. + :type value: str + """ + + allowed_values = { + "CloudTrail Account Change", + "GCP Cloud Audit CreateBucket", + "GCP Cloud Audit CreateSink", + "GCP Cloud Audit SetIamPolicy", + "GCP Cloud Audit UpdateSink", + "Github Audit Log API Activity", + "Google Workspace Admin Audit addPrivilege", + "Microsoft 365 Defender Incident", + "Microsoft 365 Defender UserLoggedIn", + "Okta System Log Authentication", + "Palo Alto Networks Firewall Traffic", + } + CLOUDTRAIL_ACCOUNT_CHANGE: ClassVar["ObservabilityPipelineOcsfMappingLibrary"] + GCP_CLOUD_AUDIT_CREATEBUCKET: ClassVar["ObservabilityPipelineOcsfMappingLibrary"] + GCP_CLOUD_AUDIT_CREATESINK: ClassVar["ObservabilityPipelineOcsfMappingLibrary"] + GCP_CLOUD_AUDIT_SETIAMPOLICY: ClassVar["ObservabilityPipelineOcsfMappingLibrary"] + GCP_CLOUD_AUDIT_UPDATESINK: ClassVar["ObservabilityPipelineOcsfMappingLibrary"] + GITHUB_AUDIT_LOG_API_ACTIVITY: ClassVar["ObservabilityPipelineOcsfMappingLibrary"] + GOOGLE_WORKSPACE_ADMIN_AUDIT_ADDPRIVILEGE: ClassVar["ObservabilityPipelineOcsfMappingLibrary"] + MICROSOFT_365_DEFENDER_INCIDENT: ClassVar["ObservabilityPipelineOcsfMappingLibrary"] + MICROSOFT_365_DEFENDER_USERLOGGEDIN: ClassVar["ObservabilityPipelineOcsfMappingLibrary"] + OKTA_SYSTEM_LOG_AUTHENTICATION: ClassVar["ObservabilityPipelineOcsfMappingLibrary"] + PALO_ALTO_NETWORKS_FIREWALL_TRAFFIC: ClassVar["ObservabilityPipelineOcsfMappingLibrary"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineOcsfMappingLibrary.CLOUDTRAIL_ACCOUNT_CHANGE = ObservabilityPipelineOcsfMappingLibrary( + "CloudTrail Account Change" +) +ObservabilityPipelineOcsfMappingLibrary.GCP_CLOUD_AUDIT_CREATEBUCKET = ObservabilityPipelineOcsfMappingLibrary( + "GCP Cloud Audit CreateBucket" +) +ObservabilityPipelineOcsfMappingLibrary.GCP_CLOUD_AUDIT_CREATESINK = ObservabilityPipelineOcsfMappingLibrary( + "GCP Cloud Audit CreateSink" +) +ObservabilityPipelineOcsfMappingLibrary.GCP_CLOUD_AUDIT_SETIAMPOLICY = ObservabilityPipelineOcsfMappingLibrary( + "GCP Cloud Audit SetIamPolicy" +) +ObservabilityPipelineOcsfMappingLibrary.GCP_CLOUD_AUDIT_UPDATESINK = ObservabilityPipelineOcsfMappingLibrary( + "GCP Cloud Audit UpdateSink" +) +ObservabilityPipelineOcsfMappingLibrary.GITHUB_AUDIT_LOG_API_ACTIVITY = ObservabilityPipelineOcsfMappingLibrary( + "Github Audit Log API Activity" +) +ObservabilityPipelineOcsfMappingLibrary.GOOGLE_WORKSPACE_ADMIN_AUDIT_ADDPRIVILEGE = ( + ObservabilityPipelineOcsfMappingLibrary("Google Workspace Admin Audit addPrivilege") +) +ObservabilityPipelineOcsfMappingLibrary.MICROSOFT_365_DEFENDER_INCIDENT = ObservabilityPipelineOcsfMappingLibrary( + "Microsoft 365 Defender Incident" +) +ObservabilityPipelineOcsfMappingLibrary.MICROSOFT_365_DEFENDER_USERLOGGEDIN = ObservabilityPipelineOcsfMappingLibrary( + "Microsoft 365 Defender UserLoggedIn" +) +ObservabilityPipelineOcsfMappingLibrary.OKTA_SYSTEM_LOG_AUTHENTICATION = ObservabilityPipelineOcsfMappingLibrary( + "Okta System Log Authentication" +) +ObservabilityPipelineOcsfMappingLibrary.PALO_ALTO_NETWORKS_FIREWALL_TRAFFIC = ObservabilityPipelineOcsfMappingLibrary( + "Palo Alto Networks Firewall Traffic" +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_open_search_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_open_search_destination.py new file mode 100644 index 0000000000..d817a4b558 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_open_search_destination.py @@ -0,0 +1,72 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_open_search_destination_type import ( + ObservabilityPipelineOpenSearchDestinationType, + ) + + +class ObservabilityPipelineOpenSearchDestination(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_open_search_destination_type import ( + ObservabilityPipelineOpenSearchDestinationType, + ) + + return { + "bulk_index": (str,), + "id": (str,), + "inputs": ([str],), + "type": (ObservabilityPipelineOpenSearchDestinationType,), + } + + attribute_map = { + "bulk_index": "bulk_index", + "id": "id", + "inputs": "inputs", + "type": "type", + } + + def __init__( + self_, + id: str, + inputs: List[str], + type: ObservabilityPipelineOpenSearchDestinationType, + bulk_index: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + The ``opensearch`` destination writes logs to an OpenSearch cluster. + + :param bulk_index: The index to write logs to. + :type bulk_index: str, optional + + :param id: The unique identifier for this component. + :type id: str + + :param inputs: A list of component IDs whose output is used as the ``input`` for this component. + :type inputs: [str] + + :param type: The destination type. The value should always be ``opensearch``. + :type type: ObservabilityPipelineOpenSearchDestinationType + """ + if bulk_index is not unset: + kwargs["bulk_index"] = bulk_index + super().__init__(kwargs) + + self_.id = id + self_.inputs = inputs + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_open_search_destination_type.py b/src/datadog_api_client/v2/model/observability_pipeline_open_search_destination_type.py new file mode 100644 index 0000000000..21551eaa7e --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_open_search_destination_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineOpenSearchDestinationType(ModelSimple): + """ + The destination type. The value should always be `opensearch`. + + :param value: If omitted defaults to "opensearch". Must be one of ["opensearch"]. + :type value: str + """ + + allowed_values = { + "opensearch", + } + OPENSEARCH: ClassVar["ObservabilityPipelineOpenSearchDestinationType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineOpenSearchDestinationType.OPENSEARCH = ObservabilityPipelineOpenSearchDestinationType("opensearch") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_parse_grok_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_parse_grok_processor.py new file mode 100644 index 0000000000..c7b2f65cfc --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_parse_grok_processor.py @@ -0,0 +1,92 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor_rule import ( + ObservabilityPipelineParseGrokProcessorRule, + ) + from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor_type import ( + ObservabilityPipelineParseGrokProcessorType, + ) + + +class ObservabilityPipelineParseGrokProcessor(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor_rule import ( + ObservabilityPipelineParseGrokProcessorRule, + ) + from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor_type import ( + ObservabilityPipelineParseGrokProcessorType, + ) + + return { + "disable_library_rules": (bool,), + "id": (str,), + "include": (str,), + "inputs": ([str],), + "rules": ([ObservabilityPipelineParseGrokProcessorRule],), + "type": (ObservabilityPipelineParseGrokProcessorType,), + } + + attribute_map = { + "disable_library_rules": "disable_library_rules", + "id": "id", + "include": "include", + "inputs": "inputs", + "rules": "rules", + "type": "type", + } + + def __init__( + self_, + id: str, + include: str, + inputs: List[str], + rules: List[ObservabilityPipelineParseGrokProcessorRule], + type: ObservabilityPipelineParseGrokProcessorType, + disable_library_rules: Union[bool, UnsetType] = unset, + **kwargs, + ): + """ + The ``parse_grok`` processor extracts structured fields from unstructured log messages using Grok patterns. + + :param disable_library_rules: If set to ``true`` , disables the default Grok rules provided by Datadog. + :type disable_library_rules: bool, optional + + :param id: A unique identifier for this processor. + :type id: str + + :param include: A Datadog search query used to determine which logs this processor targets. + :type include: str + + :param inputs: A list of component IDs whose output is used as the ``input`` for this component. + :type inputs: [str] + + :param rules: The list of Grok parsing rules. If multiple matching rules are provided, they are evaluated in order. The first successful match is applied. + :type rules: [ObservabilityPipelineParseGrokProcessorRule] + + :param type: The processor type. The value should always be ``parse_grok``. + :type type: ObservabilityPipelineParseGrokProcessorType + """ + if disable_library_rules is not unset: + kwargs["disable_library_rules"] = disable_library_rules + super().__init__(kwargs) + + self_.id = id + self_.include = include + self_.inputs = inputs + self_.rules = rules + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_parse_grok_processor_rule.py b/src/datadog_api_client/v2/model/observability_pipeline_parse_grok_processor_rule.py new file mode 100644 index 0000000000..231e75e657 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_parse_grok_processor_rule.py @@ -0,0 +1,70 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor_rule_match_rule import ( + ObservabilityPipelineParseGrokProcessorRuleMatchRule, + ) + from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor_rule_support_rule import ( + ObservabilityPipelineParseGrokProcessorRuleSupportRule, + ) + + +class ObservabilityPipelineParseGrokProcessorRule(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor_rule_match_rule import ( + ObservabilityPipelineParseGrokProcessorRuleMatchRule, + ) + from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor_rule_support_rule import ( + ObservabilityPipelineParseGrokProcessorRuleSupportRule, + ) + + return { + "match_rules": ([ObservabilityPipelineParseGrokProcessorRuleMatchRule],), + "source": (str,), + "support_rules": ([ObservabilityPipelineParseGrokProcessorRuleSupportRule],), + } + + attribute_map = { + "match_rules": "match_rules", + "source": "source", + "support_rules": "support_rules", + } + + def __init__( + self_, + match_rules: List[ObservabilityPipelineParseGrokProcessorRuleMatchRule], + source: str, + support_rules: List[ObservabilityPipelineParseGrokProcessorRuleSupportRule], + **kwargs, + ): + """ + A Grok parsing rule used in the ``parse_grok`` processor. Each rule defines how to extract structured fields + from a specific log field using Grok patterns. + + :param match_rules: A list of Grok parsing rules that define how to extract fields from the source field. + Each rule must contain a name and a valid Grok pattern. + :type match_rules: [ObservabilityPipelineParseGrokProcessorRuleMatchRule] + + :param source: The name of the field in the log event to apply the Grok rules to. + :type source: str + + :param support_rules: A list of Grok helper rules that can be referenced by the parsing rules. + :type support_rules: [ObservabilityPipelineParseGrokProcessorRuleSupportRule] + """ + super().__init__(kwargs) + + self_.match_rules = match_rules + self_.source = source + self_.support_rules = support_rules diff --git a/src/datadog_api_client/v2/model/observability_pipeline_parse_grok_processor_rule_match_rule.py b/src/datadog_api_client/v2/model/observability_pipeline_parse_grok_processor_rule_match_rule.py new file mode 100644 index 0000000000..44b2458f8f --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_parse_grok_processor_rule_match_rule.py @@ -0,0 +1,40 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +class ObservabilityPipelineParseGrokProcessorRuleMatchRule(ModelNormal): + @cached_property + def openapi_types(_): + return { + "name": (str,), + "rule": (str,), + } + + attribute_map = { + "name": "name", + "rule": "rule", + } + + def __init__(self_, name: str, rule: str, **kwargs): + """ + Defines a Grok parsing rule, which extracts structured fields from log content using named Grok patterns. + Each rule must have a unique name and a valid Datadog Grok pattern that will be applied to the source field. + + :param name: The name of the rule. + :type name: str + + :param rule: The definition of the Grok rule. + :type rule: str + """ + super().__init__(kwargs) + + self_.name = name + self_.rule = rule diff --git a/src/datadog_api_client/v2/model/observability_pipeline_parse_grok_processor_rule_support_rule.py b/src/datadog_api_client/v2/model/observability_pipeline_parse_grok_processor_rule_support_rule.py new file mode 100644 index 0000000000..9cdec2e77b --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_parse_grok_processor_rule_support_rule.py @@ -0,0 +1,39 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +class ObservabilityPipelineParseGrokProcessorRuleSupportRule(ModelNormal): + @cached_property + def openapi_types(_): + return { + "name": (str,), + "rule": (str,), + } + + attribute_map = { + "name": "name", + "rule": "rule", + } + + def __init__(self_, name: str, rule: str, **kwargs): + """ + The Grok helper rule referenced in the parsing rules. + + :param name: The name of the Grok helper rule. + :type name: str + + :param rule: The definition of the Grok helper rule. + :type rule: str + """ + super().__init__(kwargs) + + self_.name = name + self_.rule = rule diff --git a/src/datadog_api_client/v2/model/observability_pipeline_parse_grok_processor_type.py b/src/datadog_api_client/v2/model/observability_pipeline_parse_grok_processor_type.py new file mode 100644 index 0000000000..594e0a258c --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_parse_grok_processor_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineParseGrokProcessorType(ModelSimple): + """ + The processor type. The value should always be `parse_grok`. + + :param value: If omitted defaults to "parse_grok". Must be one of ["parse_grok"]. + :type value: str + """ + + allowed_values = { + "parse_grok", + } + PARSE_GROK: ClassVar["ObservabilityPipelineParseGrokProcessorType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineParseGrokProcessorType.PARSE_GROK = ObservabilityPipelineParseGrokProcessorType("parse_grok") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_quota_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_quota_processor.py index f30e26077a..aa4be5a8ca 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_quota_processor.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_quota_processor.py @@ -17,6 +17,9 @@ from datadog_api_client.v2.model.observability_pipeline_quota_processor_limit import ( ObservabilityPipelineQuotaProcessorLimit, ) + from datadog_api_client.v2.model.observability_pipeline_quota_processor_overflow_action import ( + ObservabilityPipelineQuotaProcessorOverflowAction, + ) from datadog_api_client.v2.model.observability_pipeline_quota_processor_override import ( ObservabilityPipelineQuotaProcessorOverride, ) @@ -31,6 +34,9 @@ def openapi_types(_): from datadog_api_client.v2.model.observability_pipeline_quota_processor_limit import ( ObservabilityPipelineQuotaProcessorLimit, ) + from datadog_api_client.v2.model.observability_pipeline_quota_processor_overflow_action import ( + ObservabilityPipelineQuotaProcessorOverflowAction, + ) from datadog_api_client.v2.model.observability_pipeline_quota_processor_override import ( ObservabilityPipelineQuotaProcessorOverride, ) @@ -46,6 +52,7 @@ def openapi_types(_): "inputs": ([str],), "limit": (ObservabilityPipelineQuotaProcessorLimit,), "name": (str,), + "overflow_action": (ObservabilityPipelineQuotaProcessorOverflowAction,), "overrides": ([ObservabilityPipelineQuotaProcessorOverride],), "partition_fields": ([str],), "type": (ObservabilityPipelineQuotaProcessorType,), @@ -59,6 +66,7 @@ def openapi_types(_): "inputs": "inputs", "limit": "limit", "name": "name", + "overflow_action": "overflow_action", "overrides": "overrides", "partition_fields": "partition_fields", "type": "type", @@ -74,6 +82,7 @@ def __init__( name: str, type: ObservabilityPipelineQuotaProcessorType, ignore_when_missing_partitions: Union[bool, UnsetType] = unset, + overflow_action: Union[ObservabilityPipelineQuotaProcessorOverflowAction, UnsetType] = unset, overrides: Union[List[ObservabilityPipelineQuotaProcessorOverride], UnsetType] = unset, partition_fields: Union[List[str], UnsetType] = unset, **kwargs, @@ -99,9 +108,16 @@ def __init__( :param limit: The maximum amount of data or number of events allowed before the quota is enforced. Can be specified in bytes or events. :type limit: ObservabilityPipelineQuotaProcessorLimit - :param name: Name for identifying the processor. + :param name: Name of the quota. :type name: str + :param overflow_action: The action to take when the quota is exceeded. Options: + + * ``drop`` : Drop the event. + * ``no_action`` : Let the event pass through. + * ``overflow_routing`` : Route to an overflow destination. + :type overflow_action: ObservabilityPipelineQuotaProcessorOverflowAction, optional + :param overrides: A list of alternate quota rules that apply to specific sets of events, identified by matching field values. Each override can define a custom limit. :type overrides: [ObservabilityPipelineQuotaProcessorOverride], optional @@ -113,6 +129,8 @@ def __init__( """ if ignore_when_missing_partitions is not unset: kwargs["ignore_when_missing_partitions"] = ignore_when_missing_partitions + if overflow_action is not unset: + kwargs["overflow_action"] = overflow_action if overrides is not unset: kwargs["overrides"] = overrides if partition_fields is not unset: diff --git a/src/datadog_api_client/v2/model/observability_pipeline_quota_processor_overflow_action.py b/src/datadog_api_client/v2/model/observability_pipeline_quota_processor_overflow_action.py new file mode 100644 index 0000000000..f8181e4685 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_quota_processor_overflow_action.py @@ -0,0 +1,49 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineQuotaProcessorOverflowAction(ModelSimple): + """ + The action to take when the quota is exceeded. Options: + - `drop`: Drop the event. + - `no_action`: Let the event pass through. + - `overflow_routing`: Route to an overflow destination. + + + :param value: Must be one of ["drop", "no_action", "overflow_routing"]. + :type value: str + """ + + allowed_values = { + "drop", + "no_action", + "overflow_routing", + } + DROP: ClassVar["ObservabilityPipelineQuotaProcessorOverflowAction"] + NO_ACTION: ClassVar["ObservabilityPipelineQuotaProcessorOverflowAction"] + OVERFLOW_ROUTING: ClassVar["ObservabilityPipelineQuotaProcessorOverflowAction"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineQuotaProcessorOverflowAction.DROP = ObservabilityPipelineQuotaProcessorOverflowAction("drop") +ObservabilityPipelineQuotaProcessorOverflowAction.NO_ACTION = ObservabilityPipelineQuotaProcessorOverflowAction( + "no_action" +) +ObservabilityPipelineQuotaProcessorOverflowAction.OVERFLOW_ROUTING = ObservabilityPipelineQuotaProcessorOverflowAction( + "overflow_routing" +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_reduce_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_reduce_processor.py new file mode 100644 index 0000000000..bcddc2844d --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_reduce_processor.py @@ -0,0 +1,89 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_reduce_processor_merge_strategy import ( + ObservabilityPipelineReduceProcessorMergeStrategy, + ) + from datadog_api_client.v2.model.observability_pipeline_reduce_processor_type import ( + ObservabilityPipelineReduceProcessorType, + ) + + +class ObservabilityPipelineReduceProcessor(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_reduce_processor_merge_strategy import ( + ObservabilityPipelineReduceProcessorMergeStrategy, + ) + from datadog_api_client.v2.model.observability_pipeline_reduce_processor_type import ( + ObservabilityPipelineReduceProcessorType, + ) + + return { + "group_by": ([str],), + "id": (str,), + "include": (str,), + "inputs": ([str],), + "merge_strategies": ([ObservabilityPipelineReduceProcessorMergeStrategy],), + "type": (ObservabilityPipelineReduceProcessorType,), + } + + attribute_map = { + "group_by": "group_by", + "id": "id", + "include": "include", + "inputs": "inputs", + "merge_strategies": "merge_strategies", + "type": "type", + } + + def __init__( + self_, + group_by: List[str], + id: str, + include: str, + inputs: List[str], + merge_strategies: List[ObservabilityPipelineReduceProcessorMergeStrategy], + type: ObservabilityPipelineReduceProcessorType, + **kwargs, + ): + """ + The ``reduce`` processor aggregates and merges logs based on matching keys and merge strategies. + + :param group_by: A list of fields used to group log events for merging. + :type group_by: [str] + + :param id: The unique identifier for this processor. + :type id: str + + :param include: A Datadog search query used to determine which logs this processor targets. + :type include: str + + :param inputs: A list of component IDs whose output is used as the input for this processor. + :type inputs: [str] + + :param merge_strategies: List of merge strategies defining how values from grouped events should be combined. + :type merge_strategies: [ObservabilityPipelineReduceProcessorMergeStrategy] + + :param type: The processor type. The value should always be ``reduce``. + :type type: ObservabilityPipelineReduceProcessorType + """ + super().__init__(kwargs) + + self_.group_by = group_by + self_.id = id + self_.include = include + self_.inputs = inputs + self_.merge_strategies = merge_strategies + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_reduce_processor_merge_strategy.py b/src/datadog_api_client/v2/model/observability_pipeline_reduce_processor_merge_strategy.py new file mode 100644 index 0000000000..2f75411a89 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_reduce_processor_merge_strategy.py @@ -0,0 +1,50 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_reduce_processor_merge_strategy_strategy import ( + ObservabilityPipelineReduceProcessorMergeStrategyStrategy, + ) + + +class ObservabilityPipelineReduceProcessorMergeStrategy(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_reduce_processor_merge_strategy_strategy import ( + ObservabilityPipelineReduceProcessorMergeStrategyStrategy, + ) + + return { + "path": (str,), + "strategy": (ObservabilityPipelineReduceProcessorMergeStrategyStrategy,), + } + + attribute_map = { + "path": "path", + "strategy": "strategy", + } + + def __init__(self_, path: str, strategy: ObservabilityPipelineReduceProcessorMergeStrategyStrategy, **kwargs): + """ + Defines how a specific field should be merged across grouped events. + + :param path: The field path in the log event. + :type path: str + + :param strategy: The merge strategy to apply. + :type strategy: ObservabilityPipelineReduceProcessorMergeStrategyStrategy + """ + super().__init__(kwargs) + + self_.path = path + self_.strategy = strategy diff --git a/src/datadog_api_client/v2/model/observability_pipeline_reduce_processor_merge_strategy_strategy.py b/src/datadog_api_client/v2/model/observability_pipeline_reduce_processor_merge_strategy_strategy.py new file mode 100644 index 0000000000..bda36e8a6d --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_reduce_processor_merge_strategy_strategy.py @@ -0,0 +1,92 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineReduceProcessorMergeStrategyStrategy(ModelSimple): + """ + The merge strategy to apply. + + :param value: Must be one of ["discard", "retain", "sum", "max", "min", "array", "concat", "concat_newline", "concat_raw", "shortest_array", "longest_array", "flat_unique"]. + :type value: str + """ + + allowed_values = { + "discard", + "retain", + "sum", + "max", + "min", + "array", + "concat", + "concat_newline", + "concat_raw", + "shortest_array", + "longest_array", + "flat_unique", + } + DISCARD: ClassVar["ObservabilityPipelineReduceProcessorMergeStrategyStrategy"] + RETAIN: ClassVar["ObservabilityPipelineReduceProcessorMergeStrategyStrategy"] + SUM: ClassVar["ObservabilityPipelineReduceProcessorMergeStrategyStrategy"] + MAX: ClassVar["ObservabilityPipelineReduceProcessorMergeStrategyStrategy"] + MIN: ClassVar["ObservabilityPipelineReduceProcessorMergeStrategyStrategy"] + ARRAY: ClassVar["ObservabilityPipelineReduceProcessorMergeStrategyStrategy"] + CONCAT: ClassVar["ObservabilityPipelineReduceProcessorMergeStrategyStrategy"] + CONCAT_NEWLINE: ClassVar["ObservabilityPipelineReduceProcessorMergeStrategyStrategy"] + CONCAT_RAW: ClassVar["ObservabilityPipelineReduceProcessorMergeStrategyStrategy"] + SHORTEST_ARRAY: ClassVar["ObservabilityPipelineReduceProcessorMergeStrategyStrategy"] + LONGEST_ARRAY: ClassVar["ObservabilityPipelineReduceProcessorMergeStrategyStrategy"] + FLAT_UNIQUE: ClassVar["ObservabilityPipelineReduceProcessorMergeStrategyStrategy"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineReduceProcessorMergeStrategyStrategy.DISCARD = ( + ObservabilityPipelineReduceProcessorMergeStrategyStrategy("discard") +) +ObservabilityPipelineReduceProcessorMergeStrategyStrategy.RETAIN = ( + ObservabilityPipelineReduceProcessorMergeStrategyStrategy("retain") +) +ObservabilityPipelineReduceProcessorMergeStrategyStrategy.SUM = ( + ObservabilityPipelineReduceProcessorMergeStrategyStrategy("sum") +) +ObservabilityPipelineReduceProcessorMergeStrategyStrategy.MAX = ( + ObservabilityPipelineReduceProcessorMergeStrategyStrategy("max") +) +ObservabilityPipelineReduceProcessorMergeStrategyStrategy.MIN = ( + ObservabilityPipelineReduceProcessorMergeStrategyStrategy("min") +) +ObservabilityPipelineReduceProcessorMergeStrategyStrategy.ARRAY = ( + ObservabilityPipelineReduceProcessorMergeStrategyStrategy("array") +) +ObservabilityPipelineReduceProcessorMergeStrategyStrategy.CONCAT = ( + ObservabilityPipelineReduceProcessorMergeStrategyStrategy("concat") +) +ObservabilityPipelineReduceProcessorMergeStrategyStrategy.CONCAT_NEWLINE = ( + ObservabilityPipelineReduceProcessorMergeStrategyStrategy("concat_newline") +) +ObservabilityPipelineReduceProcessorMergeStrategyStrategy.CONCAT_RAW = ( + ObservabilityPipelineReduceProcessorMergeStrategyStrategy("concat_raw") +) +ObservabilityPipelineReduceProcessorMergeStrategyStrategy.SHORTEST_ARRAY = ( + ObservabilityPipelineReduceProcessorMergeStrategyStrategy("shortest_array") +) +ObservabilityPipelineReduceProcessorMergeStrategyStrategy.LONGEST_ARRAY = ( + ObservabilityPipelineReduceProcessorMergeStrategyStrategy("longest_array") +) +ObservabilityPipelineReduceProcessorMergeStrategyStrategy.FLAT_UNIQUE = ( + ObservabilityPipelineReduceProcessorMergeStrategyStrategy("flat_unique") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_reduce_processor_type.py b/src/datadog_api_client/v2/model/observability_pipeline_reduce_processor_type.py new file mode 100644 index 0000000000..74fbe18c7f --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_reduce_processor_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineReduceProcessorType(ModelSimple): + """ + The processor type. The value should always be `reduce`. + + :param value: If omitted defaults to "reduce". Must be one of ["reduce"]. + :type value: str + """ + + allowed_values = { + "reduce", + } + REDUCE: ClassVar["ObservabilityPipelineReduceProcessorType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineReduceProcessorType.REDUCE = ObservabilityPipelineReduceProcessorType("reduce") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_destination.py new file mode 100644 index 0000000000..486442d4aa --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_destination.py @@ -0,0 +1,88 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_rsyslog_destination_type import ( + ObservabilityPipelineRsyslogDestinationType, + ) + + +class ObservabilityPipelineRsyslogDestination(ModelNormal): + validations = { + "keepalive": { + "inclusive_minimum": 0, + }, + } + + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_rsyslog_destination_type import ( + ObservabilityPipelineRsyslogDestinationType, + ) + + return { + "id": (str,), + "inputs": ([str],), + "keepalive": (int,), + "tls": (ObservabilityPipelineTls,), + "type": (ObservabilityPipelineRsyslogDestinationType,), + } + + attribute_map = { + "id": "id", + "inputs": "inputs", + "keepalive": "keepalive", + "tls": "tls", + "type": "type", + } + + def __init__( + self_, + id: str, + inputs: List[str], + type: ObservabilityPipelineRsyslogDestinationType, + keepalive: Union[int, UnsetType] = unset, + tls: Union[ObservabilityPipelineTls, UnsetType] = unset, + **kwargs, + ): + """ + The ``rsyslog`` destination forwards logs to an external ``rsyslog`` server over TCP or UDP using the syslog protocol. + + :param id: The unique identifier for this component. + :type id: str + + :param inputs: A list of component IDs whose output is used as the ``input`` for this component. + :type inputs: [str] + + :param keepalive: Optional socket keepalive duration in milliseconds. + :type keepalive: int, optional + + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional + + :param type: The destination type. The value should always be ``rsyslog``. + :type type: ObservabilityPipelineRsyslogDestinationType + """ + if keepalive is not unset: + kwargs["keepalive"] = keepalive + if tls is not unset: + kwargs["tls"] = tls + super().__init__(kwargs) + + self_.id = id + self_.inputs = inputs + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_destination_type.py b/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_destination_type.py new file mode 100644 index 0000000000..77bf9956e8 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_destination_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineRsyslogDestinationType(ModelSimple): + """ + The destination type. The value should always be `rsyslog`. + + :param value: If omitted defaults to "rsyslog". Must be one of ["rsyslog"]. + :type value: str + """ + + allowed_values = { + "rsyslog", + } + RSYSLOG: ClassVar["ObservabilityPipelineRsyslogDestinationType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineRsyslogDestinationType.RSYSLOG = ObservabilityPipelineRsyslogDestinationType("rsyslog") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_source.py b/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_source.py new file mode 100644 index 0000000000..90c9a6dc6b --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_source.py @@ -0,0 +1,80 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_syslog_source_mode import ( + ObservabilityPipelineSyslogSourceMode, + ) + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_rsyslog_source_type import ( + ObservabilityPipelineRsyslogSourceType, + ) + + +class ObservabilityPipelineRsyslogSource(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_syslog_source_mode import ( + ObservabilityPipelineSyslogSourceMode, + ) + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_rsyslog_source_type import ( + ObservabilityPipelineRsyslogSourceType, + ) + + return { + "id": (str,), + "mode": (ObservabilityPipelineSyslogSourceMode,), + "tls": (ObservabilityPipelineTls,), + "type": (ObservabilityPipelineRsyslogSourceType,), + } + + attribute_map = { + "id": "id", + "mode": "mode", + "tls": "tls", + "type": "type", + } + + def __init__( + self_, + id: str, + mode: ObservabilityPipelineSyslogSourceMode, + type: ObservabilityPipelineRsyslogSourceType, + tls: Union[ObservabilityPipelineTls, UnsetType] = unset, + **kwargs, + ): + """ + The ``rsyslog`` source listens for logs over TCP or UDP from an ``rsyslog`` server using the syslog protocol. + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + :type id: str + + :param mode: Protocol used by the syslog source to receive messages. + :type mode: ObservabilityPipelineSyslogSourceMode + + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional + + :param type: The source type. The value should always be ``rsyslog``. + :type type: ObservabilityPipelineRsyslogSourceType + """ + if tls is not unset: + kwargs["tls"] = tls + super().__init__(kwargs) + + self_.id = id + self_.mode = mode + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_source_type.py b/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_source_type.py new file mode 100644 index 0000000000..0263bcebf1 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_rsyslog_source_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineRsyslogSourceType(ModelSimple): + """ + The source type. The value should always be `rsyslog`. + + :param value: If omitted defaults to "rsyslog". Must be one of ["rsyslog"]. + :type value: str + """ + + allowed_values = { + "rsyslog", + } + RSYSLOG: ClassVar["ObservabilityPipelineRsyslogSourceType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineRsyslogSourceType.RSYSLOG = ObservabilityPipelineRsyslogSourceType("rsyslog") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sample_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_sample_processor.py new file mode 100644 index 0000000000..e6f16ce99d --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sample_processor.py @@ -0,0 +1,93 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_sample_processor_type import ( + ObservabilityPipelineSampleProcessorType, + ) + + +class ObservabilityPipelineSampleProcessor(ModelNormal): + validations = { + "rate": { + "inclusive_minimum": 1, + }, + } + + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_sample_processor_type import ( + ObservabilityPipelineSampleProcessorType, + ) + + return { + "id": (str,), + "include": (str,), + "inputs": ([str],), + "percentage": (float,), + "rate": (int,), + "type": (ObservabilityPipelineSampleProcessorType,), + } + + attribute_map = { + "id": "id", + "include": "include", + "inputs": "inputs", + "percentage": "percentage", + "rate": "rate", + "type": "type", + } + + def __init__( + self_, + id: str, + include: str, + inputs: List[str], + type: ObservabilityPipelineSampleProcessorType, + percentage: Union[float, UnsetType] = unset, + rate: Union[int, UnsetType] = unset, + **kwargs, + ): + """ + The ``sample`` processor allows probabilistic sampling of logs at a fixed rate. + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the ``input`` to downstream components). + :type id: str + + :param include: A Datadog search query used to determine which logs this processor targets. + :type include: str + + :param inputs: A list of component IDs whose output is used as the ``input`` for this component. + :type inputs: [str] + + :param percentage: The percentage of logs to sample. + :type percentage: float, optional + + :param rate: Number of events to sample (1 in N). + :type rate: int, optional + + :param type: The processor type. The value should always be ``sample``. + :type type: ObservabilityPipelineSampleProcessorType + """ + if percentage is not unset: + kwargs["percentage"] = percentage + if rate is not unset: + kwargs["rate"] = rate + super().__init__(kwargs) + + self_.id = id + self_.include = include + self_.inputs = inputs + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sample_processor_type.py b/src/datadog_api_client/v2/model/observability_pipeline_sample_processor_type.py new file mode 100644 index 0000000000..851e78ae53 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sample_processor_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSampleProcessorType(ModelSimple): + """ + The processor type. The value should always be `sample`. + + :param value: If omitted defaults to "sample". Must be one of ["sample"]. + :type value: str + """ + + allowed_values = { + "sample", + } + SAMPLE: ClassVar["ObservabilityPipelineSampleProcessorType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSampleProcessorType.SAMPLE = ObservabilityPipelineSampleProcessorType("sample") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor.py new file mode 100644 index 0000000000..9d78e97932 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor.py @@ -0,0 +1,82 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_rule import ( + ObservabilityPipelineSensitiveDataScannerProcessorRule, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_type import ( + ObservabilityPipelineSensitiveDataScannerProcessorType, + ) + + +class ObservabilityPipelineSensitiveDataScannerProcessor(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_rule import ( + ObservabilityPipelineSensitiveDataScannerProcessorRule, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_type import ( + ObservabilityPipelineSensitiveDataScannerProcessorType, + ) + + return { + "id": (str,), + "include": (str,), + "inputs": ([str],), + "rules": ([ObservabilityPipelineSensitiveDataScannerProcessorRule],), + "type": (ObservabilityPipelineSensitiveDataScannerProcessorType,), + } + + attribute_map = { + "id": "id", + "include": "include", + "inputs": "inputs", + "rules": "rules", + "type": "type", + } + + def __init__( + self_, + id: str, + include: str, + inputs: List[str], + rules: List[ObservabilityPipelineSensitiveDataScannerProcessorRule], + type: ObservabilityPipelineSensitiveDataScannerProcessorType, + **kwargs, + ): + """ + The ``sensitive_data_scanner`` processor detects and optionally redacts sensitive data in log events. + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + :type id: str + + :param include: A Datadog search query used to determine which logs this processor targets. + :type include: str + + :param inputs: A list of component IDs whose output is used as the ``input`` for this component. + :type inputs: [str] + + :param rules: A list of rules for identifying and acting on sensitive data patterns. + :type rules: [ObservabilityPipelineSensitiveDataScannerProcessorRule] + + :param type: The processor type. The value should always be ``sensitive_data_scanner``. + :type type: ObservabilityPipelineSensitiveDataScannerProcessorType + """ + super().__init__(kwargs) + + self_.id = id + self_.include = include + self_.inputs = inputs + self_.rules = rules + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action.py new file mode 100644 index 0000000000..49cf39280e --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action.py @@ -0,0 +1,51 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelComposed, + cached_property, +) + + +class ObservabilityPipelineSensitiveDataScannerProcessorAction(ModelComposed): + def __init__(self, **kwargs): + """ + Defines what action to take when sensitive data is matched. + + :param action: Action type that completely replaces the matched sensitive data with a fixed replacement string to remove all visibility. + :type action: ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction + + :param options: Configuration for fully redacting sensitive data. + :type options: ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions + """ + super().__init__(kwargs) + + @cached_property + def _composed_schemas(_): + # we need this here to make our import statements work + # we must store _composed_schemas in here so the code is only run + # when we invoke this method. If we kept this at the class + # level we would get an error because the class level + # code would be run when this module is imported, and these composed + # classes don't exist yet because their module has not finished + # loading + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_redact import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionRedact, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_hash import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionHash, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_partial_redact import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact, + ) + + return { + "oneOf": [ + ObservabilityPipelineSensitiveDataScannerProcessorActionRedact, + ObservabilityPipelineSensitiveDataScannerProcessorActionHash, + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact, + ], + } diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_hash.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_hash.py new file mode 100644 index 0000000000..b076cda3ca --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_hash.py @@ -0,0 +1,58 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_hash_action import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction, + ) + + +class ObservabilityPipelineSensitiveDataScannerProcessorActionHash(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_hash_action import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction, + ) + + return { + "action": (ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction,), + "options": (dict,), + } + + attribute_map = { + "action": "action", + "options": "options", + } + + def __init__( + self_, + action: ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction, + options: Union[dict, UnsetType] = unset, + **kwargs, + ): + """ + Configuration for hashing matched sensitive values. + + :param action: Action type that replaces the matched sensitive data with a hashed representation, preserving structure while securing content. + :type action: ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction + + :param options: The ``ObservabilityPipelineSensitiveDataScannerProcessorActionHash`` ``options``. + :type options: dict, optional + """ + if options is not unset: + kwargs["options"] = options + super().__init__(kwargs) + + self_.action = action diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_hash_action.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_hash_action.py new file mode 100644 index 0000000000..e6b115bd27 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_hash_action.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction(ModelSimple): + """ + Action type that replaces the matched sensitive data with a hashed representation, preserving structure while securing content. + + :param value: If omitted defaults to "hash". Must be one of ["hash"]. + :type value: str + """ + + allowed_values = { + "hash", + } + HASH: ClassVar["ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction.HASH = ( + ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction("hash") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_partial_redact.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_partial_redact.py new file mode 100644 index 0000000000..16b9a495c9 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_partial_redact.py @@ -0,0 +1,61 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_partial_redact_action import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_partial_redact_options import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions, + ) + + +class ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_partial_redact_action import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_partial_redact_options import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions, + ) + + return { + "action": (ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction,), + "options": (ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions,), + } + + attribute_map = { + "action": "action", + "options": "options", + } + + def __init__( + self_, + action: ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction, + options: ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions, + **kwargs, + ): + """ + Configuration for partially redacting matched sensitive data. + + :param action: Action type that redacts part of the sensitive data while preserving a configurable number of characters, typically used for masking purposes (e.g., show last 4 digits of a credit card). + :type action: ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction + + :param options: Controls how partial redaction is applied, including character count and direction. + :type options: ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions + """ + super().__init__(kwargs) + + self_.action = action + self_.options = options diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_partial_redact_action.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_partial_redact_action.py new file mode 100644 index 0000000000..95a0c244b8 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_partial_redact_action.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction(ModelSimple): + """ + Action type that redacts part of the sensitive data while preserving a configurable number of characters, typically used for masking purposes (e.g., show last 4 digits of a credit card). + + :param value: If omitted defaults to "partial_redact". Must be one of ["partial_redact"]. + :type value: str + """ + + allowed_values = { + "partial_redact", + } + PARTIAL_REDACT: ClassVar["ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction.PARTIAL_REDACT = ( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction("partial_redact") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_partial_redact_options.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_partial_redact_options.py new file mode 100644 index 0000000000..992879abec --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_partial_redact_options.py @@ -0,0 +1,55 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_partial_redact_options_direction import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection, + ) + + +class ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_partial_redact_options_direction import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection, + ) + + return { + "characters": (int,), + "direction": (ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection,), + } + + attribute_map = { + "characters": "characters", + "direction": "direction", + } + + def __init__( + self_, + characters: int, + direction: ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection, + **kwargs, + ): + """ + Controls how partial redaction is applied, including character count and direction. + + :param characters: The ``ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions`` ``characters``. + :type characters: int + + :param direction: Indicates whether to redact characters from the first or last part of the matched value. + :type direction: ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection + """ + super().__init__(kwargs) + + self_.characters = characters + self_.direction = direction diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_partial_redact_options_direction.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_partial_redact_options_direction.py new file mode 100644 index 0000000000..f246a454b3 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_partial_redact_options_direction.py @@ -0,0 +1,42 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection(ModelSimple): + """ + Indicates whether to redact characters from the first or last part of the matched value. + + :param value: Must be one of ["first", "last"]. + :type value: str + """ + + allowed_values = { + "first", + "last", + } + FIRST: ClassVar["ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection"] + LAST: ClassVar["ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection.FIRST = ( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection("first") +) +ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection.LAST = ( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection("last") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_redact.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_redact.py new file mode 100644 index 0000000000..39669b6923 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_redact.py @@ -0,0 +1,61 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_redact_action import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_redact_options import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions, + ) + + +class ObservabilityPipelineSensitiveDataScannerProcessorActionRedact(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_redact_action import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_redact_options import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions, + ) + + return { + "action": (ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction,), + "options": (ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions,), + } + + attribute_map = { + "action": "action", + "options": "options", + } + + def __init__( + self_, + action: ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction, + options: ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions, + **kwargs, + ): + """ + Configuration for completely redacting matched sensitive data. + + :param action: Action type that completely replaces the matched sensitive data with a fixed replacement string to remove all visibility. + :type action: ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction + + :param options: Configuration for fully redacting sensitive data. + :type options: ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions + """ + super().__init__(kwargs) + + self_.action = action + self_.options = options diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_redact_action.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_redact_action.py new file mode 100644 index 0000000000..5f8c456c6d --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_redact_action.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction(ModelSimple): + """ + Action type that completely replaces the matched sensitive data with a fixed replacement string to remove all visibility. + + :param value: If omitted defaults to "redact". Must be one of ["redact"]. + :type value: str + """ + + allowed_values = { + "redact", + } + REDACT: ClassVar["ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction.REDACT = ( + ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction("redact") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_redact_options.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_redact_options.py new file mode 100644 index 0000000000..c0f909ed18 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_action_redact_options.py @@ -0,0 +1,33 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +class ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions(ModelNormal): + @cached_property + def openapi_types(_): + return { + "replace": (str,), + } + + attribute_map = { + "replace": "replace", + } + + def __init__(self_, replace: str, **kwargs): + """ + Configuration for fully redacting sensitive data. + + :param replace: The ``ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions`` ``replace``. + :type replace: str + """ + super().__init__(kwargs) + + self_.replace = replace diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_custom_pattern.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_custom_pattern.py new file mode 100644 index 0000000000..e043bb0137 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_custom_pattern.py @@ -0,0 +1,61 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options import ( + ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_custom_pattern_type import ( + ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType, + ) + + +class ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options import ( + ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_custom_pattern_type import ( + ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType, + ) + + return { + "options": (ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions,), + "type": (ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType,), + } + + attribute_map = { + "options": "options", + "type": "type", + } + + def __init__( + self_, + options: ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions, + type: ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType, + **kwargs, + ): + """ + Defines a custom regex-based pattern for identifying sensitive data in logs. + + :param options: Options for defining a custom regex pattern. + :type options: ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions + + :param type: Indicates a custom regular expression is used for matching. + :type type: ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType + """ + super().__init__(kwargs) + + self_.options = options + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.py new file mode 100644 index 0000000000..da99100de6 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options.py @@ -0,0 +1,33 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +class ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions(ModelNormal): + @cached_property + def openapi_types(_): + return { + "rule": (str,), + } + + attribute_map = { + "rule": "rule", + } + + def __init__(self_, rule: str, **kwargs): + """ + Options for defining a custom regex pattern. + + :param rule: A regular expression used to detect sensitive values. Must be a valid regex. + :type rule: str + """ + super().__init__(kwargs) + + self_.rule = rule diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_type.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_type.py new file mode 100644 index 0000000000..8d29343947 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_custom_pattern_type.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType(ModelSimple): + """ + Indicates a custom regular expression is used for matching. + + :param value: If omitted defaults to "custom". Must be one of ["custom"]. + :type value: str + """ + + allowed_values = { + "custom", + } + CUSTOM: ClassVar["ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType.CUSTOM = ( + ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType("custom") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_keyword_options.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_keyword_options.py new file mode 100644 index 0000000000..13eebd0489 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_keyword_options.py @@ -0,0 +1,40 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +class ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions(ModelNormal): + @cached_property + def openapi_types(_): + return { + "keywords": ([str],), + "proximity": (int,), + } + + attribute_map = { + "keywords": "keywords", + "proximity": "proximity", + } + + def __init__(self_, keywords: List[str], proximity: int, **kwargs): + """ + Configuration for keywords used to reinforce sensitive data pattern detection. + + :param keywords: A list of keywords to match near the sensitive pattern. + :type keywords: [str] + + :param proximity: Maximum number of tokens between a keyword and a sensitive value match. + :type proximity: int + """ + super().__init__(kwargs) + + self_.keywords = keywords + self_.proximity = proximity diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_library_pattern.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_library_pattern.py new file mode 100644 index 0000000000..d9f5ffc8fe --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_library_pattern.py @@ -0,0 +1,61 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_library_pattern_options import ( + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_library_pattern_type import ( + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType, + ) + + +class ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_library_pattern_options import ( + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_library_pattern_type import ( + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType, + ) + + return { + "options": (ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions,), + "type": (ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType,), + } + + attribute_map = { + "options": "options", + "type": "type", + } + + def __init__( + self_, + options: ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions, + type: ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType, + **kwargs, + ): + """ + Specifies a pattern from Datadog’s sensitive data detection library to match known sensitive data types. + + :param options: Options for selecting a predefined library pattern and enabling keyword support. + :type options: ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions + + :param type: Indicates that a predefined library pattern is used. + :type type: ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType + """ + super().__init__(kwargs) + + self_.options = options + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.py new file mode 100644 index 0000000000..1389dec5a1 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_library_pattern_options.py @@ -0,0 +1,43 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +class ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions(ModelNormal): + @cached_property + def openapi_types(_): + return { + "id": (str,), + "use_recommended_keywords": (bool,), + } + + attribute_map = { + "id": "id", + "use_recommended_keywords": "use_recommended_keywords", + } + + def __init__(self_, id: str, use_recommended_keywords: Union[bool, UnsetType] = unset, **kwargs): + """ + Options for selecting a predefined library pattern and enabling keyword support. + + :param id: Identifier for a predefined pattern from the sensitive data scanner pattern library. + :type id: str + + :param use_recommended_keywords: Whether to augment the pattern with recommended keywords (optional). + :type use_recommended_keywords: bool, optional + """ + if use_recommended_keywords is not unset: + kwargs["use_recommended_keywords"] = use_recommended_keywords + super().__init__(kwargs) + + self_.id = id diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_library_pattern_type.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_library_pattern_type.py new file mode 100644 index 0000000000..328f105cf7 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_library_pattern_type.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType(ModelSimple): + """ + Indicates that a predefined library pattern is used. + + :param value: If omitted defaults to "library". Must be one of ["library"]. + :type value: str + """ + + allowed_values = { + "library", + } + LIBRARY: ClassVar["ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType.LIBRARY = ( + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType("library") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_pattern.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_pattern.py new file mode 100644 index 0000000000..b0aa553487 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_pattern.py @@ -0,0 +1,47 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelComposed, + cached_property, +) + + +class ObservabilityPipelineSensitiveDataScannerProcessorPattern(ModelComposed): + def __init__(self, **kwargs): + """ + Pattern detection configuration for identifying sensitive data using either a custom regex or a library reference. + + :param options: Options for defining a custom regex pattern. + :type options: ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions + + :param type: Indicates a custom regular expression is used for matching. + :type type: ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType + """ + super().__init__(kwargs) + + @cached_property + def _composed_schemas(_): + # we need this here to make our import statements work + # we must store _composed_schemas in here so the code is only run + # when we invoke this method. If we kept this at the class + # level we would get an error because the class level + # code would be run when this module is imported, and these composed + # classes don't exist yet because their module has not finished + # loading + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_custom_pattern import ( + ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_library_pattern import ( + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern, + ) + + return { + "oneOf": [ + ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern, + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern, + ], + } diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_rule.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_rule.py new file mode 100644 index 0000000000..9af3fe0ad5 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_rule.py @@ -0,0 +1,142 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_keyword_options import ( + ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action import ( + ObservabilityPipelineSensitiveDataScannerProcessorAction, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_pattern import ( + ObservabilityPipelineSensitiveDataScannerProcessorPattern, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope import ( + ObservabilityPipelineSensitiveDataScannerProcessorScope, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_redact import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionRedact, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_hash import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionHash, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_partial_redact import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_custom_pattern import ( + ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_library_pattern import ( + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_include import ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_exclude import ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_all import ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeAll, + ) + + +class ObservabilityPipelineSensitiveDataScannerProcessorRule(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_keyword_options import ( + ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action import ( + ObservabilityPipelineSensitiveDataScannerProcessorAction, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_pattern import ( + ObservabilityPipelineSensitiveDataScannerProcessorPattern, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope import ( + ObservabilityPipelineSensitiveDataScannerProcessorScope, + ) + + return { + "keyword_options": (ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions,), + "name": (str,), + "on_match": (ObservabilityPipelineSensitiveDataScannerProcessorAction,), + "pattern": (ObservabilityPipelineSensitiveDataScannerProcessorPattern,), + "scope": (ObservabilityPipelineSensitiveDataScannerProcessorScope,), + "tags": ([str],), + } + + attribute_map = { + "keyword_options": "keyword_options", + "name": "name", + "on_match": "on_match", + "pattern": "pattern", + "scope": "scope", + "tags": "tags", + } + + def __init__( + self_, + name: str, + on_match: Union[ + ObservabilityPipelineSensitiveDataScannerProcessorAction, + ObservabilityPipelineSensitiveDataScannerProcessorActionRedact, + ObservabilityPipelineSensitiveDataScannerProcessorActionHash, + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact, + ], + pattern: Union[ + ObservabilityPipelineSensitiveDataScannerProcessorPattern, + ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern, + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern, + ], + scope: Union[ + ObservabilityPipelineSensitiveDataScannerProcessorScope, + ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude, + ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude, + ObservabilityPipelineSensitiveDataScannerProcessorScopeAll, + ], + tags: List[str], + keyword_options: Union[ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions, UnsetType] = unset, + **kwargs, + ): + """ + Defines a rule for detecting sensitive data, including matching pattern, scope, and the action to take. + + :param keyword_options: Configuration for keywords used to reinforce sensitive data pattern detection. + :type keyword_options: ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions, optional + + :param name: A name identifying the rule. + :type name: str + + :param on_match: Defines what action to take when sensitive data is matched. + :type on_match: ObservabilityPipelineSensitiveDataScannerProcessorAction + + :param pattern: Pattern detection configuration for identifying sensitive data using either a custom regex or a library reference. + :type pattern: ObservabilityPipelineSensitiveDataScannerProcessorPattern + + :param scope: Determines which parts of the log the pattern-matching rule should be applied to. + :type scope: ObservabilityPipelineSensitiveDataScannerProcessorScope + + :param tags: Tags assigned to this rule for filtering and classification. + :type tags: [str] + """ + if keyword_options is not unset: + kwargs["keyword_options"] = keyword_options + super().__init__(kwargs) + + self_.name = name + self_.on_match = on_match + self_.pattern = pattern + self_.scope = scope + self_.tags = tags diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_scope.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_scope.py new file mode 100644 index 0000000000..720ecc6df2 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_scope.py @@ -0,0 +1,51 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelComposed, + cached_property, +) + + +class ObservabilityPipelineSensitiveDataScannerProcessorScope(ModelComposed): + def __init__(self, **kwargs): + """ + Determines which parts of the log the pattern-matching rule should be applied to. + + :param options: Fields to which the scope rule applies. + :type options: ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions + + :param target: Applies the rule only to included fields. + :type target: ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget + """ + super().__init__(kwargs) + + @cached_property + def _composed_schemas(_): + # we need this here to make our import statements work + # we must store _composed_schemas in here so the code is only run + # when we invoke this method. If we kept this at the class + # level we would get an error because the class level + # code would be run when this module is imported, and these composed + # classes don't exist yet because their module has not finished + # loading + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_include import ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_exclude import ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_all import ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeAll, + ) + + return { + "oneOf": [ + ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude, + ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude, + ObservabilityPipelineSensitiveDataScannerProcessorScopeAll, + ], + } diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_scope_all.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_scope_all.py new file mode 100644 index 0000000000..d50553078d --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_scope_all.py @@ -0,0 +1,44 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_all_target import ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget, + ) + + +class ObservabilityPipelineSensitiveDataScannerProcessorScopeAll(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_all_target import ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget, + ) + + return { + "target": (ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget,), + } + + attribute_map = { + "target": "target", + } + + def __init__(self_, target: ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget, **kwargs): + """ + Applies scanning across all available fields. + + :param target: Applies the rule to all fields. + :type target: ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget + """ + super().__init__(kwargs) + + self_.target = target diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_scope_all_target.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_scope_all_target.py new file mode 100644 index 0000000000..849a232292 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_scope_all_target.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget(ModelSimple): + """ + Applies the rule to all fields. + + :param value: If omitted defaults to "all". Must be one of ["all"]. + :type value: str + """ + + allowed_values = { + "all", + } + ALL: ClassVar["ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget.ALL = ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget("all") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_scope_exclude.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_scope_exclude.py new file mode 100644 index 0000000000..0806f61f3e --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_scope_exclude.py @@ -0,0 +1,61 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_options import ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_exclude_target import ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget, + ) + + +class ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_options import ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_exclude_target import ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget, + ) + + return { + "options": (ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions,), + "target": (ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget,), + } + + attribute_map = { + "options": "options", + "target": "target", + } + + def __init__( + self_, + options: ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions, + target: ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget, + **kwargs, + ): + """ + Excludes specific fields from sensitive data scanning. + + :param options: Fields to which the scope rule applies. + :type options: ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions + + :param target: Excludes specific fields from processing. + :type target: ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget + """ + super().__init__(kwargs) + + self_.options = options + self_.target = target diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_scope_exclude_target.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_scope_exclude_target.py new file mode 100644 index 0000000000..c81aa3cda9 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_scope_exclude_target.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget(ModelSimple): + """ + Excludes specific fields from processing. + + :param value: If omitted defaults to "exclude". Must be one of ["exclude"]. + :type value: str + """ + + allowed_values = { + "exclude", + } + EXCLUDE: ClassVar["ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget.EXCLUDE = ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget("exclude") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_scope_include.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_scope_include.py new file mode 100644 index 0000000000..31f8a96c2f --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_scope_include.py @@ -0,0 +1,61 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_options import ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_include_target import ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget, + ) + + +class ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_options import ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions, + ) + from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_include_target import ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget, + ) + + return { + "options": (ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions,), + "target": (ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget,), + } + + attribute_map = { + "options": "options", + "target": "target", + } + + def __init__( + self_, + options: ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions, + target: ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget, + **kwargs, + ): + """ + Includes only specific fields for sensitive data scanning. + + :param options: Fields to which the scope rule applies. + :type options: ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions + + :param target: Applies the rule only to included fields. + :type target: ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget + """ + super().__init__(kwargs) + + self_.options = options + self_.target = target diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_scope_include_target.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_scope_include_target.py new file mode 100644 index 0000000000..866edc2d4a --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_scope_include_target.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget(ModelSimple): + """ + Applies the rule only to included fields. + + :param value: If omitted defaults to "include". Must be one of ["include"]. + :type value: str + """ + + allowed_values = { + "include", + } + INCLUDE: ClassVar["ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget.INCLUDE = ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget("include") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_scope_options.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_scope_options.py new file mode 100644 index 0000000000..dc9c0914d4 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_scope_options.py @@ -0,0 +1,34 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +class ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions(ModelNormal): + @cached_property + def openapi_types(_): + return { + "fields": ([str],), + } + + attribute_map = { + "fields": "fields", + } + + def __init__(self_, fields: List[str], **kwargs): + """ + Fields to which the scope rule applies. + + :param fields: The ``ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions`` ``fields``. + :type fields: [str] + """ + super().__init__(kwargs) + + self_.fields = fields diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_type.py b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_type.py new file mode 100644 index 0000000000..2eb255bb6e --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sensitive_data_scanner_processor_type.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSensitiveDataScannerProcessorType(ModelSimple): + """ + The processor type. The value should always be `sensitive_data_scanner`. + + :param value: If omitted defaults to "sensitive_data_scanner". Must be one of ["sensitive_data_scanner"]. + :type value: str + """ + + allowed_values = { + "sensitive_data_scanner", + } + SENSITIVE_DATA_SCANNER: ClassVar["ObservabilityPipelineSensitiveDataScannerProcessorType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSensitiveDataScannerProcessorType.SENSITIVE_DATA_SCANNER = ( + ObservabilityPipelineSensitiveDataScannerProcessorType("sensitive_data_scanner") +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sentinel_one_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_sentinel_one_destination.py new file mode 100644 index 0000000000..924aa9b0f5 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sentinel_one_destination.py @@ -0,0 +1,75 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination_region import ( + ObservabilityPipelineSentinelOneDestinationRegion, + ) + from datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination_type import ( + ObservabilityPipelineSentinelOneDestinationType, + ) + + +class ObservabilityPipelineSentinelOneDestination(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination_region import ( + ObservabilityPipelineSentinelOneDestinationRegion, + ) + from datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination_type import ( + ObservabilityPipelineSentinelOneDestinationType, + ) + + return { + "id": (str,), + "inputs": ([str],), + "region": (ObservabilityPipelineSentinelOneDestinationRegion,), + "type": (ObservabilityPipelineSentinelOneDestinationType,), + } + + attribute_map = { + "id": "id", + "inputs": "inputs", + "region": "region", + "type": "type", + } + + def __init__( + self_, + id: str, + inputs: List[str], + region: ObservabilityPipelineSentinelOneDestinationRegion, + type: ObservabilityPipelineSentinelOneDestinationType, + **kwargs, + ): + """ + The ``sentinel_one`` destination sends logs to SentinelOne. + + :param id: The unique identifier for this component. + :type id: str + + :param inputs: A list of component IDs whose output is used as the ``input`` for this component. + :type inputs: [str] + + :param region: The SentinelOne region to send logs to. + :type region: ObservabilityPipelineSentinelOneDestinationRegion + + :param type: The destination type. The value should always be ``sentinel_one``. + :type type: ObservabilityPipelineSentinelOneDestinationType + """ + super().__init__(kwargs) + + self_.id = id + self_.inputs = inputs + self_.region = region + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sentinel_one_destination_region.py b/src/datadog_api_client/v2/model/observability_pipeline_sentinel_one_destination_region.py new file mode 100644 index 0000000000..e96699f347 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sentinel_one_destination_region.py @@ -0,0 +1,46 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSentinelOneDestinationRegion(ModelSimple): + """ + The SentinelOne region to send logs to. + + :param value: Must be one of ["us", "eu", "ca", "data_set_us"]. + :type value: str + """ + + allowed_values = { + "us", + "eu", + "ca", + "data_set_us", + } + US: ClassVar["ObservabilityPipelineSentinelOneDestinationRegion"] + EU: ClassVar["ObservabilityPipelineSentinelOneDestinationRegion"] + CA: ClassVar["ObservabilityPipelineSentinelOneDestinationRegion"] + DATA_SET_US: ClassVar["ObservabilityPipelineSentinelOneDestinationRegion"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSentinelOneDestinationRegion.US = ObservabilityPipelineSentinelOneDestinationRegion("us") +ObservabilityPipelineSentinelOneDestinationRegion.EU = ObservabilityPipelineSentinelOneDestinationRegion("eu") +ObservabilityPipelineSentinelOneDestinationRegion.CA = ObservabilityPipelineSentinelOneDestinationRegion("ca") +ObservabilityPipelineSentinelOneDestinationRegion.DATA_SET_US = ObservabilityPipelineSentinelOneDestinationRegion( + "data_set_us" +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sentinel_one_destination_type.py b/src/datadog_api_client/v2/model/observability_pipeline_sentinel_one_destination_type.py new file mode 100644 index 0000000000..ec5377f48e --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sentinel_one_destination_type.py @@ -0,0 +1,37 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSentinelOneDestinationType(ModelSimple): + """ + The destination type. The value should always be `sentinel_one`. + + :param value: If omitted defaults to "sentinel_one". Must be one of ["sentinel_one"]. + :type value: str + """ + + allowed_values = { + "sentinel_one", + } + SENTINEL_ONE: ClassVar["ObservabilityPipelineSentinelOneDestinationType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSentinelOneDestinationType.SENTINEL_ONE = ObservabilityPipelineSentinelOneDestinationType( + "sentinel_one" +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_create_request.py b/src/datadog_api_client/v2/model/observability_pipeline_spec.py similarity index 52% rename from src/datadog_api_client/v2/model/observability_pipeline_create_request.py rename to src/datadog_api_client/v2/model/observability_pipeline_spec.py index aa6c0a505c..d748ff00fa 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_create_request.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_spec.py @@ -12,32 +12,28 @@ if TYPE_CHECKING: - from datadog_api_client.v2.model.observability_pipeline_create_request_data import ( - ObservabilityPipelineCreateRequestData, - ) + from datadog_api_client.v2.model.observability_pipeline_spec_data import ObservabilityPipelineSpecData -class ObservabilityPipelineCreateRequest(ModelNormal): +class ObservabilityPipelineSpec(ModelNormal): @cached_property def openapi_types(_): - from datadog_api_client.v2.model.observability_pipeline_create_request_data import ( - ObservabilityPipelineCreateRequestData, - ) + from datadog_api_client.v2.model.observability_pipeline_spec_data import ObservabilityPipelineSpecData return { - "data": (ObservabilityPipelineCreateRequestData,), + "data": (ObservabilityPipelineSpecData,), } attribute_map = { "data": "data", } - def __init__(self_, data: ObservabilityPipelineCreateRequestData, **kwargs): + def __init__(self_, data: ObservabilityPipelineSpecData, **kwargs): """ - Top-level schema representing a pipeline. + Input schema representing an observability pipeline configuration. Used in create and validate requests. - :param data: Contains the pipeline’s ID, type, and configuration attributes. - :type data: ObservabilityPipelineCreateRequestData + :param data: Contains the the pipeline configuration. + :type data: ObservabilityPipelineSpecData """ super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_create_request_data.py b/src/datadog_api_client/v2/model/observability_pipeline_spec_data.py similarity index 91% rename from src/datadog_api_client/v2/model/observability_pipeline_create_request_data.py rename to src/datadog_api_client/v2/model/observability_pipeline_spec_data.py index 45d2c7b825..6b5755a6a4 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_create_request_data.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_spec_data.py @@ -15,7 +15,7 @@ from datadog_api_client.v2.model.observability_pipeline_data_attributes import ObservabilityPipelineDataAttributes -class ObservabilityPipelineCreateRequestData(ModelNormal): +class ObservabilityPipelineSpecData(ModelNormal): @cached_property def openapi_types(_): from datadog_api_client.v2.model.observability_pipeline_data_attributes import ( @@ -34,7 +34,7 @@ def openapi_types(_): def __init__(self_, attributes: ObservabilityPipelineDataAttributes, **kwargs): """ - Contains the pipeline’s ID, type, and configuration attributes. + Contains the the pipeline configuration. :param attributes: Defines the pipeline’s name and its components (sources, processors, and destinations). :type attributes: ObservabilityPipelineDataAttributes diff --git a/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_destination.py new file mode 100644 index 0000000000..e36b4aee74 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_destination.py @@ -0,0 +1,103 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination_encoding import ( + ObservabilityPipelineSplunkHecDestinationEncoding, + ) + from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination_type import ( + ObservabilityPipelineSplunkHecDestinationType, + ) + + +class ObservabilityPipelineSplunkHecDestination(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination_encoding import ( + ObservabilityPipelineSplunkHecDestinationEncoding, + ) + from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination_type import ( + ObservabilityPipelineSplunkHecDestinationType, + ) + + return { + "auto_extract_timestamp": (bool,), + "encoding": (ObservabilityPipelineSplunkHecDestinationEncoding,), + "id": (str,), + "index": (str,), + "inputs": ([str],), + "sourcetype": (str,), + "type": (ObservabilityPipelineSplunkHecDestinationType,), + } + + attribute_map = { + "auto_extract_timestamp": "auto_extract_timestamp", + "encoding": "encoding", + "id": "id", + "index": "index", + "inputs": "inputs", + "sourcetype": "sourcetype", + "type": "type", + } + + def __init__( + self_, + id: str, + inputs: List[str], + type: ObservabilityPipelineSplunkHecDestinationType, + auto_extract_timestamp: Union[bool, UnsetType] = unset, + encoding: Union[ObservabilityPipelineSplunkHecDestinationEncoding, UnsetType] = unset, + index: Union[str, UnsetType] = unset, + sourcetype: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + The ``splunk_hec`` destination forwards logs to Splunk using the HTTP Event Collector (HEC). + + :param auto_extract_timestamp: If ``true`` , Splunk tries to extract timestamps from incoming log events. + If ``false`` , Splunk assigns the time the event was received. + :type auto_extract_timestamp: bool, optional + + :param encoding: Encoding format for log events. + :type encoding: ObservabilityPipelineSplunkHecDestinationEncoding, optional + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + :type id: str + + :param index: Optional name of the Splunk index where logs are written. + :type index: str, optional + + :param inputs: A list of component IDs whose output is used as the ``input`` for this component. + :type inputs: [str] + + :param sourcetype: The Splunk sourcetype to assign to log events. + :type sourcetype: str, optional + + :param type: The destination type. Always ``splunk_hec``. + :type type: ObservabilityPipelineSplunkHecDestinationType + """ + if auto_extract_timestamp is not unset: + kwargs["auto_extract_timestamp"] = auto_extract_timestamp + if encoding is not unset: + kwargs["encoding"] = encoding + if index is not unset: + kwargs["index"] = index + if sourcetype is not unset: + kwargs["sourcetype"] = sourcetype + super().__init__(kwargs) + + self_.id = id + self_.inputs = inputs + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_destination_encoding.py b/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_destination_encoding.py new file mode 100644 index 0000000000..75981185dd --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_destination_encoding.py @@ -0,0 +1,40 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSplunkHecDestinationEncoding(ModelSimple): + """ + Encoding format for log events. + + :param value: Must be one of ["json", "raw_message"]. + :type value: str + """ + + allowed_values = { + "json", + "raw_message", + } + JSON: ClassVar["ObservabilityPipelineSplunkHecDestinationEncoding"] + RAW_MESSAGE: ClassVar["ObservabilityPipelineSplunkHecDestinationEncoding"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSplunkHecDestinationEncoding.JSON = ObservabilityPipelineSplunkHecDestinationEncoding("json") +ObservabilityPipelineSplunkHecDestinationEncoding.RAW_MESSAGE = ObservabilityPipelineSplunkHecDestinationEncoding( + "raw_message" +) diff --git a/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_destination_type.py b/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_destination_type.py new file mode 100644 index 0000000000..3f3c81a054 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_destination_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSplunkHecDestinationType(ModelSimple): + """ + The destination type. Always `splunk_hec`. + + :param value: If omitted defaults to "splunk_hec". Must be one of ["splunk_hec"]. + :type value: str + """ + + allowed_values = { + "splunk_hec", + } + SPLUNK_HEC: ClassVar["ObservabilityPipelineSplunkHecDestinationType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSplunkHecDestinationType.SPLUNK_HEC = ObservabilityPipelineSplunkHecDestinationType("splunk_hec") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_source.py b/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_source.py new file mode 100644 index 0000000000..bf8e2f976f --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_source.py @@ -0,0 +1,67 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_splunk_hec_source_type import ( + ObservabilityPipelineSplunkHecSourceType, + ) + + +class ObservabilityPipelineSplunkHecSource(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_splunk_hec_source_type import ( + ObservabilityPipelineSplunkHecSourceType, + ) + + return { + "id": (str,), + "tls": (ObservabilityPipelineTls,), + "type": (ObservabilityPipelineSplunkHecSourceType,), + } + + attribute_map = { + "id": "id", + "tls": "tls", + "type": "type", + } + + def __init__( + self_, + id: str, + type: ObservabilityPipelineSplunkHecSourceType, + tls: Union[ObservabilityPipelineTls, UnsetType] = unset, + **kwargs, + ): + """ + The ``splunk_hec`` source implements the Splunk HTTP Event Collector (HEC) API. + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + :type id: str + + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional + + :param type: The source type. Always ``splunk_hec``. + :type type: ObservabilityPipelineSplunkHecSourceType + """ + if tls is not unset: + kwargs["tls"] = tls + super().__init__(kwargs) + + self_.id = id + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_source_type.py b/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_source_type.py new file mode 100644 index 0000000000..0bae034160 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_splunk_hec_source_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSplunkHecSourceType(ModelSimple): + """ + The source type. Always `splunk_hec`. + + :param value: If omitted defaults to "splunk_hec". Must be one of ["splunk_hec"]. + :type value: str + """ + + allowed_values = { + "splunk_hec", + } + SPLUNK_HEC: ClassVar["ObservabilityPipelineSplunkHecSourceType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSplunkHecSourceType.SPLUNK_HEC = ObservabilityPipelineSplunkHecSourceType("splunk_hec") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_splunk_tcp_source.py b/src/datadog_api_client/v2/model/observability_pipeline_splunk_tcp_source.py new file mode 100644 index 0000000000..5cd32dc425 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_splunk_tcp_source.py @@ -0,0 +1,68 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_splunk_tcp_source_type import ( + ObservabilityPipelineSplunkTcpSourceType, + ) + + +class ObservabilityPipelineSplunkTcpSource(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_splunk_tcp_source_type import ( + ObservabilityPipelineSplunkTcpSourceType, + ) + + return { + "id": (str,), + "tls": (ObservabilityPipelineTls,), + "type": (ObservabilityPipelineSplunkTcpSourceType,), + } + + attribute_map = { + "id": "id", + "tls": "tls", + "type": "type", + } + + def __init__( + self_, + id: str, + type: ObservabilityPipelineSplunkTcpSourceType, + tls: Union[ObservabilityPipelineTls, UnsetType] = unset, + **kwargs, + ): + """ + The ``splunk_tcp`` source receives logs from a Splunk Universal Forwarder over TCP. + TLS is supported for secure transmission. + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + :type id: str + + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional + + :param type: The source type. Always ``splunk_tcp``. + :type type: ObservabilityPipelineSplunkTcpSourceType + """ + if tls is not unset: + kwargs["tls"] = tls + super().__init__(kwargs) + + self_.id = id + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_splunk_tcp_source_type.py b/src/datadog_api_client/v2/model/observability_pipeline_splunk_tcp_source_type.py new file mode 100644 index 0000000000..e4e2368cfc --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_splunk_tcp_source_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSplunkTcpSourceType(ModelSimple): + """ + The source type. Always `splunk_tcp`. + + :param value: If omitted defaults to "splunk_tcp". Must be one of ["splunk_tcp"]. + :type value: str + """ + + allowed_values = { + "splunk_tcp", + } + SPLUNK_TCP: ClassVar["ObservabilityPipelineSplunkTcpSourceType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSplunkTcpSourceType.SPLUNK_TCP = ObservabilityPipelineSplunkTcpSourceType("splunk_tcp") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination.py new file mode 100644 index 0000000000..d49e3044f8 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination.py @@ -0,0 +1,118 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination_encoding import ( + ObservabilityPipelineSumoLogicDestinationEncoding, + ) + from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination_header_custom_fields_item import ( + ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem, + ) + from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination_type import ( + ObservabilityPipelineSumoLogicDestinationType, + ) + + +class ObservabilityPipelineSumoLogicDestination(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination_encoding import ( + ObservabilityPipelineSumoLogicDestinationEncoding, + ) + from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination_header_custom_fields_item import ( + ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem, + ) + from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination_type import ( + ObservabilityPipelineSumoLogicDestinationType, + ) + + return { + "encoding": (ObservabilityPipelineSumoLogicDestinationEncoding,), + "header_custom_fields": ([ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem],), + "header_host_name": (str,), + "header_source_category": (str,), + "header_source_name": (str,), + "id": (str,), + "inputs": ([str],), + "type": (ObservabilityPipelineSumoLogicDestinationType,), + } + + attribute_map = { + "encoding": "encoding", + "header_custom_fields": "header_custom_fields", + "header_host_name": "header_host_name", + "header_source_category": "header_source_category", + "header_source_name": "header_source_name", + "id": "id", + "inputs": "inputs", + "type": "type", + } + + def __init__( + self_, + id: str, + inputs: List[str], + type: ObservabilityPipelineSumoLogicDestinationType, + encoding: Union[ObservabilityPipelineSumoLogicDestinationEncoding, UnsetType] = unset, + header_custom_fields: Union[ + List[ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem], UnsetType + ] = unset, + header_host_name: Union[str, UnsetType] = unset, + header_source_category: Union[str, UnsetType] = unset, + header_source_name: Union[str, UnsetType] = unset, + **kwargs, + ): + """ + The ``sumo_logic`` destination forwards logs to Sumo Logic. + + :param encoding: The output encoding format. + :type encoding: ObservabilityPipelineSumoLogicDestinationEncoding, optional + + :param header_custom_fields: A list of custom headers to include in the request to Sumo Logic. + :type header_custom_fields: [ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem], optional + + :param header_host_name: Optional override for the host name header. + :type header_host_name: str, optional + + :param header_source_category: Optional override for the source category header. + :type header_source_category: str, optional + + :param header_source_name: Optional override for the source name header. + :type header_source_name: str, optional + + :param id: The unique identifier for this component. + :type id: str + + :param inputs: A list of component IDs whose output is used as the ``input`` for this component. + :type inputs: [str] + + :param type: The destination type. The value should always be ``sumo_logic``. + :type type: ObservabilityPipelineSumoLogicDestinationType + """ + if encoding is not unset: + kwargs["encoding"] = encoding + if header_custom_fields is not unset: + kwargs["header_custom_fields"] = header_custom_fields + if header_host_name is not unset: + kwargs["header_host_name"] = header_host_name + if header_source_category is not unset: + kwargs["header_source_category"] = header_source_category + if header_source_name is not unset: + kwargs["header_source_name"] = header_source_name + super().__init__(kwargs) + + self_.id = id + self_.inputs = inputs + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination_encoding.py b/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination_encoding.py new file mode 100644 index 0000000000..10d126e168 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination_encoding.py @@ -0,0 +1,43 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSumoLogicDestinationEncoding(ModelSimple): + """ + The output encoding format. + + :param value: Must be one of ["json", "raw_message", "logfmt"]. + :type value: str + """ + + allowed_values = { + "json", + "raw_message", + "logfmt", + } + JSON: ClassVar["ObservabilityPipelineSumoLogicDestinationEncoding"] + RAW_MESSAGE: ClassVar["ObservabilityPipelineSumoLogicDestinationEncoding"] + LOGFMT: ClassVar["ObservabilityPipelineSumoLogicDestinationEncoding"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSumoLogicDestinationEncoding.JSON = ObservabilityPipelineSumoLogicDestinationEncoding("json") +ObservabilityPipelineSumoLogicDestinationEncoding.RAW_MESSAGE = ObservabilityPipelineSumoLogicDestinationEncoding( + "raw_message" +) +ObservabilityPipelineSumoLogicDestinationEncoding.LOGFMT = ObservabilityPipelineSumoLogicDestinationEncoding("logfmt") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination_header_custom_fields_item.py b/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination_header_custom_fields_item.py new file mode 100644 index 0000000000..b8ba9249e3 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination_header_custom_fields_item.py @@ -0,0 +1,39 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +class ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem(ModelNormal): + @cached_property + def openapi_types(_): + return { + "name": (str,), + "value": (str,), + } + + attribute_map = { + "name": "name", + "value": "value", + } + + def __init__(self_, name: str, value: str, **kwargs): + """ + Single key-value pair used as a custom log header for Sumo Logic. + + :param name: The header field name. + :type name: str + + :param value: The header field value. + :type value: str + """ + super().__init__(kwargs) + + self_.name = name + self_.value = value diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination_type.py b/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination_type.py new file mode 100644 index 0000000000..337f00fee9 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_destination_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSumoLogicDestinationType(ModelSimple): + """ + The destination type. The value should always be `sumo_logic`. + + :param value: If omitted defaults to "sumo_logic". Must be one of ["sumo_logic"]. + :type value: str + """ + + allowed_values = { + "sumo_logic", + } + SUMO_LOGIC: ClassVar["ObservabilityPipelineSumoLogicDestinationType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSumoLogicDestinationType.SUMO_LOGIC = ObservabilityPipelineSumoLogicDestinationType("sumo_logic") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_source.py b/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_source.py new file mode 100644 index 0000000000..c02e14c8c1 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_source.py @@ -0,0 +1,50 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_sumo_logic_source_type import ( + ObservabilityPipelineSumoLogicSourceType, + ) + + +class ObservabilityPipelineSumoLogicSource(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_sumo_logic_source_type import ( + ObservabilityPipelineSumoLogicSourceType, + ) + + return { + "id": (str,), + "type": (ObservabilityPipelineSumoLogicSourceType,), + } + + attribute_map = { + "id": "id", + "type": "type", + } + + def __init__(self_, id: str, type: ObservabilityPipelineSumoLogicSourceType, **kwargs): + """ + The ``sumo_logic`` source receives logs from Sumo Logic collectors. + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + :type id: str + + :param type: The source type. The value should always be ``sumo_logic``. + :type type: ObservabilityPipelineSumoLogicSourceType + """ + super().__init__(kwargs) + + self_.id = id + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_source_type.py b/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_source_type.py new file mode 100644 index 0000000000..9742e87d23 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_sumo_logic_source_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSumoLogicSourceType(ModelSimple): + """ + The source type. The value should always be `sumo_logic`. + + :param value: If omitted defaults to "sumo_logic". Must be one of ["sumo_logic"]. + :type value: str + """ + + allowed_values = { + "sumo_logic", + } + SUMO_LOGIC: ClassVar["ObservabilityPipelineSumoLogicSourceType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSumoLogicSourceType.SUMO_LOGIC = ObservabilityPipelineSumoLogicSourceType("sumo_logic") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_destination.py b/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_destination.py new file mode 100644 index 0000000000..4984e69b5e --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_destination.py @@ -0,0 +1,88 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_syslog_ng_destination_type import ( + ObservabilityPipelineSyslogNgDestinationType, + ) + + +class ObservabilityPipelineSyslogNgDestination(ModelNormal): + validations = { + "keepalive": { + "inclusive_minimum": 0, + }, + } + + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_syslog_ng_destination_type import ( + ObservabilityPipelineSyslogNgDestinationType, + ) + + return { + "id": (str,), + "inputs": ([str],), + "keepalive": (int,), + "tls": (ObservabilityPipelineTls,), + "type": (ObservabilityPipelineSyslogNgDestinationType,), + } + + attribute_map = { + "id": "id", + "inputs": "inputs", + "keepalive": "keepalive", + "tls": "tls", + "type": "type", + } + + def __init__( + self_, + id: str, + inputs: List[str], + type: ObservabilityPipelineSyslogNgDestinationType, + keepalive: Union[int, UnsetType] = unset, + tls: Union[ObservabilityPipelineTls, UnsetType] = unset, + **kwargs, + ): + """ + The ``syslog_ng`` destination forwards logs to an external ``syslog-ng`` server over TCP or UDP using the syslog protocol. + + :param id: The unique identifier for this component. + :type id: str + + :param inputs: A list of component IDs whose output is used as the ``input`` for this component. + :type inputs: [str] + + :param keepalive: Optional socket keepalive duration in milliseconds. + :type keepalive: int, optional + + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional + + :param type: The destination type. The value should always be ``syslog_ng``. + :type type: ObservabilityPipelineSyslogNgDestinationType + """ + if keepalive is not unset: + kwargs["keepalive"] = keepalive + if tls is not unset: + kwargs["tls"] = tls + super().__init__(kwargs) + + self_.id = id + self_.inputs = inputs + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_destination_type.py b/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_destination_type.py new file mode 100644 index 0000000000..4f17a09c03 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_destination_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSyslogNgDestinationType(ModelSimple): + """ + The destination type. The value should always be `syslog_ng`. + + :param value: If omitted defaults to "syslog_ng". Must be one of ["syslog_ng"]. + :type value: str + """ + + allowed_values = { + "syslog_ng", + } + SYSLOG_NG: ClassVar["ObservabilityPipelineSyslogNgDestinationType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSyslogNgDestinationType.SYSLOG_NG = ObservabilityPipelineSyslogNgDestinationType("syslog_ng") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_source.py b/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_source.py new file mode 100644 index 0000000000..5f3e91d9a1 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_source.py @@ -0,0 +1,80 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_syslog_source_mode import ( + ObservabilityPipelineSyslogSourceMode, + ) + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_syslog_ng_source_type import ( + ObservabilityPipelineSyslogNgSourceType, + ) + + +class ObservabilityPipelineSyslogNgSource(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_syslog_source_mode import ( + ObservabilityPipelineSyslogSourceMode, + ) + from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls + from datadog_api_client.v2.model.observability_pipeline_syslog_ng_source_type import ( + ObservabilityPipelineSyslogNgSourceType, + ) + + return { + "id": (str,), + "mode": (ObservabilityPipelineSyslogSourceMode,), + "tls": (ObservabilityPipelineTls,), + "type": (ObservabilityPipelineSyslogNgSourceType,), + } + + attribute_map = { + "id": "id", + "mode": "mode", + "tls": "tls", + "type": "type", + } + + def __init__( + self_, + id: str, + mode: ObservabilityPipelineSyslogSourceMode, + type: ObservabilityPipelineSyslogNgSourceType, + tls: Union[ObservabilityPipelineTls, UnsetType] = unset, + **kwargs, + ): + """ + The ``syslog_ng`` source listens for logs over TCP or UDP from a ``syslog-ng`` server using the syslog protocol. + + :param id: The unique identifier for this component. Used to reference this component in other parts of the pipeline (e.g., as input to downstream components). + :type id: str + + :param mode: Protocol used by the syslog source to receive messages. + :type mode: ObservabilityPipelineSyslogSourceMode + + :param tls: Configuration for enabling TLS encryption between the pipeline component and external services. + :type tls: ObservabilityPipelineTls, optional + + :param type: The source type. The value should always be ``syslog_ng``. + :type type: ObservabilityPipelineSyslogNgSourceType + """ + if tls is not unset: + kwargs["tls"] = tls + super().__init__(kwargs) + + self_.id = id + self_.mode = mode + self_.type = type diff --git a/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_source_type.py b/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_source_type.py new file mode 100644 index 0000000000..3edc780615 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_syslog_ng_source_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSyslogNgSourceType(ModelSimple): + """ + The source type. The value should always be `syslog_ng`. + + :param value: If omitted defaults to "syslog_ng". Must be one of ["syslog_ng"]. + :type value: str + """ + + allowed_values = { + "syslog_ng", + } + SYSLOG_NG: ClassVar["ObservabilityPipelineSyslogNgSourceType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSyslogNgSourceType.SYSLOG_NG = ObservabilityPipelineSyslogNgSourceType("syslog_ng") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_syslog_source_mode.py b/src/datadog_api_client/v2/model/observability_pipeline_syslog_source_mode.py new file mode 100644 index 0000000000..1e4f2f846a --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_syslog_source_mode.py @@ -0,0 +1,38 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineSyslogSourceMode(ModelSimple): + """ + Protocol used by the syslog source to receive messages. + + :param value: Must be one of ["tcp", "udp"]. + :type value: str + """ + + allowed_values = { + "tcp", + "udp", + } + TCP: ClassVar["ObservabilityPipelineSyslogSourceMode"] + UDP: ClassVar["ObservabilityPipelineSyslogSourceMode"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineSyslogSourceMode.TCP = ObservabilityPipelineSyslogSourceMode("tcp") +ObservabilityPipelineSyslogSourceMode.UDP = ObservabilityPipelineSyslogSourceMode("udp") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_throttle_processor.py b/src/datadog_api_client/v2/model/observability_pipeline_throttle_processor.py new file mode 100644 index 0000000000..aeaa56e6cb --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_throttle_processor.py @@ -0,0 +1,93 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.observability_pipeline_throttle_processor_type import ( + ObservabilityPipelineThrottleProcessorType, + ) + + +class ObservabilityPipelineThrottleProcessor(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.observability_pipeline_throttle_processor_type import ( + ObservabilityPipelineThrottleProcessorType, + ) + + return { + "group_by": ([str],), + "id": (str,), + "include": (str,), + "inputs": ([str],), + "threshold": (int,), + "type": (ObservabilityPipelineThrottleProcessorType,), + "window": (float,), + } + + attribute_map = { + "group_by": "group_by", + "id": "id", + "include": "include", + "inputs": "inputs", + "threshold": "threshold", + "type": "type", + "window": "window", + } + + def __init__( + self_, + id: str, + include: str, + inputs: List[str], + threshold: int, + type: ObservabilityPipelineThrottleProcessorType, + window: float, + group_by: Union[List[str], UnsetType] = unset, + **kwargs, + ): + """ + The ``throttle`` processor limits the number of events that pass through over a given time window. + + :param group_by: Optional list of fields used to group events before the threshold has been reached. + :type group_by: [str], optional + + :param id: The unique identifier for this processor. + :type id: str + + :param include: A Datadog search query used to determine which logs this processor targets. + :type include: str + + :param inputs: A list of component IDs whose output is used as the input for this processor. + :type inputs: [str] + + :param threshold: the number of events allowed in a given time window. Events sent after the threshold has been reached, are dropped. + :type threshold: int + + :param type: The processor type. The value should always be ``throttle``. + :type type: ObservabilityPipelineThrottleProcessorType + + :param window: The time window in seconds over which the threshold applies. + :type window: float + """ + if group_by is not unset: + kwargs["group_by"] = group_by + super().__init__(kwargs) + + self_.id = id + self_.include = include + self_.inputs = inputs + self_.threshold = threshold + self_.type = type + self_.window = window diff --git a/src/datadog_api_client/v2/model/observability_pipeline_throttle_processor_type.py b/src/datadog_api_client/v2/model/observability_pipeline_throttle_processor_type.py new file mode 100644 index 0000000000..63dac52864 --- /dev/null +++ b/src/datadog_api_client/v2/model/observability_pipeline_throttle_processor_type.py @@ -0,0 +1,35 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + + +from datadog_api_client.model_utils import ( + ModelSimple, + cached_property, +) + +from typing import ClassVar + + +class ObservabilityPipelineThrottleProcessorType(ModelSimple): + """ + The processor type. The value should always be `throttle`. + + :param value: If omitted defaults to "throttle". Must be one of ["throttle"]. + :type value: str + """ + + allowed_values = { + "throttle", + } + THROTTLE: ClassVar["ObservabilityPipelineThrottleProcessorType"] + + @cached_property + def openapi_types(_): + return { + "value": (str,), + } + + +ObservabilityPipelineThrottleProcessorType.THROTTLE = ObservabilityPipelineThrottleProcessorType("throttle") diff --git a/src/datadog_api_client/v2/model/observability_pipeline_tls.py b/src/datadog_api_client/v2/model/observability_pipeline_tls.py index 46aec65a60..0560611b9d 100644 --- a/src/datadog_api_client/v2/model/observability_pipeline_tls.py +++ b/src/datadog_api_client/v2/model/observability_pipeline_tls.py @@ -32,7 +32,7 @@ def __init__( self_, crt_file: str, ca_file: Union[str, UnsetType] = unset, key_file: Union[str, UnsetType] = unset, **kwargs ): """ - Configuration for enabling TLS encryption. + Configuration for enabling TLS encryption between the pipeline component and external services. :param ca_file: Path to the Certificate Authority (CA) file used to validate the server’s TLS certificate. :type ca_file: str, optional diff --git a/src/datadog_api_client/v2/model/validation_error.py b/src/datadog_api_client/v2/model/validation_error.py new file mode 100644 index 0000000000..dc07a54508 --- /dev/null +++ b/src/datadog_api_client/v2/model/validation_error.py @@ -0,0 +1,46 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.validation_error_meta import ValidationErrorMeta + + +class ValidationError(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.validation_error_meta import ValidationErrorMeta + + return { + "meta": (ValidationErrorMeta,), + "title": (str,), + } + + attribute_map = { + "meta": "meta", + "title": "title", + } + + def __init__(self_, meta: ValidationErrorMeta, title: str, **kwargs): + """ + Represents a single validation error, including a human-readable title and metadata. + + :param meta: Describes additional metadata for validation errors, including field names and error messages. + :type meta: ValidationErrorMeta + + :param title: A short, human-readable summary of the error. + :type title: str + """ + super().__init__(kwargs) + + self_.meta = meta + self_.title = title diff --git a/src/datadog_api_client/v2/model/validation_error_meta.py b/src/datadog_api_client/v2/model/validation_error_meta.py new file mode 100644 index 0000000000..4b5c9b08cd --- /dev/null +++ b/src/datadog_api_client/v2/model/validation_error_meta.py @@ -0,0 +1,52 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import Union + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +class ValidationErrorMeta(ModelNormal): + @cached_property + def openapi_types(_): + return { + "field": (str,), + "id": (str,), + "message": (str,), + } + + attribute_map = { + "field": "field", + "id": "id", + "message": "message", + } + + def __init__( + self_, message: str, field: Union[str, UnsetType] = unset, id: Union[str, UnsetType] = unset, **kwargs + ): + """ + Describes additional metadata for validation errors, including field names and error messages. + + :param field: The field name that caused the error. + :type field: str, optional + + :param id: The ID of the component in which the error occurred. + :type id: str, optional + + :param message: The detailed error message. + :type message: str + """ + if field is not unset: + kwargs["field"] = field + if id is not unset: + kwargs["id"] = id + super().__init__(kwargs) + + self_.message = message diff --git a/src/datadog_api_client/v2/model/validation_response.py b/src/datadog_api_client/v2/model/validation_response.py new file mode 100644 index 0000000000..1ab8097825 --- /dev/null +++ b/src/datadog_api_client/v2/model/validation_response.py @@ -0,0 +1,42 @@ +# Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +# This product includes software developed at Datadog (https://www.datadoghq.com/). +# Copyright 2019-Present Datadog, Inc. +from __future__ import annotations + +from typing import List, Union, TYPE_CHECKING + +from datadog_api_client.model_utils import ( + ModelNormal, + cached_property, + unset, + UnsetType, +) + + +if TYPE_CHECKING: + from datadog_api_client.v2.model.validation_error import ValidationError + + +class ValidationResponse(ModelNormal): + @cached_property + def openapi_types(_): + from datadog_api_client.v2.model.validation_error import ValidationError + + return { + "errors": ([ValidationError],), + } + + attribute_map = { + "errors": "errors", + } + + def __init__(self_, errors: Union[List[ValidationError], UnsetType] = unset, **kwargs): + """ + Response containing validation errors. + + :param errors: The ``ValidationResponse`` ``errors``. + :type errors: [ValidationError], optional + """ + if errors is not unset: + kwargs["errors"] = errors + super().__init__(kwargs) diff --git a/src/datadog_api_client/v2/models/__init__.py b/src/datadog_api_client/v2/models/__init__.py index 32f7bc3da7..e4cebd23b3 100644 --- a/src/datadog_api_client/v2/models/__init__.py +++ b/src/datadog_api_client/v2/models/__init__.py @@ -305,6 +305,8 @@ from datadog_api_client.v2.model.aws_scan_options_update_attributes import AwsScanOptionsUpdateAttributes from datadog_api_client.v2.model.aws_scan_options_update_data import AwsScanOptionsUpdateData from datadog_api_client.v2.model.aws_scan_options_update_request import AwsScanOptionsUpdateRequest +from datadog_api_client.v2.model.azure_storage_destination import AzureStorageDestination +from datadog_api_client.v2.model.azure_storage_destination_type import AzureStorageDestinationType from datadog_api_client.v2.model.azure_uc_config import AzureUCConfig from datadog_api_client.v2.model.azure_uc_config_pair import AzureUCConfigPair from datadog_api_client.v2.model.azure_uc_config_pair_attributes import AzureUCConfigPairAttributes @@ -1478,6 +1480,8 @@ from datadog_api_client.v2.model.list_findings_page import ListFindingsPage from datadog_api_client.v2.model.list_findings_response import ListFindingsResponse from datadog_api_client.v2.model.list_historical_jobs_response import ListHistoricalJobsResponse +from datadog_api_client.v2.model.list_pipelines_response import ListPipelinesResponse +from datadog_api_client.v2.model.list_pipelines_response_meta import ListPipelinesResponseMeta from datadog_api_client.v2.model.list_powerpacks_response import ListPowerpacksResponse from datadog_api_client.v2.model.list_rules_response import ListRulesResponse from datadog_api_client.v2.model.list_rules_response_data_item import ListRulesResponseDataItem @@ -1676,6 +1680,8 @@ from datadog_api_client.v2.model.metrics_list_response_links import MetricsListResponseLinks from datadog_api_client.v2.model.metrics_scalar_query import MetricsScalarQuery from datadog_api_client.v2.model.metrics_timeseries_query import MetricsTimeseriesQuery +from datadog_api_client.v2.model.microsoft_sentinel_destination import MicrosoftSentinelDestination +from datadog_api_client.v2.model.microsoft_sentinel_destination_type import MicrosoftSentinelDestinationType from datadog_api_client.v2.model.microsoft_teams_channel_info_response_attributes import ( MicrosoftTeamsChannelInfoResponseAttributes, ) @@ -1830,12 +1836,53 @@ from datadog_api_client.v2.model.nullable_user_relationship import NullableUserRelationship from datadog_api_client.v2.model.nullable_user_relationship_data import NullableUserRelationshipData from datadog_api_client.v2.model.observability_pipeline import ObservabilityPipeline +from datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor import ( + ObservabilityPipelineAddEnvVarsProcessor, +) +from datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor_type import ( + ObservabilityPipelineAddEnvVarsProcessorType, +) +from datadog_api_client.v2.model.observability_pipeline_add_env_vars_processor_variable import ( + ObservabilityPipelineAddEnvVarsProcessorVariable, +) from datadog_api_client.v2.model.observability_pipeline_add_fields_processor import ( ObservabilityPipelineAddFieldsProcessor, ) from datadog_api_client.v2.model.observability_pipeline_add_fields_processor_type import ( ObservabilityPipelineAddFieldsProcessorType, ) +from datadog_api_client.v2.model.observability_pipeline_amazon_data_firehose_source import ( + ObservabilityPipelineAmazonDataFirehoseSource, +) +from datadog_api_client.v2.model.observability_pipeline_amazon_data_firehose_source_type import ( + ObservabilityPipelineAmazonDataFirehoseSourceType, +) +from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination import ( + ObservabilityPipelineAmazonOpenSearchDestination, +) +from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination_auth import ( + ObservabilityPipelineAmazonOpenSearchDestinationAuth, +) +from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination_auth_strategy import ( + ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy, +) +from datadog_api_client.v2.model.observability_pipeline_amazon_open_search_destination_type import ( + ObservabilityPipelineAmazonOpenSearchDestinationType, +) +from datadog_api_client.v2.model.observability_pipeline_amazon_s3_destination import ( + ObservabilityPipelineAmazonS3Destination, +) +from datadog_api_client.v2.model.observability_pipeline_amazon_s3_destination_storage_class import ( + ObservabilityPipelineAmazonS3DestinationStorageClass, +) +from datadog_api_client.v2.model.observability_pipeline_amazon_s3_destination_type import ( + ObservabilityPipelineAmazonS3DestinationType, +) +from datadog_api_client.v2.model.observability_pipeline_amazon_s3_source import ObservabilityPipelineAmazonS3Source +from datadog_api_client.v2.model.observability_pipeline_amazon_s3_source_type import ( + ObservabilityPipelineAmazonS3SourceType, +) +from datadog_api_client.v2.model.observability_pipeline_aws_auth import ObservabilityPipelineAwsAuth from datadog_api_client.v2.model.observability_pipeline_config import ObservabilityPipelineConfig from datadog_api_client.v2.model.observability_pipeline_config_destination_item import ( ObservabilityPipelineConfigDestinationItem, @@ -1844,10 +1891,6 @@ ObservabilityPipelineConfigProcessorItem, ) from datadog_api_client.v2.model.observability_pipeline_config_source_item import ObservabilityPipelineConfigSourceItem -from datadog_api_client.v2.model.observability_pipeline_create_request import ObservabilityPipelineCreateRequest -from datadog_api_client.v2.model.observability_pipeline_create_request_data import ( - ObservabilityPipelineCreateRequestData, -) from datadog_api_client.v2.model.observability_pipeline_data import ObservabilityPipelineData from datadog_api_client.v2.model.observability_pipeline_data_attributes import ObservabilityPipelineDataAttributes from datadog_api_client.v2.model.observability_pipeline_datadog_agent_source import ( @@ -1862,17 +1905,187 @@ from datadog_api_client.v2.model.observability_pipeline_datadog_logs_destination_type import ( ObservabilityPipelineDatadogLogsDestinationType, ) +from datadog_api_client.v2.model.observability_pipeline_decoding import ObservabilityPipelineDecoding +from datadog_api_client.v2.model.observability_pipeline_dedupe_processor import ObservabilityPipelineDedupeProcessor +from datadog_api_client.v2.model.observability_pipeline_dedupe_processor_mode import ( + ObservabilityPipelineDedupeProcessorMode, +) +from datadog_api_client.v2.model.observability_pipeline_dedupe_processor_type import ( + ObservabilityPipelineDedupeProcessorType, +) +from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination import ( + ObservabilityPipelineElasticsearchDestination, +) +from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_api_version import ( + ObservabilityPipelineElasticsearchDestinationApiVersion, +) +from datadog_api_client.v2.model.observability_pipeline_elasticsearch_destination_type import ( + ObservabilityPipelineElasticsearchDestinationType, +) +from datadog_api_client.v2.model.observability_pipeline_enrichment_table_file import ( + ObservabilityPipelineEnrichmentTableFile, +) +from datadog_api_client.v2.model.observability_pipeline_enrichment_table_file_encoding import ( + ObservabilityPipelineEnrichmentTableFileEncoding, +) +from datadog_api_client.v2.model.observability_pipeline_enrichment_table_file_encoding_type import ( + ObservabilityPipelineEnrichmentTableFileEncodingType, +) +from datadog_api_client.v2.model.observability_pipeline_enrichment_table_file_key_items import ( + ObservabilityPipelineEnrichmentTableFileKeyItems, +) +from datadog_api_client.v2.model.observability_pipeline_enrichment_table_file_key_items_comparison import ( + ObservabilityPipelineEnrichmentTableFileKeyItemsComparison, +) +from datadog_api_client.v2.model.observability_pipeline_enrichment_table_file_schema_items import ( + ObservabilityPipelineEnrichmentTableFileSchemaItems, +) +from datadog_api_client.v2.model.observability_pipeline_enrichment_table_file_schema_items_type import ( + ObservabilityPipelineEnrichmentTableFileSchemaItemsType, +) +from datadog_api_client.v2.model.observability_pipeline_enrichment_table_geo_ip import ( + ObservabilityPipelineEnrichmentTableGeoIp, +) +from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor import ( + ObservabilityPipelineEnrichmentTableProcessor, +) +from datadog_api_client.v2.model.observability_pipeline_enrichment_table_processor_type import ( + ObservabilityPipelineEnrichmentTableProcessorType, +) from datadog_api_client.v2.model.observability_pipeline_field_value import ObservabilityPipelineFieldValue from datadog_api_client.v2.model.observability_pipeline_filter_processor import ObservabilityPipelineFilterProcessor from datadog_api_client.v2.model.observability_pipeline_filter_processor_type import ( ObservabilityPipelineFilterProcessorType, ) +from datadog_api_client.v2.model.observability_pipeline_fluent_bit_source import ObservabilityPipelineFluentBitSource +from datadog_api_client.v2.model.observability_pipeline_fluent_bit_source_type import ( + ObservabilityPipelineFluentBitSourceType, +) +from datadog_api_client.v2.model.observability_pipeline_fluentd_source import ObservabilityPipelineFluentdSource +from datadog_api_client.v2.model.observability_pipeline_fluentd_source_type import ( + ObservabilityPipelineFluentdSourceType, +) +from datadog_api_client.v2.model.observability_pipeline_gcp_auth import ObservabilityPipelineGcpAuth +from datadog_api_client.v2.model.observability_pipeline_generate_metrics_processor import ( + ObservabilityPipelineGenerateMetricsProcessor, +) +from datadog_api_client.v2.model.observability_pipeline_generate_metrics_processor_type import ( + ObservabilityPipelineGenerateMetricsProcessorType, +) +from datadog_api_client.v2.model.observability_pipeline_generated_metric import ObservabilityPipelineGeneratedMetric +from datadog_api_client.v2.model.observability_pipeline_generated_metric_increment_by_field import ( + ObservabilityPipelineGeneratedMetricIncrementByField, +) +from datadog_api_client.v2.model.observability_pipeline_generated_metric_increment_by_field_strategy import ( + ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy, +) +from datadog_api_client.v2.model.observability_pipeline_generated_metric_increment_by_one import ( + ObservabilityPipelineGeneratedMetricIncrementByOne, +) +from datadog_api_client.v2.model.observability_pipeline_generated_metric_increment_by_one_strategy import ( + ObservabilityPipelineGeneratedMetricIncrementByOneStrategy, +) +from datadog_api_client.v2.model.observability_pipeline_generated_metric_metric_type import ( + ObservabilityPipelineGeneratedMetricMetricType, +) +from datadog_api_client.v2.model.observability_pipeline_google_chronicle_destination import ( + ObservabilityPipelineGoogleChronicleDestination, +) +from datadog_api_client.v2.model.observability_pipeline_google_chronicle_destination_encoding import ( + ObservabilityPipelineGoogleChronicleDestinationEncoding, +) +from datadog_api_client.v2.model.observability_pipeline_google_chronicle_destination_type import ( + ObservabilityPipelineGoogleChronicleDestinationType, +) +from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination import ( + ObservabilityPipelineGoogleCloudStorageDestination, +) +from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination_acl import ( + ObservabilityPipelineGoogleCloudStorageDestinationAcl, +) +from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination_storage_class import ( + ObservabilityPipelineGoogleCloudStorageDestinationStorageClass, +) +from datadog_api_client.v2.model.observability_pipeline_google_cloud_storage_destination_type import ( + ObservabilityPipelineGoogleCloudStorageDestinationType, +) +from datadog_api_client.v2.model.observability_pipeline_google_pub_sub_source import ( + ObservabilityPipelineGooglePubSubSource, +) +from datadog_api_client.v2.model.observability_pipeline_google_pub_sub_source_type import ( + ObservabilityPipelineGooglePubSubSourceType, +) +from datadog_api_client.v2.model.observability_pipeline_http_client_source import ObservabilityPipelineHttpClientSource +from datadog_api_client.v2.model.observability_pipeline_http_client_source_auth_strategy import ( + ObservabilityPipelineHttpClientSourceAuthStrategy, +) +from datadog_api_client.v2.model.observability_pipeline_http_client_source_type import ( + ObservabilityPipelineHttpClientSourceType, +) +from datadog_api_client.v2.model.observability_pipeline_http_server_source import ObservabilityPipelineHttpServerSource +from datadog_api_client.v2.model.observability_pipeline_http_server_source_auth_strategy import ( + ObservabilityPipelineHttpServerSourceAuthStrategy, +) +from datadog_api_client.v2.model.observability_pipeline_http_server_source_type import ( + ObservabilityPipelineHttpServerSourceType, +) from datadog_api_client.v2.model.observability_pipeline_kafka_source import ObservabilityPipelineKafkaSource from datadog_api_client.v2.model.observability_pipeline_kafka_source_librdkafka_option import ( ObservabilityPipelineKafkaSourceLibrdkafkaOption, ) from datadog_api_client.v2.model.observability_pipeline_kafka_source_sasl import ObservabilityPipelineKafkaSourceSasl from datadog_api_client.v2.model.observability_pipeline_kafka_source_type import ObservabilityPipelineKafkaSourceType +from datadog_api_client.v2.model.observability_pipeline_logstash_source import ObservabilityPipelineLogstashSource +from datadog_api_client.v2.model.observability_pipeline_logstash_source_type import ( + ObservabilityPipelineLogstashSourceType, +) +from datadog_api_client.v2.model.observability_pipeline_metadata_entry import ObservabilityPipelineMetadataEntry +from datadog_api_client.v2.model.observability_pipeline_metric_value import ObservabilityPipelineMetricValue +from datadog_api_client.v2.model.observability_pipeline_new_relic_destination import ( + ObservabilityPipelineNewRelicDestination, +) +from datadog_api_client.v2.model.observability_pipeline_new_relic_destination_region import ( + ObservabilityPipelineNewRelicDestinationRegion, +) +from datadog_api_client.v2.model.observability_pipeline_new_relic_destination_type import ( + ObservabilityPipelineNewRelicDestinationType, +) +from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor import ( + ObservabilityPipelineOcsfMapperProcessor, +) +from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor_mapping import ( + ObservabilityPipelineOcsfMapperProcessorMapping, +) +from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor_mapping_mapping import ( + ObservabilityPipelineOcsfMapperProcessorMappingMapping, +) +from datadog_api_client.v2.model.observability_pipeline_ocsf_mapper_processor_type import ( + ObservabilityPipelineOcsfMapperProcessorType, +) +from datadog_api_client.v2.model.observability_pipeline_ocsf_mapping_library import ( + ObservabilityPipelineOcsfMappingLibrary, +) +from datadog_api_client.v2.model.observability_pipeline_open_search_destination import ( + ObservabilityPipelineOpenSearchDestination, +) +from datadog_api_client.v2.model.observability_pipeline_open_search_destination_type import ( + ObservabilityPipelineOpenSearchDestinationType, +) +from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor import ( + ObservabilityPipelineParseGrokProcessor, +) +from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor_rule import ( + ObservabilityPipelineParseGrokProcessorRule, +) +from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor_rule_match_rule import ( + ObservabilityPipelineParseGrokProcessorRuleMatchRule, +) +from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor_rule_support_rule import ( + ObservabilityPipelineParseGrokProcessorRuleSupportRule, +) +from datadog_api_client.v2.model.observability_pipeline_parse_grok_processor_type import ( + ObservabilityPipelineParseGrokProcessorType, +) from datadog_api_client.v2.model.observability_pipeline_parse_json_processor import ( ObservabilityPipelineParseJSONProcessor, ) @@ -1889,12 +2102,25 @@ from datadog_api_client.v2.model.observability_pipeline_quota_processor_limit_enforce_type import ( ObservabilityPipelineQuotaProcessorLimitEnforceType, ) +from datadog_api_client.v2.model.observability_pipeline_quota_processor_overflow_action import ( + ObservabilityPipelineQuotaProcessorOverflowAction, +) from datadog_api_client.v2.model.observability_pipeline_quota_processor_override import ( ObservabilityPipelineQuotaProcessorOverride, ) from datadog_api_client.v2.model.observability_pipeline_quota_processor_type import ( ObservabilityPipelineQuotaProcessorType, ) +from datadog_api_client.v2.model.observability_pipeline_reduce_processor import ObservabilityPipelineReduceProcessor +from datadog_api_client.v2.model.observability_pipeline_reduce_processor_merge_strategy import ( + ObservabilityPipelineReduceProcessorMergeStrategy, +) +from datadog_api_client.v2.model.observability_pipeline_reduce_processor_merge_strategy_strategy import ( + ObservabilityPipelineReduceProcessorMergeStrategyStrategy, +) +from datadog_api_client.v2.model.observability_pipeline_reduce_processor_type import ( + ObservabilityPipelineReduceProcessorType, +) from datadog_api_client.v2.model.observability_pipeline_remove_fields_processor import ( ObservabilityPipelineRemoveFieldsProcessor, ) @@ -1910,6 +2136,166 @@ from datadog_api_client.v2.model.observability_pipeline_rename_fields_processor_type import ( ObservabilityPipelineRenameFieldsProcessorType, ) +from datadog_api_client.v2.model.observability_pipeline_rsyslog_destination import ( + ObservabilityPipelineRsyslogDestination, +) +from datadog_api_client.v2.model.observability_pipeline_rsyslog_destination_type import ( + ObservabilityPipelineRsyslogDestinationType, +) +from datadog_api_client.v2.model.observability_pipeline_rsyslog_source import ObservabilityPipelineRsyslogSource +from datadog_api_client.v2.model.observability_pipeline_rsyslog_source_type import ( + ObservabilityPipelineRsyslogSourceType, +) +from datadog_api_client.v2.model.observability_pipeline_sample_processor import ObservabilityPipelineSampleProcessor +from datadog_api_client.v2.model.observability_pipeline_sample_processor_type import ( + ObservabilityPipelineSampleProcessorType, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor import ( + ObservabilityPipelineSensitiveDataScannerProcessor, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action import ( + ObservabilityPipelineSensitiveDataScannerProcessorAction, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_hash import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionHash, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_hash_action import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_partial_redact import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_partial_redact_action import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_partial_redact_options import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_partial_redact_options_direction import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_redact import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionRedact, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_redact_action import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_action_redact_options import ( + ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_custom_pattern import ( + ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_custom_pattern_options import ( + ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_custom_pattern_type import ( + ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_keyword_options import ( + ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_library_pattern import ( + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_library_pattern_options import ( + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_library_pattern_type import ( + ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_pattern import ( + ObservabilityPipelineSensitiveDataScannerProcessorPattern, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_rule import ( + ObservabilityPipelineSensitiveDataScannerProcessorRule, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope import ( + ObservabilityPipelineSensitiveDataScannerProcessorScope, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_all import ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeAll, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_all_target import ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_exclude import ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_exclude_target import ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_include import ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_include_target import ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_scope_options import ( + ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions, +) +from datadog_api_client.v2.model.observability_pipeline_sensitive_data_scanner_processor_type import ( + ObservabilityPipelineSensitiveDataScannerProcessorType, +) +from datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination import ( + ObservabilityPipelineSentinelOneDestination, +) +from datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination_region import ( + ObservabilityPipelineSentinelOneDestinationRegion, +) +from datadog_api_client.v2.model.observability_pipeline_sentinel_one_destination_type import ( + ObservabilityPipelineSentinelOneDestinationType, +) +from datadog_api_client.v2.model.observability_pipeline_spec import ObservabilityPipelineSpec +from datadog_api_client.v2.model.observability_pipeline_spec_data import ObservabilityPipelineSpecData +from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination import ( + ObservabilityPipelineSplunkHecDestination, +) +from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination_encoding import ( + ObservabilityPipelineSplunkHecDestinationEncoding, +) +from datadog_api_client.v2.model.observability_pipeline_splunk_hec_destination_type import ( + ObservabilityPipelineSplunkHecDestinationType, +) +from datadog_api_client.v2.model.observability_pipeline_splunk_hec_source import ObservabilityPipelineSplunkHecSource +from datadog_api_client.v2.model.observability_pipeline_splunk_hec_source_type import ( + ObservabilityPipelineSplunkHecSourceType, +) +from datadog_api_client.v2.model.observability_pipeline_splunk_tcp_source import ObservabilityPipelineSplunkTcpSource +from datadog_api_client.v2.model.observability_pipeline_splunk_tcp_source_type import ( + ObservabilityPipelineSplunkTcpSourceType, +) +from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination import ( + ObservabilityPipelineSumoLogicDestination, +) +from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination_encoding import ( + ObservabilityPipelineSumoLogicDestinationEncoding, +) +from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination_header_custom_fields_item import ( + ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem, +) +from datadog_api_client.v2.model.observability_pipeline_sumo_logic_destination_type import ( + ObservabilityPipelineSumoLogicDestinationType, +) +from datadog_api_client.v2.model.observability_pipeline_sumo_logic_source import ObservabilityPipelineSumoLogicSource +from datadog_api_client.v2.model.observability_pipeline_sumo_logic_source_type import ( + ObservabilityPipelineSumoLogicSourceType, +) +from datadog_api_client.v2.model.observability_pipeline_syslog_ng_destination import ( + ObservabilityPipelineSyslogNgDestination, +) +from datadog_api_client.v2.model.observability_pipeline_syslog_ng_destination_type import ( + ObservabilityPipelineSyslogNgDestinationType, +) +from datadog_api_client.v2.model.observability_pipeline_syslog_ng_source import ObservabilityPipelineSyslogNgSource +from datadog_api_client.v2.model.observability_pipeline_syslog_ng_source_type import ( + ObservabilityPipelineSyslogNgSourceType, +) +from datadog_api_client.v2.model.observability_pipeline_syslog_source_mode import ObservabilityPipelineSyslogSourceMode +from datadog_api_client.v2.model.observability_pipeline_throttle_processor import ObservabilityPipelineThrottleProcessor +from datadog_api_client.v2.model.observability_pipeline_throttle_processor_type import ( + ObservabilityPipelineThrottleProcessorType, +) from datadog_api_client.v2.model.observability_pipeline_tls import ObservabilityPipelineTls from datadog_api_client.v2.model.okta_account import OktaAccount from datadog_api_client.v2.model.okta_account_attributes import OktaAccountAttributes @@ -2977,6 +3363,9 @@ from datadog_api_client.v2.model.users_relationship import UsersRelationship from datadog_api_client.v2.model.users_response import UsersResponse from datadog_api_client.v2.model.users_type import UsersType +from datadog_api_client.v2.model.validation_error import ValidationError +from datadog_api_client.v2.model.validation_error_meta import ValidationErrorMeta +from datadog_api_client.v2.model.validation_response import ValidationResponse from datadog_api_client.v2.model.vulnerabilities_type import VulnerabilitiesType from datadog_api_client.v2.model.vulnerability import Vulnerability from datadog_api_client.v2.model.vulnerability_attributes import VulnerabilityAttributes @@ -3257,6 +3646,8 @@ "AwsScanOptionsUpdateAttributes", "AwsScanOptionsUpdateData", "AwsScanOptionsUpdateRequest", + "AzureStorageDestination", + "AzureStorageDestinationType", "AzureUCConfig", "AzureUCConfigPair", "AzureUCConfigPairAttributes", @@ -4166,6 +4557,8 @@ "ListFindingsPage", "ListFindingsResponse", "ListHistoricalJobsResponse", + "ListPipelinesResponse", + "ListPipelinesResponseMeta", "ListPowerpacksResponse", "ListRulesResponse", "ListRulesResponseDataItem", @@ -4350,6 +4743,8 @@ "MetricsListResponseLinks", "MetricsScalarQuery", "MetricsTimeseriesQuery", + "MicrosoftSentinelDestination", + "MicrosoftSentinelDestinationType", "MicrosoftTeamsChannelInfoResponseAttributes", "MicrosoftTeamsChannelInfoResponseData", "MicrosoftTeamsChannelInfoType", @@ -4432,40 +4827,182 @@ "NullableUserRelationship", "NullableUserRelationshipData", "ObservabilityPipeline", + "ObservabilityPipelineAddEnvVarsProcessor", + "ObservabilityPipelineAddEnvVarsProcessorType", + "ObservabilityPipelineAddEnvVarsProcessorVariable", "ObservabilityPipelineAddFieldsProcessor", "ObservabilityPipelineAddFieldsProcessorType", + "ObservabilityPipelineAmazonDataFirehoseSource", + "ObservabilityPipelineAmazonDataFirehoseSourceType", + "ObservabilityPipelineAmazonOpenSearchDestination", + "ObservabilityPipelineAmazonOpenSearchDestinationAuth", + "ObservabilityPipelineAmazonOpenSearchDestinationAuthStrategy", + "ObservabilityPipelineAmazonOpenSearchDestinationType", + "ObservabilityPipelineAmazonS3Destination", + "ObservabilityPipelineAmazonS3DestinationStorageClass", + "ObservabilityPipelineAmazonS3DestinationType", + "ObservabilityPipelineAmazonS3Source", + "ObservabilityPipelineAmazonS3SourceType", + "ObservabilityPipelineAwsAuth", "ObservabilityPipelineConfig", "ObservabilityPipelineConfigDestinationItem", "ObservabilityPipelineConfigProcessorItem", "ObservabilityPipelineConfigSourceItem", - "ObservabilityPipelineCreateRequest", - "ObservabilityPipelineCreateRequestData", "ObservabilityPipelineData", "ObservabilityPipelineDataAttributes", "ObservabilityPipelineDatadogAgentSource", "ObservabilityPipelineDatadogAgentSourceType", "ObservabilityPipelineDatadogLogsDestination", "ObservabilityPipelineDatadogLogsDestinationType", + "ObservabilityPipelineDecoding", + "ObservabilityPipelineDedupeProcessor", + "ObservabilityPipelineDedupeProcessorMode", + "ObservabilityPipelineDedupeProcessorType", + "ObservabilityPipelineElasticsearchDestination", + "ObservabilityPipelineElasticsearchDestinationApiVersion", + "ObservabilityPipelineElasticsearchDestinationType", + "ObservabilityPipelineEnrichmentTableFile", + "ObservabilityPipelineEnrichmentTableFileEncoding", + "ObservabilityPipelineEnrichmentTableFileEncodingType", + "ObservabilityPipelineEnrichmentTableFileKeyItems", + "ObservabilityPipelineEnrichmentTableFileKeyItemsComparison", + "ObservabilityPipelineEnrichmentTableFileSchemaItems", + "ObservabilityPipelineEnrichmentTableFileSchemaItemsType", + "ObservabilityPipelineEnrichmentTableGeoIp", + "ObservabilityPipelineEnrichmentTableProcessor", + "ObservabilityPipelineEnrichmentTableProcessorType", "ObservabilityPipelineFieldValue", "ObservabilityPipelineFilterProcessor", "ObservabilityPipelineFilterProcessorType", + "ObservabilityPipelineFluentBitSource", + "ObservabilityPipelineFluentBitSourceType", + "ObservabilityPipelineFluentdSource", + "ObservabilityPipelineFluentdSourceType", + "ObservabilityPipelineGcpAuth", + "ObservabilityPipelineGenerateMetricsProcessor", + "ObservabilityPipelineGenerateMetricsProcessorType", + "ObservabilityPipelineGeneratedMetric", + "ObservabilityPipelineGeneratedMetricIncrementByField", + "ObservabilityPipelineGeneratedMetricIncrementByFieldStrategy", + "ObservabilityPipelineGeneratedMetricIncrementByOne", + "ObservabilityPipelineGeneratedMetricIncrementByOneStrategy", + "ObservabilityPipelineGeneratedMetricMetricType", + "ObservabilityPipelineGoogleChronicleDestination", + "ObservabilityPipelineGoogleChronicleDestinationEncoding", + "ObservabilityPipelineGoogleChronicleDestinationType", + "ObservabilityPipelineGoogleCloudStorageDestination", + "ObservabilityPipelineGoogleCloudStorageDestinationAcl", + "ObservabilityPipelineGoogleCloudStorageDestinationStorageClass", + "ObservabilityPipelineGoogleCloudStorageDestinationType", + "ObservabilityPipelineGooglePubSubSource", + "ObservabilityPipelineGooglePubSubSourceType", + "ObservabilityPipelineHttpClientSource", + "ObservabilityPipelineHttpClientSourceAuthStrategy", + "ObservabilityPipelineHttpClientSourceType", + "ObservabilityPipelineHttpServerSource", + "ObservabilityPipelineHttpServerSourceAuthStrategy", + "ObservabilityPipelineHttpServerSourceType", "ObservabilityPipelineKafkaSource", "ObservabilityPipelineKafkaSourceLibrdkafkaOption", "ObservabilityPipelineKafkaSourceSasl", "ObservabilityPipelineKafkaSourceType", + "ObservabilityPipelineLogstashSource", + "ObservabilityPipelineLogstashSourceType", + "ObservabilityPipelineMetadataEntry", + "ObservabilityPipelineMetricValue", + "ObservabilityPipelineNewRelicDestination", + "ObservabilityPipelineNewRelicDestinationRegion", + "ObservabilityPipelineNewRelicDestinationType", + "ObservabilityPipelineOcsfMapperProcessor", + "ObservabilityPipelineOcsfMapperProcessorMapping", + "ObservabilityPipelineOcsfMapperProcessorMappingMapping", + "ObservabilityPipelineOcsfMapperProcessorType", + "ObservabilityPipelineOcsfMappingLibrary", + "ObservabilityPipelineOpenSearchDestination", + "ObservabilityPipelineOpenSearchDestinationType", + "ObservabilityPipelineParseGrokProcessor", + "ObservabilityPipelineParseGrokProcessorRule", + "ObservabilityPipelineParseGrokProcessorRuleMatchRule", + "ObservabilityPipelineParseGrokProcessorRuleSupportRule", + "ObservabilityPipelineParseGrokProcessorType", "ObservabilityPipelineParseJSONProcessor", "ObservabilityPipelineParseJSONProcessorType", "ObservabilityPipelinePipelineKafkaSourceSaslMechanism", "ObservabilityPipelineQuotaProcessor", "ObservabilityPipelineQuotaProcessorLimit", "ObservabilityPipelineQuotaProcessorLimitEnforceType", + "ObservabilityPipelineQuotaProcessorOverflowAction", "ObservabilityPipelineQuotaProcessorOverride", "ObservabilityPipelineQuotaProcessorType", + "ObservabilityPipelineReduceProcessor", + "ObservabilityPipelineReduceProcessorMergeStrategy", + "ObservabilityPipelineReduceProcessorMergeStrategyStrategy", + "ObservabilityPipelineReduceProcessorType", "ObservabilityPipelineRemoveFieldsProcessor", "ObservabilityPipelineRemoveFieldsProcessorType", "ObservabilityPipelineRenameFieldsProcessor", "ObservabilityPipelineRenameFieldsProcessorField", "ObservabilityPipelineRenameFieldsProcessorType", + "ObservabilityPipelineRsyslogDestination", + "ObservabilityPipelineRsyslogDestinationType", + "ObservabilityPipelineRsyslogSource", + "ObservabilityPipelineRsyslogSourceType", + "ObservabilityPipelineSampleProcessor", + "ObservabilityPipelineSampleProcessorType", + "ObservabilityPipelineSensitiveDataScannerProcessor", + "ObservabilityPipelineSensitiveDataScannerProcessorAction", + "ObservabilityPipelineSensitiveDataScannerProcessorActionHash", + "ObservabilityPipelineSensitiveDataScannerProcessorActionHashAction", + "ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedact", + "ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactAction", + "ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptions", + "ObservabilityPipelineSensitiveDataScannerProcessorActionPartialRedactOptionsDirection", + "ObservabilityPipelineSensitiveDataScannerProcessorActionRedact", + "ObservabilityPipelineSensitiveDataScannerProcessorActionRedactAction", + "ObservabilityPipelineSensitiveDataScannerProcessorActionRedactOptions", + "ObservabilityPipelineSensitiveDataScannerProcessorCustomPattern", + "ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternOptions", + "ObservabilityPipelineSensitiveDataScannerProcessorCustomPatternType", + "ObservabilityPipelineSensitiveDataScannerProcessorKeywordOptions", + "ObservabilityPipelineSensitiveDataScannerProcessorLibraryPattern", + "ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternOptions", + "ObservabilityPipelineSensitiveDataScannerProcessorLibraryPatternType", + "ObservabilityPipelineSensitiveDataScannerProcessorPattern", + "ObservabilityPipelineSensitiveDataScannerProcessorRule", + "ObservabilityPipelineSensitiveDataScannerProcessorScope", + "ObservabilityPipelineSensitiveDataScannerProcessorScopeAll", + "ObservabilityPipelineSensitiveDataScannerProcessorScopeAllTarget", + "ObservabilityPipelineSensitiveDataScannerProcessorScopeExclude", + "ObservabilityPipelineSensitiveDataScannerProcessorScopeExcludeTarget", + "ObservabilityPipelineSensitiveDataScannerProcessorScopeInclude", + "ObservabilityPipelineSensitiveDataScannerProcessorScopeIncludeTarget", + "ObservabilityPipelineSensitiveDataScannerProcessorScopeOptions", + "ObservabilityPipelineSensitiveDataScannerProcessorType", + "ObservabilityPipelineSentinelOneDestination", + "ObservabilityPipelineSentinelOneDestinationRegion", + "ObservabilityPipelineSentinelOneDestinationType", + "ObservabilityPipelineSpec", + "ObservabilityPipelineSpecData", + "ObservabilityPipelineSplunkHecDestination", + "ObservabilityPipelineSplunkHecDestinationEncoding", + "ObservabilityPipelineSplunkHecDestinationType", + "ObservabilityPipelineSplunkHecSource", + "ObservabilityPipelineSplunkHecSourceType", + "ObservabilityPipelineSplunkTcpSource", + "ObservabilityPipelineSplunkTcpSourceType", + "ObservabilityPipelineSumoLogicDestination", + "ObservabilityPipelineSumoLogicDestinationEncoding", + "ObservabilityPipelineSumoLogicDestinationHeaderCustomFieldsItem", + "ObservabilityPipelineSumoLogicDestinationType", + "ObservabilityPipelineSumoLogicSource", + "ObservabilityPipelineSumoLogicSourceType", + "ObservabilityPipelineSyslogNgDestination", + "ObservabilityPipelineSyslogNgDestinationType", + "ObservabilityPipelineSyslogNgSource", + "ObservabilityPipelineSyslogNgSourceType", + "ObservabilityPipelineSyslogSourceMode", + "ObservabilityPipelineThrottleProcessor", + "ObservabilityPipelineThrottleProcessorType", "ObservabilityPipelineTls", "OktaAccount", "OktaAccountAttributes", @@ -5315,6 +5852,9 @@ "UsersRelationship", "UsersResponse", "UsersType", + "ValidationError", + "ValidationErrorMeta", + "ValidationResponse", "VulnerabilitiesType", "Vulnerability", "VulnerabilityAttributes", diff --git a/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_bad_request_response.frozen b/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_bad_request_response.frozen index d1b8c94c60..71a6a09485 100644 --- a/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_bad_request_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_bad_request_response.frozen @@ -1 +1 @@ -2025-04-04T10:36:58.031Z \ No newline at end of file +2025-04-25T17:29:21.625Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_ok_response.frozen b/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_ok_response.frozen index 0fc9b398ff..a6261e736f 100644 --- a/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_ok_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_ok_response.frozen @@ -1 +1 @@ -2025-04-04T10:36:58.537Z \ No newline at end of file +2025-04-25T17:29:23.837Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_ok_response.yaml b/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_ok_response.yaml index 888fd4a49c..165507fcc0 100644 --- a/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_ok_response.yaml +++ b/tests/v2/cassettes/test_scenarios/test_create_a_new_pipeline_returns_ok_response.yaml @@ -11,7 +11,7 @@ interactions: uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines response: body: - string: '{"data":{"id":"be354bf0-1140-11f0-9a6b-da7ad0900002","type":"pipelines","attributes":{"name":"Main + string: '{"data":{"id":"d891e45c-21fa-11f0-96dc-da7ad0900002","type":"pipelines","attributes":{"name":"Main Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} ' @@ -27,7 +27,7 @@ interactions: accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/be354bf0-1140-11f0-9a6b-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/d891e45c-21fa-11f0-96dc-da7ad0900002 response: body: string: '' diff --git a/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_not_found_response.frozen b/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_not_found_response.frozen index 4f32c245d6..033a0755d0 100644 --- a/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_not_found_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_not_found_response.frozen @@ -1 +1 @@ -2025-04-04T10:36:59.510Z \ No newline at end of file +2025-04-25T17:29:25.292Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_ok_response.frozen b/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_ok_response.frozen index 5489af2d1a..9cbc728e00 100644 --- a/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_ok_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_ok_response.frozen @@ -1 +1 @@ -2025-04-04T10:37:00.184Z \ No newline at end of file +2025-04-25T17:29:26.847Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_ok_response.yaml b/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_ok_response.yaml index f184ed4e96..a06d7942c9 100644 --- a/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_ok_response.yaml +++ b/tests/v2/cassettes/test_scenarios/test_delete_a_pipeline_returns_ok_response.yaml @@ -11,7 +11,7 @@ interactions: uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines response: body: - string: '{"data":{"id":"bf2c23da-1140-11f0-9a95-da7ad0900002","type":"pipelines","attributes":{"name":"Main + string: '{"data":{"id":"da5f2218-21fa-11f0-96de-da7ad0900002","type":"pipelines","attributes":{"name":"Main Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} ' @@ -27,7 +27,7 @@ interactions: accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/bf2c23da-1140-11f0-9a95-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/da5f2218-21fa-11f0-96de-da7ad0900002 response: body: string: '' @@ -43,7 +43,7 @@ interactions: accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/bf2c23da-1140-11f0-9a95-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/da5f2218-21fa-11f0-96de-da7ad0900002 response: body: string: '{"errors":[{"title":"Resource Not Found"}]} diff --git a/tests/v2/cassettes/test_scenarios/test_get_a_specific_pipeline_returns_ok_response.frozen b/tests/v2/cassettes/test_scenarios/test_get_a_specific_pipeline_returns_ok_response.frozen index 5002994582..f863fd1ac2 100644 --- a/tests/v2/cassettes/test_scenarios/test_get_a_specific_pipeline_returns_ok_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_get_a_specific_pipeline_returns_ok_response.frozen @@ -1 +1 @@ -2025-04-04T10:37:02.118Z \ No newline at end of file +2025-04-25T17:29:29.196Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_get_a_specific_pipeline_returns_ok_response.yaml b/tests/v2/cassettes/test_scenarios/test_get_a_specific_pipeline_returns_ok_response.yaml index 274bc14412..c680773358 100644 --- a/tests/v2/cassettes/test_scenarios/test_get_a_specific_pipeline_returns_ok_response.yaml +++ b/tests/v2/cassettes/test_scenarios/test_get_a_specific_pipeline_returns_ok_response.yaml @@ -11,7 +11,7 @@ interactions: uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines response: body: - string: '{"data":{"id":"c06831ee-1140-11f0-9fe5-da7ad0900002","type":"pipelines","attributes":{"name":"Main + string: '{"data":{"id":"dbbff3d0-21fa-11f0-96e0-da7ad0900002","type":"pipelines","attributes":{"name":"Main Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} ' @@ -27,10 +27,10 @@ interactions: accept: - application/json method: GET - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c06831ee-1140-11f0-9fe5-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/dbbff3d0-21fa-11f0-96e0-da7ad0900002 response: body: - string: '{"data":{"id":"c06831ee-1140-11f0-9fe5-da7ad0900002","type":"pipelines","attributes":{"name":"Main + string: '{"data":{"id":"dbbff3d0-21fa-11f0-96e0-da7ad0900002","type":"pipelines","attributes":{"name":"Main Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} ' @@ -46,7 +46,7 @@ interactions: accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c06831ee-1140-11f0-9fe5-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/dbbff3d0-21fa-11f0-96e0-da7ad0900002 response: body: string: '' diff --git a/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_bad_request_response.frozen b/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_bad_request_response.frozen new file mode 100644 index 0000000000..3a7978da8a --- /dev/null +++ b/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_bad_request_response.frozen @@ -0,0 +1 @@ +2025-04-25T17:29:31.769Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_bad_request_response.yaml b/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_bad_request_response.yaml new file mode 100644 index 0000000000..7bcaf73401 --- /dev/null +++ b/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_bad_request_response.yaml @@ -0,0 +1,20 @@ +interactions: +- request: + body: null + headers: + accept: + - application/json + method: GET + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines?page%5Bsize%5D=0 + response: + body: + string: '{"errors":[{"title":"page[size] must be a number between 1 and 50"}]} + + ' + headers: + content-type: + - application/json + status: + code: 400 + message: Bad Request +version: 1 diff --git a/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_ok_response.frozen b/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_ok_response.frozen new file mode 100644 index 0000000000..4c9f3cfcbc --- /dev/null +++ b/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_ok_response.frozen @@ -0,0 +1 @@ +2025-04-25T17:29:32.322Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_ok_response.yaml b/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_ok_response.yaml new file mode 100644 index 0000000000..3c0f76dafe --- /dev/null +++ b/tests/v2/cassettes/test_scenarios/test_list_pipelines_returns_ok_response.yaml @@ -0,0 +1,83 @@ +interactions: +- request: + body: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main + Observability Pipeline"},"type":"pipelines"}}' + headers: + accept: + - application/json + content-type: + - application/json + method: POST + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines + response: + body: + string: '{"data":{"id":"dd87c652-21fa-11f0-96e2-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} + + ' + headers: + content-type: + - application/json + status: + code: 201 + message: Created +- request: + body: null + headers: + accept: + - application/json + method: GET + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines + response: + body: + string: '{"data":[{"id":"8d85d864-0f09-11f0-9711-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"3f339054-10ab-11f0-88a2-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"b8c068de-10ab-11f0-88a8-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"32e2f90a-1139-11f0-8501-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"67e79020-1139-11f0-98d1-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"30966e06-113a-11f0-98e1-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"3e3f9382-138f-11f0-8cf0-da7ad0900002","type":"pipelines","attributes":{"name":"test + pipeline","config":{"destinations":[{"id":"destination-1","inputs":["parser-1"],"type":"datadog_logs"}],"processors":[{"field":"message","id":"parser-1","include":"service:my-service","inputs":["source-1"],"type":"parse_json"}],"sources":[{"id":"source-1","tls":{"crt_file":"/path/to/cert.crt"},"type":"datadog_agent"}]}}},{"id":"42159650-138f-11f0-a2aa-da7ad0900002","type":"pipelines","attributes":{"name":"test + pipeline","config":{"destinations":[{"id":"destination-1","inputs":["parser-1"],"type":"datadog_logs"}],"processors":[{"field":"message","id":"parser-1","include":"service:my-service","inputs":["source-1"],"type":"parse_json"}],"sources":[{"id":"source-1","tls":{"crt_file":"/path/to/cert.crt"},"type":"datadog_agent"}]}}},{"id":"6cc001f8-1392-11f0-9e35-da7ad0900002","type":"pipelines","attributes":{"name":"agent + with tls","config":{"destinations":[{"id":"destination-1","inputs":["source-with-tls"],"type":"datadog_logs"}],"processors":[],"sources":[{"id":"source-with-tls","tls":{"ca_file":"/etc/certs/ca.crt","crt_file":"/etc/certs/agent.crt","key_file":"/etc/certs/agent.key"},"type":"datadog_agent"}]}}},{"id":"bdf5078e-139d-11f0-8e4f-da7ad0900002","type":"pipelines","attributes":{"name":"add-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["add-fields-1"],"type":"datadog_logs"}],"processors":[{"fields":[{"name":"custom.field","value":"hello-world"}],"id":"add-fields-1","include":"*","inputs":["source-1"],"type":"add_fields"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"e67402d2-139d-11f0-8e51-da7ad0900002","type":"pipelines","attributes":{"name":"add-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["add-fields-1"],"type":"datadog_logs"}],"processors":[{"fields":[{"name":"custom.field","value":"hello-world"}],"id":"add-fields-1","include":"*","inputs":["source-1"],"type":"add_fields"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"e9849f22-139d-11f0-8e53-da7ad0900002","type":"pipelines","attributes":{"name":"add-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["add-fields-1"],"type":"datadog_logs"}],"processors":[{"fields":[{"name":"custom.field","value":"hello-world"}],"id":"add-fields-1","include":"*","inputs":["source-1"],"type":"add_fields"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"f9845bb0-139d-11f0-b101-da7ad0900002","type":"pipelines","attributes":{"name":"add-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["add-fields-1"],"type":"datadog_logs"}],"processors":[{"fields":[{"name":"custom.field","value":"hello-world"}],"id":"add-fields-1","include":"*","inputs":["source-1"],"type":"add_fields"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"2ebd82ca-139e-11f0-b103-da7ad0900002","type":"pipelines","attributes":{"name":"add-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["add-fields-1"],"type":"datadog_logs"}],"processors":[{"fields":[{"name":"custom.field","value":"hello-world"}],"id":"add-fields-1","include":"*","inputs":["source-1"],"type":"add_fields"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"31ba5a34-139e-11f0-8e55-da7ad0900002","type":"pipelines","attributes":{"name":"add-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["add-fields-1"],"type":"datadog_logs"}],"processors":[{"fields":[{"name":"custom.field","value":"hello-world"}],"id":"add-fields-1","include":"*","inputs":["source-1"],"type":"add_fields"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"3aa802c2-139e-11f0-8e57-da7ad0900002","type":"pipelines","attributes":{"name":"add-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["add-fields-1"],"type":"datadog_logs"}],"processors":[{"fields":[{"name":"custom.field","value":"hello-world"}],"id":"add-fields-1","include":"*","inputs":["source-1"],"type":"add_fields"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"7b3daaf8-139e-11f0-8e59-da7ad0900002","type":"pipelines","attributes":{"name":"add-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["add-fields-1"],"type":"datadog_logs"}],"processors":[{"fields":[{"name":"custom.field","value":"hello-world"}],"id":"add-fields-1","include":"*","inputs":["source-1"],"type":"add_fields"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"7e72d32e-139e-11f0-8e5b-da7ad0900002","type":"pipelines","attributes":{"name":"add-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["add-fields-1"],"type":"datadog_logs"}],"processors":[{"fields":[{"name":"custom.field","value":"hello-world"}],"id":"add-fields-1","include":"*","inputs":["source-1"],"type":"add_fields"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"818a8728-139e-11f0-8e5d-da7ad0900002","type":"pipelines","attributes":{"name":"add-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["add-fields-1"],"type":"datadog_logs"}],"processors":[{"fields":[{"name":"custom.field","value":"hello-world"}],"id":"add-fields-1","include":"*","inputs":["source-1"],"type":"add_fields"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"bed86e88-139e-11f0-8e5f-da7ad0900002","type":"pipelines","attributes":{"name":"add-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["add-fields-1"],"type":"datadog_logs"}],"processors":[{"fields":[{"name":"custom.field","value":"hello-world"}],"id":"add-fields-1","include":"*","inputs":["source-1"],"type":"add_fields"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"c202b050-139e-11f0-b107-da7ad0900002","type":"pipelines","attributes":{"name":"add-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["add-fields-1"],"type":"datadog_logs"}],"processors":[{"fields":[{"name":"custom.field","value":"hello-world"}],"id":"add-fields-1","include":"*","inputs":["source-1"],"type":"add_fields"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"c5111084-139e-11f0-8e61-da7ad0900002","type":"pipelines","attributes":{"name":"add-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["add-fields-1"],"type":"datadog_logs"}],"processors":[{"fields":[{"name":"custom.field","value":"hello-world"}],"id":"add-fields-1","include":"*","inputs":["source-1"],"type":"add_fields"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"e01ae6de-139e-11f0-8e63-da7ad0900002","type":"pipelines","attributes":{"name":"add-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["add-fields-1"],"type":"datadog_logs"}],"processors":[{"fields":[{"name":"custom.field","value":"hello-world"}],"id":"add-fields-1","include":"*","inputs":["source-1"],"type":"add_fields"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"e33ad356-139e-11f0-b109-da7ad0900002","type":"pipelines","attributes":{"name":"add-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["add-fields-1"],"type":"datadog_logs"}],"processors":[{"fields":[{"name":"custom.field","value":"hello-world"}],"id":"add-fields-1","include":"*","inputs":["source-1"],"type":"add_fields"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"e6783e78-139e-11f0-b10b-da7ad0900002","type":"pipelines","attributes":{"name":"add-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["add-fields-1"],"type":"datadog_logs"}],"processors":[{"fields":[{"name":"custom.field","value":"hello-world"}],"id":"add-fields-1","include":"*","inputs":["source-1"],"type":"add_fields"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"04407bfa-139f-11f0-8e65-da7ad0900002","type":"pipelines","attributes":{"name":"add-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["add-fields-1"],"type":"datadog_logs"}],"processors":[{"fields":[{"name":"custom.field","value":"hello-world"}],"id":"add-fields-1","include":"*","inputs":["source-1"],"type":"add_fields"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"126be958-139f-11f0-8e67-da7ad0900002","type":"pipelines","attributes":{"name":"add-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["add-fields-1"],"type":"datadog_logs"}],"processors":[{"fields":[{"name":"custom.field","value":"hello-world"}],"id":"add-fields-1","include":"*","inputs":["source-1"],"type":"add_fields"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"97607354-139f-11f0-8e6b-da7ad0900002","type":"pipelines","attributes":{"name":"add-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["add-fields-1"],"type":"datadog_logs"}],"processors":[{"fields":[{"name":"custom.field","value":"hello-world"}],"id":"add-fields-1","include":"*","inputs":["source-1"],"type":"add_fields"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"305f9a88-13a2-11f0-8dd9-da7ad0900002","type":"pipelines","attributes":{"name":"parse-json-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["parser-1"],"type":"datadog_logs"}],"processors":[{"field":"message","id":"parser-1","include":"env:parse","inputs":["source-1"],"type":"parse_json"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"45669120-13a2-11f0-8ddb-da7ad0900002","type":"pipelines","attributes":{"name":"parse-json-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["parser-1"],"type":"datadog_logs"}],"processors":[{"field":"message","id":"parser-1","include":"env:parse","inputs":["source-1"],"type":"parse_json"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"c00f7548-13a4-11f0-9315-da7ad0900002","type":"pipelines","attributes":{"name":"test + pipeline","config":{"destinations":[{"id":"destination-1","inputs":["parser-1"],"type":"datadog_logs"}],"processors":[{"field":"message","id":"parser-1","include":"service:my-service","inputs":["source-1"],"type":"parse_json"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"c07cf206-13a6-11f0-949d-da7ad0900002","type":"pipelines","attributes":{"name":"test + pipeline","config":{"destinations":[{"id":"destination-1","inputs":["parser-1"],"type":"datadog_logs"}],"processors":[{"field":"message","id":"parser-1","include":"service:my-service","inputs":["source-1"],"type":"parse_json"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"8ff06e36-13ad-11f0-8243-da7ad0900002","type":"pipelines","attributes":{"name":"add-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["add-fields-1"],"type":"datadog_logs"}],"processors":[{"fields":[{"name":"custom.field","value":"hello-world"}],"id":"add-fields-1","include":"*","inputs":["source-1"],"type":"add_fields"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"8ff1a508-13ad-11f0-b934-da7ad0900002","type":"pipelines","attributes":{"name":"agent + with tls","config":{"destinations":[{"id":"destination-1","inputs":["source-with-tls"],"type":"datadog_logs"}],"processors":[],"sources":[{"id":"source-with-tls","tls":{"crt_file":"/etc/certs/agent.crt","key_file":"/etc/certs/agent.key"},"type":"datadog_agent"}]}}},{"id":"8ff38a08-13ad-11f0-8245-da7ad0900002","type":"pipelines","attributes":{"name":"parse-json-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["parser-1"],"type":"datadog_logs"}],"processors":[{"field":"message","id":"parser-1","include":"env:parse","inputs":["source-1"],"type":"parse_json"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"8ff54f0a-13ad-11f0-8247-da7ad0900002","type":"pipelines","attributes":{"name":"kafka + pipeline","config":{"destinations":[{"id":"destination-1","inputs":["kafka-source-1"],"type":"datadog_logs"}],"processors":[],"sources":[{"group_id":"consumer-group-1","id":"kafka-source-1","sasl":{"mechanism":"PLAIN"},"tls":{"ca_file":"","crt_file":"/path/to/kafka.crt"},"topics":["topic-a","topic-b"],"type":"kafka"}]}}},{"id":"f814235e-13ad-11f0-b942-da7ad0900002","type":"pipelines","attributes":{"name":"agent + with tls","config":{"destinations":[{"id":"destination-1","inputs":["source-with-tls"],"type":"datadog_logs"}],"processors":[],"sources":[{"id":"source-with-tls","tls":{"crt_file":"/etc/certs/agent.crt","key_file":"/etc/certs/agent.key"},"type":"datadog_agent"}]}}},{"id":"5b1a9314-13ce-11f0-a9de-da7ad0900002","type":"pipelines","attributes":{"name":"rename-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["rename-1"],"type":"datadog_logs"}],"processors":[{"fields":[{"destination":"new.field","preserve_source":true,"source":"old.field"}],"id":"rename-1","include":"*","inputs":["source-1"],"type":"rename_fields"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"1e356802-1419-11f0-8834-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"14156a86-142b-11f0-96f3-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"a5280a06-143a-11f0-aac4-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"5005f6c2-1481-11f0-8faa-da7ad0900002","type":"pipelines","attributes":{"name":"remove-fields-pipeline","config":{"destinations":[{"id":"destination-1","inputs":["remove-1"],"type":"datadog_logs"}],"processors":[{"fields":["temp.debug","internal.trace_id"],"id":"remove-1","include":"*","inputs":["source-1"],"type":"remove_fields"}],"sources":[{"id":"source-1","type":"datadog_agent"}]}}},{"id":"9c1776c0-14d7-11f0-87c9-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"b1e4c58c-1501-11f0-b0bd-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"43a8a4cc-15c3-11f0-b111-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"45042a58-15c3-11f0-b113-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"f9de3956-15cc-11f0-ac43-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"b3efaa52-1693-11f0-89e0-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"24c83620-1696-11f0-89e6-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}},{"id":"5d2875d6-17a2-11f0-9bd1-da7ad0900002","type":"pipelines","attributes":{"name":"Main + Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}],"meta":{"totalCount":128}} + + ' + headers: + content-type: + - application/vnd.api+json + status: + code: 200 + message: OK +- request: + body: null + headers: + accept: + - '*/*' + method: DELETE + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/dd87c652-21fa-11f0-96e2-da7ad0900002 + response: + body: + string: '' + headers: + content-type: + - application/json + status: + code: 204 + message: No Content +version: 1 diff --git a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_bad_request_response.frozen b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_bad_request_response.frozen index 01305de98f..a8aa863f11 100644 --- a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_bad_request_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_bad_request_response.frozen @@ -1 +1 @@ -2025-04-04T10:37:04.190Z \ No newline at end of file +2025-04-25T17:29:34.672Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_bad_request_response.yaml b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_bad_request_response.yaml index 9a861c39a7..6db97980e1 100644 --- a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_bad_request_response.yaml +++ b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_bad_request_response.yaml @@ -11,7 +11,7 @@ interactions: uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines response: body: - string: '{"data":{"id":"c193d7da-1140-11f0-9a6d-da7ad0900002","type":"pipelines","attributes":{"name":"Main + string: '{"data":{"id":"deeea5f6-21fa-11f0-96e4-da7ad0900002","type":"pipelines","attributes":{"name":"Main Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} ' @@ -30,7 +30,7 @@ interactions: content-type: - application/json method: PUT - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c193d7da-1140-11f0-9a6d-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/deeea5f6-21fa-11f0-96e4-da7ad0900002 response: body: string: '{"errors":[{"title":"Component with ID filter-processor is an unknown @@ -52,7 +52,7 @@ interactions: accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c193d7da-1140-11f0-9a6d-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/deeea5f6-21fa-11f0-96e4-da7ad0900002 response: body: string: '' diff --git a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_not_found_response.frozen b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_not_found_response.frozen index b453d330d3..81821e325e 100644 --- a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_not_found_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_not_found_response.frozen @@ -1 +1 @@ -2025-04-04T10:37:06.031Z \ No newline at end of file +2025-04-25T17:29:36.564Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_ok_response.frozen b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_ok_response.frozen index 4fba4617c1..8195f92c72 100644 --- a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_ok_response.frozen +++ b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_ok_response.frozen @@ -1 +1 @@ -2025-04-04T10:37:06.485Z \ No newline at end of file +2025-04-25T17:29:36.987Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_ok_response.yaml b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_ok_response.yaml index 176f7aff9e..115d89402b 100644 --- a/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_ok_response.yaml +++ b/tests/v2/cassettes/test_scenarios/test_update_a_pipeline_returns_ok_response.yaml @@ -11,7 +11,7 @@ interactions: uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines response: body: - string: '{"data":{"id":"c2ee25ae-1140-11f0-9fe7-da7ad0900002","type":"pipelines","attributes":{"name":"Main + string: '{"data":{"id":"e04d5230-21fa-11f0-96e6-da7ad0900002","type":"pipelines","attributes":{"name":"Main Observability Pipeline","config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} ' @@ -30,10 +30,10 @@ interactions: content-type: - application/json method: PUT - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c2ee25ae-1140-11f0-9fe7-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/e04d5230-21fa-11f0-96e6-da7ad0900002 response: body: - string: '{"data":{"id":"c2ee25ae-1140-11f0-9fe7-da7ad0900002","type":"pipelines","attributes":{"name":"Updated + string: '{"data":{"id":"e04d5230-21fa-11f0-96e6-da7ad0900002","type":"pipelines","attributes":{"name":"Updated Pipeline Name","config":{"destinations":[{"id":"updated-datadog-logs-destination-id","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]}}}} ' @@ -49,7 +49,7 @@ interactions: accept: - '*/*' method: DELETE - uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/c2ee25ae-1140-11f0-9fe7-da7ad0900002 + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/e04d5230-21fa-11f0-96e6-da7ad0900002 response: body: string: '' diff --git a/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_bad_request_response.frozen b/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_bad_request_response.frozen new file mode 100644 index 0000000000..f19e635b27 --- /dev/null +++ b/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_bad_request_response.frozen @@ -0,0 +1 @@ +2025-04-25T17:29:39.236Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_bad_request_response.yaml b/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_bad_request_response.yaml new file mode 100644 index 0000000000..7a8f9daae3 --- /dev/null +++ b/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_bad_request_response.yaml @@ -0,0 +1,24 @@ +interactions: +- request: + body: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main + Observability Pipeline"},"type":"pipelines"}}' + headers: + accept: + - application/json + content-type: + - application/json + method: POST + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/validate + response: + body: + string: '{"errors":[{"title":"Field ''include'' is required","meta":{"field":"include","id":"filter-processor","message":"Field + ''include'' is required"}}]} + + ' + headers: + content-type: + - application/json + status: + code: 400 + message: Bad Request +version: 1 diff --git a/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_ok_response.frozen b/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_ok_response.frozen new file mode 100644 index 0000000000..c2e1e3c736 --- /dev/null +++ b/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_ok_response.frozen @@ -0,0 +1 @@ +2025-04-25T17:29:39.613Z \ No newline at end of file diff --git a/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_ok_response.yaml b/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_ok_response.yaml new file mode 100644 index 0000000000..e1cbf59d65 --- /dev/null +++ b/tests/v2/cassettes/test_scenarios/test_validate_an_observability_pipeline_returns_ok_response.yaml @@ -0,0 +1,23 @@ +interactions: +- request: + body: '{"data":{"attributes":{"config":{"destinations":[{"id":"datadog-logs-destination","inputs":["filter-processor"],"type":"datadog_logs"}],"processors":[{"id":"filter-processor","include":"service:my-service","inputs":["datadog-agent-source"],"type":"filter"}],"sources":[{"id":"datadog-agent-source","type":"datadog_agent"}]},"name":"Main + Observability Pipeline"},"type":"pipelines"}}' + headers: + accept: + - application/json + content-type: + - application/json + method: POST + uri: https://api.datadoghq.com/api/v2/remote_config/products/obs_pipelines/pipelines/validate + response: + body: + string: '{"errors":[]} + + ' + headers: + content-type: + - application/json + status: + code: 200 + message: OK +version: 1 diff --git a/tests/v2/features/observability_pipelines.feature b/tests/v2/features/observability_pipelines.feature index f66f9bc618..c7149b1e4f 100644 --- a/tests/v2/features/observability_pipelines.feature +++ b/tests/v2/features/observability_pipelines.feature @@ -78,6 +78,28 @@ Feature: Observability Pipelines And the response "data.attributes.config.processors" has length 1 And the response "data.attributes.config.destinations" has length 1 + @team:DataDog/observability-pipelines + Scenario: List pipelines returns "Bad Request" response + Given operation "ListPipelines" enabled + And new "ListPipelines" request + And request contains "page[size]" parameter with value 0 + When the request is sent + Then the response status is 400 Bad Request + + @team:DataDog/observability-pipelines + Scenario: List pipelines returns "OK" response + Given operation "ListPipelines" enabled + And there is a valid "pipeline" in the system + And new "ListPipelines" request + When the request is sent + Then the response status is 200 OK + And the response "data[0]" has field "id" + And the response "data[0].type" is equal to "pipelines" + And the response "data[0].attributes.name" is equal to "Main Observability Pipeline" + And the response "data[0].attributes.config.sources" has length 1 + And the response "data[0].attributes.config.processors" has length 1 + And the response "data[0].attributes.config.destinations" has length 1 + @team:DataDog/observability-pipelines Scenario: Update a pipeline returns "Bad Request" response Given operation "UpdatePipeline" enabled @@ -122,3 +144,24 @@ Feature: Observability Pipelines And the response "data.attributes.config.processors" has length 1 And the response "data.attributes.config.destinations" has length 1 And the response "data.attributes.config.destinations[0].id" is equal to "updated-datadog-logs-destination-id" + + @team:DataDog/observability-pipelines + Scenario: Validate an observability pipeline returns "Bad Request" response + Given operation "ValidatePipeline" enabled + And new "ValidatePipeline" request + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"id": "filter-processor", "inputs": ["datadog-agent-source"], "type": "filter"}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + When the request is sent + Then the response status is 400 Bad Request + And the response "errors[0].title" is equal to "Field 'include' is required" + And the response "errors[0].meta.field" is equal to "include" + And the response "errors[0].meta.id" is equal to "filter-processor" + And the response "errors[0].meta.message" is equal to "Field 'include' is required" + + @team:DataDog/observability-pipelines + Scenario: Validate an observability pipeline returns "OK" response + Given operation "ValidatePipeline" enabled + And new "ValidatePipeline" request + And body with value {"data": {"attributes": {"config": {"destinations": [{"id": "datadog-logs-destination", "inputs": ["filter-processor"], "type": "datadog_logs"}], "processors": [{"id": "filter-processor", "include": "service:my-service", "inputs": ["datadog-agent-source"], "type": "filter"}], "sources": [{"id": "datadog-agent-source", "type": "datadog_agent"}]}, "name": "Main Observability Pipeline"}, "type": "pipelines"}} + When the request is sent + Then the response status is 200 OK + And the response "errors" has length 0 diff --git a/tests/v2/features/undo.json b/tests/v2/features/undo.json index 67dca81fb6..4aba315c34 100644 --- a/tests/v2/features/undo.json +++ b/tests/v2/features/undo.json @@ -2098,6 +2098,12 @@ "type": "safe" } }, + "ListPipelines": { + "tag": "Observability Pipelines", + "undo": { + "type": "safe" + } + }, "CreatePipeline": { "tag": "Observability Pipelines", "undo": { @@ -2111,6 +2117,12 @@ "type": "unsafe" } }, + "ValidatePipeline": { + "tag": "Observability Pipelines", + "undo": { + "type": "safe" + } + }, "DeletePipeline": { "tag": "Observability Pipelines", "undo": {