diff --git a/.generated-info b/.generated-info index 60f157a2c..44599f873 100644 --- a/.generated-info +++ b/.generated-info @@ -1,4 +1,4 @@ { - "spec_repo_commit": "4413e63", - "generated": "2025-08-19 20:33:59.967" + "spec_repo_commit": "ca16233", + "generated": "2025-08-21 17:14:19.116" } diff --git a/.generator/schemas/v2/openapi.yaml b/.generator/schemas/v2/openapi.yaml index 5dc52e4a2..7be522b93 100644 --- a/.generator/schemas/v2/openapi.yaml +++ b/.generator/schemas/v2/openapi.yaml @@ -4983,6 +4983,8 @@ components: description: Optional prefix for blobs written to the container. example: logs/ type: string + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' container_name: description: The name of the Azure Blob Storage container to store logs in. @@ -25037,6 +25039,8 @@ components: description: The `microsoft_sentinel` destination forwards logs to Microsoft Sentinel. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' client_id: description: Azure AD client ID used for authentication. example: a1b2c3d4-5678-90ab-cdef-1234567890ab @@ -26669,6 +26673,8 @@ components: properties: auth: $ref: '#/components/schemas/ObservabilityPipelineAmazonOpenSearchDestinationAuth' + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' bulk_index: description: The index to write logs to. example: logs-index @@ -26747,6 +26753,8 @@ components: description: S3 bucket name. example: error-logs type: string + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: Unique identifier for the destination component. example: amazon-s3-destination @@ -26919,6 +26927,30 @@ components: role session. type: string type: object + ObservabilityPipelineBufferOptions: + description: Configuration for buffer settings on destination components. + oneOf: + - $ref: '#/components/schemas/ObservabilityPipelineDiskBufferOptions' + - $ref: '#/components/schemas/ObservabilityPipelineMemoryBufferOptions' + - $ref: '#/components/schemas/ObservabilityPipelineMemoryBufferSizeOptions' + ObservabilityPipelineBufferOptionsDiskType: + default: disk + description: Specifies the buffer type to configure. This option supports only + a disk buffer. + enum: + - disk + type: string + x-enum-varnames: + - DISK + ObservabilityPipelineBufferOptionsMemoryType: + default: memory + description: Specifies the buffer type to configure. This option supports only + a memory buffer. + enum: + - memory + type: string + x-enum-varnames: + - MEMORY ObservabilityPipelineConfig: description: Specifies the pipeline's configuration, including its sources, processors, and destinations. @@ -27232,6 +27264,8 @@ components: ObservabilityPipelineDatadogLogsDestination: description: The `datadog_logs` destination forwards logs to Datadog Log Management. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: The unique identifier for this component. example: datadog-logs-destination @@ -27407,12 +27441,25 @@ components: type: string x-enum-varnames: - DEDUPE + ObservabilityPipelineDiskBufferOptions: + description: Options for configuring a disk buffer. + properties: + max_size: + description: Maximum size of the disk buffer. + example: 4096 + format: int64 + type: integer + type: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptionsDiskType' + type: object ObservabilityPipelineElasticsearchDestination: description: The `elasticsearch` destination writes logs to an Elasticsearch cluster. properties: api_version: $ref: '#/components/schemas/ObservabilityPipelineElasticsearchDestinationApiVersion' + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' bulk_index: description: The index to write logs to in Elasticsearch. example: logs-index @@ -27897,6 +27944,8 @@ components: properties: auth: $ref: '#/components/schemas/ObservabilityPipelineGcpAuth' + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' customer_id: description: The Google Chronicle customer ID. example: abcdefg123456789 @@ -27963,6 +28012,8 @@ components: description: Name of the GCS bucket. example: error-logs type: string + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: Unique identifier for the destination component. example: gcs-destination @@ -28273,6 +28324,28 @@ components: type: string x-enum-varnames: - LOGSTASH + ObservabilityPipelineMemoryBufferOptions: + description: Options for configuring a memory buffer by byte size. + properties: + max_size: + description: Maximum size of the disk buffer. + example: 4096 + format: int64 + type: integer + type: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptionsMemoryType' + type: object + ObservabilityPipelineMemoryBufferSizeOptions: + description: Options for configuring a memory buffer by queue length. + properties: + max_events: + description: Maximum events for the memory buffer. + example: 500 + format: int64 + type: integer + type: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptionsMemoryType' + type: object ObservabilityPipelineMetadataEntry: description: A custom metadata entry. properties: @@ -28296,6 +28369,8 @@ components: ObservabilityPipelineNewRelicDestination: description: The `new_relic` destination sends logs to the New Relic platform. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: The unique identifier for this component. example: new-relic-destination @@ -28433,6 +28508,8 @@ components: ObservabilityPipelineOpenSearchDestination: description: The `opensearch` destination writes logs to an OpenSearch cluster. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' bulk_index: description: The index to write logs to. example: logs-index @@ -28657,9 +28734,10 @@ components: can drop or alert. properties: drop_events: - description: If set to `true`, logs that matched the quota filter and sent - after the quota has been met are dropped; only logs that did not match - the filter query continue through the pipeline. + description: 'If set to `true`, logs that match the quota filter and are + sent after the quota is exceeded are dropped. Logs that do not match the + filter continue through the pipeline. **Note**: You can set either `drop_events` + or `overflow_action`, but not both.' example: false type: boolean id: @@ -28707,6 +28785,8 @@ components: items: type: string type: array + too_many_buckets_action: + $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorOverflowAction' type: $ref: '#/components/schemas/ObservabilityPipelineQuotaProcessorType' required: @@ -28714,7 +28794,6 @@ components: - type - include - name - - drop_events - limit - inputs type: object @@ -28745,7 +28824,8 @@ components: - BYTES - EVENTS ObservabilityPipelineQuotaProcessorOverflowAction: - description: 'The action to take when the quota is exceeded. Options: + description: 'The action to take when the quota or bucket limit is exceeded. + Options: - `drop`: Drop the event. @@ -29006,6 +29086,8 @@ components: description: The `rsyslog` destination forwards logs to an external `rsyslog` server over TCP or UDP using the syslog protocol. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: The unique identifier for this component. example: rsyslog-destination @@ -29076,6 +29158,16 @@ components: description: The `sample` processor allows probabilistic sampling of logs at a fixed rate. properties: + group_by: + description: Optional list of fields to group events by. Each group is sampled + independently. + example: + - service + - host + items: + type: string + minItems: 1 + type: array id: description: The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` @@ -29480,6 +29572,8 @@ components: ObservabilityPipelineSentinelOneDestination: description: The `sentinel_one` destination sends logs to SentinelOne. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: The unique identifier for this component. example: sentinelone-destination @@ -29836,6 +29930,8 @@ components: ' example: true type: boolean + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' encoding: $ref: '#/components/schemas/ObservabilityPipelineSplunkHecDestinationEncoding' id: @@ -29949,6 +30045,8 @@ components: ObservabilityPipelineSumoLogicDestination: description: The `sumo_logic` destination forwards logs to Sumo Logic. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' encoding: $ref: '#/components/schemas/ObservabilityPipelineSumoLogicDestinationEncoding' header_custom_fields: @@ -30052,6 +30150,8 @@ components: description: The `syslog_ng` destination forwards logs to an external `syslog-ng` server over TCP or UDP using the syslog protocol. properties: + buffer: + $ref: '#/components/schemas/ObservabilityPipelineBufferOptions' id: description: The unique identifier for this component. example: syslog-ng-destination diff --git a/src/datadogV2/model/mod.rs b/src/datadogV2/model/mod.rs index 5be05b819..734901091 100644 --- a/src/datadogV2/model/mod.rs +++ b/src/datadogV2/model/mod.rs @@ -3814,6 +3814,18 @@ pub mod model_observability_pipeline_config; pub use self::model_observability_pipeline_config::ObservabilityPipelineConfig; pub mod model_observability_pipeline_datadog_logs_destination; pub use self::model_observability_pipeline_datadog_logs_destination::ObservabilityPipelineDatadogLogsDestination; +pub mod model_observability_pipeline_disk_buffer_options; +pub use self::model_observability_pipeline_disk_buffer_options::ObservabilityPipelineDiskBufferOptions; +pub mod model_observability_pipeline_buffer_options_disk_type; +pub use self::model_observability_pipeline_buffer_options_disk_type::ObservabilityPipelineBufferOptionsDiskType; +pub mod model_observability_pipeline_memory_buffer_options; +pub use self::model_observability_pipeline_memory_buffer_options::ObservabilityPipelineMemoryBufferOptions; +pub mod model_observability_pipeline_buffer_options_memory_type; +pub use self::model_observability_pipeline_buffer_options_memory_type::ObservabilityPipelineBufferOptionsMemoryType; +pub mod model_observability_pipeline_memory_buffer_size_options; +pub use self::model_observability_pipeline_memory_buffer_size_options::ObservabilityPipelineMemoryBufferSizeOptions; +pub mod model_observability_pipeline_buffer_options; +pub use self::model_observability_pipeline_buffer_options::ObservabilityPipelineBufferOptions; pub mod model_observability_pipeline_datadog_logs_destination_type; pub use self::model_observability_pipeline_datadog_logs_destination_type::ObservabilityPipelineDatadogLogsDestinationType; pub mod model_observability_pipeline_amazon_s3_destination; diff --git a/src/datadogV2/model/model_azure_storage_destination.rs b/src/datadogV2/model/model_azure_storage_destination.rs index 3725cf911..277b3c34a 100644 --- a/src/datadogV2/model/model_azure_storage_destination.rs +++ b/src/datadogV2/model/model_azure_storage_destination.rs @@ -14,6 +14,9 @@ pub struct AzureStorageDestination { /// Optional prefix for blobs written to the container. #[serde(rename = "blob_prefix")] pub blob_prefix: Option, + /// Configuration for buffer settings on destination components. + #[serde(rename = "buffer")] + pub buffer: Option, /// The name of the Azure Blob Storage container to store logs in. #[serde(rename = "container_name")] pub container_name: String, @@ -42,6 +45,7 @@ impl AzureStorageDestination { ) -> AzureStorageDestination { AzureStorageDestination { blob_prefix: None, + buffer: None, container_name, id, inputs, @@ -56,6 +60,14 @@ impl AzureStorageDestination { self } + pub fn buffer( + mut self, + value: crate::datadogV2::model::ObservabilityPipelineBufferOptions, + ) -> Self { + self.buffer = Some(value); + self + } + pub fn additional_properties( mut self, value: std::collections::BTreeMap, @@ -83,6 +95,9 @@ impl<'de> Deserialize<'de> for AzureStorageDestination { M: MapAccess<'a>, { let mut blob_prefix: Option = None; + let mut buffer: Option< + crate::datadogV2::model::ObservabilityPipelineBufferOptions, + > = None; let mut container_name: Option = None; let mut id: Option = None; let mut inputs: Option> = None; @@ -102,6 +117,20 @@ impl<'de> Deserialize<'de> for AzureStorageDestination { blob_prefix = Some(serde_json::from_value(v).map_err(M::Error::custom)?); } + "buffer" => { + if v.is_null() { + continue; + } + buffer = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _buffer) = buffer { + match _buffer { + crate::datadogV2::model::ObservabilityPipelineBufferOptions::UnparsedObject(_buffer) => { + _unparsed = true; + }, + _ => {} + } + } + } "container_name" => { container_name = Some(serde_json::from_value(v).map_err(M::Error::custom)?); @@ -138,6 +167,7 @@ impl<'de> Deserialize<'de> for AzureStorageDestination { let content = AzureStorageDestination { blob_prefix, + buffer, container_name, id, inputs, diff --git a/src/datadogV2/model/model_microsoft_sentinel_destination.rs b/src/datadogV2/model/model_microsoft_sentinel_destination.rs index 0f6414e05..f99dac779 100644 --- a/src/datadogV2/model/model_microsoft_sentinel_destination.rs +++ b/src/datadogV2/model/model_microsoft_sentinel_destination.rs @@ -11,6 +11,9 @@ use std::fmt::{self, Formatter}; #[skip_serializing_none] #[derive(Clone, Debug, PartialEq, Serialize)] pub struct MicrosoftSentinelDestination { + /// Configuration for buffer settings on destination components. + #[serde(rename = "buffer")] + pub buffer: Option, /// Azure AD client ID used for authentication. #[serde(rename = "client_id")] pub client_id: String, @@ -50,6 +53,7 @@ impl MicrosoftSentinelDestination { type_: crate::datadogV2::model::MicrosoftSentinelDestinationType, ) -> MicrosoftSentinelDestination { MicrosoftSentinelDestination { + buffer: None, client_id, dcr_immutable_id, id, @@ -62,6 +66,14 @@ impl MicrosoftSentinelDestination { } } + pub fn buffer( + mut self, + value: crate::datadogV2::model::ObservabilityPipelineBufferOptions, + ) -> Self { + self.buffer = Some(value); + self + } + pub fn additional_properties( mut self, value: std::collections::BTreeMap, @@ -88,6 +100,9 @@ impl<'de> Deserialize<'de> for MicrosoftSentinelDestination { where M: MapAccess<'a>, { + let mut buffer: Option< + crate::datadogV2::model::ObservabilityPipelineBufferOptions, + > = None; let mut client_id: Option = None; let mut dcr_immutable_id: Option = None; let mut id: Option = None; @@ -104,6 +119,20 @@ impl<'de> Deserialize<'de> for MicrosoftSentinelDestination { while let Some((k, v)) = map.next_entry::()? { match k.as_str() { + "buffer" => { + if v.is_null() { + continue; + } + buffer = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _buffer) = buffer { + match _buffer { + crate::datadogV2::model::ObservabilityPipelineBufferOptions::UnparsedObject(_buffer) => { + _unparsed = true; + }, + _ => {} + } + } + } "client_id" => { client_id = Some(serde_json::from_value(v).map_err(M::Error::custom)?); } @@ -151,6 +180,7 @@ impl<'de> Deserialize<'de> for MicrosoftSentinelDestination { let type_ = type_.ok_or_else(|| M::Error::missing_field("type_"))?; let content = MicrosoftSentinelDestination { + buffer, client_id, dcr_immutable_id, id, diff --git a/src/datadogV2/model/model_observability_pipeline_amazon_open_search_destination.rs b/src/datadogV2/model/model_observability_pipeline_amazon_open_search_destination.rs index e19fe9bcc..50da1880f 100644 --- a/src/datadogV2/model/model_observability_pipeline_amazon_open_search_destination.rs +++ b/src/datadogV2/model/model_observability_pipeline_amazon_open_search_destination.rs @@ -16,6 +16,9 @@ pub struct ObservabilityPipelineAmazonOpenSearchDestination { /// #[serde(rename = "auth")] pub auth: crate::datadogV2::model::ObservabilityPipelineAmazonOpenSearchDestinationAuth, + /// Configuration for buffer settings on destination components. + #[serde(rename = "buffer")] + pub buffer: Option, /// The index to write logs to. #[serde(rename = "bulk_index")] pub bulk_index: Option, @@ -44,6 +47,7 @@ impl ObservabilityPipelineAmazonOpenSearchDestination { ) -> ObservabilityPipelineAmazonOpenSearchDestination { ObservabilityPipelineAmazonOpenSearchDestination { auth, + buffer: None, bulk_index: None, id, inputs, @@ -53,6 +57,14 @@ impl ObservabilityPipelineAmazonOpenSearchDestination { } } + pub fn buffer( + mut self, + value: crate::datadogV2::model::ObservabilityPipelineBufferOptions, + ) -> Self { + self.buffer = Some(value); + self + } + pub fn bulk_index(mut self, value: String) -> Self { self.bulk_index = Some(value); self @@ -87,6 +99,9 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineAmazonOpenSearchDestination let mut auth: Option< crate::datadogV2::model::ObservabilityPipelineAmazonOpenSearchDestinationAuth, > = None; + let mut buffer: Option< + crate::datadogV2::model::ObservabilityPipelineBufferOptions, + > = None; let mut bulk_index: Option = None; let mut id: Option = None; let mut inputs: Option> = None; @@ -104,6 +119,20 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineAmazonOpenSearchDestination "auth" => { auth = Some(serde_json::from_value(v).map_err(M::Error::custom)?); } + "buffer" => { + if v.is_null() { + continue; + } + buffer = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _buffer) = buffer { + match _buffer { + crate::datadogV2::model::ObservabilityPipelineBufferOptions::UnparsedObject(_buffer) => { + _unparsed = true; + }, + _ => {} + } + } + } "bulk_index" => { if v.is_null() { continue; @@ -141,6 +170,7 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineAmazonOpenSearchDestination let content = ObservabilityPipelineAmazonOpenSearchDestination { auth, + buffer, bulk_index, id, inputs, diff --git a/src/datadogV2/model/model_observability_pipeline_amazon_s3_destination.rs b/src/datadogV2/model/model_observability_pipeline_amazon_s3_destination.rs index fe4d0937c..1ef05b3ce 100644 --- a/src/datadogV2/model/model_observability_pipeline_amazon_s3_destination.rs +++ b/src/datadogV2/model/model_observability_pipeline_amazon_s3_destination.rs @@ -19,6 +19,9 @@ pub struct ObservabilityPipelineAmazonS3Destination { /// S3 bucket name. #[serde(rename = "bucket")] pub bucket: String, + /// Configuration for buffer settings on destination components. + #[serde(rename = "buffer")] + pub buffer: Option, /// Unique identifier for the destination component. #[serde(rename = "id")] pub id: String, @@ -60,6 +63,7 @@ impl ObservabilityPipelineAmazonS3Destination { ObservabilityPipelineAmazonS3Destination { auth: None, bucket, + buffer: None, id, inputs, key_prefix: None, @@ -77,6 +81,14 @@ impl ObservabilityPipelineAmazonS3Destination { self } + pub fn buffer( + mut self, + value: crate::datadogV2::model::ObservabilityPipelineBufferOptions, + ) -> Self { + self.buffer = Some(value); + self + } + pub fn key_prefix(mut self, value: String) -> Self { self.key_prefix = Some(value); self @@ -115,6 +127,9 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineAmazonS3Destination { { let mut auth: Option = None; let mut bucket: Option = None; + let mut buffer: Option< + crate::datadogV2::model::ObservabilityPipelineBufferOptions, + > = None; let mut id: Option = None; let mut inputs: Option> = None; let mut key_prefix: Option = None; @@ -143,6 +158,20 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineAmazonS3Destination { "bucket" => { bucket = Some(serde_json::from_value(v).map_err(M::Error::custom)?); } + "buffer" => { + if v.is_null() { + continue; + } + buffer = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _buffer) = buffer { + match _buffer { + crate::datadogV2::model::ObservabilityPipelineBufferOptions::UnparsedObject(_buffer) => { + _unparsed = true; + }, + _ => {} + } + } + } "id" => { id = Some(serde_json::from_value(v).map_err(M::Error::custom)?); } @@ -205,6 +234,7 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineAmazonS3Destination { let content = ObservabilityPipelineAmazonS3Destination { auth, bucket, + buffer, id, inputs, key_prefix, diff --git a/src/datadogV2/model/model_observability_pipeline_buffer_options.rs b/src/datadogV2/model/model_observability_pipeline_buffer_options.rs new file mode 100644 index 000000000..78ee487f8 --- /dev/null +++ b/src/datadogV2/model/model_observability_pipeline_buffer_options.rs @@ -0,0 +1,64 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::{Deserialize, Deserializer, Serialize}; + +/// Configuration for buffer settings on destination components. +#[non_exhaustive] +#[derive(Clone, Debug, PartialEq, Serialize)] +#[serde(untagged)] +pub enum ObservabilityPipelineBufferOptions { + ObservabilityPipelineDiskBufferOptions( + Box, + ), + ObservabilityPipelineMemoryBufferOptions( + Box, + ), + ObservabilityPipelineMemoryBufferSizeOptions( + Box, + ), + UnparsedObject(crate::datadog::UnparsedObject), +} + +impl<'de> Deserialize<'de> for ObservabilityPipelineBufferOptions { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let value: serde_json::Value = Deserialize::deserialize(deserializer)?; + if let Ok(_v) = serde_json::from_value::< + Box, + >(value.clone()) + { + if !_v._unparsed { + return Ok( + ObservabilityPipelineBufferOptions::ObservabilityPipelineDiskBufferOptions(_v), + ); + } + } + if let Ok(_v) = serde_json::from_value::< + Box, + >(value.clone()) + { + if !_v._unparsed { + return Ok( + ObservabilityPipelineBufferOptions::ObservabilityPipelineMemoryBufferOptions( + _v, + ), + ); + } + } + if let Ok(_v) = serde_json::from_value::< + Box, + >(value.clone()) + { + if !_v._unparsed { + return Ok(ObservabilityPipelineBufferOptions::ObservabilityPipelineMemoryBufferSizeOptions(_v)); + } + } + + return Ok(ObservabilityPipelineBufferOptions::UnparsedObject( + crate::datadog::UnparsedObject { value }, + )); + } +} diff --git a/src/datadogV2/model/model_observability_pipeline_buffer_options_disk_type.rs b/src/datadogV2/model/model_observability_pipeline_buffer_options_disk_type.rs new file mode 100644 index 000000000..1a66bb008 --- /dev/null +++ b/src/datadogV2/model/model_observability_pipeline_buffer_options_disk_type.rs @@ -0,0 +1,48 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +use serde::{Deserialize, Deserializer, Serialize, Serializer}; + +#[non_exhaustive] +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum ObservabilityPipelineBufferOptionsDiskType { + DISK, + UnparsedObject(crate::datadog::UnparsedObject), +} + +impl ToString for ObservabilityPipelineBufferOptionsDiskType { + fn to_string(&self) -> String { + match self { + Self::DISK => String::from("disk"), + Self::UnparsedObject(v) => v.value.to_string(), + } + } +} + +impl Serialize for ObservabilityPipelineBufferOptionsDiskType { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + match self { + Self::UnparsedObject(v) => v.serialize(serializer), + _ => serializer.serialize_str(self.to_string().as_str()), + } + } +} + +impl<'de> Deserialize<'de> for ObservabilityPipelineBufferOptionsDiskType { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let s: String = String::deserialize(deserializer)?; + Ok(match s.as_str() { + "disk" => Self::DISK, + _ => Self::UnparsedObject(crate::datadog::UnparsedObject { + value: serde_json::Value::String(s.into()), + }), + }) + } +} diff --git a/src/datadogV2/model/model_observability_pipeline_buffer_options_memory_type.rs b/src/datadogV2/model/model_observability_pipeline_buffer_options_memory_type.rs new file mode 100644 index 000000000..0b742549a --- /dev/null +++ b/src/datadogV2/model/model_observability_pipeline_buffer_options_memory_type.rs @@ -0,0 +1,48 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. + +use serde::{Deserialize, Deserializer, Serialize, Serializer}; + +#[non_exhaustive] +#[derive(Clone, Debug, Eq, PartialEq)] +pub enum ObservabilityPipelineBufferOptionsMemoryType { + MEMORY, + UnparsedObject(crate::datadog::UnparsedObject), +} + +impl ToString for ObservabilityPipelineBufferOptionsMemoryType { + fn to_string(&self) -> String { + match self { + Self::MEMORY => String::from("memory"), + Self::UnparsedObject(v) => v.value.to_string(), + } + } +} + +impl Serialize for ObservabilityPipelineBufferOptionsMemoryType { + fn serialize(&self, serializer: S) -> Result + where + S: Serializer, + { + match self { + Self::UnparsedObject(v) => v.serialize(serializer), + _ => serializer.serialize_str(self.to_string().as_str()), + } + } +} + +impl<'de> Deserialize<'de> for ObservabilityPipelineBufferOptionsMemoryType { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + let s: String = String::deserialize(deserializer)?; + Ok(match s.as_str() { + "memory" => Self::MEMORY, + _ => Self::UnparsedObject(crate::datadog::UnparsedObject { + value: serde_json::Value::String(s.into()), + }), + }) + } +} diff --git a/src/datadogV2/model/model_observability_pipeline_datadog_logs_destination.rs b/src/datadogV2/model/model_observability_pipeline_datadog_logs_destination.rs index a6ca36604..6521694cb 100644 --- a/src/datadogV2/model/model_observability_pipeline_datadog_logs_destination.rs +++ b/src/datadogV2/model/model_observability_pipeline_datadog_logs_destination.rs @@ -11,6 +11,9 @@ use std::fmt::{self, Formatter}; #[skip_serializing_none] #[derive(Clone, Debug, PartialEq, Serialize)] pub struct ObservabilityPipelineDatadogLogsDestination { + /// Configuration for buffer settings on destination components. + #[serde(rename = "buffer")] + pub buffer: Option, /// The unique identifier for this component. #[serde(rename = "id")] pub id: String, @@ -34,6 +37,7 @@ impl ObservabilityPipelineDatadogLogsDestination { type_: crate::datadogV2::model::ObservabilityPipelineDatadogLogsDestinationType, ) -> ObservabilityPipelineDatadogLogsDestination { ObservabilityPipelineDatadogLogsDestination { + buffer: None, id, inputs, type_, @@ -42,6 +46,14 @@ impl ObservabilityPipelineDatadogLogsDestination { } } + pub fn buffer( + mut self, + value: crate::datadogV2::model::ObservabilityPipelineBufferOptions, + ) -> Self { + self.buffer = Some(value); + self + } + pub fn additional_properties( mut self, value: std::collections::BTreeMap, @@ -68,6 +80,9 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineDatadogLogsDestination { where M: MapAccess<'a>, { + let mut buffer: Option< + crate::datadogV2::model::ObservabilityPipelineBufferOptions, + > = None; let mut id: Option = None; let mut inputs: Option> = None; let mut type_: Option< @@ -81,6 +96,20 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineDatadogLogsDestination { while let Some((k, v)) = map.next_entry::()? { match k.as_str() { + "buffer" => { + if v.is_null() { + continue; + } + buffer = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _buffer) = buffer { + match _buffer { + crate::datadogV2::model::ObservabilityPipelineBufferOptions::UnparsedObject(_buffer) => { + _unparsed = true; + }, + _ => {} + } + } + } "id" => { id = Some(serde_json::from_value(v).map_err(M::Error::custom)?); } @@ -110,6 +139,7 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineDatadogLogsDestination { let type_ = type_.ok_or_else(|| M::Error::missing_field("type_"))?; let content = ObservabilityPipelineDatadogLogsDestination { + buffer, id, inputs, type_, diff --git a/src/datadogV2/model/model_observability_pipeline_disk_buffer_options.rs b/src/datadogV2/model/model_observability_pipeline_disk_buffer_options.rs new file mode 100644 index 000000000..0d8d76962 --- /dev/null +++ b/src/datadogV2/model/model_observability_pipeline_disk_buffer_options.rs @@ -0,0 +1,135 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Options for configuring a disk buffer. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct ObservabilityPipelineDiskBufferOptions { + /// Maximum size of the disk buffer. + #[serde(rename = "max_size")] + pub max_size: Option, + /// Specifies the buffer type to configure. This option supports only a disk buffer. + #[serde(rename = "type")] + pub type_: Option, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl ObservabilityPipelineDiskBufferOptions { + pub fn new() -> ObservabilityPipelineDiskBufferOptions { + ObservabilityPipelineDiskBufferOptions { + max_size: None, + type_: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn max_size(mut self, value: i64) -> Self { + self.max_size = Some(value); + self + } + + pub fn type_( + mut self, + value: crate::datadogV2::model::ObservabilityPipelineBufferOptionsDiskType, + ) -> Self { + self.type_ = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl Default for ObservabilityPipelineDiskBufferOptions { + fn default() -> Self { + Self::new() + } +} + +impl<'de> Deserialize<'de> for ObservabilityPipelineDiskBufferOptions { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct ObservabilityPipelineDiskBufferOptionsVisitor; + impl<'a> Visitor<'a> for ObservabilityPipelineDiskBufferOptionsVisitor { + type Value = ObservabilityPipelineDiskBufferOptions; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut max_size: Option = None; + let mut type_: Option< + crate::datadogV2::model::ObservabilityPipelineBufferOptionsDiskType, + > = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "max_size" => { + if v.is_null() { + continue; + } + max_size = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "type" => { + if v.is_null() { + continue; + } + type_ = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _type_) = type_ { + match _type_ { + crate::datadogV2::model::ObservabilityPipelineBufferOptionsDiskType::UnparsedObject(_type_) => { + _unparsed = true; + }, + _ => {} + } + } + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + + let content = ObservabilityPipelineDiskBufferOptions { + max_size, + type_, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(ObservabilityPipelineDiskBufferOptionsVisitor) + } +} diff --git a/src/datadogV2/model/model_observability_pipeline_elasticsearch_destination.rs b/src/datadogV2/model/model_observability_pipeline_elasticsearch_destination.rs index e104a3eac..7b7578e39 100644 --- a/src/datadogV2/model/model_observability_pipeline_elasticsearch_destination.rs +++ b/src/datadogV2/model/model_observability_pipeline_elasticsearch_destination.rs @@ -15,6 +15,9 @@ pub struct ObservabilityPipelineElasticsearchDestination { #[serde(rename = "api_version")] pub api_version: Option, + /// Configuration for buffer settings on destination components. + #[serde(rename = "buffer")] + pub buffer: Option, /// The index to write logs to in Elasticsearch. #[serde(rename = "bulk_index")] pub bulk_index: Option, @@ -42,6 +45,7 @@ impl ObservabilityPipelineElasticsearchDestination { ) -> ObservabilityPipelineElasticsearchDestination { ObservabilityPipelineElasticsearchDestination { api_version: None, + buffer: None, bulk_index: None, id, inputs, @@ -59,6 +63,14 @@ impl ObservabilityPipelineElasticsearchDestination { self } + pub fn buffer( + mut self, + value: crate::datadogV2::model::ObservabilityPipelineBufferOptions, + ) -> Self { + self.buffer = Some(value); + self + } + pub fn bulk_index(mut self, value: String) -> Self { self.bulk_index = Some(value); self @@ -91,6 +103,9 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineElasticsearchDestination { M: MapAccess<'a>, { let mut api_version: Option = None; + let mut buffer: Option< + crate::datadogV2::model::ObservabilityPipelineBufferOptions, + > = None; let mut bulk_index: Option = None; let mut id: Option = None; let mut inputs: Option> = None; @@ -120,6 +135,20 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineElasticsearchDestination { } } } + "buffer" => { + if v.is_null() { + continue; + } + buffer = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _buffer) = buffer { + match _buffer { + crate::datadogV2::model::ObservabilityPipelineBufferOptions::UnparsedObject(_buffer) => { + _unparsed = true; + }, + _ => {} + } + } + } "bulk_index" => { if v.is_null() { continue; @@ -156,6 +185,7 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineElasticsearchDestination { let content = ObservabilityPipelineElasticsearchDestination { api_version, + buffer, bulk_index, id, inputs, diff --git a/src/datadogV2/model/model_observability_pipeline_google_chronicle_destination.rs b/src/datadogV2/model/model_observability_pipeline_google_chronicle_destination.rs index fe867545b..a854b0b0a 100644 --- a/src/datadogV2/model/model_observability_pipeline_google_chronicle_destination.rs +++ b/src/datadogV2/model/model_observability_pipeline_google_chronicle_destination.rs @@ -15,6 +15,9 @@ pub struct ObservabilityPipelineGoogleChronicleDestination { /// #[serde(rename = "auth")] pub auth: crate::datadogV2::model::ObservabilityPipelineGcpAuth, + /// Configuration for buffer settings on destination components. + #[serde(rename = "buffer")] + pub buffer: Option, /// The Google Chronicle customer ID. #[serde(rename = "customer_id")] pub customer_id: String, @@ -51,6 +54,7 @@ impl ObservabilityPipelineGoogleChronicleDestination { ) -> ObservabilityPipelineGoogleChronicleDestination { ObservabilityPipelineGoogleChronicleDestination { auth, + buffer: None, customer_id, encoding: None, id, @@ -62,6 +66,14 @@ impl ObservabilityPipelineGoogleChronicleDestination { } } + pub fn buffer( + mut self, + value: crate::datadogV2::model::ObservabilityPipelineBufferOptions, + ) -> Self { + self.buffer = Some(value); + self + } + pub fn encoding( mut self, value: crate::datadogV2::model::ObservabilityPipelineGoogleChronicleDestinationEncoding, @@ -102,6 +114,9 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineGoogleChronicleDestination { M: MapAccess<'a>, { let mut auth: Option = None; + let mut buffer: Option< + crate::datadogV2::model::ObservabilityPipelineBufferOptions, + > = None; let mut customer_id: Option = None; let mut encoding: Option = None; let mut id: Option = None; @@ -121,6 +136,20 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineGoogleChronicleDestination { "auth" => { auth = Some(serde_json::from_value(v).map_err(M::Error::custom)?); } + "buffer" => { + if v.is_null() { + continue; + } + buffer = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _buffer) = buffer { + match _buffer { + crate::datadogV2::model::ObservabilityPipelineBufferOptions::UnparsedObject(_buffer) => { + _unparsed = true; + }, + _ => {} + } + } + } "customer_id" => { customer_id = Some(serde_json::from_value(v).map_err(M::Error::custom)?); @@ -178,6 +207,7 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineGoogleChronicleDestination { let content = ObservabilityPipelineGoogleChronicleDestination { auth, + buffer, customer_id, encoding, id, diff --git a/src/datadogV2/model/model_observability_pipeline_google_cloud_storage_destination.rs b/src/datadogV2/model/model_observability_pipeline_google_cloud_storage_destination.rs index a39b55f08..cdb13a269 100644 --- a/src/datadogV2/model/model_observability_pipeline_google_cloud_storage_destination.rs +++ b/src/datadogV2/model/model_observability_pipeline_google_cloud_storage_destination.rs @@ -23,6 +23,9 @@ pub struct ObservabilityPipelineGoogleCloudStorageDestination { /// Name of the GCS bucket. #[serde(rename = "bucket")] pub bucket: String, + /// Configuration for buffer settings on destination components. + #[serde(rename = "buffer")] + pub buffer: Option, /// Unique identifier for the destination component. #[serde(rename = "id")] pub id: String, @@ -63,6 +66,7 @@ impl ObservabilityPipelineGoogleCloudStorageDestination { acl, auth, bucket, + buffer: None, id, inputs, key_prefix: None, @@ -74,6 +78,14 @@ impl ObservabilityPipelineGoogleCloudStorageDestination { } } + pub fn buffer( + mut self, + value: crate::datadogV2::model::ObservabilityPipelineBufferOptions, + ) -> Self { + self.buffer = Some(value); + self + } + pub fn key_prefix(mut self, value: String) -> Self { self.key_prefix = Some(value); self @@ -118,6 +130,9 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineGoogleCloudStorageDestinatio > = None; let mut auth: Option = None; let mut bucket: Option = None; + let mut buffer: Option< + crate::datadogV2::model::ObservabilityPipelineBufferOptions, + > = None; let mut id: Option = None; let mut inputs: Option> = None; let mut key_prefix: Option = None; @@ -153,6 +168,20 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineGoogleCloudStorageDestinatio "bucket" => { bucket = Some(serde_json::from_value(v).map_err(M::Error::custom)?); } + "buffer" => { + if v.is_null() { + continue; + } + buffer = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _buffer) = buffer { + match _buffer { + crate::datadogV2::model::ObservabilityPipelineBufferOptions::UnparsedObject(_buffer) => { + _unparsed = true; + }, + _ => {} + } + } + } "id" => { id = Some(serde_json::from_value(v).map_err(M::Error::custom)?); } @@ -214,6 +243,7 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineGoogleCloudStorageDestinatio acl, auth, bucket, + buffer, id, inputs, key_prefix, diff --git a/src/datadogV2/model/model_observability_pipeline_memory_buffer_options.rs b/src/datadogV2/model/model_observability_pipeline_memory_buffer_options.rs new file mode 100644 index 000000000..a268e5623 --- /dev/null +++ b/src/datadogV2/model/model_observability_pipeline_memory_buffer_options.rs @@ -0,0 +1,135 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Options for configuring a memory buffer by byte size. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct ObservabilityPipelineMemoryBufferOptions { + /// Maximum size of the disk buffer. + #[serde(rename = "max_size")] + pub max_size: Option, + /// Specifies the buffer type to configure. This option supports only a memory buffer. + #[serde(rename = "type")] + pub type_: Option, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl ObservabilityPipelineMemoryBufferOptions { + pub fn new() -> ObservabilityPipelineMemoryBufferOptions { + ObservabilityPipelineMemoryBufferOptions { + max_size: None, + type_: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn max_size(mut self, value: i64) -> Self { + self.max_size = Some(value); + self + } + + pub fn type_( + mut self, + value: crate::datadogV2::model::ObservabilityPipelineBufferOptionsMemoryType, + ) -> Self { + self.type_ = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl Default for ObservabilityPipelineMemoryBufferOptions { + fn default() -> Self { + Self::new() + } +} + +impl<'de> Deserialize<'de> for ObservabilityPipelineMemoryBufferOptions { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct ObservabilityPipelineMemoryBufferOptionsVisitor; + impl<'a> Visitor<'a> for ObservabilityPipelineMemoryBufferOptionsVisitor { + type Value = ObservabilityPipelineMemoryBufferOptions; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut max_size: Option = None; + let mut type_: Option< + crate::datadogV2::model::ObservabilityPipelineBufferOptionsMemoryType, + > = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "max_size" => { + if v.is_null() { + continue; + } + max_size = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "type" => { + if v.is_null() { + continue; + } + type_ = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _type_) = type_ { + match _type_ { + crate::datadogV2::model::ObservabilityPipelineBufferOptionsMemoryType::UnparsedObject(_type_) => { + _unparsed = true; + }, + _ => {} + } + } + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + + let content = ObservabilityPipelineMemoryBufferOptions { + max_size, + type_, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(ObservabilityPipelineMemoryBufferOptionsVisitor) + } +} diff --git a/src/datadogV2/model/model_observability_pipeline_memory_buffer_size_options.rs b/src/datadogV2/model/model_observability_pipeline_memory_buffer_size_options.rs new file mode 100644 index 000000000..77e3ca6f2 --- /dev/null +++ b/src/datadogV2/model/model_observability_pipeline_memory_buffer_size_options.rs @@ -0,0 +1,135 @@ +// Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. +// This product includes software developed at Datadog (https://www.datadoghq.com/). +// Copyright 2019-Present Datadog, Inc. +use serde::de::{Error, MapAccess, Visitor}; +use serde::{Deserialize, Deserializer, Serialize}; +use serde_with::skip_serializing_none; +use std::fmt::{self, Formatter}; + +/// Options for configuring a memory buffer by queue length. +#[non_exhaustive] +#[skip_serializing_none] +#[derive(Clone, Debug, PartialEq, Serialize)] +pub struct ObservabilityPipelineMemoryBufferSizeOptions { + /// Maximum events for the memory buffer. + #[serde(rename = "max_events")] + pub max_events: Option, + /// Specifies the buffer type to configure. This option supports only a memory buffer. + #[serde(rename = "type")] + pub type_: Option, + #[serde(flatten)] + pub additional_properties: std::collections::BTreeMap, + #[serde(skip)] + #[serde(default)] + pub(crate) _unparsed: bool, +} + +impl ObservabilityPipelineMemoryBufferSizeOptions { + pub fn new() -> ObservabilityPipelineMemoryBufferSizeOptions { + ObservabilityPipelineMemoryBufferSizeOptions { + max_events: None, + type_: None, + additional_properties: std::collections::BTreeMap::new(), + _unparsed: false, + } + } + + pub fn max_events(mut self, value: i64) -> Self { + self.max_events = Some(value); + self + } + + pub fn type_( + mut self, + value: crate::datadogV2::model::ObservabilityPipelineBufferOptionsMemoryType, + ) -> Self { + self.type_ = Some(value); + self + } + + pub fn additional_properties( + mut self, + value: std::collections::BTreeMap, + ) -> Self { + self.additional_properties = value; + self + } +} + +impl Default for ObservabilityPipelineMemoryBufferSizeOptions { + fn default() -> Self { + Self::new() + } +} + +impl<'de> Deserialize<'de> for ObservabilityPipelineMemoryBufferSizeOptions { + fn deserialize(deserializer: D) -> Result + where + D: Deserializer<'de>, + { + struct ObservabilityPipelineMemoryBufferSizeOptionsVisitor; + impl<'a> Visitor<'a> for ObservabilityPipelineMemoryBufferSizeOptionsVisitor { + type Value = ObservabilityPipelineMemoryBufferSizeOptions; + + fn expecting(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.write_str("a mapping") + } + + fn visit_map(self, mut map: M) -> Result + where + M: MapAccess<'a>, + { + let mut max_events: Option = None; + let mut type_: Option< + crate::datadogV2::model::ObservabilityPipelineBufferOptionsMemoryType, + > = None; + let mut additional_properties: std::collections::BTreeMap< + String, + serde_json::Value, + > = std::collections::BTreeMap::new(); + let mut _unparsed = false; + + while let Some((k, v)) = map.next_entry::()? { + match k.as_str() { + "max_events" => { + if v.is_null() { + continue; + } + max_events = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } + "type" => { + if v.is_null() { + continue; + } + type_ = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _type_) = type_ { + match _type_ { + crate::datadogV2::model::ObservabilityPipelineBufferOptionsMemoryType::UnparsedObject(_type_) => { + _unparsed = true; + }, + _ => {} + } + } + } + &_ => { + if let Ok(value) = serde_json::from_value(v.clone()) { + additional_properties.insert(k, value); + } + } + } + } + + let content = ObservabilityPipelineMemoryBufferSizeOptions { + max_events, + type_, + additional_properties, + _unparsed, + }; + + Ok(content) + } + } + + deserializer.deserialize_any(ObservabilityPipelineMemoryBufferSizeOptionsVisitor) + } +} diff --git a/src/datadogV2/model/model_observability_pipeline_new_relic_destination.rs b/src/datadogV2/model/model_observability_pipeline_new_relic_destination.rs index 954143c2e..3dfe294ce 100644 --- a/src/datadogV2/model/model_observability_pipeline_new_relic_destination.rs +++ b/src/datadogV2/model/model_observability_pipeline_new_relic_destination.rs @@ -11,6 +11,9 @@ use std::fmt::{self, Formatter}; #[skip_serializing_none] #[derive(Clone, Debug, PartialEq, Serialize)] pub struct ObservabilityPipelineNewRelicDestination { + /// Configuration for buffer settings on destination components. + #[serde(rename = "buffer")] + pub buffer: Option, /// The unique identifier for this component. #[serde(rename = "id")] pub id: String, @@ -38,6 +41,7 @@ impl ObservabilityPipelineNewRelicDestination { type_: crate::datadogV2::model::ObservabilityPipelineNewRelicDestinationType, ) -> ObservabilityPipelineNewRelicDestination { ObservabilityPipelineNewRelicDestination { + buffer: None, id, inputs, region, @@ -47,6 +51,14 @@ impl ObservabilityPipelineNewRelicDestination { } } + pub fn buffer( + mut self, + value: crate::datadogV2::model::ObservabilityPipelineBufferOptions, + ) -> Self { + self.buffer = Some(value); + self + } + pub fn additional_properties( mut self, value: std::collections::BTreeMap, @@ -73,6 +85,9 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineNewRelicDestination { where M: MapAccess<'a>, { + let mut buffer: Option< + crate::datadogV2::model::ObservabilityPipelineBufferOptions, + > = None; let mut id: Option = None; let mut inputs: Option> = None; let mut region: Option< @@ -89,6 +104,20 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineNewRelicDestination { while let Some((k, v)) = map.next_entry::()? { match k.as_str() { + "buffer" => { + if v.is_null() { + continue; + } + buffer = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _buffer) = buffer { + match _buffer { + crate::datadogV2::model::ObservabilityPipelineBufferOptions::UnparsedObject(_buffer) => { + _unparsed = true; + }, + _ => {} + } + } + } "id" => { id = Some(serde_json::from_value(v).map_err(M::Error::custom)?); } @@ -130,6 +159,7 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineNewRelicDestination { let type_ = type_.ok_or_else(|| M::Error::missing_field("type_"))?; let content = ObservabilityPipelineNewRelicDestination { + buffer, id, inputs, region, diff --git a/src/datadogV2/model/model_observability_pipeline_open_search_destination.rs b/src/datadogV2/model/model_observability_pipeline_open_search_destination.rs index 063cc4913..bda3562c7 100644 --- a/src/datadogV2/model/model_observability_pipeline_open_search_destination.rs +++ b/src/datadogV2/model/model_observability_pipeline_open_search_destination.rs @@ -11,6 +11,9 @@ use std::fmt::{self, Formatter}; #[skip_serializing_none] #[derive(Clone, Debug, PartialEq, Serialize)] pub struct ObservabilityPipelineOpenSearchDestination { + /// Configuration for buffer settings on destination components. + #[serde(rename = "buffer")] + pub buffer: Option, /// The index to write logs to. #[serde(rename = "bulk_index")] pub bulk_index: Option, @@ -37,6 +40,7 @@ impl ObservabilityPipelineOpenSearchDestination { type_: crate::datadogV2::model::ObservabilityPipelineOpenSearchDestinationType, ) -> ObservabilityPipelineOpenSearchDestination { ObservabilityPipelineOpenSearchDestination { + buffer: None, bulk_index: None, id, inputs, @@ -46,6 +50,14 @@ impl ObservabilityPipelineOpenSearchDestination { } } + pub fn buffer( + mut self, + value: crate::datadogV2::model::ObservabilityPipelineBufferOptions, + ) -> Self { + self.buffer = Some(value); + self + } + pub fn bulk_index(mut self, value: String) -> Self { self.bulk_index = Some(value); self @@ -77,6 +89,9 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineOpenSearchDestination { where M: MapAccess<'a>, { + let mut buffer: Option< + crate::datadogV2::model::ObservabilityPipelineBufferOptions, + > = None; let mut bulk_index: Option = None; let mut id: Option = None; let mut inputs: Option> = None; @@ -91,6 +106,20 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineOpenSearchDestination { while let Some((k, v)) = map.next_entry::()? { match k.as_str() { + "buffer" => { + if v.is_null() { + continue; + } + buffer = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _buffer) = buffer { + match _buffer { + crate::datadogV2::model::ObservabilityPipelineBufferOptions::UnparsedObject(_buffer) => { + _unparsed = true; + }, + _ => {} + } + } + } "bulk_index" => { if v.is_null() { continue; @@ -126,6 +155,7 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineOpenSearchDestination { let type_ = type_.ok_or_else(|| M::Error::missing_field("type_"))?; let content = ObservabilityPipelineOpenSearchDestination { + buffer, bulk_index, id, inputs, diff --git a/src/datadogV2/model/model_observability_pipeline_quota_processor.rs b/src/datadogV2/model/model_observability_pipeline_quota_processor.rs index 44675d753..a2611c69c 100644 --- a/src/datadogV2/model/model_observability_pipeline_quota_processor.rs +++ b/src/datadogV2/model/model_observability_pipeline_quota_processor.rs @@ -11,9 +11,9 @@ use std::fmt::{self, Formatter}; #[skip_serializing_none] #[derive(Clone, Debug, PartialEq, Serialize)] pub struct ObservabilityPipelineQuotaProcessor { - /// If set to `true`, logs that matched the quota filter and sent after the quota has been met are dropped; only logs that did not match the filter query continue through the pipeline. + /// If set to `true`, logs that match the quota filter and are sent after the quota is exceeded are dropped. Logs that do not match the filter continue through the pipeline. **Note**: You can set either `drop_events` or `overflow_action`, but not both. #[serde(rename = "drop_events")] - pub drop_events: bool, + pub drop_events: Option, /// The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). #[serde(rename = "id")] pub id: String, @@ -32,7 +32,7 @@ pub struct ObservabilityPipelineQuotaProcessor { /// Name of the quota. #[serde(rename = "name")] pub name: String, - /// The action to take when the quota is exceeded. Options: + /// The action to take when the quota or bucket limit is exceeded. Options: /// - `drop`: Drop the event. /// - `no_action`: Let the event pass through. /// - `overflow_routing`: Route to an overflow destination. @@ -47,6 +47,14 @@ pub struct ObservabilityPipelineQuotaProcessor { /// A list of fields used to segment log traffic for quota enforcement. Quotas are tracked independently by unique combinations of these field values. #[serde(rename = "partition_fields")] pub partition_fields: Option>, + /// The action to take when the quota or bucket limit is exceeded. Options: + /// - `drop`: Drop the event. + /// - `no_action`: Let the event pass through. + /// - `overflow_routing`: Route to an overflow destination. + /// + #[serde(rename = "too_many_buckets_action")] + pub too_many_buckets_action: + Option, /// The processor type. The value should always be `quota`. #[serde(rename = "type")] pub type_: crate::datadogV2::model::ObservabilityPipelineQuotaProcessorType, @@ -59,7 +67,6 @@ pub struct ObservabilityPipelineQuotaProcessor { impl ObservabilityPipelineQuotaProcessor { pub fn new( - drop_events: bool, id: String, include: String, inputs: Vec, @@ -68,7 +75,7 @@ impl ObservabilityPipelineQuotaProcessor { type_: crate::datadogV2::model::ObservabilityPipelineQuotaProcessorType, ) -> ObservabilityPipelineQuotaProcessor { ObservabilityPipelineQuotaProcessor { - drop_events, + drop_events: None, id, ignore_when_missing_partitions: None, include, @@ -78,12 +85,18 @@ impl ObservabilityPipelineQuotaProcessor { overflow_action: None, overrides: None, partition_fields: None, + too_many_buckets_action: None, type_, additional_properties: std::collections::BTreeMap::new(), _unparsed: false, } } + pub fn drop_events(mut self, value: bool) -> Self { + self.drop_events = Some(value); + self + } + pub fn ignore_when_missing_partitions(mut self, value: bool) -> Self { self.ignore_when_missing_partitions = Some(value); self @@ -110,6 +123,14 @@ impl ObservabilityPipelineQuotaProcessor { self } + pub fn too_many_buckets_action( + mut self, + value: crate::datadogV2::model::ObservabilityPipelineQuotaProcessorOverflowAction, + ) -> Self { + self.too_many_buckets_action = Some(value); + self + } + pub fn additional_properties( mut self, value: std::collections::BTreeMap, @@ -152,6 +173,9 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineQuotaProcessor { Vec, > = None; let mut partition_fields: Option> = None; + let mut too_many_buckets_action: Option< + crate::datadogV2::model::ObservabilityPipelineQuotaProcessorOverflowAction, + > = None; let mut type_: Option< crate::datadogV2::model::ObservabilityPipelineQuotaProcessorType, > = None; @@ -164,6 +188,9 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineQuotaProcessor { while let Some((k, v)) = map.next_entry::()? { match k.as_str() { "drop_events" => { + if v.is_null() { + continue; + } drop_events = Some(serde_json::from_value(v).map_err(M::Error::custom)?); } @@ -217,6 +244,21 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineQuotaProcessor { partition_fields = Some(serde_json::from_value(v).map_err(M::Error::custom)?); } + "too_many_buckets_action" => { + if v.is_null() { + continue; + } + too_many_buckets_action = + Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _too_many_buckets_action) = too_many_buckets_action { + match _too_many_buckets_action { + crate::datadogV2::model::ObservabilityPipelineQuotaProcessorOverflowAction::UnparsedObject(_too_many_buckets_action) => { + _unparsed = true; + }, + _ => {} + } + } + } "type" => { type_ = Some(serde_json::from_value(v).map_err(M::Error::custom)?); if let Some(ref _type_) = type_ { @@ -235,8 +277,6 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineQuotaProcessor { } } } - let drop_events = - drop_events.ok_or_else(|| M::Error::missing_field("drop_events"))?; let id = id.ok_or_else(|| M::Error::missing_field("id"))?; let include = include.ok_or_else(|| M::Error::missing_field("include"))?; let inputs = inputs.ok_or_else(|| M::Error::missing_field("inputs"))?; @@ -255,6 +295,7 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineQuotaProcessor { overflow_action, overrides, partition_fields, + too_many_buckets_action, type_, additional_properties, _unparsed, diff --git a/src/datadogV2/model/model_observability_pipeline_rsyslog_destination.rs b/src/datadogV2/model/model_observability_pipeline_rsyslog_destination.rs index 639b30eb5..f41b60e80 100644 --- a/src/datadogV2/model/model_observability_pipeline_rsyslog_destination.rs +++ b/src/datadogV2/model/model_observability_pipeline_rsyslog_destination.rs @@ -11,6 +11,9 @@ use std::fmt::{self, Formatter}; #[skip_serializing_none] #[derive(Clone, Debug, PartialEq, Serialize)] pub struct ObservabilityPipelineRsyslogDestination { + /// Configuration for buffer settings on destination components. + #[serde(rename = "buffer")] + pub buffer: Option, /// The unique identifier for this component. #[serde(rename = "id")] pub id: String, @@ -40,6 +43,7 @@ impl ObservabilityPipelineRsyslogDestination { type_: crate::datadogV2::model::ObservabilityPipelineRsyslogDestinationType, ) -> ObservabilityPipelineRsyslogDestination { ObservabilityPipelineRsyslogDestination { + buffer: None, id, inputs, keepalive: None, @@ -50,6 +54,14 @@ impl ObservabilityPipelineRsyslogDestination { } } + pub fn buffer( + mut self, + value: crate::datadogV2::model::ObservabilityPipelineBufferOptions, + ) -> Self { + self.buffer = Some(value); + self + } + pub fn keepalive(mut self, value: i64) -> Self { self.keepalive = Some(value); self @@ -86,6 +98,9 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineRsyslogDestination { where M: MapAccess<'a>, { + let mut buffer: Option< + crate::datadogV2::model::ObservabilityPipelineBufferOptions, + > = None; let mut id: Option = None; let mut inputs: Option> = None; let mut keepalive: Option = None; @@ -101,6 +116,20 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineRsyslogDestination { while let Some((k, v)) = map.next_entry::()? { match k.as_str() { + "buffer" => { + if v.is_null() { + continue; + } + buffer = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _buffer) = buffer { + match _buffer { + crate::datadogV2::model::ObservabilityPipelineBufferOptions::UnparsedObject(_buffer) => { + _unparsed = true; + }, + _ => {} + } + } + } "id" => { id = Some(serde_json::from_value(v).map_err(M::Error::custom)?); } @@ -142,6 +171,7 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineRsyslogDestination { let type_ = type_.ok_or_else(|| M::Error::missing_field("type_"))?; let content = ObservabilityPipelineRsyslogDestination { + buffer, id, inputs, keepalive, diff --git a/src/datadogV2/model/model_observability_pipeline_sample_processor.rs b/src/datadogV2/model/model_observability_pipeline_sample_processor.rs index d3365d06f..9cf9e6b34 100644 --- a/src/datadogV2/model/model_observability_pipeline_sample_processor.rs +++ b/src/datadogV2/model/model_observability_pipeline_sample_processor.rs @@ -11,6 +11,9 @@ use std::fmt::{self, Formatter}; #[skip_serializing_none] #[derive(Clone, Debug, PartialEq, Serialize)] pub struct ObservabilityPipelineSampleProcessor { + /// Optional list of fields to group events by. Each group is sampled independently. + #[serde(rename = "group_by")] + pub group_by: Option>, /// The unique identifier for this component. Used to reference this component in other parts of the pipeline (for example, as the `input` to downstream components). #[serde(rename = "id")] pub id: String, @@ -44,6 +47,7 @@ impl ObservabilityPipelineSampleProcessor { type_: crate::datadogV2::model::ObservabilityPipelineSampleProcessorType, ) -> ObservabilityPipelineSampleProcessor { ObservabilityPipelineSampleProcessor { + group_by: None, id, include, inputs, @@ -55,6 +59,11 @@ impl ObservabilityPipelineSampleProcessor { } } + pub fn group_by(mut self, value: Vec) -> Self { + self.group_by = Some(value); + self + } + pub fn percentage(mut self, value: f64) -> Self { self.percentage = Some(value); self @@ -91,6 +100,7 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineSampleProcessor { where M: MapAccess<'a>, { + let mut group_by: Option> = None; let mut id: Option = None; let mut include: Option = None; let mut inputs: Option> = None; @@ -107,6 +117,12 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineSampleProcessor { while let Some((k, v)) = map.next_entry::()? { match k.as_str() { + "group_by" => { + if v.is_null() { + continue; + } + group_by = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + } "id" => { id = Some(serde_json::from_value(v).map_err(M::Error::custom)?); } @@ -152,6 +168,7 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineSampleProcessor { let type_ = type_.ok_or_else(|| M::Error::missing_field("type_"))?; let content = ObservabilityPipelineSampleProcessor { + group_by, id, include, inputs, diff --git a/src/datadogV2/model/model_observability_pipeline_sentinel_one_destination.rs b/src/datadogV2/model/model_observability_pipeline_sentinel_one_destination.rs index fdc03a0f9..662433672 100644 --- a/src/datadogV2/model/model_observability_pipeline_sentinel_one_destination.rs +++ b/src/datadogV2/model/model_observability_pipeline_sentinel_one_destination.rs @@ -11,6 +11,9 @@ use std::fmt::{self, Formatter}; #[skip_serializing_none] #[derive(Clone, Debug, PartialEq, Serialize)] pub struct ObservabilityPipelineSentinelOneDestination { + /// Configuration for buffer settings on destination components. + #[serde(rename = "buffer")] + pub buffer: Option, /// The unique identifier for this component. #[serde(rename = "id")] pub id: String, @@ -38,6 +41,7 @@ impl ObservabilityPipelineSentinelOneDestination { type_: crate::datadogV2::model::ObservabilityPipelineSentinelOneDestinationType, ) -> ObservabilityPipelineSentinelOneDestination { ObservabilityPipelineSentinelOneDestination { + buffer: None, id, inputs, region, @@ -47,6 +51,14 @@ impl ObservabilityPipelineSentinelOneDestination { } } + pub fn buffer( + mut self, + value: crate::datadogV2::model::ObservabilityPipelineBufferOptions, + ) -> Self { + self.buffer = Some(value); + self + } + pub fn additional_properties( mut self, value: std::collections::BTreeMap, @@ -73,6 +85,9 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineSentinelOneDestination { where M: MapAccess<'a>, { + let mut buffer: Option< + crate::datadogV2::model::ObservabilityPipelineBufferOptions, + > = None; let mut id: Option = None; let mut inputs: Option> = None; let mut region: Option< @@ -89,6 +104,20 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineSentinelOneDestination { while let Some((k, v)) = map.next_entry::()? { match k.as_str() { + "buffer" => { + if v.is_null() { + continue; + } + buffer = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _buffer) = buffer { + match _buffer { + crate::datadogV2::model::ObservabilityPipelineBufferOptions::UnparsedObject(_buffer) => { + _unparsed = true; + }, + _ => {} + } + } + } "id" => { id = Some(serde_json::from_value(v).map_err(M::Error::custom)?); } @@ -130,6 +159,7 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineSentinelOneDestination { let type_ = type_.ok_or_else(|| M::Error::missing_field("type_"))?; let content = ObservabilityPipelineSentinelOneDestination { + buffer, id, inputs, region, diff --git a/src/datadogV2/model/model_observability_pipeline_splunk_hec_destination.rs b/src/datadogV2/model/model_observability_pipeline_splunk_hec_destination.rs index be7e14b86..12df8755a 100644 --- a/src/datadogV2/model/model_observability_pipeline_splunk_hec_destination.rs +++ b/src/datadogV2/model/model_observability_pipeline_splunk_hec_destination.rs @@ -17,6 +17,9 @@ pub struct ObservabilityPipelineSplunkHecDestination { /// #[serde(rename = "auto_extract_timestamp")] pub auto_extract_timestamp: Option, + /// Configuration for buffer settings on destination components. + #[serde(rename = "buffer")] + pub buffer: Option, /// Encoding format for log events. #[serde(rename = "encoding")] pub encoding: @@ -51,6 +54,7 @@ impl ObservabilityPipelineSplunkHecDestination { ) -> ObservabilityPipelineSplunkHecDestination { ObservabilityPipelineSplunkHecDestination { auto_extract_timestamp: None, + buffer: None, encoding: None, id, index: None, @@ -67,6 +71,14 @@ impl ObservabilityPipelineSplunkHecDestination { self } + pub fn buffer( + mut self, + value: crate::datadogV2::model::ObservabilityPipelineBufferOptions, + ) -> Self { + self.buffer = Some(value); + self + } + pub fn encoding( mut self, value: crate::datadogV2::model::ObservabilityPipelineSplunkHecDestinationEncoding, @@ -112,6 +124,9 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineSplunkHecDestination { M: MapAccess<'a>, { let mut auto_extract_timestamp: Option = None; + let mut buffer: Option< + crate::datadogV2::model::ObservabilityPipelineBufferOptions, + > = None; let mut encoding: Option< crate::datadogV2::model::ObservabilityPipelineSplunkHecDestinationEncoding, > = None; @@ -137,6 +152,20 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineSplunkHecDestination { auto_extract_timestamp = Some(serde_json::from_value(v).map_err(M::Error::custom)?); } + "buffer" => { + if v.is_null() { + continue; + } + buffer = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _buffer) = buffer { + match _buffer { + crate::datadogV2::model::ObservabilityPipelineBufferOptions::UnparsedObject(_buffer) => { + _unparsed = true; + }, + _ => {} + } + } + } "encoding" => { if v.is_null() { continue; @@ -193,6 +222,7 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineSplunkHecDestination { let content = ObservabilityPipelineSplunkHecDestination { auto_extract_timestamp, + buffer, encoding, id, index, diff --git a/src/datadogV2/model/model_observability_pipeline_sumo_logic_destination.rs b/src/datadogV2/model/model_observability_pipeline_sumo_logic_destination.rs index 2a6758c9d..4712e8631 100644 --- a/src/datadogV2/model/model_observability_pipeline_sumo_logic_destination.rs +++ b/src/datadogV2/model/model_observability_pipeline_sumo_logic_destination.rs @@ -11,6 +11,9 @@ use std::fmt::{self, Formatter}; #[skip_serializing_none] #[derive(Clone, Debug, PartialEq, Serialize)] pub struct ObservabilityPipelineSumoLogicDestination { + /// Configuration for buffer settings on destination components. + #[serde(rename = "buffer")] + pub buffer: Option, /// The output encoding format. #[serde(rename = "encoding")] pub encoding: Option, @@ -49,6 +52,7 @@ impl ObservabilityPipelineSumoLogicDestination { type_: crate::datadogV2::model::ObservabilityPipelineSumoLogicDestinationType, ) -> ObservabilityPipelineSumoLogicDestination { ObservabilityPipelineSumoLogicDestination { + buffer: None, encoding: None, header_custom_fields: None, header_host_name: None, @@ -62,6 +66,14 @@ impl ObservabilityPipelineSumoLogicDestination { } } + pub fn buffer( + mut self, + value: crate::datadogV2::model::ObservabilityPipelineBufferOptions, + ) -> Self { + self.buffer = Some(value); + self + } + pub fn encoding( mut self, value: crate::datadogV2::model::ObservabilityPipelineSumoLogicDestinationEncoding, @@ -119,6 +131,9 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineSumoLogicDestination { where M: MapAccess<'a>, { + let mut buffer: Option< + crate::datadogV2::model::ObservabilityPipelineBufferOptions, + > = None; let mut encoding: Option< crate::datadogV2::model::ObservabilityPipelineSumoLogicDestinationEncoding, > = None; @@ -139,6 +154,20 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineSumoLogicDestination { while let Some((k, v)) = map.next_entry::()? { match k.as_str() { + "buffer" => { + if v.is_null() { + continue; + } + buffer = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _buffer) = buffer { + match _buffer { + crate::datadogV2::model::ObservabilityPipelineBufferOptions::UnparsedObject(_buffer) => { + _unparsed = true; + }, + _ => {} + } + } + } "encoding" => { if v.is_null() { continue; @@ -210,6 +239,7 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineSumoLogicDestination { let type_ = type_.ok_or_else(|| M::Error::missing_field("type_"))?; let content = ObservabilityPipelineSumoLogicDestination { + buffer, encoding, header_custom_fields, header_host_name, diff --git a/src/datadogV2/model/model_observability_pipeline_syslog_ng_destination.rs b/src/datadogV2/model/model_observability_pipeline_syslog_ng_destination.rs index 21eb01357..337500493 100644 --- a/src/datadogV2/model/model_observability_pipeline_syslog_ng_destination.rs +++ b/src/datadogV2/model/model_observability_pipeline_syslog_ng_destination.rs @@ -11,6 +11,9 @@ use std::fmt::{self, Formatter}; #[skip_serializing_none] #[derive(Clone, Debug, PartialEq, Serialize)] pub struct ObservabilityPipelineSyslogNgDestination { + /// Configuration for buffer settings on destination components. + #[serde(rename = "buffer")] + pub buffer: Option, /// The unique identifier for this component. #[serde(rename = "id")] pub id: String, @@ -40,6 +43,7 @@ impl ObservabilityPipelineSyslogNgDestination { type_: crate::datadogV2::model::ObservabilityPipelineSyslogNgDestinationType, ) -> ObservabilityPipelineSyslogNgDestination { ObservabilityPipelineSyslogNgDestination { + buffer: None, id, inputs, keepalive: None, @@ -50,6 +54,14 @@ impl ObservabilityPipelineSyslogNgDestination { } } + pub fn buffer( + mut self, + value: crate::datadogV2::model::ObservabilityPipelineBufferOptions, + ) -> Self { + self.buffer = Some(value); + self + } + pub fn keepalive(mut self, value: i64) -> Self { self.keepalive = Some(value); self @@ -86,6 +98,9 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineSyslogNgDestination { where M: MapAccess<'a>, { + let mut buffer: Option< + crate::datadogV2::model::ObservabilityPipelineBufferOptions, + > = None; let mut id: Option = None; let mut inputs: Option> = None; let mut keepalive: Option = None; @@ -101,6 +116,20 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineSyslogNgDestination { while let Some((k, v)) = map.next_entry::()? { match k.as_str() { + "buffer" => { + if v.is_null() { + continue; + } + buffer = Some(serde_json::from_value(v).map_err(M::Error::custom)?); + if let Some(ref _buffer) = buffer { + match _buffer { + crate::datadogV2::model::ObservabilityPipelineBufferOptions::UnparsedObject(_buffer) => { + _unparsed = true; + }, + _ => {} + } + } + } "id" => { id = Some(serde_json::from_value(v).map_err(M::Error::custom)?); } @@ -142,6 +171,7 @@ impl<'de> Deserialize<'de> for ObservabilityPipelineSyslogNgDestination { let type_ = type_.ok_or_else(|| M::Error::missing_field("type_"))?; let content = ObservabilityPipelineSyslogNgDestination { + buffer, id, inputs, keepalive,