diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS
index 687f8cecffba9..568f49a280032 100644
--- a/.github/CODEOWNERS
+++ b/.github/CODEOWNERS
@@ -106,7 +106,7 @@ content/en/logs/log_collection/android.md                        @Datadog/rum-mo
 content/en/logs/log_collection/flutter.md                        @Datadog/rum-mobile @DataDog/documentation
 content/en/logs/log_collection/unity.md                          @Datadog/rum-mobile @DataDog/documentation
 content/en/logs/log_collection/ios.md                            @Datadog/rum-mobile @DataDog/documentation
-content/en/logs/log_collection/kotlin-multiplatform.md           @Datadog/rum-mobile @DataDog/documentation
+content/en/logs/log_collection/kotlin_multiplatform.md           @Datadog/rum-mobile @DataDog/documentation
 
 # Traces
 content/en/tracing/trace_collection/dd_libraries/android.md     @Datadog/rum-mobile @DataDog/documentation
@@ -128,7 +128,7 @@ content/en/real_user_monitoring/error_tracking/flutter.md                @Datado
 content/en/real_user_monitoring/error_tracking/mobile/unity.md           @Datadog/rum-mobile @DataDog/documentation
 content/en/real_user_monitoring/error_tracking/ios.md                    @Datadog/rum-mobile @DataDog/documentation
 content/en/real_user_monitoring/error_tracking/reactnative.md            @Datadog/rum-mobile @DataDog/documentation
-content/en/real_user_monitoring/error_tracking/kotlin-multiplatform.md   @Datadog/rum-mobile @DataDog/documentation
+content/en/real_user_monitoring/error_tracking/kotlin_multiplatform.md   @Datadog/rum-mobile @DataDog/documentation
 
 # Browser SDK
 content/en/real_user_monitoring/browser/                        @Datadog/rum-browser @DataDog/documentation
diff --git a/config/_default/menus/main.en.yaml b/config/_default/menus/main.en.yaml
index 3f24103fbe2b5..33c9532b2f60d 100644
--- a/config/_default/menus/main.en.yaml
+++ b/config/_default/menus/main.en.yaml
@@ -3546,11 +3546,6 @@ menu:
       url: tracing/trace_explorer/trace_queries/
       parent: trace_explorer
       weight: 606
-    - name: Request Flow Map
-      identifier: tracing_trace_explorer_request_flow_map
-      url: tracing/trace_explorer/request_flow_map/
-      parent: trace_explorer
-      weight: 607
     - name: Service Observability
       url: tracing/services/
       parent: tracing
@@ -4622,386 +4617,456 @@ menu:
       parent: observability_pipelines_set_up_pipelines
       identifier: observability_pipelines_log_volume_control
       weight: 101
+    - name: Amazon S3
+      url: observability_pipelines/set_up_pipelines/log_volume_control/amazon_s3/
+      parent: observability_pipelines_log_volume_control
+      identifier: observability_pipelines_log_volume_control_amazon_s3
+      weight: 1012
     - name: Datadog Agent
       url: observability_pipelines/set_up_pipelines/log_volume_control/datadog_agent/
       parent: observability_pipelines_log_volume_control
       identifier: observability_pipelines_log_volume_control_datadog_agent
-      weight: 1011
+      weight: 1013
     - name: Fluent
       url: observability_pipelines/set_up_pipelines/log_volume_control/fluent/
       parent: observability_pipelines_log_volume_control
       identifier: observability_pipelines_log_volume_control_fluent
-      weight: 1012
+      weight: 1014
     - name: Google Pub/Sub
       url: observability_pipelines/set_up_pipelines/log_volume_control/google_pubsub/
       parent: observability_pipelines_log_volume_control
       identifier: observability_pipelines_log_volume_control_google_pubsub
-      weight: 1013
+      weight: 1015
     - name: HTTP Client
       url: observability_pipelines/set_up_pipelines/log_volume_control/http_client/
       parent: observability_pipelines_log_volume_control
       identifier: observability_pipelines_log_volume_control_http_client
-      weight: 1014
+      weight: 1016
     - name: HTTP Server
       url: observability_pipelines/set_up_pipelines/log_volume_control/http_server/
       parent: observability_pipelines_log_volume_control
       identifier: observability_pipelines_log_volume_control_http_server
-      weight: 1015
+      weight: 1017
+    - name: Kafka
+      url: observability_pipelines/set_up_pipelines/log_volume_control/kafka/
+      parent: observability_pipelines_log_volume_control
+      identifier: observability_pipelines_log_volume_control_kafka
+      weight: 1018
     - name: Logstash
       url: observability_pipelines/set_up_pipelines/log_volume_control/logstash/
       parent: observability_pipelines_log_volume_control
       identifier: observability_pipelines_log_volume_control_logstash
-      weight: 1016
+      weight: 1019
     - name: Splunk HTTP Event Collector
       url: observability_pipelines/set_up_pipelines/log_volume_control/splunk_hec/
       parent: observability_pipelines_log_volume_control
       identifier: observability_pipelines_log_volume_control_splunk_hec
-      weight: 1017
+      weight: 1020
     - name: Splunk Forwarders (TCP)
       url: observability_pipelines/set_up_pipelines/log_volume_control/splunk_tcp/
       parent: observability_pipelines_log_volume_control
       identifier: observability_pipelines_log_volume_control_splunk_tcp
-      weight: 1018
+      weight: 1021
     - name: Sumo Logic Hosted Collector
       url: observability_pipelines/set_up_pipelines/log_volume_control/sumo_logic_hosted_collector/
       parent: observability_pipelines_log_volume_control
       identifier: observability_pipelines_log_volume_control_sumo_logic_hosted_collector
-      weight: 1019
+      weight: 1022
     - name: Syslog
       url: observability_pipelines/set_up_pipelines/log_volume_control/syslog/
       parent: observability_pipelines_log_volume_control
       identifier: observability_pipelines_log_volume_control_syslog
-      weight: 1020
+      weight: 1023
     - name: Dual Ship Logs
       url: observability_pipelines/set_up_pipelines/dual_ship_logs/
       parent: observability_pipelines_set_up_pipelines
       identifier: observability_pipelines_dual_ship_logs
       weight: 102
+    - name: Amazon S3
+      url: observability_pipelines/set_up_pipelines/dual_ship_logs/amazon_s3/
+      parent: observability_pipelines_dual_ship_logs
+      identifier: observability_pipelines_dual_ship_logs_amazon_s3
+      weight: 2012
     - name: Datadog Agent
       url: observability_pipelines/set_up_pipelines/dual_ship_logs/datadog_agent/
       parent: observability_pipelines_dual_ship_logs
       identifier: observability_pipelines_dual_ship_logs_datadog_agent
-      weight: 2011
+      weight: 2013
     - name: Fluent
       url: observability_pipelines/set_up_pipelines/dual_ship_logs/fluent/
       parent: observability_pipelines_dual_ship_logs
       identifier: observability_pipelines_dual_ship_logs_fluent
-      weight: 2012
+      weight: 2014
     - name: Google Pub/Sub
       url: observability_pipelines/set_up_pipelines/dual_ship_logs/google_pubsub/
       parent: observability_pipelines_dual_ship_logs
       identifier: observability_pipelines_dual_ship_logs_google_pubsub
-      weight: 2013
+      weight: 2015
     - name: HTTP Client
       url: observability_pipelines/set_up_pipelines/dual_ship_logs/http_client/
       parent: observability_pipelines_dual_ship_logs
       identifier: observability_pipelines_dual_ship_logs_http_client
-      weight: 2014
+      weight: 2016
     - name: HTTP Server
       url: observability_pipelines/set_up_pipelines/dual_ship_logs/http_server/
       parent: observability_pipelines_dual_ship_logs
       identifier: observability_pipelines_dual_ship_logs_http_server
-      weight: 2015
+      weight: 2017
+    - name: Kafka
+      url: observability_pipelines/set_up_pipelines/dual_ship_logs/kafka/
+      parent: observability_pipelines_dual_ship_logs
+      identifier: observability_pipelines_dual_ship_logs_kafka
+      weight: 2018
     - name: Logstash
       url: observability_pipelines/set_up_pipelines/dual_ship_logs/logstash/
       parent: observability_pipelines_dual_ship_logs
       identifier: observability_pipelines_dual_ship_logs_logstash
-      weight: 2016
+      weight: 2019
     - name: Splunk HTTP Event Collector
       url: observability_pipelines/set_up_pipelines/dual_ship_logs/splunk_hec/
       parent: observability_pipelines_dual_ship_logs
       identifier: observability_pipelines_dual_ship_logs_splunk_hec
-      weight: 2017
+      weight: 2020
     - name: Splunk Forwarders (TCP)
       url: observability_pipelines/set_up_pipelines/dual_ship_logs/splunk_tcp/
       parent: observability_pipelines_dual_ship_logs
       identifier: observability_pipelines_dual_ship_logs_splunk_tcp
-      weight: 2018
+      weight: 2021
     - name: Sumo Logic Hosted Collector
       url: observability_pipelines/set_up_pipelines/dual_ship_logs/sumo_logic_hosted_collector/
       parent: observability_pipelines_dual_ship_logs
       identifier: observability_pipelines_dual_ship_logs_sumo_logic_hosted_collector
-      weight: 2019
+      weight: 2022
     - name: Syslog
       url: observability_pipelines/set_up_pipelines/dual_ship_logs/syslog/
       parent: observability_pipelines_dual_ship_logs
       identifier: observability_pipelines_dual_ship_logs_syslog
-      weight: 2020
+      weight: 2021
     - name: Archive Logs
       url: observability_pipelines/set_up_pipelines/archive_logs/
       parent: observability_pipelines_set_up_pipelines
       identifier: observability_pipelines_archive_logs
       weight: 103
+    - name: Amazon S3
+      url: observability_pipelines/set_up_pipelines/archive_logs/amazon_s3/
+      parent: observability_pipelines_archive_logs
+      identifier: observability_pipelines_archive_logs_amazon_s3
+      weight: 3012
     - name: Datadog Agent
       url: observability_pipelines/set_up_pipelines/archive_logs/datadog_agent/
       parent: observability_pipelines_archive_logs
       identifier: observability_pipelines_archive_logs_datadog_agent
-      weight: 3011
+      weight: 3013
     - name: Fluent
       url: observability_pipelines/set_up_pipelines/archive_logs/fluent/
       parent: observability_pipelines_archive_logs
       identifier: observability_pipelines_archive_logs_fluent
-      weight: 3012
+      weight: 3014
     - name: Google Pub/Sub
-      url: observability_pipelines/set_up_pipelines/archive_logs/google_pub_sub/
+      url: observability_pipelines/set_up_pipelines/archive_logs/google_pubsub/
       parent: observability_pipelines_archive_logs
       identifier: observability_pipelines_archive_logs_google_pub_sub
-      weight: 3013
+      weight: 3015
     - name: HTTP Client
       url: observability_pipelines/set_up_pipelines/archive_logs/http_client/
       parent: observability_pipelines_archive_logs
       identifier: observability_pipelines_archive_logs_http_client
-      weight: 3014
+      weight: 3016
     - name: HTTP Server
       url: observability_pipelines/set_up_pipelines/archive_logs/http_server/
       parent: observability_pipelines_archive_logs
       identifier: observability_pipelines_archive_logs_http_server
-      weight: 3015
+      weight: 3017
+    - name: Kafka
+      url: observability_pipelines/set_up_pipelines/archive_logs/kafka/
+      parent: observability_pipelines_archive_logs
+      identifier: observability_pipelines_archive_logs_kafka
+      weight: 3018
     - name: Logstash
       url: observability_pipelines/set_up_pipelines/archive_logs/logstash/
       parent: observability_pipelines_archive_logs
       identifier: observability_pipelines_archive_logs_logstash
-      weight: 3016
+      weight: 3019
     - name: Splunk HTTP Event Collector
       url: observability_pipelines/set_up_pipelines/archive_logs/splunk_hec/
       parent: observability_pipelines_archive_logs
       identifier: observability_pipelines_archive_logs_splunk_hec
-      weight: 3017
+      weight: 3020
     - name: Splunk Forwarders (TCP)
       url: observability_pipelines/set_up_pipelines/archive_logs/splunk_tcp/
       parent: observability_pipelines_archive_logs
       identifier: observability_pipelines_archive_logs_splunk_tcp
-      weight: 3018
+      weight: 3021
     - name: Sumo Logic Hosted Collector
       url: observability_pipelines/set_up_pipelines/archive_logs/sumo_logic_hosted_collector/
       parent: observability_pipelines_archive_logs
       identifier: observability_pipelines_archive_logs_sumo_logic_hosted_collector
-      weight: 3019
+      weight: 3022
     - name: Syslog
       url: observability_pipelines/set_up_pipelines/archive_logs/syslog/
       parent: observability_pipelines_archive_logs
       identifier: observability_pipelines_archive_logs_syslog
-      weight: 3020
+      weight: 3023
     - name: Split Logs
       url: observability_pipelines/set_up_pipelines/split_logs/
       parent: observability_pipelines_set_up_pipelines
       identifier: observability_pipelines_split_logs
       weight: 104
+    - name: Amazon S3
+      url: observability_pipelines/set_up_pipelines/split_logs/amazon_s3/
+      parent: observability_pipelines_split_logs
+      identifier: observability_pipelines_split_logs_amazon_s3
+      weight: 4012
     - name: Datadog Agent
       url: observability_pipelines/set_up_pipelines/split_logs/datadog_agent/
       parent: observability_pipelines_split_logs
       identifier: observability_pipelines_split_logs_datadog_agent
-      weight: 4011
+      weight: 4013
     - name: Fluent
       url: observability_pipelines/set_up_pipelines/split_logs/fluent/
       parent: observability_pipelines_split_logs
       identifier: observability_pipelines_split_logs_fluent
-      weight: 4012
+      weight: 4014
     - name: Google Pub/Sub
       url: observability_pipelines/set_up_pipelines/split_logs/google_pubsub/
       parent: observability_pipelines_split_logs
       identifier: observability_pipelines_split_logs_google_pubsub
-      weight: 4013
+      weight: 4015
     - name: HTTP Client
       url: observability_pipelines/set_up_pipelines/split_logs/http_client/
       parent: observability_pipelines_split_logs
       identifier: observability_pipelines_split_logs_http_client
-      weight: 4014
+      weight: 4016
     - name: HTTP Server
       url: observability_pipelines/set_up_pipelines/split_logs/http_server/
       parent: observability_pipelines_split_logs
       identifier: observability_pipelines_split_logs_http_server
-      weight: 4015
+      weight: 4017
+    - name: Kafka
+      url: observability_pipelines/set_up_pipelines/split_logs/kafka/
+      parent: observability_pipelines_split_logs
+      identifier: observability_pipelines_split_logs_kafka
+      weight: 4018
     - name: Logstash
       url: observability_pipelines/set_up_pipelines/split_logs/logstash/
       parent: observability_pipelines_split_logs
       identifier: observability_pipelines_split_logs_logstash
-      weight: 4016
+      weight: 4019
     - name: Splunk HTTP Event Collector
       url: observability_pipelines/set_up_pipelines/split_logs/splunk_hec/
       parent: observability_pipelines_split_logs
       identifier: observability_pipelines_split_logs_splunk_hec
-      weight: 4017
+      weight: 4020
     - name: Splunk Forwarders (TCP)
       url: observability_pipelines/set_up_pipelines/split_logs/splunk_tcp/
       parent: observability_pipelines_split_logs
       identifier: observability_pipelines_split_logs_splunk_tcp
-      weight: 4018
+      weight: 4021
     - name: Sumo Logic Hosted Collector
       url: observability_pipelines/set_up_pipelines/split_logs/sumo_logic_hosted_collector/
       parent: observability_pipelines_split_logs
       identifier: observability_pipelines_split_logs_sumo_logic_hosted_collector
-      weight: 4019
+      weight: 4022
     - name: Syslog
       url: observability_pipelines/set_up_pipelines/split_logs/syslog/
       parent: observability_pipelines_split_logs
       identifier: observability_pipelines_split_logs_syslog
-      weight: 4020
+      weight: 4023
     - name: Sensitive Data Redaction
       url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/
       parent: observability_pipelines_set_up_pipelines
       identifier: observability_pipelines_sensitive_data_redaction
       weight: 105
+    - name: Amazon S3
+      url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/amazon_s3/
+      parent: observability_pipelines_sensitive_data_redaction
+      identifier: observability_pipelines_sensitive_data_redaction_amazon_s3
+      weight: 5012
     - name: Datadog Agent
       url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/datadog_agent/
       parent: observability_pipelines_sensitive_data_redaction
       identifier: observability_pipelines_sensitive_data_redaction_datadog_agent
-      weight: 5011
+      weight: 5013
     - name: Fluent
       url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/fluent/
       parent: observability_pipelines_sensitive_data_redaction
       identifier: observability_pipelines_sensitive_data_redaction_fluent
-      weight: 5012
+      weight: 5014
     - name: Google Pub/Sub
       url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/google_pubsub/
       parent: observability_pipelines_sensitive_data_redaction
       identifier: observability_pipelines_sensitive_data_redaction_google_pubsub
-      weight: 5013
+      weight: 5015
     - name: HTTP Client
       url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/http_client/
       parent: observability_pipelines_sensitive_data_redaction
       identifier: observability_pipelines_sensitive_data_redaction_http_client
-      weight: 5014
+      weight: 5016
     - name: HTTP Server
       url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/http_server/
       parent: observability_pipelines_sensitive_data_redaction
       identifier: observability_pipelines_sensitive_data_redaction_http_server
-      weight: 5015
+      weight: 5017
+    - name: Kafka
+      url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/kafka/
+      parent: observability_pipelines_sensitive_data_redaction
+      identifier: observability_pipelines_sensitive_data_redaction_kafka
+      weight: 5018
     - name: Logstash
       url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/logstash/
       parent: observability_pipelines_sensitive_data_redaction
       identifier: observability_pipelines_sensitive_data_redaction_logstash
-      weight: 5016
+      weight: 5019
     - name: Splunk HTTP Event Collector
       url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/splunk_hec/
       parent: observability_pipelines_sensitive_data_redaction
       identifier: observability_pipelines_sensitive_data_redaction_splunk_hec
-      weight: 5017
+      weight: 5020
     - name: Splunk Forwarders (TCP)
       url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/splunk_tcp/
       parent: observability_pipelines_sensitive_data_redaction
       identifier: observability_pipelines_sensitive_data_redaction_splunk_tcp
-      weight: 5018
+      weight: 5021
     - name: Sumo Logic Hosted Collector
       url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/sumo_logic_hosted_collector/
       parent: observability_pipelines_sensitive_data_redaction
       identifier: observability_pipelines_sensitive_data_redaction_sumo_logic_hosted_collector
-      weight: 5019
+      weight: 5022
     - name: Syslog
       url: observability_pipelines/set_up_pipelines/sensitive_data_redaction/syslog/
       parent: observability_pipelines_sensitive_data_redaction
       identifier: observability_pipelines_sensitive_data_redaction_syslog
-      weight: 5020
+      weight: 5023
     - name: Log Enrichment
       url: observability_pipelines/set_up_pipelines/log_enrichment/
       parent: observability_pipelines_set_up_pipelines
       identifier: observability_pipelines_log_enrichment
       weight: 106
+    - name: Amazon S3
+      url: observability_pipelines/set_up_pipelines/log_enrichment/amazon_s3/
+      parent: observability_pipelines_log_enrichment
+      identifier: observability_pipelines_log_enrichment_amazon_s3
+      weight: 6012
     - name: Datadog Agent
       url: observability_pipelines/set_up_pipelines/log_enrichment/datadog_agent/
       parent: observability_pipelines_log_enrichment
       identifier: observability_pipelines_log_enrichment_datadog_agent
-      weight: 6011
+      weight: 6013
     - name: Fluent
       url: observability_pipelines/set_up_pipelines/log_enrichment/fluent/
       parent: observability_pipelines_log_enrichment
       identifier: observability_pipelines_log_enrichment_fluent
-      weight: 6012
+      weight: 6014
     - name: Google Pub/Sub
       url: observability_pipelines/set_up_pipelines/log_enrichment/google_pubsub/
       parent: observability_pipelines_log_enrichment
       identifier: observability_pipelines_log_enrichment_google_pubsub
-      weight: 6013
+      weight: 6015
     - name: HTTP Client
       url: observability_pipelines/set_up_pipelines/log_enrichment/http_client/
       parent: observability_pipelines_log_enrichment
       identifier: observability_pipelines_log_enrichment_http_client
-      weight: 6014
+      weight: 6016
     - name: HTTP Server
       url: observability_pipelines/set_up_pipelines/log_enrichment/http_server/
       parent: observability_pipelines_log_enrichment
       identifier: observability_pipelines_log_enrichment_http_server
-      weight: 6015
+      weight: 6017
+    - name: Kafka
+      url: observability_pipelines/set_up_pipelines/log_enrichment/kafka/
+      parent: observability_pipelines_log_enrichment
+      identifier: observability_pipelines_log_enrichment_kafka
+      weight: 6018
     - name: Logstash
       url: observability_pipelines/set_up_pipelines/log_enrichment/logstash/
       parent: observability_pipelines_log_enrichment
       identifier: observability_pipelines_log_enrichment_logstash
-      weight: 6016
+      weight: 6019
     - name: Splunk HTTP Event Collector
       url: observability_pipelines/set_up_pipelines/log_enrichment/splunk_hec/
       parent: observability_pipelines_log_enrichment
       identifier: observability_pipelines_log_enrichment_splunk_hec
-      weight: 6017
+      weight: 6020
     - name: Splunk Forwarders (TCP)
       url: observability_pipelines/set_up_pipelines/log_enrichment/splunk_tcp/
       parent: observability_pipelines_log_enrichment
       identifier: observability_pipelines_log_enrichment_splunk_tcp
-      weight: 6018
+      weight: 6021
     - name: Sumo Logic Hosted Collector
       url: observability_pipelines/set_up_pipelines/log_enrichment/sumo_logic_hosted_collector/
       parent: observability_pipelines_log_enrichment
       identifier: observability_pipelines_log_enrichment_sumo_logic_hosted_collector
-      weight: 6019
+      weight: 6022
     - name: Syslog
       url: observability_pipelines/set_up_pipelines/log_enrichment/syslog/
       parent: observability_pipelines_log_enrichment
       identifier: observability_pipelines_log_enrichment_syslog
-      weight: 6020
+      weight: 6023
     - name: Generate Metrics
       identifier: observability_pipelines_generate_metrics
       url: /observability_pipelines/set_up_pipelines/generate_metrics/
       parent: observability_pipelines_set_up_pipelines
       weight: 107
+    - name: Amazon S3
+      url: observability_pipelines/set_up_pipelines/generate_metrics/amazon_s3/
+      parent: observability_pipelines_generate_metrics
+      identifier: observability_pipelines_generate_metrics_amazon_s3
+      weight: 7012
     - name: Datadog Agent
       url: observability_pipelines/set_up_pipelines/generate_metrics/datadog_agent/
       parent: observability_pipelines_generate_metrics
       identifier: observability_pipelines_generate_metrics_datadog_agent
-      weight: 7011
+      weight: 7013
     - name: Fluent
       url: observability_pipelines/set_up_pipelines/generate_metrics/fluent/
       parent: observability_pipelines_generate_metrics
       identifier: observability_pipelines_generate_metrics_fluent
-      weight: 7012
+      weight: 7014
     - name: Google Pub/Sub
       url: observability_pipelines/set_up_pipelines/generate_metrics/google_pubsub/
       parent: observability_pipelines_generate_metrics
       identifier: observability_pipelines_generate_metrics_google_pubsub
-      weight: 7013
+      weight: 7015
     - name: HTTP Client
       url: observability_pipelines/set_up_pipelines/generate_metrics/http_client/
       parent: observability_pipelines_generate_metrics
       identifier: observability_pipelines_generate_metrics_http_client
-      weight: 7014
+      weight: 7016
     - name: HTTP Server
       url: observability_pipelines/set_up_pipelines/generate_metrics/http_server/
       parent: observability_pipelines_generate_metrics
       identifier: observability_pipelines_generate_metrics_http_server
-      weight: 7015
+      weight: 7017
+    - name: Kafka
+      url: observability_pipelines/set_up_pipelines/generate_metrics/kafka/
+      parent: observability_pipelines_generate_metrics
+      identifier: observability_pipelines_generate_metrics_kafka
+      weight: 7018
     - name: Logstash
       url: observability_pipelines/set_up_pipelines/generate_metrics/logstash/
       parent: observability_pipelines_generate_metrics
       identifier: observability_pipelines_generate_metrics_logstash
-      weight: 7016
+      weight: 7019
     - name: Splunk HTTP Event Collector
       url: observability_pipelines/set_up_pipelines/generate_metrics/splunk_hec/
       parent: observability_pipelines_generate_metrics
       identifier: observability_pipelines_generate_metrics_splunk_hec
-      weight: 7017
+      weight: 7020
     - name: Splunk Forwarders (TCP)
       url: observability_pipelines/set_up_pipelines/generate_metrics/splunk_tcp/
       parent: observability_pipelines_generate_metrics
       identifier: observability_pipelines_generate_metrics_splunk_tcp
-      weight: 7018
+      weight: 7021
     - name: Sumo Logic Hosted Collector
       url: observability_pipelines/set_up_pipelines/generate_metrics/sumo_logic_hosted_collector/
       parent: observability_pipelines_generate_metrics
       identifier: observability_pipelines_generate_metrics_sumo_logic_hosted_collector
-      weight: 7019
+      weight: 7022
     - name: Syslog
       url: observability_pipelines/set_up_pipelines/generate_metrics/syslog/
       parent: observability_pipelines_generate_metrics
       identifier: observability_pipelines_generate_metrics_syslog
-      weight: 7020
+      weight: 7023
     - name: Update Existing Pipelines
       url: observability_pipelines/update_existing_pipelines/
       parent: observability_pipelines
@@ -5032,56 +5097,66 @@ menu:
       parent: observability_pipelines
       identifier: observability_pipelines_sources
       weight: 6
+    - name: Amazon S3
+      url: observability_pipelines/sources/amazon_s3/
+      parent: observability_pipelines_sources
+      identifier: observability_pipelines_sources_amazon_s3
+      weight: 602
     - name: Datadog Agent
       url: observability_pipelines/sources/datadog_agent/
       parent: observability_pipelines_sources
       identifier: observability_pipelines_sources_datadog_agent
-      weight: 601
+      weight: 603
     - name: Fluent
       url: observability_pipelines/sources/fluent/
       parent: observability_pipelines_sources
       identifier: observability_pipelines_sources_fluent
-      weight: 602
+      weight: 604
     - name: Google Pub/Sub
       url: observability_pipelines/sources/google_pubsub/
       parent: observability_pipelines_sources
       identifier: observability_pipelines_sources_google_pubsub
-      weight: 603
+      weight: 605
     - name: HTTP Client
       url: observability_pipelines/sources/http_client/
       parent: observability_pipelines_sources
       identifier: observability_pipelines_sources_http_client
-      weight: 604
+      weight: 606
     - name: HTTP Server
       url: observability_pipelines/sources/http_server/
       parent: observability_pipelines_sources
       identifier: observability_pipelines_sources_http_server
-      weight: 605
+      weight: 607
+    - name: Kafka
+      url: observability_pipelines/sources/kafka/
+      parent: observability_pipelines_sources
+      identifier: observability_pipelines_sources_kafka
+      weight: 608
     - name: Logstash
       url: observability_pipelines/sources/logstash/
       parent: observability_pipelines_sources
       identifier: observability_pipelines_sources_logstash
-      weight: 606
+      weight: 609
     - name: Splunk HEC
       url: observability_pipelines/sources/splunk_hec/
       parent: observability_pipelines_sources
       identifier: observability_pipelines_sources_splunk_hec
-      weight: 607
+      weight: 610
     - name: Splunk TCP
       url: observability_pipelines/sources/splunk_tcp/
       parent: observability_pipelines_sources
       identifier: observability_pipelines_sources_splunk_tcp
-      weight: 608
+      weight: 611
     - name: Sumo Logic Hosted Collector
       url: observability_pipelines/sources/sumo_logic/
       parent: observability_pipelines_sources
       identifier: observability_pipelines_sources_sumo_logic
-      weight: 609
+      weight: 612
     - name: Syslog
       url: observability_pipelines/sources/syslog/
       parent: observability_pipelines_sources
       identifier: observability_pipelines_sources_syslog
-      weight: 610
+      weight: 613
     - name: Processors
       url: observability_pipelines/processors/
       parent: observability_pipelines
@@ -5142,11 +5217,16 @@ menu:
       parent: observability_pipelines_processors
       identifier: observability_pipelines_processors_reduce
       weight: 711
+    - name: Remap to OCSF
+      url: observability_pipelines/processors/remap_ocsf
+      parent: observability_pipelines_processors
+      identifier: observability_pipelines_processors_remap_ocsf
+      weight: 712
     - name: Sample
       url: observability_pipelines/processors/sample
       parent: observability_pipelines_processors
       identifier: observability_pipelines_processors_sample
-      weight: 712
+      weight: 713
     - name: Sensitive Data Scanner
       url: observability_pipelines/processors/sensitive_data_scanner
       parent: observability_pipelines_processors
@@ -5171,52 +5251,62 @@ menu:
       identifier: observability_pipelines_azure_storage
       url: observability_pipelines/destinations/azure_storage/
       parent: observability_pipelines_destinations
-      weight: 803
+      weight: 804
     - name: Datadog Logs
       url: observability_pipelines/destinations/datadog_logs/
       parent: observability_pipelines_destinations
       identifier: observability_pipelines_datadog_logs
-      weight: 804
+      weight: 805
     - name: Elasticsearch
       url: observability_pipelines/destinations/elasticsearch/
       parent: observability_pipelines_destinations
       identifier: observability_pipelines_elasticsearch
-      weight: 805
+      weight: 806
     - name: Google Chronicle
       url: observability_pipelines/destinations/google_chronicle
       parent: observability_pipelines_destinations
       identifier: observability_pipelines_google_chronicle
-      weight: 806
+      weight: 807
     - name: Google Cloud Storage
       identifier: observability_pipelines_google_cloud_storage
       url: /observability_pipelines/destinations/google_cloud_storage/
       parent: observability_pipelines_destinations
-      weight: 807
+      weight: 808
+    - name: Microsoft Sentinel
+      identifier: observability_pipelines_microsoft_sentinel
+      url: /observability_pipelines/destinations/microsoft_sentinel/
+      parent: observability_pipelines_destinations
+      weight: 809
     - name: New Relic
       identifier: observability_pipelines_new_relic
       url: /observability_pipelines/destinations/new_relic/
       parent: observability_pipelines_destinations
-      weight: 808
+      weight: 810
     - name: OpenSearch
       url: observability_pipelines/destinations/opensearch
       parent: observability_pipelines_destinations
       identifier: observability_pipelines_opensearch
-      weight: 809
+      weight: 811
+    - name: SentinelOne
+      url: observability_pipelines/destinations/sentinelone
+      parent: observability_pipelines_destinations
+      identifier: observability_pipelines_sentinelone
+      weight: 812
     - name: Syslog
       url: observability_pipelines/destinations/syslog
       parent: observability_pipelines_destinations
       identifier: observability_pipelines_syslog
-      weight: 810
+      weight: 813
     - name: Splunk HEC
       url: observability_pipelines/destinations/splunk_hec
       parent: observability_pipelines_destinations
       identifier: observability_pipelines_splunk_hec
-      weight: 811
+      weight: 814
     - name: Sumo Logic Hosted Collector
       url: observability_pipelines/destinations/sumo_logic_hosted_collector
       parent: observability_pipelines_destinations
       identifier: observability_pipelines_sumo_logic_hosted_collector
-      weight: 812
+      weight: 815
     - name: Best Practices for Scaling Observability Pipelines
       url: observability_pipelines/best_practices_for_scaling_observability_pipelines/
       parent: observability_pipelines
@@ -5275,7 +5365,7 @@ menu:
       weight: 106
     - name: Kotlin Multiplatform
       identifier: log_kotlin_multiplatform
-      url: logs/log_collection/kotlin-multiplatform/
+      url: logs/log_collection/kotlin_multiplatform/
       parent: log_collection
       weight: 107
     - name: C#
@@ -7067,6 +7157,16 @@ menu:
       parent: rum
       identifier: rum_feature_flag_tracking
       weight: 7
+    - name: Setup
+      url: real_user_monitoring/feature_flag_tracking/setup
+      parent: rum_feature_flag_tracking
+      identifier: rum_feature_flag_tracking_setup
+      weight: 701
+    - name: Using Feature Flags
+      url: real_user_monitoring/feature_flag_tracking/using_feature_flags
+      parent: rum_feature_flag_tracking
+      identifier: rum_feature_flag_tracking_use
+      weight: 702
     - name: Error Tracking
       url: real_user_monitoring/error_tracking/
       parent: rum
diff --git a/config/_default/menus/main.es.yaml b/config/_default/menus/main.es.yaml
index 3ecfbc027c52e..31828c055a8ab 100644
--- a/config/_default/menus/main.es.yaml
+++ b/config/_default/menus/main.es.yaml
@@ -5177,7 +5177,7 @@ menu:
   - identifier: log_kotlin_multiplatform
     name: Kotlin Multiplatform
     parent: log_collection
-    url: logs/log_collection/kotlin-multiplatform/
+    url: logs/log_collection/kotlin_multiplatform/
     weight: 107
   - identifier: log_collection_csharp
     name: C#
diff --git a/content/en/administrators_guide/plan.md b/content/en/administrators_guide/plan.md
index 08ba120cc2a6b..c692f72a96800 100644
--- a/content/en/administrators_guide/plan.md
+++ b/content/en/administrators_guide/plan.md
@@ -366,7 +366,7 @@ Create a detailed roll-out methodology in the [build][41] phase by focusing on t
 [48]: https://www.datadoghq.com/blog/engineering/husky-deep-dive/
 [49]: /real_user_monitoring/platform/connect_rum_and_traces/?tab=browserrum
 [50]: /integrations/tcp_check/?tab=host#data-collected
-[51]: /tracing/guide/inferred-service-opt-in/?tab=java
+[51]: /tracing/services/inferred_services
 [52]: /integrations/amazon_web_services/
 [53]: /integrations/google_cloud_platform/
 [54]: /integrations/azure/
diff --git a/content/en/error_tracking/frontend/_index.md b/content/en/error_tracking/frontend/_index.md
index b4b0178e11274..406397211eed9 100644
--- a/content/en/error_tracking/frontend/_index.md
+++ b/content/en/error_tracking/frontend/_index.md
@@ -38,7 +38,7 @@ Error Tracking simplifies debugging by grouping thousands of similar errors into
     {{< nextlink href="error_tracking/frontend/mobile/expo" >}}Expo{{< /nextlink >}}
     {{< nextlink href="error_tracking/frontend/mobile/reactnative" >}}React Native{{< /nextlink >}}
     {{< nextlink href="error_tracking/frontend/mobile/flutter" >}}Flutter{{< /nextlink >}}
-    {{< nextlink href="error_tracking/frontend/mobile/kotlin-multiplatform" >}}Kotlin Multiplatform{{< /nextlink >}}
+    {{< nextlink href="error_tracking/frontend/mobile/kotlin_multiplatform" >}}Kotlin Multiplatform{{< /nextlink >}}
     {{< nextlink href="error_tracking/frontend/logs" >}}Logs{{< /nextlink >}}
 {{< /whatsnext >}}
 
diff --git a/content/en/error_tracking/frontend/logs.md b/content/en/error_tracking/frontend/logs.md
index da7b8a8b72a14..b1270db1b3338 100644
--- a/content/en/error_tracking/frontend/logs.md
+++ b/content/en/error_tracking/frontend/logs.md
@@ -123,7 +123,7 @@ If you have not setup the Datadog Kotlin Multiplatform Logs SDK yet, follow the
    ```
 
 [1]: https://app.datadoghq.com/logs/onboarding/client
-[2]: /logs/log_collection/kotlin-multiplatform/#setup
+[2]: /logs/log_collection/kotlin_multiplatform/#setup
 [3]: https://github.com/Datadog/dd-sdk-kotlin-multiplatform
 
 {{% /tab %}}
diff --git a/content/en/error_tracking/frontend/mobile/kotlin-multiplatform.md b/content/en/error_tracking/frontend/mobile/kotlin_multiplatform.md
similarity index 83%
rename from content/en/error_tracking/frontend/mobile/kotlin-multiplatform.md
rename to content/en/error_tracking/frontend/mobile/kotlin_multiplatform.md
index 8008d6fd05ce1..f245548e3a8f1 100644
--- a/content/en/error_tracking/frontend/mobile/kotlin-multiplatform.md
+++ b/content/en/error_tracking/frontend/mobile/kotlin_multiplatform.md
@@ -2,4 +2,4 @@
 title: Kotlin Multiplatform Crash Reporting and Error Tracking
 ---
 
-{{< include-markdown "real_user_monitoring/error_tracking/mobile/kotlin-multiplatform" >}}
\ No newline at end of file
+{{< include-markdown "real_user_monitoring/error_tracking/mobile/kotlin_multiplatform" >}}
\ No newline at end of file
diff --git a/content/en/error_tracking/rum.md b/content/en/error_tracking/rum.md
index e10ff4991f317..3adc50939bfa9 100644
--- a/content/en/error_tracking/rum.md
+++ b/content/en/error_tracking/rum.md
@@ -11,5 +11,5 @@ title: Error Tracking for Web and Mobile Applications
     {{< nextlink href="real_user_monitoring/error_tracking/flutter" >}}Flutter{{< /nextlink >}}
     {{< nextlink href="real_user_monitoring/error_tracking/unity" >}}Unity{{< /nextlink >}}
     {{< nextlink href="real_user_monitoring/error_tracking/roku" >}}Roku{{< /nextlink >}}
-    {{< nextlink href="real_user_monitoring/error_tracking/kotlin-multiplatform" >}}Kotlin Multiplatform{{< /nextlink >}}
+    {{< nextlink href="real_user_monitoring/error_tracking/kotlin_multiplatform" >}}Kotlin Multiplatform{{< /nextlink >}}
 {{< /whatsnext >}}
\ No newline at end of file
diff --git a/content/en/integrations/guide/source-code-integration.md b/content/en/integrations/guide/source-code-integration.md
index 3f6bcaedc3e4b..cba399d3f93ff 100644
--- a/content/en/integrations/guide/source-code-integration.md
+++ b/content/en/integrations/guide/source-code-integration.md
@@ -581,6 +581,30 @@ If you're using the GitHub integration, click **Connect to preview** on error fr
 
 [101]: https://app.datadoghq.com/security/appsec
 
+{{% /tab %}}
+{{% tab "Dynamic Instrumentation" %}}
+
+You can see full source code files in [**Dynamic Instrumentation**][102] when creating or editing an instrumentation (dynamic log, metric, span, or span tags).
+
+#### Create new instrumentation
+
+1. Navigate to [**APM** > **Dynamic Instrumentation**][101].
+2. Select **Create New Instrumentation** and choose a service to instrument.
+3. Search for and select a source code filename or method.
+
+#### View or edit instrumentation
+
+1. Navigate to [**APM** > **Dynamic Instrumentation**][101].
+2. Select an existing instrumentation from the list, then click **View Events**.
+3. Select the instrumentation card to view its location in the source code.
+
+{{< img src="integrations/guide/source_code_integration/dynamic-instrumentation-create-new.png" alt="Source Code File in Dynamic Instrumentation" style="width:100%;">}}
+
+For more information, see the [Dynamic Instrumentation documentation][102].
+
+[101]: https://app.datadoghq.com/dynamic-instrumentation/events
+[102]: /dynamic_instrumentation/
+
 {{% /tab %}}
 {{< /tabs >}}
 
diff --git a/content/en/logs/log_collection/kotlin-multiplatform.md b/content/en/logs/log_collection/kotlin_multiplatform.md
similarity index 100%
rename from content/en/logs/log_collection/kotlin-multiplatform.md
rename to content/en/logs/log_collection/kotlin_multiplatform.md
diff --git a/content/en/monitors/configuration/_index.md b/content/en/monitors/configuration/_index.md
index aa5d9bfb309a0..64dacf52ff118 100644
--- a/content/en/monitors/configuration/_index.md
+++ b/content/en/monitors/configuration/_index.md
@@ -270,6 +270,8 @@ A `Multi Alert` monitor triggers individual notifications for each entity in a m
 
 For example, when setting up a monitor to notify you if the P99 latency, aggregated by service, exceeds a certain threshold, you would receive a **separate** alert for each individual service whose P99 latency exceeded the alert threshold. This can be useful for identifying and addressing specific instances of system or application issues. It allows you to track problems on a more granular level.
 
+##### Notification grouping
+
 When monitoring a large group of entities, multi alerts can lead to noisy monitors. To mitigate this, customize which dimensions trigger alerts. This reduces the noise and allows you to focus on the alerts that matter most. For instance, you are monitoring the average CPU usage of all your hosts. If you group your query by `service` and `host` but only want alerts to be sent once for each `service` attribute meeting the threshold, remove the `host` attribute from your multi alert options and reduce the number of notifications that are sent.
 
 {{< img src="/monitors/create/multi-alert-aggregated.png" alt="Diagram of how notifications are sent when set to specific dimensions in multi alerts" style="width:90%;">}}
diff --git a/content/en/monitors/types/cloud_cost.md b/content/en/monitors/types/cloud_cost.md
index 52f18ae315a86..07835a28a71c5 100644
--- a/content/en/monitors/types/cloud_cost.md
+++ b/content/en/monitors/types/cloud_cost.md
@@ -37,7 +37,7 @@ Cloud Cost monitors are evaluated with a 48 hour delayed evaluation window, beca
 
 To create a Cloud Cost monitor in Datadog, use the main navigation: [**Monitors** --> **New Monitor** --> **Cloud Cost**][4].
 
-You can also create Cloud Cost monitors from the [Cloud Cost Explorer][2]. Click **More...** next to the Options button and select **Create monitor**. 
+You can also create Cloud Cost monitors from the [Cloud Cost Explorer][2]. Click **More...** next to the Options button and select **Create monitor**.
 
 {{< img src="/monitors/monitor_types/cloud_cost/explorer.png" alt="Option to create a monitor from the Cloud Cost Explorer page" style="width:100%;" >}}
 
@@ -73,14 +73,14 @@ You can select from the following monitor types.
 
 | Cost Type | Description | Usage Examples |
 | ---  | ----------- | ----------- |
-| Cost Anomalies | Detect anomalies by comparing current costs to historical data, using a defined lookback period. Incomplete days are excluded from analysis to ensure accuracy. Anomaly monitors require at least 4 months of cloud cost data to evaluate since historical data is required to train the algorithm. | Alert if 3 days from the past 30 days show significant cost anomalies compared to historical data. |
+| Cost Anomalies | Detect anomalies by comparing current costs to historical data, using a defined lookback period. Incomplete days are excluded from analysis to ensure accuracy. Anomaly monitors require at least 1 month of cloud cost data to evaluate since historical data is required to train the algorithm. | Alert if 3 days from the past 30 days show significant cost anomalies compared to historical data. |
 
 {{% /tab %}}
 {{< /tabs >}}
 
 ## Specify which costs to track
 
-Any cost type or metric reporting to Datadog is available for monitors. You can use custom metrics or observability metrics alongside a cost metric to monitor unit economics. 
+Any cost type or metric reporting to Datadog is available for monitors. You can use custom metrics or observability metrics alongside a cost metric to monitor unit economics.
 
 | Step                              | Required | Default              | Example             |
 |-----------------------------------|----------|----------------------|---------------------|
@@ -89,35 +89,35 @@ Any cost type or metric reporting to Datadog is available for monitors. You can
 | Group by                          | No       | Everything           | `aws_availability_zone` |
 | Add observability metric | No      | `system.cpu.user` | `aws.s3.all_requests` |
 
-Use the editor to define the cost types or exports. 
+Use the editor to define the cost types or exports.
 
 {{< img src="monitors/monitor_types/cloud_cost/ccm_metrics_source.png" alt="Cloud Cost and Metrics data source options for specifying which costs to track" style="width:100%;" >}}
 
-For more information, see the [Cloud Cost Management documentation][1]. 
+For more information, see the [Cloud Cost Management documentation][1].
 
 ## Set alert conditions
 
 {{< tabs >}}
 {{% tab "Changes" %}}
 
-If you are using the **Cost Changes** monitor type, you can trigger an alert when the cost `increases` or `decreases` more than the defined threshold. The threshold can be set to either a **Percentage Change** or set to **Dollar Amount**. 
+If you are using the **Cost Changes** monitor type, you can trigger an alert when the cost `increases` or `decreases` more than the defined threshold. The threshold can be set to either a **Percentage Change** or set to **Dollar Amount**.
 
 If you are using the **Percentage Change**, you can filter out changes that are below a certain dollar threshold. For example, the monitor alerts when there is a cost change above 5% for any change that is above $500.
 
 {{% /tab %}}
 {{% tab "Threshold" %}}
 
-If you are using the **Cost Threshold** monitor type, you can trigger an alert when the cloud cost is `above`, `below`, `above or equal`, or `below or equal to` a threshold.  
+If you are using the **Cost Threshold** monitor type, you can trigger an alert when the cloud cost is `above`, `below`, `above or equal`, or `below or equal to` a threshold.
 
 {{% /tab %}}
 {{% tab "Forecast" %}}
 
-If you are using the **Cost Forecast** monitor type, you can trigger an alert when the cloud cost is `above`, `below`, `above or equal`, `below or equal to`, `equal to`, or `not equal to` a threshold.  
+If you are using the **Cost Forecast** monitor type, you can trigger an alert when the cloud cost is `above`, `below`, `above or equal`, `below or equal to`, `equal to`, or `not equal to` a threshold.
 
 {{% /tab %}}
 {{% tab "Anomalies" %}}
 
-If you are using the **Cost Anomalies** monitor type, you can trigger an alert if the observed cost deviates from historical data by being `above`, `below`, or `above or below` a threshold for any provider and service. 
+If you are using the **Cost Anomalies** monitor type, you can trigger an alert if the observed cost deviates from historical data by being `above`, `below`, or `above or below` a threshold for any provider and service.
 
 The `agile` [anomaly algorithm][101] is used with two bounds and monthly seasonality.
 
diff --git a/content/en/observability_pipelines/destinations/_index.md b/content/en/observability_pipelines/destinations/_index.md
index d15fab1337308..6a2a0e6bdc9a6 100644
--- a/content/en/observability_pipelines/destinations/_index.md
+++ b/content/en/observability_pipelines/destinations/_index.md
@@ -29,12 +29,70 @@ Select and set up your destinations when you [set up a pipeline][1]. This is ste
     {{< nextlink href="observability_pipelines/destinations/google_chronicle" >}}Google Chronicle{{< /nextlink >}}
     {{< nextlink href="observability_pipelines/destinations/google_cloud_storage" >}}Google Cloud Storage{{< /nextlink >}}
     {{< nextlink href="observability_pipelines/destinations/new_relic" >}}New Relic{{< /nextlink >}}
+    {{< nextlink href="observability_pipelines/destinations/microsoft_sentinel" >}}Microsoft Sentinel{{< /nextlink >}}
     {{< nextlink href="observability_pipelines/destinations/opensearch" >}}OpenSearch{{< /nextlink >}}
     {{< nextlink href="observability_pipelines/destinations/syslog" >}}rsyslog or syslog-ng{{< /nextlink >}}
+    {{< nextlink href="observability_pipelines/destinations/sentinelone" >}} SentinelOne {{< /nextlink >}}
     {{< nextlink href="observability_pipelines/destinations/splunk_hec" >}}Splunk HTTP Event Collector (HEC){{< /nextlink >}}
     {{< nextlink href="observability_pipelines/destinations/sumo_logic_hosted_collector" >}}Sumo Logic Hosted Collector{{< /nextlink >}}
 {{< /whatsnext >}}
 
+## Template syntax
+
+Logs are often stored in separate indexes based on log data, such as the service or environment the logs are coming from or another log attribute. In Observability Pipelines, you can use template syntax to route your logs to different indexes based on specific log fields.
+
+When the Observability Pipelines Worker cannot resolve the field with the template syntax, the Worker defaults to a specified behavior for that destination. For example, if you are using the template `{{application_id}}` for the Amazon S3 destination's **Prefix** field, but there isn't an `application_id` field in the log, the Worker creates a folder called `OP_UNRESOLVED_TEMPLATE_LOGS/` and publishes the logs there.
+
+The following table lists the destinations and fields that support template syntax, and what happens when the Worker cannot resolve the field:
+
+| Destination       | Fields that support template syntax  | Behavior when the field cannot be resolved                                                     |
+| ----------------- | -------------------------------------| -----------------------------------------------------------------------------------------------|
+| Amazon Opensearch | Index                                | The Worker creates an index named `datadog-op` and sends the logs there.                       |
+| Amazon S3         | Prefix                               | The Worker creates a folder named `OP_UNRESOLVED_TEMPLATE_LOGS/` and sends the logs there. |
+| Azure Blob        | Prefix                               | The Worker creates a folder named `OP_UNRESOLVED_TEMPLATE_LOGS/` and sends the logs there. |
+| Elasticsearch     | Source type                          | The Worker creates an index named `datadog-op` and sends the logs there.                       |
+| Google Chronicle  | Log type                             | Defaults to `vector_dev` log type.                                                             |
+| Google Cloud      | Prefix                               | The Worker creates a folder named `OP_UNRESOLVED_TEMPLATE_LOGS/` and sends the logs there. |
+| Opensearch        | Index                                | The Worker creates an index named `datadog-op` and sends the logs there.                       |
+| Splunk HEC        | Index<br>Source type                 | The Worker sends the logs to the default index configured in Splunk.                       |
+
+#### Example
+
+If you want to route logs based on the log's application ID field (for example, `application_id`) to the Amazon S3 destination, use the event fields syntax in the **Prefix to apply to all object keys** field.
+
+{{< img src="observability_pipelines/amazon_s3_prefix.png" alt="The Amazon S3 destination showing the prefix field using the event fields syntax /application_id={{ application_id }}/" style="width:40%;" >}}
+
+### Syntax
+
+#### Event fields
+
+Use `{{ <field_name> }}` to access individual log event fields. For example:
+
+```
+{{ application_id }}
+```
+
+#### Strftime specifiers
+
+Use [strftime specifiers][3] for the date and time. For example:
+
+```
+year=%Y/month=%m/day=%d
+```
+
+#### Escape characters
+
+Prefix a character with `\` to escape the character. This example escapes the event field syntax:
+
+```
+\{{ field_name }}
+```
+
+This example escapes the strftime specifiers:
+
+```
+year=\%Y/month=\%m/day=\%d/
+```
 
 ## Event batching
 
@@ -57,4 +115,5 @@ If the destination receives 3 events within 2 seconds, it flushes a batch with 2
 {{% observability_pipelines/destination_batching %}}
 
 [1]: /observability_pipelines/set_up_pipelines/
-[2]: https://app.datadoghq.com/observability-pipelines
\ No newline at end of file
+[2]: https://app.datadoghq.com/observability-pipelines
+[3]: https://docs.rs/chrono/0.4.19/chrono/format/strftime/index.html#specifiers
\ No newline at end of file
diff --git a/content/en/observability_pipelines/destinations/amazon_security_lake.md b/content/en/observability_pipelines/destinations/amazon_security_lake.md
new file mode 100644
index 0000000000000..8799d313520c4
--- /dev/null
+++ b/content/en/observability_pipelines/destinations/amazon_security_lake.md
@@ -0,0 +1,47 @@
+---
+title: Amazon Security Lake Destination
+disable_toc: false
+---
+
+Use Observability Pipelines' Amazon Security Lake destination to send logs to Amazon Security Lake.
+
+## Prerequisites
+
+You need to do the following before setting up the Amazon Security Lake destination:
+
+{{% observability_pipelines/prerequisites/amazon_security_lake %}}
+
+## Setup
+
+Set up the Amazon Security Lake destination and its environment variables when you [set up a pipeline][1]. The information below is configured in the pipelines UI.
+
+### Set up the destination
+
+{{% observability_pipelines/destination_settings/amazon_security_lake %}}
+
+### Set the environment variables
+
+{{% observability_pipelines/configure_existing_pipelines/destination_env_vars/amazon_security_lake %}}
+
+## AWS Authentication
+
+{{% observability_pipelines/aws_authentication/amazon_security_lake/intro %}}
+
+{{% observability_pipelines/aws_authentication/instructions %}}
+
+### Permissions
+
+{{% observability_pipelines/aws_authentication/amazon_security_lake/permissions %}}
+
+## How the destination works
+
+### Event batching
+
+A batch of events is flushed when one of these parameters is met. See [event batching][2] for more information.
+
+| Max Events     | Max Bytes       | Timeout (seconds)   |
+|----------------|-----------------|---------------------|
+| TKTK           | TKTK      | TKTK                   |
+
+[1]: https://app.datadoghq.com/observability-pipelines
+[2]: /observability_pipelines/destinations/#event-batching
\ No newline at end of file
diff --git a/content/en/observability_pipelines/destinations/microsoft_sentinel.md b/content/en/observability_pipelines/destinations/microsoft_sentinel.md
new file mode 100644
index 0000000000000..060a86d673687
--- /dev/null
+++ b/content/en/observability_pipelines/destinations/microsoft_sentinel.md
@@ -0,0 +1,31 @@
+---
+title: Microsoft Sentinel Destination
+disable_toc: false
+---
+
+Use Observability Pipelines' Microsoft Sentinel destination to send logs to Microsoft Sentinel.
+
+## Setup
+
+Set up the Microsoft Sentinel destination and its environment variables when you [set up a pipeline][1]. The information below is configured in the pipelines UI.
+
+### Set up the destination
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+### Set the environment variables
+
+{{% observability_pipelines/configure_existing_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+## How the destination works
+
+### Event batching
+
+A batch of events is flushed when one of these parameters is met. See [event batching][2] for more information.
+
+| Max Events     | Max Bytes       | Timeout (seconds)   |
+|----------------|-----------------|---------------------|
+| None           | 10,000,000       | 1                   |
+
+[1]: https://app.datadoghq.com/observability-pipelines
+[2]: /observability_pipelines/destinations/#event-batching
\ No newline at end of file
diff --git a/content/en/observability_pipelines/destinations/sentinelone.md b/content/en/observability_pipelines/destinations/sentinelone.md
new file mode 100644
index 0000000000000..6686938e8c285
--- /dev/null
+++ b/content/en/observability_pipelines/destinations/sentinelone.md
@@ -0,0 +1,41 @@
+---
+title: SentinelOne Destination
+disable_toc: false
+---
+
+Use Observability Pipelines' SentinelOne destination to send logs to SentinelOne.
+
+## Setup
+
+Set up the SentinelOne destination and its environment variables when you [set up a pipeline][1]. The information below is configured in the pipelines UI.
+
+### Set up the destination
+
+{{% observability_pipelines/destination_settings/sentinelone %}}
+
+### Set the environment variables
+
+{{% observability_pipelines/configure_existing_pipelines/destination_env_vars/sentinelone %}}
+
+## View logs in a SentinelOne cluster
+
+After you've set up the pipeline to send logs to the SentinelOne destination, you can view the logs in a SentinelOne cluster:
+
+1. Log into the [S1 console][2].
+2. Navigate to the Singularity Data Lake (SDL)  "Search" page. To access it from the console, click on "Visibility" on the left menu to go to SDL, and make sure you're on the "Search" tab.
+3. Make sure the filter next to the search bar is set to **All Data**.
+4. This page shows the logs you sent from Observability Pipelines to SentinelOne.
+
+## How the destination works
+
+### Event batching
+
+A batch of events is flushed when one of these parameters is met. See [event batching][3] for more information.
+
+| Max Events     | Max Bytes       | Timeout (seconds)   |
+|----------------|-----------------|---------------------|
+| None           | 1,000,000       | 1                   |
+
+[1]: https://app.datadoghq.com/observability-pipelines
+[2]: https://usea1-partners.sentinelone.net/login
+[3]: /observability_pipelines/destinations/#event-batching
\ No newline at end of file
diff --git a/content/en/observability_pipelines/processors/_index.md b/content/en/observability_pipelines/processors/_index.md
index 174891fdfdf44..d8e1e0e18ecd5 100644
--- a/content/en/observability_pipelines/processors/_index.md
+++ b/content/en/observability_pipelines/processors/_index.md
@@ -38,6 +38,7 @@ Use Observability Pipelines' processors to parse, structure, and enrich your log
     {{< nextlink href="observability_pipelines/processors/quota" >}}Quota{{< /nextlink >}}
     {{< nextlink href="observability_pipelines/processors/reduce" >}}Reduce{{< /nextlink >}}
     {{< nextlink href="observability_pipelines/processors/sample" >}}Sample{{< /nextlink >}}
+    {{< nextlink href="observability_pipelines/processors/remap_ocsf" >}}Remap to OCSF{{< /nextlink >}}
     {{< nextlink href="observability_pipelines/processors/sensitive_data_scanner" >}}Sensitive Data Scanner{{< /nextlink >}}
 {{< /whatsnext >}}
 
diff --git a/content/en/observability_pipelines/processors/remap_ocsf.md b/content/en/observability_pipelines/processors/remap_ocsf.md
new file mode 100644
index 0000000000000..e908df319e015
--- /dev/null
+++ b/content/en/observability_pipelines/processors/remap_ocsf.md
@@ -0,0 +1,8 @@
+---
+title: Remap to OCSF Processor
+disable_toc: false
+---
+
+{{% observability_pipelines/processors/remap_ocsf %}}
+
+{{% observability_pipelines/processors/filter_syntax %}}
\ No newline at end of file
diff --git a/content/en/observability_pipelines/processors/sensitive_data_scanner.md b/content/en/observability_pipelines/processors/sensitive_data_scanner.md
index e3e77ca4d887b..46a637dbf717a 100644
--- a/content/en/observability_pipelines/processors/sensitive_data_scanner.md
+++ b/content/en/observability_pipelines/processors/sensitive_data_scanner.md
@@ -5,4 +5,15 @@ disable_toc: false
 
 {{% observability_pipelines/processors/sensitive_data_scanner %}}
 
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_library_rules %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_custom_rules %}}
+
+{{% /collapse-content %}} -->
+
 {{% observability_pipelines/processors/filter_syntax %}}
\ No newline at end of file
diff --git a/content/en/observability_pipelines/set_up_pipelines/archive_logs/_index.md b/content/en/observability_pipelines/set_up_pipelines/archive_logs/_index.md
index 7d6c7d99f0525..8bc9073f71d75 100644
--- a/content/en/observability_pipelines/set_up_pipelines/archive_logs/_index.md
+++ b/content/en/observability_pipelines/set_up_pipelines/archive_logs/_index.md
@@ -17,11 +17,14 @@ Use Observability Pipelines to route ingested logs to a cloud storage solution (
 
 Select a source to get started:
 
+<!-- - [Amazon Data Firehose][12] -->
+- [Amazon S3][11]
 - [Datadog Agent][1]
 - [Fluentd or Fluent Bit][2]
 - [Google Pub/Sub][3]
 - [HTTP Client][4]
 - [HTTP Server][5]
+- [Kafka][13]
 - [Logstash][6]
 - [Splunk HTTP Event Collector (HEC)][7]
 - [Splunk Heavy or Universal Forwarders (TCP)][8]
@@ -38,3 +41,6 @@ Select a source to get started:
 [8]: /observability_pipelines/archive_logs/splunk_tcp
 [9]: /observability_pipelines/archive_logs/sumo_logic_hosted_collector
 [10]: /observability_pipelines/archive_logs/syslog
+[11]: /observability_pipelines/set_up_pipelines/archive_logs/amazon_s3
+[12]: /observability_pipelines/set_up_pipelines/archive_logs/amazon_data_firehose
+[13]: /observability_pipelines/set_up_pipelines/archive_logs/kafka
diff --git a/content/en/observability_pipelines/set_up_pipelines/archive_logs/amazon_data_firehose.md b/content/en/observability_pipelines/set_up_pipelines/archive_logs/amazon_data_firehose.md
new file mode 100644
index 0000000000000..dfd51d20c2622
--- /dev/null
+++ b/content/en/observability_pipelines/set_up_pipelines/archive_logs/amazon_data_firehose.md
@@ -0,0 +1,394 @@
+---
+title: Archive Logs for Amazon Data Firehose
+disable_toc: false
+---
+
+## Overview
+
+Use the Observability Pipelines Worker to format your Amazon Data Firehose logs into a Datadog-rehydratable format before routing them to Datadog Log Archives.
+
+{{% observability_pipelines/use_case_images/archive_logs %}}
+
+This document walks you through the following steps:
+1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
+1. [Configuring a Log Archive](#configure-a-log-archive)
+1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
+1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker)
+
+## Prerequisites
+
+{{% observability_pipelines/prerequisites/amazon_data_firehose %}}
+
+## Configure Log Archives
+
+If you already have a Datadog Log Archive configured for Observability Pipelines, skip to [Set up Observability Pipelines](#set-up-observability-pipelines).
+
+You need to have the Datadog integration for your cloud provider installed to set up Datadog Log Archive. See the [AWS integration][1], [Google Cloud Platform][2], and [Azure integration][3] documentation for more information.
+
+Select the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h4" %}}
+{{% observability_pipelines/configure_log_archive/amazon_s3/instructions %}}
+
+{{< tabs >}}
+{{% tab "Docker" %}}
+
+{{% observability_pipelines/configure_log_archive/amazon_s3/docker %}}
+
+{{% /tab %}}
+{{% tab "Amazon EKS" %}}
+
+{{% observability_pipelines/configure_log_archive/amazon_s3/amazon_eks %}}
+
+{{% /tab %}}
+{{% tab "Linux (APT)" %}}
+
+{{% observability_pipelines/configure_log_archive/amazon_s3/linux_apt %}}
+
+{{% /tab %}}
+{{% tab "Linux (RPM)" %}}
+
+{{% observability_pipelines/configure_log_archive/amazon_s3/linux_rpm %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+{{% observability_pipelines/configure_log_archive/amazon_s3/connect_s3_to_datadog_log_archives %}}
+
+{{% /collapse-content %}}
+
+{{% collapse-content title="Google Cloud Storage" level="h4" %}}
+
+{{% observability_pipelines/configure_log_archive/google_cloud_storage/instructions %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h4" %}}
+
+{{% observability_pipelines/configure_log_archive/azure_storage/instructions %}}
+
+{{% /collapse-content %}}
+
+## Set up Observability Pipelines
+
+1. Navigate to [Observability Pipelines][4].
+1. Select the **Archive Logs** template to create a new pipeline.
+1. Select the **Amazon Data Firehose** source.
+
+### Set up the source
+
+{{% observability_pipelines/source_settings/amazon_data_firehose %}}
+
+### Set up the destinations
+
+Enter the following information based on your selected logs destinations.
+
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
+
+Follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_settings/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_settings/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
+### Set up processors
+
+{{% observability_pipelines/processors/intro %}}
+
+{{% observability_pipelines/processors/filter_syntax %}}
+
+{{% observability_pipelines/processors/add_processors %}}
+
+{{< tabs >}}
+{{% tab "Add env vars" %}}
+
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
+
+{{% /tab %}}
+{{% tab "Edit fields" %}}
+
+{{% observability_pipelines/processors/remap %}}
+
+{{% /tab %}}
+{{% tab "Enrichment table" %}}
+
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
+
+{{% /tab %}}
+{{% tab "Grok Parser" %}}
+
+{{% observability_pipelines/processors/grok_parser %}}
+
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
+{{% /tab %}}
+{{% tab "Quota" %}}
+
+{{% observability_pipelines/processors/quota %}}
+
+{{% /tab %}}
+{{% tab "Reduce" %}}
+
+{{% observability_pipelines/processors/reduce %}}
+
+{{% /tab %}}
+{{% tab "Remap to OCSF" %}}
+
+{{% observability_pipelines/processors/remap_ocsf %}}
+
+{{% /tab %}}
+{{% tab "Sample" %}}
+
+{{% observability_pipelines/processors/sample %}}
+
+{{% /tab %}}
+{{% tab "Sensitive Data Scanner" %}}
+
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
+
+{{% collapse-content title="Add rules from the library" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_library_rules %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_custom_rules %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add another set of processors and destinations
+
+{{% observability_pipelines/multiple_processors %}}
+
+### Install the Observability Pipelines Worker
+1. Select your platform in the **Choose your installation platform** dropdown menu.
+1. Enter the Amazon Data Firehose address. The Observability Pipelines Worker listens to this address and port for incoming logs from Amazon Data Firehose.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+1. Follow the instructions for your environment to install the Worker.
+{{< tabs >}}
+{{% tab "Docker" %}}
+
+{{% observability_pipelines/install_worker/docker %}}
+
+{{% /tab %}}
+{{% tab "Amazon EKS" %}}
+
+{{% observability_pipelines/install_worker/amazon_eks %}}
+
+{{% /tab %}}
+{{% tab "Azure AKS" %}}
+
+{{% observability_pipelines/install_worker/azure_aks %}}
+
+{{% /tab %}}
+{{% tab "Google GKE" %}}
+
+{{% observability_pipelines/install_worker/google_gke %}}
+
+{{% /tab %}}
+{{% tab "Linux (APT)" %}}
+
+{{% observability_pipelines/install_worker/linux_apt %}}
+
+{{% /tab %}}
+{{% tab "Linux (RPM)" %}}
+
+{{% observability_pipelines/install_worker/linux_rpm %}}
+
+{{% /tab %}}
+{{% tab "CloudFormation" %}}
+
+{{% observability_pipelines/install_worker/cloudformation %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+## Send logs to the Observability Pipelines Worker
+
+{{% observability_pipelines/log_source_configuration/amazon_data_firehose %}}
+
+[1]: /integrations/amazon_web_services/#setup
+[2]: /integrations/google_cloud_platform/#setup
+[3]: /integrations/azure/#setup
+[4]: https://app.datadoghq.com/observability-pipelines
\ No newline at end of file
diff --git a/content/en/observability_pipelines/set_up_pipelines/archive_logs/amazon_s3.md b/content/en/observability_pipelines/set_up_pipelines/archive_logs/amazon_s3.md
new file mode 100644
index 0000000000000..89671d12a41e6
--- /dev/null
+++ b/content/en/observability_pipelines/set_up_pipelines/archive_logs/amazon_s3.md
@@ -0,0 +1,389 @@
+---
+title: Archive Logs for Amazon S3
+disable_toc: false
+---
+
+## Overview
+
+Use the Observability Pipelines Worker to format your Amazon S3 logs into a Datadog-rehydratable format before routing them to Datadog Log Archives.
+
+{{% observability_pipelines/use_case_images/archive_logs %}}
+
+This document walks you through the following:
+1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
+1. [Configuring a Log Archive](#configure-a-log-archive)
+1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
+
+## Prerequisites
+
+{{% observability_pipelines/prerequisites/amazon_s3 %}}
+
+## Configure Log Archives
+
+If you already have a Datadog Log Archive configured for Observability Pipelines, skip to [Set up Observability Pipelines](#set-up-observability-pipelines).
+
+You need to have the Datadog integration for your cloud provider installed to set up Datadog Log Archive. See the [AWS integration][1], [Google Cloud Platform][2], and [Azure integration][3] documentation for more information.
+
+Select the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h4" %}}
+{{% observability_pipelines/configure_log_archive/amazon_s3/instructions %}}
+
+{{< tabs >}}
+{{% tab "Docker" %}}
+
+{{% observability_pipelines/configure_log_archive/amazon_s3/docker %}}
+
+{{% /tab %}}
+{{% tab "Amazon EKS" %}}
+
+{{% observability_pipelines/configure_log_archive/amazon_s3/amazon_eks %}}
+
+{{% /tab %}}
+{{% tab "Linux (APT)" %}}
+
+{{% observability_pipelines/configure_log_archive/amazon_s3/linux_apt %}}
+
+{{% /tab %}}
+{{% tab "Linux (RPM)" %}}
+
+{{% observability_pipelines/configure_log_archive/amazon_s3/linux_rpm %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+{{% observability_pipelines/configure_log_archive/amazon_s3/connect_s3_to_datadog_log_archives %}}
+
+{{% /collapse-content %}}
+
+{{% collapse-content title="Google Cloud Storage" level="h4" %}}
+
+{{% observability_pipelines/configure_log_archive/google_cloud_storage/instructions %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h4" %}}
+
+{{% observability_pipelines/configure_log_archive/azure_storage/instructions %}}
+
+{{% /collapse-content %}}
+
+## Set up Observability Pipelines
+
+1. Navigate to [Observability Pipelines][4].
+1. Select the **Archive Logs** template to create a new pipeline.
+1. Select the **Amazon S3** source.
+
+### Set up the source
+
+{{% observability_pipelines/source_settings/amazon_s3 %}}
+
+### Set up the destinations
+
+Enter the following information based on your selected logs destinations.
+
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
+
+Follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_settings/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_settings/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
+### Set up processors
+
+{{% observability_pipelines/processors/intro %}}
+
+{{% observability_pipelines/processors/filter_syntax %}}
+
+{{% observability_pipelines/processors/add_processors %}}
+
+{{< tabs >}}
+{{% tab "Add env vars" %}}
+
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
+
+{{% /tab %}}
+{{% tab "Edit fields" %}}
+
+{{% observability_pipelines/processors/remap %}}
+
+{{% /tab %}}
+{{% tab "Enrichment table" %}}
+
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
+
+{{% /tab %}}
+{{% tab "Grok Parser" %}}
+
+{{% observability_pipelines/processors/grok_parser %}}
+
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
+{{% /tab %}}
+{{% tab "Quota" %}}
+
+{{% observability_pipelines/processors/quota %}}
+
+{{% /tab %}}
+{{% tab "Reduce" %}}
+
+{{% observability_pipelines/processors/reduce %}}
+
+{{% /tab %}}
+{{% tab "Remap to OCSF" %}}
+
+{{% observability_pipelines/processors/remap_ocsf %}}
+
+{{% /tab %}}
+{{% tab "Sample" %}}
+
+{{% observability_pipelines/processors/sample %}}
+
+{{% /tab %}}
+{{% tab "Sensitive Data Scanner" %}}
+
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
+
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_library_rules %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_custom_rules %}}
+
+{{% /collapse-content %}} -->
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add another set of processors and destinations
+
+{{% observability_pipelines/multiple_processors %}}
+
+### Install the Observability Pipelines Worker
+1. Select your platform in the **Choose your installation platform** dropdown menu.
+1. In the **AWS S3 SQS URL** field, enter the URL of the SQS queue to which the S3 bucket sends notification events.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+1. Follow the instructions for your environment to install the Worker.
+{{< tabs >}}
+{{% tab "Docker" %}}
+
+{{% observability_pipelines/install_worker/docker %}}
+
+{{% /tab %}}
+{{% tab "Amazon EKS" %}}
+
+{{% observability_pipelines/install_worker/amazon_eks %}}
+
+{{% /tab %}}
+{{% tab "Azure AKS" %}}
+
+{{% observability_pipelines/install_worker/azure_aks %}}
+
+{{% /tab %}}
+{{% tab "Google GKE" %}}
+
+{{% observability_pipelines/install_worker/google_gke %}}
+
+{{% /tab %}}
+{{% tab "Linux (APT)" %}}
+
+{{% observability_pipelines/install_worker/linux_apt %}}
+
+{{% /tab %}}
+{{% tab "Linux (RPM)" %}}
+
+{{% observability_pipelines/install_worker/linux_rpm %}}
+
+{{% /tab %}}
+{{% tab "CloudFormation" %}}
+
+{{% observability_pipelines/install_worker/cloudformation %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+[1]: /integrations/amazon_web_services/#setup
+[2]: /integrations/google_cloud_platform/#setup
+[3]: /integrations/azure/#setup
+[4]: https://app.datadoghq.com/observability-pipelines
\ No newline at end of file
diff --git a/content/en/observability_pipelines/set_up_pipelines/archive_logs/datadog_agent.md b/content/en/observability_pipelines/set_up_pipelines/archive_logs/datadog_agent.md
index bcc27a181e835..540b26d934105 100644
--- a/content/en/observability_pipelines/set_up_pipelines/archive_logs/datadog_agent.md
+++ b/content/en/observability_pipelines/set_up_pipelines/archive_logs/datadog_agent.md
@@ -11,7 +11,7 @@ Configure your Datadog Agent so that the Observability Pipelines Worker formats
 
 {{% observability_pipelines/use_case_images/archive_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Configuring a Log Archive](#configure-a-log-archive)
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
@@ -83,9 +83,24 @@ Select the cloud provider you are using to archive your logs.
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
 {{% tab "Datadog Archives" %}}
 
 {{% observability_pipelines/destination_settings/datadog_archives_note %}}
@@ -109,52 +124,51 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% /collapse-content %}}
 
 {{% /tab %}}
-{{% tab "Datadog" %}}
+{{% tab "Elasticsearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/elasticsearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Microsoft Sentinel" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "New Relic" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/new_relic %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "Elasticsearch" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/elasticsearch %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
 
 {{% /tab %}}
-{{% tab "OpenSearch" %}}
+{{% tab "Sumo Logic" %}}
 
-{{% observability_pipelines/destination_settings/opensearch %}}
+{{% observability_pipelines/destination_settings/sumo_logic %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "Syslog" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+#### Add additional destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_destinations %}}
 
 ### Set up processors
 
@@ -164,9 +178,19 @@ Follow the instructions for the cloud provider you are using to archive your log
 
 {{% observability_pipelines/processors/add_processors %}}
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -174,15 +198,30 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -194,50 +233,62 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
-
-{{% observability_pipelines/processors/add_hostname %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Datadog Agent address. The Observability Pipelines Worker listens to this address and port for incoming logs from the Datadog Agent. For example, `0.0.0.0:<port_number>`.
 1. Provide the environment variables for each of your selected destinations.
 {{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
 {{% tab "Datadog Archives" %}}
 
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
 {{% collapse-content title="Amazon S3" level="h5" %}}
 
 {{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
@@ -255,49 +306,44 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% /collapse-content %}}
 
 {{% /tab %}}
-{{% tab "Datadog" %}}
-
-{{% observability_pipelines/destination_env_vars/datadog %}}
-
-{{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Elasticsearch" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Microsoft Sentinel" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "New Relic" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+{{% observability_pipelines/destination_env_vars/new_relic %}}
 
 {{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% tab "OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Elasticsearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "OpenSearch" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/opensearch %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "Sumo Logic" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Syslog" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/archive_logs/fluent.md b/content/en/observability_pipelines/set_up_pipelines/archive_logs/fluent.md
index 51ddb45e76d1a..d8b31cd0d09fb 100644
--- a/content/en/observability_pipelines/set_up_pipelines/archive_logs/fluent.md
+++ b/content/en/observability_pipelines/set_up_pipelines/archive_logs/fluent.md
@@ -12,7 +12,7 @@ Configure Fluentd or Fluent Bit so that the Observability Pipelines Worker forma
 
 {{% observability_pipelines/use_case_images/archive_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Configuring a Log Archive](#configure-a-log-archive)
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
@@ -84,9 +84,24 @@ Select the cloud provider you are using to archive your logs.
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
 {{% tab "Datadog Archives" %}}
 
 {{% observability_pipelines/destination_settings/datadog_archives_note %}}
@@ -110,52 +125,51 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% /collapse-content %}}
 
 {{% /tab %}}
-{{% tab "Datadog" %}}
+{{% tab "Elasticsearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/elasticsearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Microsoft Sentinel" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "New Relic" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/new_relic %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "Elasticsearch" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/elasticsearch %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
 
 {{% /tab %}}
-{{% tab "OpenSearch" %}}
+{{% tab "Sumo Logic" %}}
 
-{{% observability_pipelines/destination_settings/opensearch %}}
+{{% observability_pipelines/destination_settings/sumo_logic %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "Syslog" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+#### Add additional destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_destinations %}}
 
 ### Set up processors
 
@@ -166,9 +180,19 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -176,15 +200,30 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -196,51 +235,63 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
-
-{{% observability_pipelines/processors/add_hostname %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Fluent socket address and port. The Observability Pipelines Worker listens on this address for incoming log messages.
 
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
 {{% tab "Datadog Archives" %}}
 
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
 {{% collapse-content title="Amazon S3" level="h5" %}}
 
 {{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
@@ -258,49 +309,44 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% /collapse-content %}}
 
 {{% /tab %}}
-{{% tab "Datadog" %}}
-
-{{% observability_pipelines/destination_env_vars/datadog %}}
-
-{{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Elasticsearch" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Microsoft Sentinel" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "New Relic" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+{{% observability_pipelines/destination_env_vars/new_relic %}}
 
 {{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% tab "OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Elasticsearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "OpenSearch" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/opensearch %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "Sumo Logic" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Syslog" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/archive_logs/google_pubsub.md b/content/en/observability_pipelines/set_up_pipelines/archive_logs/google_pubsub.md
index dccfb4ea9dcaa..e87397f75cca4 100644
--- a/content/en/observability_pipelines/set_up_pipelines/archive_logs/google_pubsub.md
+++ b/content/en/observability_pipelines/set_up_pipelines/archive_logs/google_pubsub.md
@@ -11,7 +11,7 @@ Configure Google Pub/Sub so that the Observability Pipelines Worker formats the
 
 {{% observability_pipelines/use_case_images/archive_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Configuring a Log Archive](#configure-a-log-archive)
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
@@ -71,7 +71,7 @@ Select the cloud provider you are using to archive your logs.
 
 ## Set up Observability Pipelines
 
-1. Navigate to [Observability Pipelines][1].
+1. Navigate to [Observability Pipelines][4].
 1. Select the **Archive Logs** template to create a new pipeline.
 1. Select the **Google Pub/Sub** source.
 
@@ -81,9 +81,24 @@ Select the cloud provider you are using to archive your logs.
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
 {{% tab "Datadog Archives" %}}
 
 {{% observability_pipelines/destination_settings/datadog_archives_note %}}
@@ -107,52 +122,51 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% /collapse-content %}}
 
 {{% /tab %}}
-{{% tab "Datadog" %}}
+{{% tab "Elasticsearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/elasticsearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Microsoft Sentinel" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "New Relic" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/new_relic %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "Elasticsearch" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/elasticsearch %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
 
 {{% /tab %}}
-{{% tab "OpenSearch" %}}
+{{% tab "Sumo Logic" %}}
 
-{{% observability_pipelines/destination_settings/opensearch %}}
+{{% observability_pipelines/destination_settings/sumo_logic %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "Syslog" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+#### Add additional destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_destinations %}}
 
 ### Set up processors
 
@@ -163,9 +177,19 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -173,15 +197,30 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -193,49 +232,61 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
-
-{{% observability_pipelines/processors/add_hostname %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
 {{% tab "Datadog Archives" %}}
 
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
 {{% collapse-content title="Amazon S3" level="h5" %}}
 
 {{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
@@ -253,49 +304,44 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% /collapse-content %}}
 
 {{% /tab %}}
-{{% tab "Datadog" %}}
-
-{{% observability_pipelines/destination_env_vars/datadog %}}
-
-{{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Elasticsearch" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Microsoft Sentinel" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "New Relic" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+{{% observability_pipelines/destination_env_vars/new_relic %}}
 
 {{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% tab "OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Elasticsearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "OpenSearch" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/opensearch %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "Sumo Logic" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Syslog" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/archive_logs/http_client.md b/content/en/observability_pipelines/set_up_pipelines/archive_logs/http_client.md
index ccb06186f1d68..f9477a990989e 100644
--- a/content/en/observability_pipelines/set_up_pipelines/archive_logs/http_client.md
+++ b/content/en/observability_pipelines/set_up_pipelines/archive_logs/http_client.md
@@ -11,7 +11,7 @@ Use the Observability Pipelines Worker to format your HTTP server logs into a Da
 
 {{% observability_pipelines/use_case_images/archive_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Configuring a Log Archive](#configure-a-log-archive)
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
@@ -82,9 +82,24 @@ Select the cloud provider you are using to archive your logs.
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
 {{% tab "Datadog Archives" %}}
 
 {{% observability_pipelines/destination_settings/datadog_archives_note %}}
@@ -108,52 +123,51 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% /collapse-content %}}
 
 {{% /tab %}}
-{{% tab "Datadog" %}}
+{{% tab "Elasticsearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/elasticsearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Microsoft Sentinel" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "New Relic" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/new_relic %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "Elasticsearch" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/elasticsearch %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
 
 {{% /tab %}}
-{{% tab "OpenSearch" %}}
+{{% tab "Sumo Logic" %}}
 
-{{% observability_pipelines/destination_settings/opensearch %}}
+{{% observability_pipelines/destination_settings/sumo_logic %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "Syslog" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+#### Add additional destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_destinations %}}
 
 ### Set up processors
 
@@ -164,9 +178,19 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -174,15 +198,30 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -194,51 +233,63 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
-
-{{% observability_pipelines/processors/add_hostname %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the full path of the HTTP/S endpoint URL. For example, `https://127.0.0.8/logs`. The Observability Pipelines Worker collects logs events from this endpoint.
 
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
 {{% tab "Datadog Archives" %}}
 
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
 {{% collapse-content title="Amazon S3" level="h5" %}}
 
 {{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
@@ -256,49 +307,44 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% /collapse-content %}}
 
 {{% /tab %}}
-{{% tab "Datadog" %}}
-
-{{% observability_pipelines/destination_env_vars/datadog %}}
-
-{{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Elasticsearch" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Microsoft Sentinel" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "New Relic" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+{{% observability_pipelines/destination_env_vars/new_relic %}}
 
 {{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% tab "OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Elasticsearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "OpenSearch" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/opensearch %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "Sumo Logic" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Syslog" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/archive_logs/http_server.md b/content/en/observability_pipelines/set_up_pipelines/archive_logs/http_server.md
index 02a17a2808937..f6e73e0a98d8d 100644
--- a/content/en/observability_pipelines/set_up_pipelines/archive_logs/http_server.md
+++ b/content/en/observability_pipelines/set_up_pipelines/archive_logs/http_server.md
@@ -9,7 +9,7 @@ Use the Observability Pipelines Worker to format HTTP client logs into a Datadog
 
 {{% observability_pipelines/use_case_images/archive_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Configuring a Log Archive](#configure-a-log-archive)
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
@@ -69,7 +69,7 @@ Select the cloud provider you are using to archive your logs.
 
 ## Set up Observability Pipelines
 
-1. Navigate to [Observability Pipelines][1].
+1. Navigate to [Observability Pipelines][4].
 1. Select the **Archive Logs** template to create a new pipeline.
 1. Select the **HTTP Server** source.
 
@@ -79,9 +79,24 @@ Select the cloud provider you are using to archive your logs.
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
 {{% tab "Datadog Archives" %}}
 
 {{% observability_pipelines/destination_settings/datadog_archives_note %}}
@@ -105,52 +120,51 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% /collapse-content %}}
 
 {{% /tab %}}
-{{% tab "Datadog" %}}
+{{% tab "Elasticsearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/elasticsearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Microsoft Sentinel" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "New Relic" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/new_relic %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "Elasticsearch" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/elasticsearch %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
 
 {{% /tab %}}
-{{% tab "OpenSearch" %}}
+{{% tab "Sumo Logic" %}}
 
-{{% observability_pipelines/destination_settings/opensearch %}}
+{{% observability_pipelines/destination_settings/sumo_logic %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "Syslog" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+#### Add additional destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_destinations %}}
 
 ### Set up processors
 
@@ -161,9 +175,19 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -171,15 +195,30 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -191,50 +230,62 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
-
-{{% observability_pipelines/processors/add_hostname %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the HTTP/S server address, such as `0.0.0.0:9997`. The Observability Pipelines Worker listens to this socket address for your HTTP client logs.
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
 {{% tab "Datadog Archives" %}}
 
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
 {{% collapse-content title="Amazon S3" level="h5" %}}
 
 {{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
@@ -252,49 +303,44 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% /collapse-content %}}
 
 {{% /tab %}}
-{{% tab "Datadog" %}}
-
-{{% observability_pipelines/destination_env_vars/datadog %}}
-
-{{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Elasticsearch" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Microsoft Sentinel" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "New Relic" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+{{% observability_pipelines/destination_env_vars/new_relic %}}
 
 {{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% tab "OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Elasticsearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "OpenSearch" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/opensearch %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "Sumo Logic" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Syslog" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/archive_logs/kafka.md b/content/en/observability_pipelines/set_up_pipelines/archive_logs/kafka.md
new file mode 100644
index 0000000000000..ce7744b7729c5
--- /dev/null
+++ b/content/en/observability_pipelines/set_up_pipelines/archive_logs/kafka.md
@@ -0,0 +1,393 @@
+---
+title: Archive Logs for Kafka
+disable_toc: false
+---
+
+## Overview
+
+Use the Observability Pipelines Worker to format your logs from Kafka topics into a Datadog-rehydratable format before routing them to Datadog Log Archives.
+
+{{% observability_pipelines/use_case_images/archive_logs %}}
+
+This document walks you through the following:
+1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
+1. [Configuring a Log Archive](#configure-a-log-archive)
+1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
+1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker)
+
+## Prerequisites
+
+{{% observability_pipelines/prerequisites/kafka %}}
+
+## Configure Log Archives
+
+If you already have a Datadog Log Archive configured for Observability Pipelines, skip to [Set up Observability Pipelines](#set-up-observability-pipelines).
+
+You need to have the Datadog integration for your cloud provider installed to set up Datadog Log Archive. See the [AWS integration][1], [Google Cloud Platform][2], and [Azure integration][3] documentation for more information.
+
+Select the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h4" %}}
+{{% observability_pipelines/configure_log_archive/amazon_s3/instructions %}}
+
+{{< tabs >}}
+{{% tab "Docker" %}}
+
+{{% observability_pipelines/configure_log_archive/amazon_s3/docker %}}
+
+{{% /tab %}}
+{{% tab "Amazon EKS" %}}
+
+{{% observability_pipelines/configure_log_archive/amazon_s3/amazon_eks %}}
+
+{{% /tab %}}
+{{% tab "Linux (APT)" %}}
+
+{{% observability_pipelines/configure_log_archive/amazon_s3/linux_apt %}}
+
+{{% /tab %}}
+{{% tab "Linux (RPM)" %}}
+
+{{% observability_pipelines/configure_log_archive/amazon_s3/linux_rpm %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+{{% observability_pipelines/configure_log_archive/amazon_s3/connect_s3_to_datadog_log_archives %}}
+
+{{% /collapse-content %}}
+
+{{% collapse-content title="Google Cloud Storage" level="h4" %}}
+
+{{% observability_pipelines/configure_log_archive/google_cloud_storage/instructions %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h4" %}}
+
+{{% observability_pipelines/configure_log_archive/azure_storage/instructions %}}
+
+{{% /collapse-content %}}
+
+## Set up Observability Pipelines
+
+1. Navigate to [Observability Pipelines][4].
+1. Select the **Archive Logs** template to create a new pipeline.
+1. Select the **Kafka** source.
+
+### Set up the source
+
+{{% observability_pipelines/source_settings/kafka %}}
+
+### Set up the destinations
+
+Enter the following information based on your selected logs destinations.
+
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
+
+Follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_settings/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_settings/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
+### Set up processors
+
+{{% observability_pipelines/processors/intro %}}
+
+{{% observability_pipelines/processors/filter_syntax %}}
+
+{{% observability_pipelines/processors/add_processors %}}
+
+{{< tabs >}}
+{{% tab "Add env vars" %}}
+
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
+
+{{% /tab %}}
+{{% tab "Edit fields" %}}
+
+{{% observability_pipelines/processors/remap %}}
+
+{{% /tab %}}
+{{% tab "Enrichment table" %}}
+
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
+
+{{% /tab %}}
+{{% tab "Grok Parser" %}}
+
+{{% observability_pipelines/processors/grok_parser %}}
+
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
+{{% /tab %}}
+{{% tab "Quota" %}}
+
+{{% observability_pipelines/processors/quota %}}
+
+{{% /tab %}}
+{{% tab "Reduce" %}}
+
+{{% observability_pipelines/processors/reduce %}}
+
+{{% /tab %}}
+{{% tab "Remap to OCSF" %}}
+
+{{% observability_pipelines/processors/remap_ocsf %}}
+
+{{% /tab %}}
+{{% tab "Sample" %}}
+
+{{% observability_pipelines/processors/sample %}}
+
+{{% /tab %}}
+{{% tab "Sensitive Data Scanner" %}}
+
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
+
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_library_rules %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_custom_rules %}}
+
+{{% /collapse-content %}} -->
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add another set of processors and destinations
+
+{{% observability_pipelines/multiple_processors %}}
+
+### Install the Observability Pipelines Worker
+1. Select your platform in the **Choose your installation platform** dropdown menu.
+1. Enter the host and port of the Kafka bootstrap servers, which clients should use to connect to the Kafka cluster and discover all the other hosts in the cluster. Must be entered in the format of `host:port`, such as `10.14.22.123:9092`. If there is more than one server, use commas to separate them.
+
+    If you enabled SASL, enter the Kafka SASL username and Kafka SASL password.
+
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+1. Follow the instructions for your environment to install the Worker.
+{{< tabs >}}
+{{% tab "Docker" %}}
+
+{{% observability_pipelines/install_worker/docker %}}
+
+{{% /tab %}}
+{{% tab "Amazon EKS" %}}
+
+{{% observability_pipelines/install_worker/amazon_eks %}}
+
+{{% /tab %}}
+{{% tab "Azure AKS" %}}
+
+{{% observability_pipelines/install_worker/azure_aks %}}
+
+{{% /tab %}}
+{{% tab "Google GKE" %}}
+
+{{% observability_pipelines/install_worker/google_gke %}}
+
+{{% /tab %}}
+{{% tab "Linux (APT)" %}}
+
+{{% observability_pipelines/install_worker/linux_apt %}}
+
+{{% /tab %}}
+{{% tab "Linux (RPM)" %}}
+
+{{% observability_pipelines/install_worker/linux_rpm %}}
+
+{{% /tab %}}
+{{% tab "CloudFormation" %}}
+
+{{% observability_pipelines/install_worker/cloudformation %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+[1]: /integrations/amazon_web_services/#setup
+[2]: /integrations/google_cloud_platform/#setup
+[3]: /integrations/azure/#setup
+[4]: https://app.datadoghq.com/observability-pipelines
\ No newline at end of file
diff --git a/content/en/observability_pipelines/set_up_pipelines/archive_logs/logstash.md b/content/en/observability_pipelines/set_up_pipelines/archive_logs/logstash.md
index 5e1e1816c99df..9cc380098c1f7 100644
--- a/content/en/observability_pipelines/set_up_pipelines/archive_logs/logstash.md
+++ b/content/en/observability_pipelines/set_up_pipelines/archive_logs/logstash.md
@@ -9,7 +9,7 @@ Configure Logstash so that the Observability Pipelines Worker formats the logs c
 
 {{% observability_pipelines/use_case_images/archive_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Configuring a Log Archive](#configure-a-log-archive)
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
@@ -70,7 +70,7 @@ Select the cloud provider you are using to archive your logs.
 
 ## Set up Observability Pipelines
 
-1. Navigate to [Observability Pipelines][1].
+1. Navigate to [Observability Pipelines][4].
 1. Select the **Archive Logs** template to create a new pipeline.
 1. Select the **Logstash** source.
 
@@ -80,9 +80,24 @@ Select the cloud provider you are using to archive your logs.
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
 {{% tab "Datadog Archives" %}}
 
 {{% observability_pipelines/destination_settings/datadog_archives_note %}}
@@ -106,52 +121,51 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% /collapse-content %}}
 
 {{% /tab %}}
-{{% tab "Datadog" %}}
+{{% tab "Elasticsearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/elasticsearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Microsoft Sentinel" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "New Relic" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/new_relic %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "Elasticsearch" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/elasticsearch %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
 
 {{% /tab %}}
-{{% tab "OpenSearch" %}}
+{{% tab "Sumo Logic" %}}
 
-{{% observability_pipelines/destination_settings/opensearch %}}
+{{% observability_pipelines/destination_settings/sumo_logic %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "Syslog" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+#### Add additional destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_destinations %}}
 
 ### Set up processors
 
@@ -162,9 +176,19 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -172,15 +196,30 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -192,50 +231,62 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
-
-{{% observability_pipelines/processors/add_hostname %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Logstash address and port, such as `0.0.0.0:9997`. The Observability Pipelines Worker listens on this address for incoming log messages.
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
 {{% tab "Datadog Archives" %}}
 
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
 {{% collapse-content title="Amazon S3" level="h5" %}}
 
 {{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
@@ -253,49 +304,44 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% /collapse-content %}}
 
 {{% /tab %}}
-{{% tab "Datadog" %}}
-
-{{% observability_pipelines/destination_env_vars/datadog %}}
-
-{{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Elasticsearch" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Microsoft Sentinel" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "New Relic" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+{{% observability_pipelines/destination_env_vars/new_relic %}}
 
 {{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% tab "OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Elasticsearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "OpenSearch" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/opensearch %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "Sumo Logic" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Syslog" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/archive_logs/splunk_hec.md b/content/en/observability_pipelines/set_up_pipelines/archive_logs/splunk_hec.md
index 516e1444f1060..29db65aeb8e18 100644
--- a/content/en/observability_pipelines/set_up_pipelines/archive_logs/splunk_hec.md
+++ b/content/en/observability_pipelines/set_up_pipelines/archive_logs/splunk_hec.md
@@ -85,6 +85,21 @@ Select the cloud provider you are using to archive your logs.
 Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
 {{% tab "Datadog Archives" %}}
 
 {{% observability_pipelines/destination_settings/datadog_archives_note %}}
@@ -108,52 +123,51 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% /collapse-content %}}
 
 {{% /tab %}}
-{{% tab "Datadog" %}}
+{{% tab "Elasticsearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/elasticsearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Microsoft Sentinel" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "New Relic" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/new_relic %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "Elasticsearch" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/elasticsearch %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
 
 {{% /tab %}}
-{{% tab "OpenSearch" %}}
+{{% tab "Sumo Logic" %}}
 
-{{% observability_pipelines/destination_settings/opensearch %}}
+{{% observability_pipelines/destination_settings/sumo_logic %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "Syslog" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+#### Add additional destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_destinations %}}
 
 ### Set up processors
 
@@ -164,9 +178,19 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -174,15 +198,30 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -194,50 +233,62 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
-
-{{% observability_pipelines/processors/add_hostname %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Splunk HEC address. This is the address and port where your applications are sending their logging data. The Observability Pipelines Worker listens to this address for incoming logs.
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
 {{% tab "Datadog Archives" %}}
 
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
 {{% collapse-content title="Amazon S3" level="h5" %}}
 
 {{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
@@ -255,49 +306,44 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% /collapse-content %}}
 
 {{% /tab %}}
-{{% tab "Datadog" %}}
-
-{{% observability_pipelines/destination_env_vars/datadog %}}
-
-{{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Elasticsearch" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Microsoft Sentinel" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "New Relic" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+{{% observability_pipelines/destination_env_vars/new_relic %}}
 
 {{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% tab "OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Elasticsearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "OpenSearch" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/opensearch %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "Sumo Logic" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Syslog" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/archive_logs/splunk_tcp.md b/content/en/observability_pipelines/set_up_pipelines/archive_logs/splunk_tcp.md
index 56cd2e3c7d19c..2841dbcf527e0 100644
--- a/content/en/observability_pipelines/set_up_pipelines/archive_logs/splunk_tcp.md
+++ b/content/en/observability_pipelines/set_up_pipelines/archive_logs/splunk_tcp.md
@@ -11,7 +11,7 @@ Configure your Splunk Heavy and Universal Forwarder so that the Observability Pi
 
 {{% observability_pipelines/use_case_images/archive_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Configuring a Log Archive](#configure-a-log-archive)
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
@@ -82,9 +82,24 @@ Select the cloud provider you are using to archive your logs.
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
 {{% tab "Datadog Archives" %}}
 
 {{% observability_pipelines/destination_settings/datadog_archives_note %}}
@@ -108,52 +123,51 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% /collapse-content %}}
 
 {{% /tab %}}
-{{% tab "Datadog" %}}
+{{% tab "Elasticsearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/elasticsearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Microsoft Sentinel" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "New Relic" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/new_relic %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "Elasticsearch" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/elasticsearch %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
 
 {{% /tab %}}
-{{% tab "OpenSearch" %}}
+{{% tab "Sumo Logic" %}}
 
-{{% observability_pipelines/destination_settings/opensearch %}}
+{{% observability_pipelines/destination_settings/sumo_logic %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "Syslog" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+#### Add additional destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_destinations %}}
 
 ### Set up processors
 
@@ -164,9 +178,19 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -174,15 +198,30 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -194,50 +233,62 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
-
-{{% observability_pipelines/processors/add_hostname %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Splunk TCP address. This is the address and port where your applications are sending their logging data. The Observability Pipelines Worker listens to this address for incoming logs.
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
 {{% tab "Datadog Archives" %}}
 
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
 {{% collapse-content title="Amazon S3" level="h5" %}}
 
 {{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
@@ -255,49 +306,44 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% /collapse-content %}}
 
 {{% /tab %}}
-{{% tab "Datadog" %}}
-
-{{% observability_pipelines/destination_env_vars/datadog %}}
-
-{{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Elasticsearch" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Microsoft Sentinel" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "New Relic" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+{{% observability_pipelines/destination_env_vars/new_relic %}}
 
 {{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% tab "OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Elasticsearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "OpenSearch" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/opensearch %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "Sumo Logic" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Syslog" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/archive_logs/sumo_logic_hosted_collector.md b/content/en/observability_pipelines/set_up_pipelines/archive_logs/sumo_logic_hosted_collector.md
index 336d049f990e0..d102af921e5e7 100644
--- a/content/en/observability_pipelines/set_up_pipelines/archive_logs/sumo_logic_hosted_collector.md
+++ b/content/en/observability_pipelines/set_up_pipelines/archive_logs/sumo_logic_hosted_collector.md
@@ -11,7 +11,7 @@ Configure your Sumo Logic Hosted Collector HTTP Logs source so that the Observab
 
 {{% observability_pipelines/use_case_images/archive_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Configuring a Log Archive](#configure-a-log-archive)
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
@@ -82,9 +82,24 @@ Select the cloud provider you are using to archive your logs.
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
 {{% tab "Datadog Archives" %}}
 
 {{% observability_pipelines/destination_settings/datadog_archives_note %}}
@@ -108,52 +123,51 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% /collapse-content %}}
 
 {{% /tab %}}
-{{% tab "Datadog" %}}
+{{% tab "Elasticsearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/elasticsearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Microsoft Sentinel" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "New Relic" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/new_relic %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "Elasticsearch" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/elasticsearch %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
 
 {{% /tab %}}
-{{% tab "OpenSearch" %}}
+{{% tab "Sumo Logic" %}}
 
-{{% observability_pipelines/destination_settings/opensearch %}}
+{{% observability_pipelines/destination_settings/sumo_logic %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "Syslog" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+#### Add additional destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_destinations %}}
 
 ### Set up processors
 
@@ -164,9 +178,19 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -174,15 +198,30 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -194,48 +233,58 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
-
-{{% observability_pipelines/processors/add_hostname %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Sumo Logic address. This is the address and port where your applications are sending their logging data. The Observability Pipelines Worker listens to this address for incoming logs.
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
 {{% tab "Datadog Archives" %}}
 
 For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
@@ -257,49 +306,44 @@ For the Datadog Archives destination, follow the instructions for the cloud prov
 {{% /collapse-content %}}
 
 {{% /tab %}}
-{{% tab "Datadog" %}}
-
-{{% observability_pipelines/destination_env_vars/datadog %}}
-
-{{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Elasticsearch" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Microsoft Sentinel" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "New Relic" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+{{% observability_pipelines/destination_env_vars/new_relic %}}
 
 {{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% tab "OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Elasticsearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "OpenSearch" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/opensearch %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "Sumo Logic" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Syslog" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/archive_logs/syslog.md b/content/en/observability_pipelines/set_up_pipelines/archive_logs/syslog.md
index 44aa08f6f4737..1f4c4d28cad0d 100644
--- a/content/en/observability_pipelines/set_up_pipelines/archive_logs/syslog.md
+++ b/content/en/observability_pipelines/set_up_pipelines/archive_logs/syslog.md
@@ -11,7 +11,7 @@ Configure your rsyslog or syslog-ng source so that the Observability Pipelines W
 
 {{% observability_pipelines/use_case_images/archive_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Configuring a Log Archive](#configure-a-log-archive)
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
@@ -83,9 +83,24 @@ Select the cloud provider you are using to archive your logs.
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
 {{% tab "Datadog Archives" %}}
 
 {{% observability_pipelines/destination_settings/datadog_archives_note %}}
@@ -109,52 +124,51 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% /collapse-content %}}
 
 {{% /tab %}}
-{{% tab "Datadog" %}}
+{{% tab "Elasticsearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/elasticsearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Microsoft Sentinel" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "New Relic" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/new_relic %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "Elasticsearch" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/elasticsearch %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
 
 {{% /tab %}}
-{{% tab "OpenSearch" %}}
+{{% tab "Sumo Logic" %}}
 
-{{% observability_pipelines/destination_settings/opensearch %}}
+{{% observability_pipelines/destination_settings/sumo_logic %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "Syslog" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+#### Add additional destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_destinations %}}
 
 ### Set up processors
 
@@ -165,9 +179,19 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -175,15 +199,30 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -195,51 +234,63 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
-
-{{% observability_pipelines/processors/add_hostname %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Syslog address. This is a Syslog-compatible endpoint, exposed by the Worker, that your applications send logs to. The Observability Pipelines Worker listens on this address for incoming logs.
 
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
 {{% tab "Datadog Archives" %}}
 
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
 {{% collapse-content title="Amazon S3" level="h5" %}}
 
 {{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
@@ -257,49 +308,44 @@ Follow the instructions for the cloud provider you are using to archive your log
 {{% /collapse-content %}}
 
 {{% /tab %}}
-{{% tab "Datadog" %}}
-
-{{% observability_pipelines/destination_env_vars/datadog %}}
-
-{{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Elasticsearch" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Microsoft Sentinel" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "New Relic" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+{{% observability_pipelines/destination_env_vars/new_relic %}}
 
 {{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% tab "OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Elasticsearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "OpenSearch" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/opensearch %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "Sumo Logic" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Syslog" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/_index.md b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/_index.md
index cd616e0960374..fb5c15722eb52 100644
--- a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/_index.md
+++ b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/_index.md
@@ -13,11 +13,14 @@ As your infrastructure and your organization scales, so does your log volume, th
 
 Select a source to get started:
 
+<!-- - [Amazon Data Firehose][12] -->
+- [Amazon S3][11]
 - [Datadog Agent][1]
 - [Fluentd or Fluent Bit][2]
 - [Google Pub/Sub][3]
 - [HTTP Client][4]
 - [HTTP Server][5]
+- [Kafka][13]
 - [Logstash][6]
 - [Splunk HTTP Event Collector (HEC)][7]
 - [Splunk Heavy or Universal Forwarders (TCP)][8]
@@ -34,3 +37,6 @@ Select a source to get started:
 [8]: /observability_pipelines/dual_ship_logs/splunk_tcp
 [9]: /observability_pipelines/dual_ship_logs/sumo_logic_hosted_collector
 [10]: /observability_pipelines/dual_ship_logs/syslog
+[11]: /observability_pipelines/set_up_pipelines/dual_ship_logs/amazon_s3
+[12]: /observability_pipelines/set_up_pipelines/dual_ship_logs/amazon_data_firehose
+[13]: /observability_pipelines/set_up_pipelines/dual_ship_logs/kafka
diff --git a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/amazon_data_firehose.md b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/amazon_data_firehose.md
new file mode 100644
index 0000000000000..b9c7be5a7b628
--- /dev/null
+++ b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/amazon_data_firehose.md
@@ -0,0 +1,342 @@
+---
+title: Dual Ship Logs for Amazon Data Firehose
+disable_toc: false
+---
+
+## Overview
+
+Use the Observability Pipelines Worker to aggregate and process your Amazon Data Firehose logs before routing them to various applications.
+
+
+{{% observability_pipelines/use_case_images/dual_ship_logs %}}
+
+This document walks you through the following:
+1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
+1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
+1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-amazon_data_firehose)
+
+## Prerequisites
+
+{{% observability_pipelines/prerequisites/amazon_data_firehose %}}
+
+## Set up Observability Pipelines
+
+1. Navigate to [Observability Pipelines][1].
+1. Select the **Dual Ship Logs** template to create a new pipeline.
+1. Select the **Amazon Data Firehose** source.
+
+### Set up the source
+
+{{% observability_pipelines/source_settings/amazon_data_firehose %}}
+
+### Set up the destinations
+
+Enter the following information based on your selected logs destinations.
+
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
+
+Follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_settings/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_settings/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
+### Set up processors
+
+{{% observability_pipelines/processors/intro %}}
+
+{{% observability_pipelines/processors/filter_syntax %}}
+
+{{% observability_pipelines/processors/add_processors %}}
+
+{{< tabs >}}
+{{% tab "Add env vars" %}}
+
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
+
+{{% /tab %}}
+{{% tab "Edit fields" %}}
+
+{{% observability_pipelines/processors/remap %}}
+
+{{% /tab %}}
+{{% tab "Enrichment table" %}}
+
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
+
+{{% /tab %}}
+{{% tab "Grok Parser" %}}
+
+{{% observability_pipelines/processors/grok_parser %}}
+
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
+{{% /tab %}}
+{{% tab "Quota" %}}
+
+{{% observability_pipelines/processors/quota %}}
+
+{{% /tab %}}
+{{% tab "Reduce" %}}
+
+{{% observability_pipelines/processors/reduce %}}
+
+{{% /tab %}}
+{{% tab "Remap to OCSF" %}}
+
+{{% observability_pipelines/processors/remap_ocsf %}}
+
+{{% /tab %}}
+{{% tab "Sample" %}}
+
+{{% observability_pipelines/processors/sample %}}
+
+{{% /tab %}}
+{{% tab "Sensitive Data Scanner" %}}
+
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
+
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_library_rules %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_custom_rules %}}
+
+{{% /collapse-content %}} -->
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add another set of processors and destinations
+
+{{% observability_pipelines/multiple_processors %}}
+
+### Install the Observability Pipelines Worker
+1. Select your platform in the **Choose your installation platform** dropdown menu.
+1. Enter the Amazon Data Firehose address. The Observability Pipelines Worker listens to this address and port for incoming logs from Amazon Data Firehose.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+1. Follow the instructions for your environment to install the Worker.
+{{< tabs >}}
+{{% tab "Docker" %}}
+
+{{% observability_pipelines/install_worker/docker %}}
+
+{{% /tab %}}
+{{% tab "Amazon EKS" %}}
+
+{{% observability_pipelines/install_worker/amazon_eks %}}
+
+{{% /tab %}}
+{{% tab "Azure AKS" %}}
+
+{{% observability_pipelines/install_worker/azure_aks %}}
+
+{{% /tab %}}
+{{% tab "Google GKE" %}}
+
+{{% observability_pipelines/install_worker/google_gke %}}
+
+{{% /tab %}}
+{{% tab "Linux (APT)" %}}
+
+{{% observability_pipelines/install_worker/linux_apt %}}
+
+{{% /tab %}}
+{{% tab "Linux (RPM)" %}}
+
+{{% observability_pipelines/install_worker/linux_rpm %}}
+
+{{% /tab %}}
+{{% tab "CloudFormation" %}}
+
+{{% observability_pipelines/install_worker/cloudformation %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+## Send logs to the Observability Pipelines Worker over Amazon Data Firehose
+
+{{% observability_pipelines/log_source_configuration/amazon_data_firehose %}}
+
+[1]: https://app.datadoghq.com/observability-pipelines
\ No newline at end of file
diff --git a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/amazon_s3.md b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/amazon_s3.md
new file mode 100644
index 0000000000000..c70b82f229523
--- /dev/null
+++ b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/amazon_s3.md
@@ -0,0 +1,336 @@
+---
+title: Dual Ship Logs for Amazon S3
+disable_toc: false
+---
+
+## Overview
+
+Use the Observability Pipelines Worker to aggregate and process your Amazon S3 logs before routing them to various applications.
+
+{{% observability_pipelines/use_case_images/dual_ship_logs %}}
+
+This document walks you through the following:
+1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
+1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
+
+## Prerequisites
+
+{{% observability_pipelines/prerequisites/amazon_s3 %}}
+
+## Set up Observability Pipelines
+
+1. Navigate to [Observability Pipelines][1].
+1. Select the **Dual Ship Logs** template to create a new pipeline.
+1. Select the **Amazon S3** source.
+
+### Set up the source
+
+{{% observability_pipelines/source_settings/amazon_s3 %}}
+
+### Set up the destinations
+
+Enter the following information based on your selected logs destinations.
+
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
+
+Follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_settings/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_settings/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
+### Set up processors
+
+{{% observability_pipelines/processors/intro %}}
+
+{{% observability_pipelines/processors/filter_syntax %}}
+
+{{% observability_pipelines/processors/add_processors %}}
+
+{{< tabs >}}
+{{% tab "Add env vars" %}}
+
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
+
+{{% /tab %}}
+{{% tab "Edit fields" %}}
+
+{{% observability_pipelines/processors/remap %}}
+
+{{% /tab %}}
+{{% tab "Enrichment table" %}}
+
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
+
+{{% /tab %}}
+{{% tab "Grok Parser" %}}
+
+{{% observability_pipelines/processors/grok_parser %}}
+
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
+{{% /tab %}}
+{{% tab "Quota" %}}
+
+{{% observability_pipelines/processors/quota %}}
+
+{{% /tab %}}
+{{% tab "Reduce" %}}
+
+{{% observability_pipelines/processors/reduce %}}
+
+{{% /tab %}}
+{{% tab "Remap to OCSF" %}}
+
+{{% observability_pipelines/processors/remap_ocsf %}}
+
+{{% /tab %}}
+{{% tab "Sample" %}}
+
+{{% observability_pipelines/processors/sample %}}
+
+{{% /tab %}}
+{{% tab "Sensitive Data Scanner" %}}
+
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
+
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_library_rules %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_custom_rules %}}
+
+{{% /collapse-content %}} -->
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add another set of processors and destinations
+
+{{% observability_pipelines/multiple_processors %}}
+
+### Install the Observability Pipelines Worker
+1. Select your platform in the **Choose your installation platform** dropdown menu.
+1. In the **AWS S3 SQS URL** field, enter the URL of the SQS queue to which the S3 bucket sends notification events.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+1. Follow the instructions for your environment to install the Worker.
+{{< tabs >}}
+{{% tab "Docker" %}}
+
+{{% observability_pipelines/install_worker/docker %}}
+
+{{% /tab %}}
+{{% tab "Amazon EKS" %}}
+
+{{% observability_pipelines/install_worker/amazon_eks %}}
+
+{{% /tab %}}
+{{% tab "Azure AKS" %}}
+
+{{% observability_pipelines/install_worker/azure_aks %}}
+
+{{% /tab %}}
+{{% tab "Google GKE" %}}
+
+{{% observability_pipelines/install_worker/google_gke %}}
+
+{{% /tab %}}
+{{% tab "Linux (APT)" %}}
+
+{{% observability_pipelines/install_worker/linux_apt %}}
+
+{{% /tab %}}
+{{% tab "Linux (RPM)" %}}
+
+{{% observability_pipelines/install_worker/linux_rpm %}}
+
+{{% /tab %}}
+{{% tab "CloudFormation" %}}
+
+{{% observability_pipelines/install_worker/cloudformation %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+[1]: https://app.datadoghq.com/observability-pipelines
\ No newline at end of file
diff --git a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/datadog_agent.md b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/datadog_agent.md
index 7f3df90cd41c6..d0bed51b8790a 100644
--- a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/datadog_agent.md
+++ b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/datadog_agent.md
@@ -11,7 +11,7 @@ Configure the Datadog Agent and set up Observability Pipelines so that the Obser
 
 {{% observability_pipelines/use_case_images/dual_ship_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Connecting the Datadog Agent to the Observability Pipelines Worker](#connect-the-datadog-agent-to-the-observability-pipelines-worker)
@@ -32,56 +32,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -91,9 +130,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -101,15 +150,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -121,91 +185,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Datadog Agent address. The Observability Pipelines Worker listens to this address and port for incoming logs from the Datadog Agent. For example, `0.0.0.0:<port_number>`.
 1. Provide the environment variables for each of your selected destinations.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/fluent.md b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/fluent.md
index 7cab08033bee3..f4c4833794b2f 100644
--- a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/fluent.md
+++ b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/fluent.md
@@ -11,7 +11,7 @@ Configure Fluentd or Fluent Bit and set up Observability Pipelines so that the O
 
 {{% observability_pipelines/use_case_images/dual_ship_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-fluent)
@@ -32,56 +32,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -91,9 +130,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -101,15 +150,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -121,92 +185,118 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Fluent socket address and port. The Observability Pipelines Worker listens on this address for incoming log messages.
 
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/google_pubsub.md b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/google_pubsub.md
index c243dab9bd9b5..9fc6a78dd4b29 100644
--- a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/google_pubsub.md
+++ b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/google_pubsub.md
@@ -9,7 +9,7 @@ Configure Google Pub/Sub and set up Observability Pipelines so that the Observab
 
 {{% observability_pipelines/use_case_images/dual_ship_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 
@@ -29,56 +29,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -88,9 +127,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -98,15 +147,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -118,90 +182,116 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/http_client.md b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/http_client.md
index 79a9bf1fb37fd..3b42259d4a683 100644
--- a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/http_client.md
+++ b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/http_client.md
@@ -11,7 +11,7 @@ Use the Observability Pipelines Worker to aggregate and process your HTTP server
 
 {{% observability_pipelines/use_case_images/dual_ship_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 
@@ -31,56 +31,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -90,9 +129,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -100,15 +149,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -120,92 +184,118 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the full path of the HTTP/S endpoint URL. For example, `https://127.0.0.8/logs`. The Observability Pipelines Worker collects logs events from this endpoint.
 
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/http_server.md b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/http_server.md
index 95291436529e8..6b634f7d10541 100644
--- a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/http_server.md
+++ b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/http_server.md
@@ -9,7 +9,7 @@ Use the Observability Pipelines Worker to aggregate and processes your HTTP clie
 
 {{% observability_pipelines/use_case_images/dual_ship_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 
@@ -29,56 +29,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -88,9 +127,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -98,15 +147,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -118,91 +182,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the HTTP/S server address, such as `0.0.0.0:9997`. The Observability Pipelines Worker listens to this socket address for your HTTP client logs.
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/kafka.md b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/kafka.md
new file mode 100644
index 0000000000000..de16740c125a4
--- /dev/null
+++ b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/kafka.md
@@ -0,0 +1,340 @@
+---
+title: Dual Ship Logs for Kafka
+disable_toc: false
+---
+
+## Overview
+
+Use the Observability Pipelines Worker to aggregate and process your logs from Kafka topics before routing them to various applications.
+
+{{% observability_pipelines/use_case_images/dual_ship_logs %}}
+
+This document walks you through the following:
+1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
+1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
+1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-kafka)
+
+## Prerequisites
+
+{{% observability_pipelines/prerequisites/kafka %}}
+
+## Set up Observability Pipelines
+
+1. Navigate to [Observability Pipelines][1].
+1. Select the **Dual Ship Logs** template to create a new pipeline.
+1. Select the **Kafka** source.
+
+### Set up the source
+
+{{% observability_pipelines/source_settings/kafka %}}
+
+### Set up the destinations
+
+Enter the following information based on your selected logs destinations.
+
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
+
+Follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_settings/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_settings/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
+### Set up processors
+
+{{% observability_pipelines/processors/intro %}}
+
+{{% observability_pipelines/processors/filter_syntax %}}
+
+{{% observability_pipelines/processors/add_processors %}}
+
+{{< tabs >}}
+{{% tab "Add env vars" %}}
+
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
+
+{{% /tab %}}
+{{% tab "Edit fields" %}}
+
+{{% observability_pipelines/processors/remap %}}
+
+{{% /tab %}}
+{{% tab "Enrichment table" %}}
+
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
+
+{{% /tab %}}
+{{% tab "Grok Parser" %}}
+
+{{% observability_pipelines/processors/grok_parser %}}
+
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
+{{% /tab %}}
+{{% tab "Quota" %}}
+
+{{% observability_pipelines/processors/quota %}}
+
+{{% /tab %}}
+{{% tab "Reduce" %}}
+
+{{% observability_pipelines/processors/reduce %}}
+
+{{% /tab %}}
+{{% tab "Remap to OCSF" %}}
+
+{{% observability_pipelines/processors/remap_ocsf %}}
+
+{{% /tab %}}
+{{% tab "Sample" %}}
+
+{{% observability_pipelines/processors/sample %}}
+
+{{% /tab %}}
+{{% tab "Sensitive Data Scanner" %}}
+
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
+
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_library_rules %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_custom_rules %}}
+
+{{% /collapse-content %}} -->
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add another set of processors and destinations
+
+{{% observability_pipelines/multiple_processors %}}
+
+### Install the Observability Pipelines Worker
+1. Select your platform in the **Choose your installation platform** dropdown menu.
+1. Enter the host and port of the Kafka bootstrap servers, which clients should use to connect to the Kafka cluster and discover all the other hosts in the cluster. Must be entered in the format of `host:port`, such as `10.14.22.123:9092`. If there is more than one server, use commas to separate them.
+
+    If you enabled SASL, enter the Kafka SASL username and Kafka SASL password.
+
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+1. Follow the instructions for your environment to install the Worker.
+{{< tabs >}}
+{{% tab "Docker" %}}
+
+{{% observability_pipelines/install_worker/docker %}}
+
+{{% /tab %}}
+{{% tab "Amazon EKS" %}}
+
+{{% observability_pipelines/install_worker/amazon_eks %}}
+
+{{% /tab %}}
+{{% tab "Azure AKS" %}}
+
+{{% observability_pipelines/install_worker/azure_aks %}}
+
+{{% /tab %}}
+{{% tab "Google GKE" %}}
+
+{{% observability_pipelines/install_worker/google_gke %}}
+
+{{% /tab %}}
+{{% tab "Linux (APT)" %}}
+
+{{% observability_pipelines/install_worker/linux_apt %}}
+
+{{% /tab %}}
+{{% tab "Linux (RPM)" %}}
+
+{{% observability_pipelines/install_worker/linux_rpm %}}
+
+{{% /tab %}}
+{{% tab "CloudFormation" %}}
+
+{{% observability_pipelines/install_worker/cloudformation %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+[1]: https://app.datadoghq.com/observability-pipelines
\ No newline at end of file
diff --git a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/logstash.md b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/logstash.md
index dd6d28d8ade70..8f1b42dadf8bf 100644
--- a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/logstash.md
+++ b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/logstash.md
@@ -9,7 +9,7 @@ Configure Logstash and set up Observability Pipelines so that the Observability
 
 {{% observability_pipelines/use_case_images/dual_ship_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-logstash)
@@ -30,56 +30,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -89,9 +128,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -99,15 +148,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -119,91 +183,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Logstash address and port, such as `0.0.0.0:9997`. The Observability Pipelines Worker listens on this address for incoming log messages.
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/splunk_hec.md b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/splunk_hec.md
index 3a0c0d23b6731..4546880f738aa 100644
--- a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/splunk_hec.md
+++ b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/splunk_hec.md
@@ -43,53 +43,92 @@ This document walks you through the following steps to set up dual shipping:
 Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -99,9 +138,19 @@ Enter the following information based on your selected logs destinations.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -109,15 +158,30 @@ Enter the following information based on your selected logs destinations.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -129,91 +193,117 @@ Enter the following information based on your selected logs destinations.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Splunk HEC address. This is the address and port where your applications are sending their logging data. The Observability Pipelines Worker listens to this address for incoming logs.
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/splunk_tcp.md b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/splunk_tcp.md
index b51dba109f944..3dc65f5b6cc1e 100644
--- a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/splunk_tcp.md
+++ b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/splunk_tcp.md
@@ -11,7 +11,7 @@ Configure your Splunk Heavy or Universal Forwarders to send logs to the Observab
 
 {{% observability_pipelines/use_case_images/dual_ship_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Connecting Splunk Forwarder to the Observability Pipelines Worker](#connect-splunk-forwarder-to-the-observability-pipelines-worker)
@@ -38,56 +38,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -97,9 +136,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -107,15 +156,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -127,91 +191,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Splunk TCP address. This is the address and port where your applications are sending their logging data. The Observability Pipelines Worker listens to this address for incoming logs.
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/sumo_logic_hosted_collector.md b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/sumo_logic_hosted_collector.md
index 88ce1eac7a29d..162b268da30d4 100644
--- a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/sumo_logic_hosted_collector.md
+++ b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/sumo_logic_hosted_collector.md
@@ -11,7 +11,7 @@ Configure Sumo Logic Hosted Collector the HTTP Logs source to send logs to the O
 
 {{% observability_pipelines/use_case_images/dual_ship_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Observability Pipelines Worker over Sumo Logic HTTP Source](#send-logs-to-the-observability-pipelines-worker-over-sumo-logic-http-source)
@@ -38,56 +38,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -97,9 +136,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -107,15 +156,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -127,91 +191,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Sumo Logic address. This is the address and port where your applications are sending their logging data. The Observability Pipelines Worker listens to this address for incoming logs.
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/syslog.md b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/syslog.md
index 616daafe67448..3a3e6eadec9ac 100644
--- a/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/syslog.md
+++ b/content/en/observability_pipelines/set_up_pipelines/dual_ship_logs/syslog.md
@@ -11,7 +11,7 @@ Configure rsyslog or syslog-ng and set up Observability Pipelines so that the Ob
 
 {{% observability_pipelines/use_case_images/dual_ship_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-syslog)
@@ -32,56 +32,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -91,9 +130,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -101,15 +150,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -121,92 +185,118 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Syslog address. This is a Syslog-compatible endpoint, exposed by the Worker, that your applications send logs to. The Observability Pipelines Worker listens on this address for incoming logs.
 
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/generate_metrics/amazon_data_firehose.md b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/amazon_data_firehose.md
new file mode 100644
index 0000000000000..243128e1d56a8
--- /dev/null
+++ b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/amazon_data_firehose.md
@@ -0,0 +1,341 @@
+---
+title: Generate Metrics for Amazon Data Firehose
+disable_toc: false
+---
+
+## Overview
+
+Use the Observability Pipelines Worker to generate metrics from your Amazon Data Firehose logs.
+
+{{% observability_pipelines/use_case_images/generate_metrics %}}
+
+This document walks you through the following:
+1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
+1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
+1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-amazon_data_firehose)
+
+## Prerequisites
+
+{{% observability_pipelines/prerequisites/amazon_data_firehose %}}
+
+## Set up Observability Pipelines
+
+1. Navigate to [Observability Pipelines][1].
+1. Select the **Generate Metrics** template to create a new pipeline.
+1. Select the **Amazon Data Firehose** source.
+
+### Set up the source
+
+{{% observability_pipelines/source_settings/amazon_data_firehose %}}
+
+### Set up the destinations
+
+Enter the following information based on your selected logs destinations.
+
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
+
+Follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_settings/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_settings/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
+### Set up processors
+
+{{% observability_pipelines/processors/intro %}}
+
+{{% observability_pipelines/processors/filter_syntax %}}
+
+{{% observability_pipelines/processors/add_processors %}}
+
+{{< tabs >}}
+{{% tab "Add env vars" %}}
+
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
+
+{{% /tab %}}
+{{% tab "Edit fields" %}}
+
+{{% observability_pipelines/processors/remap %}}
+
+{{% /tab %}}
+{{% tab "Enrichment table" %}}
+
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
+
+{{% /tab %}}
+{{% tab "Grok Parser" %}}
+
+{{% observability_pipelines/processors/grok_parser %}}
+
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
+{{% /tab %}}
+{{% tab "Quota" %}}
+
+{{% observability_pipelines/processors/quota %}}
+
+{{% /tab %}}
+{{% tab "Reduce" %}}
+
+{{% observability_pipelines/processors/reduce %}}
+
+{{% /tab %}}
+{{% tab "Remap to OCSF" %}}
+
+{{% observability_pipelines/processors/remap_ocsf %}}
+
+{{% /tab %}}
+{{% tab "Sample" %}}
+
+{{% observability_pipelines/processors/sample %}}
+
+{{% /tab %}}
+{{% tab "Sensitive Data Scanner" %}}
+
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
+
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_library_rules %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_custom_rules %}}
+
+{{% /collapse-content %}} -->
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add another set of processors and destinations
+
+{{% observability_pipelines/multiple_processors %}}
+
+### Install the Observability Pipelines Worker
+1. Select your platform in the **Choose your installation platform** dropdown menu.
+1. Enter the Amazon Data Firehose address. The Observability Pipelines Worker listens to this address and port for incoming logs from Amazon Data Firehose.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+1. Follow the instructions for your environment to install the Worker.
+{{< tabs >}}
+{{% tab "Docker" %}}
+
+{{% observability_pipelines/install_worker/docker %}}
+
+{{% /tab %}}
+{{% tab "Amazon EKS" %}}
+
+{{% observability_pipelines/install_worker/amazon_eks %}}
+
+{{% /tab %}}
+{{% tab "Azure AKS" %}}
+
+{{% observability_pipelines/install_worker/azure_aks %}}
+
+{{% /tab %}}
+{{% tab "Google GKE" %}}
+
+{{% observability_pipelines/install_worker/google_gke %}}
+
+{{% /tab %}}
+{{% tab "Linux (APT)" %}}
+
+{{% observability_pipelines/install_worker/linux_apt %}}
+
+{{% /tab %}}
+{{% tab "Linux (RPM)" %}}
+
+{{% observability_pipelines/install_worker/linux_rpm %}}
+
+{{% /tab %}}
+{{% tab "CloudFormation" %}}
+
+{{% observability_pipelines/install_worker/cloudformation %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+## Send logs to the Observability Pipelines Worker over Amazon Data Firehose
+
+{{% observability_pipelines/log_source_configuration/amazon_data_firehose %}}
+
+[1]: https://app.datadoghq.com/observability-pipelines
\ No newline at end of file
diff --git a/content/en/observability_pipelines/set_up_pipelines/generate_metrics/amazon_s3.md b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/amazon_s3.md
new file mode 100644
index 0000000000000..403e0dcd797a6
--- /dev/null
+++ b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/amazon_s3.md
@@ -0,0 +1,336 @@
+---
+title: Generate Metrics for Amazon S3
+disable_toc: false
+---
+
+## Overview
+
+Use the Observability Pipelines Worker to generate metrics from your Amazon S3 logs.
+
+{{% observability_pipelines/use_case_images/generate_metrics %}}
+
+This document walks you through the following:
+1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
+1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
+
+## Prerequisites
+
+{{% observability_pipelines/prerequisites/amazon_s3 %}}
+
+## Set up Observability Pipelines
+
+1. Navigate to [Observability Pipelines][1].
+1. Select the **Generate Metrics** template to create a new pipeline.
+1. Select the **Amazon S3** source.
+
+### Set up the source
+
+{{% observability_pipelines/source_settings/amazon_s3 %}}
+
+### Set up the destinations
+
+Enter the following information based on your selected logs destinations.
+
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
+
+Follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_settings/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_settings/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
+### Set up processors
+
+{{% observability_pipelines/processors/intro %}}
+
+{{% observability_pipelines/processors/filter_syntax %}}
+
+{{% observability_pipelines/processors/add_processors %}}
+
+{{< tabs >}}
+{{% tab "Add env vars" %}}
+
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
+
+{{% /tab %}}
+{{% tab "Edit fields" %}}
+
+{{% observability_pipelines/processors/remap %}}
+
+{{% /tab %}}
+{{% tab "Enrichment table" %}}
+
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
+
+{{% /tab %}}
+{{% tab "Grok Parser" %}}
+
+{{% observability_pipelines/processors/grok_parser %}}
+
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
+{{% /tab %}}
+{{% tab "Quota" %}}
+
+{{% observability_pipelines/processors/quota %}}
+
+{{% /tab %}}
+{{% tab "Reduce" %}}
+
+{{% observability_pipelines/processors/reduce %}}
+
+{{% /tab %}}
+{{% tab "Remap to OCSF" %}}
+
+{{% observability_pipelines/processors/remap_ocsf %}}
+
+{{% /tab %}}
+{{% tab "Sample" %}}
+
+{{% observability_pipelines/processors/sample %}}
+
+{{% /tab %}}
+{{% tab "Sensitive Data Scanner" %}}
+
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
+
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_library_rules %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_custom_rules %}}
+
+{{% /collapse-content %}} -->
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add another set of processors and destinations
+
+{{% observability_pipelines/multiple_processors %}}
+
+### Install the Observability Pipelines Worker
+1. Select your platform in the **Choose your installation platform** dropdown menu.
+1. In the **AWS S3 SQS URL** field, enter the URL of the SQS queue to which the S3 bucket sends notification events.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+1. Follow the instructions for your environment to install the Worker.
+{{< tabs >}}
+{{% tab "Docker" %}}
+
+{{% observability_pipelines/install_worker/docker %}}
+
+{{% /tab %}}
+{{% tab "Amazon EKS" %}}
+
+{{% observability_pipelines/install_worker/amazon_eks %}}
+
+{{% /tab %}}
+{{% tab "Azure AKS" %}}
+
+{{% observability_pipelines/install_worker/azure_aks %}}
+
+{{% /tab %}}
+{{% tab "Google GKE" %}}
+
+{{% observability_pipelines/install_worker/google_gke %}}
+
+{{% /tab %}}
+{{% tab "Linux (APT)" %}}
+
+{{% observability_pipelines/install_worker/linux_apt %}}
+
+{{% /tab %}}
+{{% tab "Linux (RPM)" %}}
+
+{{% observability_pipelines/install_worker/linux_rpm %}}
+
+{{% /tab %}}
+{{% tab "CloudFormation" %}}
+
+{{% observability_pipelines/install_worker/cloudformation %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+[1]: https://app.datadoghq.com/observability-pipelines
\ No newline at end of file
diff --git a/content/en/observability_pipelines/set_up_pipelines/generate_metrics/datadog_agent.md b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/datadog_agent.md
index b923990ec9651..be0ad0198e1dd 100644
--- a/content/en/observability_pipelines/set_up_pipelines/generate_metrics/datadog_agent.md
+++ b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/datadog_agent.md
@@ -9,7 +9,7 @@ Configure the Datadog Agent to send logs to the Observability Pipelines Worker s
 
 {{% observability_pipelines/use_case_images/generate_metrics %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Connecting the Datadog Agent to the Observability Pipelines Worker](#connect-the-datadog-agent-to-the-observability-pipelines-worker)
@@ -30,56 +30,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -89,9 +128,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -99,15 +148,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -119,91 +183,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Datadog Agent address. The Observability Pipelines Worker listens to this address and port for incoming logs from the Datadog Agent. For example, `0.0.0.0:<port_number>`.
 1. Provide the environment variables for each of your selected destinations.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/generate_metrics/fluent.md b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/fluent.md
index 4c6647a75adc8..21fb5076689d0 100644
--- a/content/en/observability_pipelines/set_up_pipelines/generate_metrics/fluent.md
+++ b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/fluent.md
@@ -9,7 +9,7 @@ Configure Fluentd or Fluent Bit to send logs to the Observability Pipelines Work
 
 {{% observability_pipelines/use_case_images/generate_metrics %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-fluent)
@@ -30,56 +30,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -89,9 +128,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -99,15 +148,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -119,42 +183,37 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
@@ -162,49 +221,80 @@ Enter the following information based on your selected logs destination.
 
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/generate_metrics/google_pubsub.md b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/google_pubsub.md
index f00b30690697f..223f4d77c91f5 100644
--- a/content/en/observability_pipelines/set_up_pipelines/generate_metrics/google_pubsub.md
+++ b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/google_pubsub.md
@@ -9,7 +9,7 @@ Send Google Pub/Sub logs to the Observability Pipelines Worker so that you can g
 
 {{% observability_pipelines/use_case_images/generate_metrics %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 
@@ -29,56 +29,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -88,9 +127,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -98,15 +147,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -118,90 +182,116 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/generate_metrics/http_client.md b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/http_client.md
index 754cc93450e55..37eefcb0888f3 100644
--- a/content/en/observability_pipelines/set_up_pipelines/generate_metrics/http_client.md
+++ b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/http_client.md
@@ -9,7 +9,7 @@ Use the Observability Pipelines Worker to generate metrics from your HTTP server
 
 {{% observability_pipelines/use_case_images/generate_metrics %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 
@@ -29,56 +29,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -88,9 +127,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -98,15 +147,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -118,42 +182,37 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
@@ -161,49 +220,80 @@ Enter the following information based on your selected logs destination.
 
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/generate_metrics/http_server.md b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/http_server.md
index 92ddb992296e0..2545f58806500 100644
--- a/content/en/observability_pipelines/set_up_pipelines/generate_metrics/http_server.md
+++ b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/http_server.md
@@ -9,7 +9,7 @@ Use the Observability Pipelines Worker to generate metrics from your HTTP client
 
 {{% observability_pipelines/use_case_images/generate_metrics %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 
@@ -29,56 +29,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -88,9 +127,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -98,15 +147,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -118,91 +182,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the HTTP/S server address, such as `0.0.0.0:9997`. The Observability Pipelines Worker listens to this socket address for your HTTP client logs.
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/generate_metrics/kafka.md b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/kafka.md
new file mode 100644
index 0000000000000..a45a828ed4cf0
--- /dev/null
+++ b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/kafka.md
@@ -0,0 +1,340 @@
+---
+title: Generate Metrics for Kafka
+disable_toc: false
+---
+
+## Overview
+
+Use the Observability Pipelines Worker to generate metrics for your logs from Kafka topics.
+
+{{% observability_pipelines/use_case_images/generate_metrics %}}
+
+This document walks you through the following:
+1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
+1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
+1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-kafka)
+
+## Prerequisites
+
+{{% observability_pipelines/prerequisites/kafka %}}
+
+## Set up Observability Pipelines
+
+1. Navigate to [Observability Pipelines][1].
+1. Select the **Generate Metrics** template to create a new pipeline.
+1. Select the **Kafka** source.
+
+### Set up the source
+
+{{% observability_pipelines/source_settings/kafka %}}
+
+### Set up the destinations
+
+Enter the following information based on your selected logs destinations.
+
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
+
+Follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_settings/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_settings/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
+### Set up processors
+
+{{% observability_pipelines/processors/intro %}}
+
+{{% observability_pipelines/processors/filter_syntax %}}
+
+{{% observability_pipelines/processors/add_processors %}}
+
+{{< tabs >}}
+{{% tab "Add env vars" %}}
+
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
+
+{{% /tab %}}
+{{% tab "Edit fields" %}}
+
+{{% observability_pipelines/processors/remap %}}
+
+{{% /tab %}}
+{{% tab "Enrichment table" %}}
+
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
+
+{{% /tab %}}
+{{% tab "Grok Parser" %}}
+
+{{% observability_pipelines/processors/grok_parser %}}
+
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
+{{% /tab %}}
+{{% tab "Quota" %}}
+
+{{% observability_pipelines/processors/quota %}}
+
+{{% /tab %}}
+{{% tab "Reduce" %}}
+
+{{% observability_pipelines/processors/reduce %}}
+
+{{% /tab %}}
+{{% tab "Remap to OCSF" %}}
+
+{{% observability_pipelines/processors/remap_ocsf %}}
+
+{{% /tab %}}
+{{% tab "Sample" %}}
+
+{{% observability_pipelines/processors/sample %}}
+
+{{% /tab %}}
+{{% tab "Sensitive Data Scanner" %}}
+
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
+
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_library_rules %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_custom_rules %}}
+
+{{% /collapse-content %}} -->
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add another set of processors and destinations
+
+{{% observability_pipelines/multiple_processors %}}
+
+### Install the Observability Pipelines Worker
+1. Select your platform in the **Choose your installation platform** dropdown menu.
+1. Enter the host and port of the Kafka bootstrap servers, which clients should use to connect to the Kafka cluster and discover all the other hosts in the cluster. Must be entered in the format of `host:port`, such as `10.14.22.123:9092`. If there is more than one server, use commas to separate them.
+
+    If you enabled SASL, enter the Kafka SASL username and Kafka SASL password.
+
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+1. Follow the instructions for your environment to install the Worker.
+{{< tabs >}}
+{{% tab "Docker" %}}
+
+{{% observability_pipelines/install_worker/docker %}}
+
+{{% /tab %}}
+{{% tab "Amazon EKS" %}}
+
+{{% observability_pipelines/install_worker/amazon_eks %}}
+
+{{% /tab %}}
+{{% tab "Azure AKS" %}}
+
+{{% observability_pipelines/install_worker/azure_aks %}}
+
+{{% /tab %}}
+{{% tab "Google GKE" %}}
+
+{{% observability_pipelines/install_worker/google_gke %}}
+
+{{% /tab %}}
+{{% tab "Linux (APT)" %}}
+
+{{% observability_pipelines/install_worker/linux_apt %}}
+
+{{% /tab %}}
+{{% tab "Linux (RPM)" %}}
+
+{{% observability_pipelines/install_worker/linux_rpm %}}
+
+{{% /tab %}}
+{{% tab "CloudFormation" %}}
+
+{{% observability_pipelines/install_worker/cloudformation %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+[1]: https://app.datadoghq.com/observability-pipelines
\ No newline at end of file
diff --git a/content/en/observability_pipelines/set_up_pipelines/generate_metrics/logstash.md b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/logstash.md
index e4182d3e5e82f..bce870d1d7204 100644
--- a/content/en/observability_pipelines/set_up_pipelines/generate_metrics/logstash.md
+++ b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/logstash.md
@@ -9,7 +9,7 @@ Configure Logstash to send logs to the Observability Pipelines Worker so that yo
 
 {{% observability_pipelines/use_case_images/generate_metrics %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-logstash)
@@ -30,56 +30,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -89,9 +128,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -99,15 +148,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -119,91 +183,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Logstash address and port, such as `0.0.0.0:9997`. The Observability Pipelines Worker listens on this address for incoming log messages.
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/generate_metrics/splunk_hec.md b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/splunk_hec.md
index 6309fff873d81..6c3bad50de0e5 100644
--- a/content/en/observability_pipelines/set_up_pipelines/generate_metrics/splunk_hec.md
+++ b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/splunk_hec.md
@@ -39,53 +39,92 @@ This document walks you through the following steps to set up dual shipping:
 Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -95,9 +134,19 @@ Enter the following information based on your selected logs destinations.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -105,15 +154,30 @@ Enter the following information based on your selected logs destinations.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -125,91 +189,117 @@ Enter the following information based on your selected logs destinations.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Splunk HEC address. This is the address and port where your applications are sending their logging data. The Observability Pipelines Worker listens to this address for incoming logs.
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/generate_metrics/splunk_tcp.md b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/splunk_tcp.md
index 1f086b6fe821d..950a42b6bf26c 100644
--- a/content/en/observability_pipelines/set_up_pipelines/generate_metrics/splunk_tcp.md
+++ b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/splunk_tcp.md
@@ -9,7 +9,7 @@ Configure your Splunk Heavy or Universal Forwarders to send logs to the Observab
 
 {{% observability_pipelines/use_case_images/generate_metrics %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Connecting Splunk Forwarder to the Observability Pipelines Worker](#connect-splunk-forwarder-to-the-observability-pipelines-worker)
@@ -36,56 +36,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -95,9 +134,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -105,15 +154,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -125,91 +189,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Splunk TCP address. This is the address and port where your applications are sending their logging data. The Observability Pipelines Worker listens to this address for incoming logs.
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/generate_metrics/sumo_logic_hosted_collector.md b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/sumo_logic_hosted_collector.md
index 2beae6f10b0e7..266e4ca5a6747 100644
--- a/content/en/observability_pipelines/set_up_pipelines/generate_metrics/sumo_logic_hosted_collector.md
+++ b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/sumo_logic_hosted_collector.md
@@ -9,7 +9,7 @@ Configure your Sumo Logic Hosted Collector HTTP Logs source to send logs to the
 
 {{% observability_pipelines/use_case_images/generate_metrics %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Observability Pipelines Worker over Sumo Logic HTTP Source](#send-logs-to-the-observability-pipelines-worker-over-sumo-logic-http-source)
@@ -36,56 +36,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -95,9 +134,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -105,15 +154,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -125,91 +189,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Sumo Logic address. This is the address and port where your applications are sending their logging data. The Observability Pipelines Worker listens to this address for incoming logs.
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/generate_metrics/syslog.md b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/syslog.md
index 1bb08c279ad86..338d6f4223c73 100644
--- a/content/en/observability_pipelines/set_up_pipelines/generate_metrics/syslog.md
+++ b/content/en/observability_pipelines/set_up_pipelines/generate_metrics/syslog.md
@@ -9,7 +9,7 @@ Configure rsyslog or syslog-ng to send logs to the Observability Pipelines Worke
 
 {{% observability_pipelines/use_case_images/generate_metrics %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-syslog)
@@ -30,56 +30,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -89,9 +128,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -99,15 +148,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -119,42 +183,37 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
@@ -162,49 +221,80 @@ Enter the following information based on your selected logs destination.
 
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/_index.md b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/_index.md
index 40bb9dffe4824..5dda930c30939 100644
--- a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/_index.md
+++ b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/_index.md
@@ -18,11 +18,14 @@ aliases:
 
 Select a source to get started:
 
+<!-- - [Amazon Data Firehose][12] -->
+- [Amazon S3][11]
 - [Datadog Agent][1]
 - [Fluentd or Fluent Bit][2]
 - [Google Pub/Sub][3]
 - [HTTP Client][4]
 - [HTTP Server][5]
+- [Kafka][13]
 - [Logstash][6]
 - [Splunk HTTP Event Collector (HEC)][7]
 - [Splunk Heavy or Universal Forwarders (TCP)][8]
@@ -39,3 +42,6 @@ Select a source to get started:
 [8]: /observability_pipelines/log_enrichment/splunk_tcp
 [9]: /observability_pipelines/log_enrichment/sumo_logic_hosted_collector
 [10]: /observability_pipelines/log_enrichment/syslog
+[11]: /observability_pipelines/set_up_pipelines/log_enrichment/amazon_s3
+[12]: /observability_pipelines/set_up_pipelines/log_enrichment/amazon_data_firehose
+[13]: /observability_pipelines/set_up_pipelines/log_enrichment/kafka
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/amazon_data_firehose.md b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/amazon_data_firehose.md
new file mode 100644
index 0000000000000..d298c21fe2846
--- /dev/null
+++ b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/amazon_data_firehose.md
@@ -0,0 +1,341 @@
+---
+title: Log Enrichment for Amazon Data Firehose
+disable_toc: false
+---
+
+## Overview
+
+Use the Observability Pipelines Worker to enrich and transform your Amazon Data Firehose logs before routing them to their destination.
+
+{{% observability_pipelines/use_case_images/log_enrichment %}}
+
+This document walks you through the following:
+1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
+1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
+1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-amazon_data_firehose)
+
+## Prerequisites
+
+{{% observability_pipelines/prerequisites/amazon_data_firehose %}}
+
+## Set up Observability Pipelines
+
+1. Navigate to [Observability Pipelines][1].
+1. Select the **Log Enrichment** template to create a new pipeline.
+1. Select the **Amazon Data Firehose** source.
+
+### Set up the source
+
+{{% observability_pipelines/source_settings/amazon_data_firehose %}}
+
+### Set up the destinations
+
+Enter the following information based on your selected logs destinations.
+
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
+
+Follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_settings/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_settings/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
+### Set up processors
+
+{{% observability_pipelines/processors/intro %}}
+
+{{% observability_pipelines/processors/filter_syntax %}}
+
+{{% observability_pipelines/processors/add_processors %}}
+
+{{< tabs >}}
+{{% tab "Add env vars" %}}
+
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
+
+{{% /tab %}}
+{{% tab "Edit fields" %}}
+
+{{% observability_pipelines/processors/remap %}}
+
+{{% /tab %}}
+{{% tab "Enrichment table" %}}
+
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
+
+{{% /tab %}}
+{{% tab "Grok Parser" %}}
+
+{{% observability_pipelines/processors/grok_parser %}}
+
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
+{{% /tab %}}
+{{% tab "Quota" %}}
+
+{{% observability_pipelines/processors/quota %}}
+
+{{% /tab %}}
+{{% tab "Reduce" %}}
+
+{{% observability_pipelines/processors/reduce %}}
+
+{{% /tab %}}
+{{% tab "Remap to OCSF" %}}
+
+{{% observability_pipelines/processors/remap_ocsf %}}
+
+{{% /tab %}}
+{{% tab "Sample" %}}
+
+{{% observability_pipelines/processors/sample %}}
+
+{{% /tab %}}
+{{% tab "Sensitive Data Scanner" %}}
+
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
+
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_library_rules %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_custom_rules %}}
+
+{{% /collapse-content %}} -->
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add another set of processors and destinations
+
+{{% observability_pipelines/multiple_processors %}}
+
+### Install the Observability Pipelines Worker
+1. Select your platform in the **Choose your installation platform** dropdown menu.
+1. Enter the Amazon Data Firehose address. The Observability Pipelines Worker listens to this address and port for incoming logs from Amazon Data Firehose.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+1. Follow the instructions for your environment to install the Worker.
+{{< tabs >}}
+{{% tab "Docker" %}}
+
+{{% observability_pipelines/install_worker/docker %}}
+
+{{% /tab %}}
+{{% tab "Amazon EKS" %}}
+
+{{% observability_pipelines/install_worker/amazon_eks %}}
+
+{{% /tab %}}
+{{% tab "Azure AKS" %}}
+
+{{% observability_pipelines/install_worker/azure_aks %}}
+
+{{% /tab %}}
+{{% tab "Google GKE" %}}
+
+{{% observability_pipelines/install_worker/google_gke %}}
+
+{{% /tab %}}
+{{% tab "Linux (APT)" %}}
+
+{{% observability_pipelines/install_worker/linux_apt %}}
+
+{{% /tab %}}
+{{% tab "Linux (RPM)" %}}
+
+{{% observability_pipelines/install_worker/linux_rpm %}}
+
+{{% /tab %}}
+{{% tab "CloudFormation" %}}
+
+{{% observability_pipelines/install_worker/cloudformation %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+## Send logs to the Observability Pipelines Worker over Amazon Data Firehose
+
+{{% observability_pipelines/log_source_configuration/amazon_data_firehose %}}
+
+[1]: https://app.datadoghq.com/observability-pipelines
\ No newline at end of file
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/amazon_s3.md b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/amazon_s3.md
new file mode 100644
index 0000000000000..103b16f271ce2
--- /dev/null
+++ b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/amazon_s3.md
@@ -0,0 +1,336 @@
+---
+title: Log Enrichment for Amazon S3
+disable_toc: false
+---
+
+## Overview
+
+Use the Observability Pipelines Worker to enrich and transform your Amazon S3 logs before routing them to their destination.
+
+{{% observability_pipelines/use_case_images/log_enrichment %}}
+
+This document walks you through the following:
+1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
+1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
+
+## Prerequisites
+
+{{% observability_pipelines/prerequisites/amazon_s3 %}}
+
+## Set up Observability Pipelines
+
+1. Navigate to [Observability Pipelines][1].
+1. Select the **Log Enrichment** template to create a new pipeline.
+1. Select the **Amazon S3** source.
+
+### Set up the source
+
+{{% observability_pipelines/source_settings/amazon_s3 %}}
+
+### Set up the destinations
+
+Enter the following information based on your selected logs destinations.
+
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
+
+Follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_settings/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_settings/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
+### Set up processors
+
+{{% observability_pipelines/processors/intro %}}
+
+{{% observability_pipelines/processors/filter_syntax %}}
+
+{{% observability_pipelines/processors/add_processors %}}
+
+{{< tabs >}}
+{{% tab "Add env vars" %}}
+
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
+
+{{% /tab %}}
+{{% tab "Edit fields" %}}
+
+{{% observability_pipelines/processors/remap %}}
+
+{{% /tab %}}
+{{% tab "Enrichment table" %}}
+
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
+
+{{% /tab %}}
+{{% tab "Grok Parser" %}}
+
+{{% observability_pipelines/processors/grok_parser %}}
+
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
+{{% /tab %}}
+{{% tab "Quota" %}}
+
+{{% observability_pipelines/processors/quota %}}
+
+{{% /tab %}}
+{{% tab "Reduce" %}}
+
+{{% observability_pipelines/processors/reduce %}}
+
+{{% /tab %}}
+{{% tab "Remap to OCSF" %}}
+
+{{% observability_pipelines/processors/remap_ocsf %}}
+
+{{% /tab %}}
+{{% tab "Sample" %}}
+
+{{% observability_pipelines/processors/sample %}}
+
+{{% /tab %}}
+{{% tab "Sensitive Data Scanner" %}}
+
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
+
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_library_rules %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_custom_rules %}}
+
+{{% /collapse-content %}} -->
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add another set of processors and destinations
+
+{{% observability_pipelines/multiple_processors %}}
+
+### Install the Observability Pipelines Worker
+1. Select your platform in the **Choose your installation platform** dropdown menu.
+1. In the **AWS S3 SQS URL** field, enter the URL of the SQS queue to which the S3 bucket sends notification events.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+1. Follow the instructions for your environment to install the Worker.
+{{< tabs >}}
+{{% tab "Docker" %}}
+
+{{% observability_pipelines/install_worker/docker %}}
+
+{{% /tab %}}
+{{% tab "Amazon EKS" %}}
+
+{{% observability_pipelines/install_worker/amazon_eks %}}
+
+{{% /tab %}}
+{{% tab "Azure AKS" %}}
+
+{{% observability_pipelines/install_worker/azure_aks %}}
+
+{{% /tab %}}
+{{% tab "Google GKE" %}}
+
+{{% observability_pipelines/install_worker/google_gke %}}
+
+{{% /tab %}}
+{{% tab "Linux (APT)" %}}
+
+{{% observability_pipelines/install_worker/linux_apt %}}
+
+{{% /tab %}}
+{{% tab "Linux (RPM)" %}}
+
+{{% observability_pipelines/install_worker/linux_rpm %}}
+
+{{% /tab %}}
+{{% tab "CloudFormation" %}}
+
+{{% observability_pipelines/install_worker/cloudformation %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+[1]: https://app.datadoghq.com/observability-pipelines
\ No newline at end of file
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/datadog_agent.md b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/datadog_agent.md
index 0820d9407d744..3ba368fec982b 100644
--- a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/datadog_agent.md
+++ b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/datadog_agent.md
@@ -11,7 +11,7 @@ Configure your Datadog Agent to send logs to the Observability Pipelines Worker
 
 {{% observability_pipelines/use_case_images/log_enrichment %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Connecting the Datadog Agent to the Observability Pipelines Worker](#connect-the-datadog-agent-to-the-observability-pipelines-worker)
@@ -32,56 +32,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -91,9 +130,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -101,15 +150,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -121,91 +185,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Datadog Agent address. The Observability Pipelines Worker listens to this address and port for incoming logs from the Datadog Agent. For example, `0.0.0.0:<port_number>`.
 1. Provide the environment variables for each of your selected destinations.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/fluent.md b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/fluent.md
index 230c139b14bdb..1ba705efbb368 100644
--- a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/fluent.md
+++ b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/fluent.md
@@ -11,7 +11,7 @@ Configure Fluentd or Fluent Bit to send logs to the Observability Pipelines Work
 
 {{% observability_pipelines/use_case_images/log_enrichment %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-fluent)
@@ -32,56 +32,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -91,9 +130,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -101,15 +150,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -121,92 +185,118 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Fluent socket address and port. The Observability Pipelines Worker listens on this address for incoming log messages.
 
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/google_pubsub.md b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/google_pubsub.md
index da8e1e86e70e8..5e9c7636b01a3 100644
--- a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/google_pubsub.md
+++ b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/google_pubsub.md
@@ -9,7 +9,7 @@ Configure your Google Pub/Sub to send logs to the Observability Pipelines Worker
 
 {{% observability_pipelines/use_case_images/log_enrichment %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 
@@ -29,56 +29,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -88,9 +127,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -98,15 +147,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -118,90 +182,116 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/http_client.md b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/http_client.md
index 5b7d00eb3d782..43e822a99bd4d 100644
--- a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/http_client.md
+++ b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/http_client.md
@@ -11,7 +11,7 @@ Use the Observability Pipelines Worker to enrich and transform your HTTP server
 
 {{% observability_pipelines/use_case_images/log_enrichment %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 
@@ -31,56 +31,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -90,9 +129,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -100,15 +149,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -120,92 +184,118 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the full path of the HTTP/S endpoint URL. For example, `https://127.0.0.8/logs`. The Observability Pipelines Worker collects logs events from this endpoint.
 
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/http_server.md b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/http_server.md
index e15b694e45264..3dded3221b51c 100644
--- a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/http_server.md
+++ b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/http_server.md
@@ -10,7 +10,7 @@ Use the Observability Pipelines Worker to enrich and transform your HTTP client
 
 {{% observability_pipelines/use_case_images/log_enrichment %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 
@@ -30,56 +30,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -89,9 +128,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -99,15 +148,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -119,91 +183,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the HTTP/S server address, such as `0.0.0.0:9997`. The Observability Pipelines Worker listens to this socket address for your HTTP client logs.
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/kafka.md b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/kafka.md
new file mode 100644
index 0000000000000..94f388c50f9e9
--- /dev/null
+++ b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/kafka.md
@@ -0,0 +1,340 @@
+---
+title: Log Enrichment for Kafka
+disable_toc: false
+---
+
+## Overview
+
+Use the Observability Pipelines Worker to enrich and transform logs from your Kafka topics before routing them to their destination.
+
+{{% observability_pipelines/use_case_images/log_enrichment %}}
+
+This document walks you through the following:
+1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
+1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
+1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-kafka)
+
+## Prerequisites
+
+{{% observability_pipelines/prerequisites/kafka %}}
+
+## Set up Observability Pipelines
+
+1. Navigate to [Observability Pipelines][1].
+1. Select the **Log Enrichment** template to create a new pipeline.
+1. Select the **Kafka** source.
+
+### Set up the source
+
+{{% observability_pipelines/source_settings/kafka %}}
+
+### Set up the destinations
+
+Enter the following information based on your selected logs destinations.
+
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
+
+Follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_settings/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_settings/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
+### Set up processors
+
+{{% observability_pipelines/processors/intro %}}
+
+{{% observability_pipelines/processors/filter_syntax %}}
+
+{{% observability_pipelines/processors/add_processors %}}
+
+{{< tabs >}}
+{{% tab "Add env vars" %}}
+
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
+
+{{% /tab %}}
+{{% tab "Edit fields" %}}
+
+{{% observability_pipelines/processors/remap %}}
+
+{{% /tab %}}
+{{% tab "Enrichment table" %}}
+
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
+
+{{% /tab %}}
+{{% tab "Grok Parser" %}}
+
+{{% observability_pipelines/processors/grok_parser %}}
+
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
+{{% /tab %}}
+{{% tab "Quota" %}}
+
+{{% observability_pipelines/processors/quota %}}
+
+{{% /tab %}}
+{{% tab "Reduce" %}}
+
+{{% observability_pipelines/processors/reduce %}}
+
+{{% /tab %}}
+{{% tab "Remap to OCSF" %}}
+
+{{% observability_pipelines/processors/remap_ocsf %}}
+
+{{% /tab %}}
+{{% tab "Sample" %}}
+
+{{% observability_pipelines/processors/sample %}}
+
+{{% /tab %}}
+{{% tab "Sensitive Data Scanner" %}}
+
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
+
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_library_rules %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_custom_rules %}}
+
+{{% /collapse-content %}} -->
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add another set of processors and destinations
+
+{{% observability_pipelines/multiple_processors %}}
+
+### Install the Observability Pipelines Worker
+1. Select your platform in the **Choose your installation platform** dropdown menu.
+1. Enter the host and port of the Kafka bootstrap servers, which clients should use to connect to the Kafka cluster and discover all the other hosts in the cluster. Must be entered in the format of `host:port`, such as `10.14.22.123:9092`. If there is more than one server, use commas to separate them.
+
+    If you enabled SASL, enter the Kafka SASL username and Kafka SASL password.
+
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+1. Follow the instructions for your environment to install the Worker.
+{{< tabs >}}
+{{% tab "Docker" %}}
+
+{{% observability_pipelines/install_worker/docker %}}
+
+{{% /tab %}}
+{{% tab "Amazon EKS" %}}
+
+{{% observability_pipelines/install_worker/amazon_eks %}}
+
+{{% /tab %}}
+{{% tab "Azure AKS" %}}
+
+{{% observability_pipelines/install_worker/azure_aks %}}
+
+{{% /tab %}}
+{{% tab "Google GKE" %}}
+
+{{% observability_pipelines/install_worker/google_gke %}}
+
+{{% /tab %}}
+{{% tab "Linux (APT)" %}}
+
+{{% observability_pipelines/install_worker/linux_apt %}}
+
+{{% /tab %}}
+{{% tab "Linux (RPM)" %}}
+
+{{% observability_pipelines/install_worker/linux_rpm %}}
+
+{{% /tab %}}
+{{% tab "CloudFormation" %}}
+
+{{% observability_pipelines/install_worker/cloudformation %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+[1]: https://app.datadoghq.com/observability-pipelines
\ No newline at end of file
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/logstash.md b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/logstash.md
index 10f6a331d8391..832a5c651d6ce 100644
--- a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/logstash.md
+++ b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/logstash.md
@@ -9,7 +9,7 @@ Configure your Logstash to send logs to the Observability Pipelines Worker and e
 
 {{% observability_pipelines/use_case_images/log_enrichment %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-logstash)
@@ -30,56 +30,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -89,9 +128,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -99,15 +148,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -119,91 +183,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Logstash address and port, such as `0.0.0.0:9997`. The Observability Pipelines Worker listens on this address for incoming log messages.
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/splunk_hec.md b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/splunk_hec.md
index 9bb98c752a9ec..e2657c06a7c8c 100644
--- a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/splunk_hec.md
+++ b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/splunk_hec.md
@@ -11,7 +11,7 @@ Configure your Splunk HTTP Event Collectors (HEC) to send logs to the Observabil
 
 {{% observability_pipelines/use_case_images/log_enrichment %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Worker over Splunk HEC](#send-logs-to-the-observability-pipelines-worker-over-splunk-hec)
@@ -32,56 +32,95 @@ This document walks you through the following steps:
 
 ### Set up the destination
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -91,9 +130,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -101,15 +150,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -121,91 +185,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Splunk HEC address. This is the address and port where your applications are sending their logging data to. The Observability Pipelines Worker listens to this address for incoming logs.
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/splunk_tcp.md b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/splunk_tcp.md
index 7f45f63400448..89f44f4a07cd9 100644
--- a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/splunk_tcp.md
+++ b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/splunk_tcp.md
@@ -11,7 +11,7 @@ Configure your Splunk Heavy or Universal Forwarders to send logs to the Observab
 
 {{% observability_pipelines/use_case_images/log_enrichment %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Connecting Splunk Forwarder to the Observability Pipelines Worker](#connect-splunk-forwarder-to-the-observability-pipelines-worker)
@@ -32,56 +32,95 @@ This document walks you through the following steps:
 
 ### Set up the destination
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -91,9 +130,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -101,15 +150,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -121,91 +185,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Splunk TCP address. This is the address and port where your applications are sending their logging data. The Observability Pipelines Worker listens to this address for incoming logs.
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/sumo_logic_hosted_collector.md b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/sumo_logic_hosted_collector.md
index 1c8e5d3ef79e9..a62b68a572999 100644
--- a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/sumo_logic_hosted_collector.md
+++ b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/sumo_logic_hosted_collector.md
@@ -11,7 +11,7 @@ Configure your Sumo Logic Hosted Collector the HTTP Logs source to send logs to
 
 {{% observability_pipelines/use_case_images/log_enrichment %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Observability Pipelines Worker over Sumo Logic HTTP Source](#send-logs-to-the-observability-pipelines-worker-over-sumo-logic-http-source)
@@ -28,56 +28,95 @@ This document walks you through the following steps:
 
 ### Set up the destination
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -87,9 +126,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -97,15 +146,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -117,91 +181,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Sumo Logic address. This is the address and port where your applications are sending their logging data. The Observability Pipelines Worker listens to this address for incoming logs.
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/syslog.md b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/syslog.md
index 5595ba67fb21b..213440de5814e 100644
--- a/content/en/observability_pipelines/set_up_pipelines/log_enrichment/syslog.md
+++ b/content/en/observability_pipelines/set_up_pipelines/log_enrichment/syslog.md
@@ -11,7 +11,7 @@ Configure rsyslog or syslog-ng to send logs to the Observability Pipelines Worke
 
 {{% observability_pipelines/use_case_images/log_enrichment %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-syslog)
@@ -32,56 +32,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -91,9 +130,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -101,15 +150,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -121,92 +185,118 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Syslog address. This is a Syslog-compatible endpoint, exposed by the Worker, that your applications send logs to. The Observability Pipelines Worker listens on this address for incoming logs.
 
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/_index.md b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/_index.md
index 01655dd679bd8..2931d06646dca 100644
--- a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/_index.md
+++ b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/_index.md
@@ -23,11 +23,14 @@ As your infrastructure and applications grow, so does your log volume and the co
 
 Select a log source to get started:
 
+<!-- - [Amazon Data Firehose][12] -->
+- [Amazon S3][11]
 - [Datadog Agent][1]
 - [Fluentd or Fluent Bit][2]
 - [Google Pub/Sub][3]
 - [HTTP Client][4]
 - [HTTP Server][5]
+- [Kafka][13]
 - [Logstash][6]
 - [Splunk HTTP Event Collector (HEC)][7]
 - [Splunk Heavy or Universal Forwarders (TCP)][8]
@@ -48,3 +51,6 @@ Select a log source to get started:
 [8]: /observability_pipelines/log_volume_control/splunk_tcp
 [9]: /observability_pipelines/log_volume_control/sumo_logic_hosted_collector
 [10]: /observability_pipelines/log_volume_control/syslog
+[11]: /observability_pipelines/set_up_pipelines/log_volume_control/amazon_s3
+[12]: /observability_pipelines/set_up_pipelines/log_volume_control/amazon_data_firehose
+[13]: /observability_pipelines/set_up_pipelines/log_volume_control/kafka
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/amazon_data_firehose.md b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/amazon_data_firehose.md
new file mode 100644
index 0000000000000..101d13116fd9d
--- /dev/null
+++ b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/amazon_data_firehose.md
@@ -0,0 +1,341 @@
+---
+title: Log Volume Control for Amazon Data Firehose
+disable_toc: false
+---
+
+## Overview
+
+Use the Observability Pipelines Worker to only route useful Amazon Data Firehose logs to their destinations.
+
+{{% observability_pipelines/use_case_images/log_volume_control %}}
+
+This document walks you through the following:
+1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
+1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
+1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-amazon_data_firehose)
+
+## Prerequisites
+
+{{% observability_pipelines/prerequisites/amazon_data_firehose %}}
+
+## Set up Observability Pipelines
+
+1. Navigate to [Observability Pipelines][1].
+1. Select the **Log Volume Control** template to create a new pipeline.
+1. Select the **Amazon Data Firehose** source.
+
+### Set up the source
+
+{{% observability_pipelines/source_settings/amazon_data_firehose %}}
+
+### Set up the destinations
+
+Enter the following information based on your selected logs destinations.
+
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
+
+Follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_settings/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_settings/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
+### Set up processors
+
+{{% observability_pipelines/processors/intro %}}
+
+{{% observability_pipelines/processors/filter_syntax %}}
+
+{{% observability_pipelines/processors/add_processors %}}
+
+{{< tabs >}}
+{{% tab "Add env vars" %}}
+
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
+
+{{% /tab %}}
+{{% tab "Edit fields" %}}
+
+{{% observability_pipelines/processors/remap %}}
+
+{{% /tab %}}
+{{% tab "Enrichment table" %}}
+
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
+
+{{% /tab %}}
+{{% tab "Grok Parser" %}}
+
+{{% observability_pipelines/processors/grok_parser %}}
+
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
+{{% /tab %}}
+{{% tab "Quota" %}}
+
+{{% observability_pipelines/processors/quota %}}
+
+{{% /tab %}}
+{{% tab "Reduce" %}}
+
+{{% observability_pipelines/processors/reduce %}}
+
+{{% /tab %}}
+{{% tab "Remap to OCSF" %}}
+
+{{% observability_pipelines/processors/remap_ocsf %}}
+
+{{% /tab %}}
+{{% tab "Sample" %}}
+
+{{% observability_pipelines/processors/sample %}}
+
+{{% /tab %}}
+{{% tab "Sensitive Data Scanner" %}}
+
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
+
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_library_rules %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_custom_rules %}}
+
+{{% /collapse-content %}} -->
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add another set of processors and destinations
+
+{{% observability_pipelines/multiple_processors %}}
+
+### Install the Observability Pipelines Worker
+1. Select your platform in the **Choose your installation platform** dropdown menu.
+1. Enter the Amazon Data Firehose address. The Observability Pipelines Worker listens to this address and port for incoming logs from Amazon Data Firehose.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+1. Follow the instructions for your environment to install the Worker.
+{{< tabs >}}
+{{% tab "Docker" %}}
+
+{{% observability_pipelines/install_worker/docker %}}
+
+{{% /tab %}}
+{{% tab "Amazon EKS" %}}
+
+{{% observability_pipelines/install_worker/amazon_eks %}}
+
+{{% /tab %}}
+{{% tab "Azure AKS" %}}
+
+{{% observability_pipelines/install_worker/azure_aks %}}
+
+{{% /tab %}}
+{{% tab "Google GKE" %}}
+
+{{% observability_pipelines/install_worker/google_gke %}}
+
+{{% /tab %}}
+{{% tab "Linux (APT)" %}}
+
+{{% observability_pipelines/install_worker/linux_apt %}}
+
+{{% /tab %}}
+{{% tab "Linux (RPM)" %}}
+
+{{% observability_pipelines/install_worker/linux_rpm %}}
+
+{{% /tab %}}
+{{% tab "CloudFormation" %}}
+
+{{% observability_pipelines/install_worker/cloudformation %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+## Send logs to the Observability Pipelines Worker over Amazon Data Firehose
+
+{{% observability_pipelines/log_source_configuration/amazon_data_firehose %}}
+
+[1]: https://app.datadoghq.com/observability-pipelines
\ No newline at end of file
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/amazon_s3.md b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/amazon_s3.md
new file mode 100644
index 0000000000000..ea5e2965da76e
--- /dev/null
+++ b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/amazon_s3.md
@@ -0,0 +1,336 @@
+---
+title: Log Volume Control for Amazon S3
+disable_toc: false
+---
+
+## Overview
+
+Use the Observability Pipelines Worker to only route useful Amazon S3 logs to their destinations.
+
+{{% observability_pipelines/use_case_images/log_volume_control %}}
+
+This document walks you through the following:
+1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
+1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
+
+## Prerequisites
+
+{{% observability_pipelines/prerequisites/amazon_s3 %}}
+
+## Set up Observability Pipelines
+
+1. Navigate to [Observability Pipelines][1].
+1. Select the **Log Volume Control** template to create a new pipeline.
+1. Select the **Amazon S3** source.
+
+### Set up the source
+
+{{% observability_pipelines/source_settings/amazon_s3 %}}
+
+### Set up the destinations
+
+Enter the following information based on your selected logs destinations.
+
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
+
+Follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_settings/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_settings/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
+### Set up processors
+
+{{% observability_pipelines/processors/intro %}}
+
+{{% observability_pipelines/processors/filter_syntax %}}
+
+{{% observability_pipelines/processors/add_processors %}}
+
+{{< tabs >}}
+{{% tab "Add env vars" %}}
+
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
+
+{{% /tab %}}
+{{% tab "Edit fields" %}}
+
+{{% observability_pipelines/processors/remap %}}
+
+{{% /tab %}}
+{{% tab "Enrichment table" %}}
+
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
+
+{{% /tab %}}
+{{% tab "Grok Parser" %}}
+
+{{% observability_pipelines/processors/grok_parser %}}
+
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
+{{% /tab %}}
+{{% tab "Quota" %}}
+
+{{% observability_pipelines/processors/quota %}}
+
+{{% /tab %}}
+{{% tab "Reduce" %}}
+
+{{% observability_pipelines/processors/reduce %}}
+
+{{% /tab %}}
+{{% tab "Remap to OCSF" %}}
+
+{{% observability_pipelines/processors/remap_ocsf %}}
+
+{{% /tab %}}
+{{% tab "Sample" %}}
+
+{{% observability_pipelines/processors/sample %}}
+
+{{% /tab %}}
+{{% tab "Sensitive Data Scanner" %}}
+
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
+
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_library_rules %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_custom_rules %}}
+
+{{% /collapse-content %}} -->
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add another set of processors and destinations
+
+{{% observability_pipelines/multiple_processors %}}
+
+### Install the Observability Pipelines Worker
+1. Select your platform in the **Choose your installation platform** dropdown menu.
+1. In the **AWS S3 SQS URL** field, enter the URL of the SQS queue to which the S3 bucket sends notification events.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+1. Follow the instructions for your environment to install the Worker.
+{{< tabs >}}
+{{% tab "Docker" %}}
+
+{{% observability_pipelines/install_worker/docker %}}
+
+{{% /tab %}}
+{{% tab "Amazon EKS" %}}
+
+{{% observability_pipelines/install_worker/amazon_eks %}}
+
+{{% /tab %}}
+{{% tab "Azure AKS" %}}
+
+{{% observability_pipelines/install_worker/azure_aks %}}
+
+{{% /tab %}}
+{{% tab "Google GKE" %}}
+
+{{% observability_pipelines/install_worker/google_gke %}}
+
+{{% /tab %}}
+{{% tab "Linux (APT)" %}}
+
+{{% observability_pipelines/install_worker/linux_apt %}}
+
+{{% /tab %}}
+{{% tab "Linux (RPM)" %}}
+
+{{% observability_pipelines/install_worker/linux_rpm %}}
+
+{{% /tab %}}
+{{% tab "CloudFormation" %}}
+
+{{% observability_pipelines/install_worker/cloudformation %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+[1]: https://app.datadoghq.com/observability-pipelines
\ No newline at end of file
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/datadog_agent.md b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/datadog_agent.md
index 5f781a443bc00..ee40e3abc2ca7 100644
--- a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/datadog_agent.md
+++ b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/datadog_agent.md
@@ -11,7 +11,7 @@ Set up the Observability Pipelines Worker with the Datadog Agent source so that
 
 {{% observability_pipelines/use_case_images/log_volume_control %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Connecting the Datadog Agent to the Observability Pipelines Worker](#connect-the-datadog-agent-to-the-observability-pipelines-worker)
@@ -32,56 +32,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -91,9 +130,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -101,15 +150,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -121,91 +185,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Datadog Agent address. The Observability Pipelines Worker listens to this address and port for incoming logs from the Datadog Agent. For example, `0.0.0.0:<port_number>`.
 1. Provide the environment variables for each of your selected destinations.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/fluent.md b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/fluent.md
index 1ccee30d14107..05e8c040fcfe4 100644
--- a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/fluent.md
+++ b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/fluent.md
@@ -11,7 +11,7 @@ Set up the Observability Pipelines Worker with the Fluentd or Fluent Bit source
 
 {{% observability_pipelines/use_case_images/log_volume_control %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-fluent)
@@ -32,56 +32,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -91,9 +130,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -101,15 +150,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -121,92 +185,118 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Fluent socket address and port. The Observability Pipelines Worker listens on this address for incoming log messages.
 
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/google_pubsub.md b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/google_pubsub.md
index 6ec643d1939e9..963e3a6515e0a 100644
--- a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/google_pubsub.md
+++ b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/google_pubsub.md
@@ -9,7 +9,7 @@ Set up the Observability Pipelines Worker with the Google Pub/Sub source so that
 
 {{% observability_pipelines/use_case_images/log_volume_control %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 
@@ -29,56 +29,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -88,9 +127,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -98,15 +147,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -118,90 +182,116 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/http_client.md b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/http_client.md
index 9f1ae35dd6279..02e2dbd4fc99b 100644
--- a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/http_client.md
+++ b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/http_client.md
@@ -11,7 +11,7 @@ Use the Observability Pipelines Worker to only route useful HTTP server logs to
 
 {{% observability_pipelines/use_case_images/log_volume_control %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 
@@ -31,56 +31,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -90,9 +129,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -100,15 +149,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -120,92 +184,118 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the full path of the HTTP/S endpoint URL. For example, `https://127.0.0.8/logs`. The Observability Pipelines Worker collects logs events from this endpoint.
 
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/http_server.md b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/http_server.md
index 7c51b95f23753..7411c5127791f 100644
--- a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/http_server.md
+++ b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/http_server.md
@@ -9,7 +9,7 @@ Set up the Observability Pipelines Worker to only route useful HTTP Client logs
 
 {{% observability_pipelines/use_case_images/log_volume_control %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 
@@ -29,56 +29,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -88,9 +127,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -98,15 +147,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -118,91 +182,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the HTTP/S server address, such as `0.0.0.0:9997`. The Observability Pipelines Worker listens to this socket address for your HTTP client logs.
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/kafka.md b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/kafka.md
new file mode 100644
index 0000000000000..f230cc38b8bf0
--- /dev/null
+++ b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/kafka.md
@@ -0,0 +1,340 @@
+---
+title: Log Volume Control for Kafka
+disable_toc: false
+---
+
+## Overview
+
+Use the Observability Pipelines Worker to only route useful logs from your Kafka topics to their destinations.
+
+{{% observability_pipelines/use_case_images/log_volume_control %}}
+
+This document walks you through the following:
+1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
+1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
+1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-kafka)
+
+## Prerequisites
+
+{{% observability_pipelines/prerequisites/kafka %}}
+
+## Set up Observability Pipelines
+
+1. Navigate to [Observability Pipelines][1].
+1. Select the **Log Volume Control** template to create a new pipeline.
+1. Select the **Kafka** source.
+
+### Set up the source
+
+{{% observability_pipelines/source_settings/kafka %}}
+
+### Set up the destinations
+
+Enter the following information based on your selected logs destinations.
+
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
+
+Follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_settings/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_settings/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
+### Set up processors
+
+{{% observability_pipelines/processors/intro %}}
+
+{{% observability_pipelines/processors/filter_syntax %}}
+
+{{% observability_pipelines/processors/add_processors %}}
+
+{{< tabs >}}
+{{% tab "Add env vars" %}}
+
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
+
+{{% /tab %}}
+{{% tab "Edit fields" %}}
+
+{{% observability_pipelines/processors/remap %}}
+
+{{% /tab %}}
+{{% tab "Enrichment table" %}}
+
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
+
+{{% /tab %}}
+{{% tab "Grok Parser" %}}
+
+{{% observability_pipelines/processors/grok_parser %}}
+
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
+{{% /tab %}}
+{{% tab "Quota" %}}
+
+{{% observability_pipelines/processors/quota %}}
+
+{{% /tab %}}
+{{% tab "Reduce" %}}
+
+{{% observability_pipelines/processors/reduce %}}
+
+{{% /tab %}}
+{{% tab "Remap to OCSF" %}}
+
+{{% observability_pipelines/processors/remap_ocsf %}}
+
+{{% /tab %}}
+{{% tab "Sample" %}}
+
+{{% observability_pipelines/processors/sample %}}
+
+{{% /tab %}}
+{{% tab "Sensitive Data Scanner" %}}
+
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
+
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_library_rules %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_custom_rules %}}
+
+{{% /collapse-content %}} -->
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add another set of processors and destinations
+
+{{% observability_pipelines/multiple_processors %}}
+
+### Install the Observability Pipelines Worker
+1. Select your platform in the **Choose your installation platform** dropdown menu.
+1. Enter the host and port of the Kafka bootstrap servers, which clients should use to connect to the Kafka cluster and discover all the other hosts in the cluster. Must be entered in the format of `host:port`, such as `10.14.22.123:9092`. If there is more than one server, use commas to separate them.
+
+    If you enabled SASL, enter the Kafka SASL username and Kafka SASL password.
+
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+1. Follow the instructions for your environment to install the Worker.
+{{< tabs >}}
+{{% tab "Docker" %}}
+
+{{% observability_pipelines/install_worker/docker %}}
+
+{{% /tab %}}
+{{% tab "Amazon EKS" %}}
+
+{{% observability_pipelines/install_worker/amazon_eks %}}
+
+{{% /tab %}}
+{{% tab "Azure AKS" %}}
+
+{{% observability_pipelines/install_worker/azure_aks %}}
+
+{{% /tab %}}
+{{% tab "Google GKE" %}}
+
+{{% observability_pipelines/install_worker/google_gke %}}
+
+{{% /tab %}}
+{{% tab "Linux (APT)" %}}
+
+{{% observability_pipelines/install_worker/linux_apt %}}
+
+{{% /tab %}}
+{{% tab "Linux (RPM)" %}}
+
+{{% observability_pipelines/install_worker/linux_rpm %}}
+
+{{% /tab %}}
+{{% tab "CloudFormation" %}}
+
+{{% observability_pipelines/install_worker/cloudformation %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+[1]: https://app.datadoghq.com/observability-pipelines
\ No newline at end of file
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/logstash.md b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/logstash.md
index 9410af7421e34..0d3590caa3268 100644
--- a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/logstash.md
+++ b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/logstash.md
@@ -9,7 +9,7 @@ Set up the Observability Pipelines Worker with the Logstash source so that you o
 
 {{% observability_pipelines/use_case_images/log_volume_control %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-logstash)
@@ -30,56 +30,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -89,9 +128,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -99,15 +148,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -119,91 +183,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Logstash address and port, such as `0.0.0.0:9997`. The Observability Pipelines Worker listens on this address for incoming log messages.
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/splunk_hec.md b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/splunk_hec.md
index 09d7f63d563c5..bf9bbfeebabb5 100644
--- a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/splunk_hec.md
+++ b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/splunk_hec.md
@@ -11,7 +11,7 @@ Set up the Observability Pipelines Worker with the Splunk HTTP Event Collector (
 
 {{% observability_pipelines/use_case_images/log_volume_control %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Worker over Splunk HEC](#send-logs-to-the-observability-pipelines-worker-over-splunk-hec)
@@ -32,56 +32,95 @@ This document walks you through the following steps:
 
 ### Set up the destination
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -91,9 +130,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -101,15 +150,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -121,91 +185,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Splunk HEC address. This is the address and port where your applications are sending their logging data to. The Observability Pipelines Worker listens to this address for incoming logs.
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/splunk_tcp.md b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/splunk_tcp.md
index ac2b2f17693ec..566f0f219372c 100644
--- a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/splunk_tcp.md
+++ b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/splunk_tcp.md
@@ -11,7 +11,7 @@ This document walks you through the following steps to set up the Observability
 
 {{% observability_pipelines/use_case_images/log_volume_control %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Connecting Splunk Forwarder to the Observability Pipelines Worker](#connect-splunk-forwarder-to-the-observability-pipelines-worker)
@@ -32,56 +32,95 @@ This document walks you through the following steps:
 
 ### Set up the destination
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -91,9 +130,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -101,15 +150,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -121,91 +185,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Splunk TCP address. This is the address and port where your applications are sending their logging data. The Observability Pipelines Worker listens to this address for incoming logs.
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/sumo_logic_hosted_collector.md b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/sumo_logic_hosted_collector.md
index 5857b259209e4..07b4fc8de6e7c 100644
--- a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/sumo_logic_hosted_collector.md
+++ b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/sumo_logic_hosted_collector.md
@@ -11,7 +11,7 @@ This document walks you through the following steps to set up the Observability
 
 {{% observability_pipelines/use_case_images/log_volume_control %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Observability Pipelines Worker over Sumo Logic HTTP Source](#send-logs-to-the-observability-pipelines-worker-over-sumo-logic-http-source)
@@ -28,56 +28,95 @@ This document walks you through the following steps:
 
 ### Set up the destination
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -87,9 +126,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -97,15 +146,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -117,91 +181,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Sumo Logic address. This is the address and port where your applications are sending their logging data. The Observability Pipelines Worker listens to this address for incoming logs.
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/syslog.md b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/syslog.md
index bbb89ec06bb24..aed43197fe61e 100644
--- a/content/en/observability_pipelines/set_up_pipelines/log_volume_control/syslog.md
+++ b/content/en/observability_pipelines/set_up_pipelines/log_volume_control/syslog.md
@@ -11,7 +11,7 @@ Set up the Observability Pipelines Worker with the rsyslog or syslog-ng source s
 
 {{% observability_pipelines/use_case_images/log_volume_control %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-syslog)
@@ -32,56 +32,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -91,9 +130,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -101,15 +150,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -121,92 +185,118 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Syslog address. This is a Syslog-compatible endpoint, exposed by the Worker, that your applications send logs to. The Observability Pipelines Worker listens on this address for incoming logs.
 
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/_index.md b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/_index.md
index 6921efc9c409e..c0fc6158605b6 100644
--- a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/_index.md
+++ b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/_index.md
@@ -15,11 +15,14 @@ Use the Observability Pipelines Worker to identify, tag, and optionally redact o
 
 Select a log source to get started:
 
+<!-- - [Amazon Data Firehose][12] -->
+- [Amazon S3][11]
 - [Datadog Agent][1]
 - [Fluentd or Fluent Bit][2]
 - [Google Pub/Sub][3]
 - [HTTP Client][4]
 - [HTTP Server][5]
+- [Kafka][13]
 - [Logstash][6]
 - [Splunk HTTP Event Collector (HEC)][7]
 - [Splunk Heavy or Universal Forwarders (TCP)][8]
@@ -36,3 +39,6 @@ Select a log source to get started:
 [8]: /observability_pipelines/sensitive_data_redaction/splunk_tcp
 [9]: /observability_pipelines/sensitive_data_redaction/sumo_logic_hosted_collector
 [10]: /observability_pipelines/sensitive_data_redaction/syslog
+[11]: /observability_pipelines/set_up_pipelines/sensitive_data_redaction/amazon_s3
+[12]: /observability_pipelines/set_up_pipelines/sensitive_data_redaction/amazon_data_firehose
+[13]: /observability_pipelines/set_up_pipelines/sensitive_data_redaction/kafka
diff --git a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/amazon_data_firehose.md b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/amazon_data_firehose.md
new file mode 100644
index 0000000000000..d856c5c4dff10
--- /dev/null
+++ b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/amazon_data_firehose.md
@@ -0,0 +1,343 @@
+---
+title: Sensitive Data Redaction for Amazon Data Firehose
+disable_toc: false
+---
+
+## Overview
+
+Sensitive data, such as credit card numbers, bank routing numbers, and API keys, can be revealed unintentionally in your logs, which can expose your organization to financial and privacy risks.
+
+Use Observability Pipelines to identify, tag, and optionally redact or hash sensitive information before routing logs to different destinations and outside of your infrastructure. You can use out-of-the-box scanning rules to detect common patterns such as email addresses, credit card numbers, API keys, authorization tokens, and more. You can also create custom scanning rules using regex patterns to match sensitive information.
+
+{{% observability_pipelines/use_case_images/sensitive_data_redaction %}}
+
+This document walks you through the following:
+1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
+1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
+1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-amazon_data_firehose)
+
+## Prerequisites
+
+{{% observability_pipelines/prerequisites/amazon_data_firehose %}}
+
+## Set up Observability Pipelines
+
+1. Navigate to [Observability Pipelines][1].
+1. Select the **Sensitive Data Redactions** template to create a new pipeline.
+1. Select the **Amazon Data Firehose** source.
+
+### Set up the source
+
+{{% observability_pipelines/source_settings/amazon_data_firehose %}}
+
+### Set up the destinations
+
+Enter the following information based on your selected logs destinations.
+
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
+
+Follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_settings/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_settings/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
+### Set up processors
+
+{{% observability_pipelines/processors/intro %}}
+
+{{% observability_pipelines/processors/filter_syntax %}}
+
+{{% observability_pipelines/processors/add_processors_sds %}}
+
+{{< tabs >}}
+{{% tab "Add env vars" %}}
+
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
+
+{{% /tab %}}
+{{% tab "Edit fields" %}}
+
+{{% observability_pipelines/processors/remap %}}
+
+{{% /tab %}}
+{{% tab "Enrichment table" %}}
+
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
+
+{{% /tab %}}
+{{% tab "Grok Parser" %}}
+
+{{% observability_pipelines/processors/grok_parser %}}
+
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
+{{% /tab %}}
+{{% tab "Quota" %}}
+
+{{% observability_pipelines/processors/quota %}}
+
+{{% /tab %}}
+{{% tab "Reduce" %}}
+
+{{% observability_pipelines/processors/reduce %}}
+
+{{% /tab %}}
+{{% tab "Remap to OCSF" %}}
+
+{{% observability_pipelines/processors/remap_ocsf %}}
+
+{{% /tab %}}
+{{% tab "Sample" %}}
+
+{{% observability_pipelines/processors/sample %}}
+
+{{% /tab %}}
+{{% tab "Sensitive Data Scanner" %}}
+
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
+
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_library_rules %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_custom_rules %}}
+
+{{% /collapse-content %}} -->
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add another set of processors and destinations
+
+{{% observability_pipelines/multiple_processors %}}
+
+### Install the Observability Pipelines Worker
+1. Select your platform in the **Choose your installation platform** dropdown menu.
+1. Enter the Amazon Data Firehose address. The Observability Pipelines Worker listens to this address and port for incoming logs from Amazon Data Firehose.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+1. Follow the instructions for your environment to install the Worker.
+{{< tabs >}}
+{{% tab "Docker" %}}
+
+{{% observability_pipelines/install_worker/docker %}}
+
+{{% /tab %}}
+{{% tab "Amazon EKS" %}}
+
+{{% observability_pipelines/install_worker/amazon_eks %}}
+
+{{% /tab %}}
+{{% tab "Azure AKS" %}}
+
+{{% observability_pipelines/install_worker/azure_aks %}}
+
+{{% /tab %}}
+{{% tab "Google GKE" %}}
+
+{{% observability_pipelines/install_worker/google_gke %}}
+
+{{% /tab %}}
+{{% tab "Linux (APT)" %}}
+
+{{% observability_pipelines/install_worker/linux_apt %}}
+
+{{% /tab %}}
+{{% tab "Linux (RPM)" %}}
+
+{{% observability_pipelines/install_worker/linux_rpm %}}
+
+{{% /tab %}}
+{{% tab "CloudFormation" %}}
+
+{{% observability_pipelines/install_worker/cloudformation %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+## Send logs to the Observability Pipelines Worker over Amazon Data Firehose
+
+{{% observability_pipelines/log_source_configuration/amazon_data_firehose %}}
+
+[1]: https://app.datadoghq.com/observability-pipelines
\ No newline at end of file
diff --git a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/amazon_s3.md b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/amazon_s3.md
new file mode 100644
index 0000000000000..bfd5cefd3eddb
--- /dev/null
+++ b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/amazon_s3.md
@@ -0,0 +1,338 @@
+---
+title: Sensitive Data Redaction for Amazon S3
+disable_toc: false
+---
+
+## Overview
+
+Sensitive data, such as credit card numbers, bank routing numbers, and API keys, can be revealed unintentionally in your logs, which can expose your organization to financial and privacy risks.
+
+Use Observability Pipelines to identify, tag, and optionally redact or hash sensitive information before routing logs to different destinations and outside of your infrastructure. You can use out-of-the-box scanning rules to detect common patterns such as email addresses, credit card numbers, API keys, authorization tokens, and more. You can also create custom scanning rules using regex patterns to match sensitive information.
+
+{{% observability_pipelines/use_case_images/sensitive_data_redaction %}}
+
+This document walks you through the following:
+1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
+1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
+
+## Prerequisites
+
+{{% observability_pipelines/prerequisites/amazon_s3 %}}
+
+## Set up Observability Pipelines
+
+1. Navigate to [Observability Pipelines][1].
+1. Select the **Sensitive Data Redactions** template to create a new pipeline.
+1. Select the **Amazon S3** source.
+
+### Set up the source
+
+{{% observability_pipelines/source_settings/amazon_s3 %}}
+
+### Set up the destinations
+
+Enter the following information based on your selected logs destinations.
+
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
+
+Follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_settings/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_settings/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
+### Set up processors
+
+{{% observability_pipelines/processors/intro %}}
+
+{{% observability_pipelines/processors/filter_syntax %}}
+
+{{% observability_pipelines/processors/add_processors_sds %}}
+
+{{< tabs >}}
+{{% tab "Add env vars" %}}
+
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
+
+{{% /tab %}}
+{{% tab "Edit fields" %}}
+
+{{% observability_pipelines/processors/remap %}}
+
+{{% /tab %}}
+{{% tab "Enrichment table" %}}
+
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
+
+{{% /tab %}}
+{{% tab "Grok Parser" %}}
+
+{{% observability_pipelines/processors/grok_parser %}}
+
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
+{{% /tab %}}
+{{% tab "Quota" %}}
+
+{{% observability_pipelines/processors/quota %}}
+
+{{% /tab %}}
+{{% tab "Reduce" %}}
+
+{{% observability_pipelines/processors/reduce %}}
+
+{{% /tab %}}
+{{% tab "Remap to OCSF" %}}
+
+{{% observability_pipelines/processors/remap_ocsf %}}
+
+{{% /tab %}}
+{{% tab "Sample" %}}
+
+{{% observability_pipelines/processors/sample %}}
+
+{{% /tab %}}
+{{% tab "Sensitive Data Scanner" %}}
+
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
+
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_library_rules %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_custom_rules %}}
+
+{{% /collapse-content %}} -->
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add another set of processors and destinations
+
+{{% observability_pipelines/multiple_processors %}}
+
+### Install the Observability Pipelines Worker
+1. Select your platform in the **Choose your installation platform** dropdown menu.
+1. In the **AWS S3 SQS URL** field, enter the URL of the SQS queue to which the S3 bucket sends notification events.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+1. Follow the instructions for your environment to install the Worker.
+{{< tabs >}}
+{{% tab "Docker" %}}
+
+{{% observability_pipelines/install_worker/docker %}}
+
+{{% /tab %}}
+{{% tab "Amazon EKS" %}}
+
+{{% observability_pipelines/install_worker/amazon_eks %}}
+
+{{% /tab %}}
+{{% tab "Azure AKS" %}}
+
+{{% observability_pipelines/install_worker/azure_aks %}}
+
+{{% /tab %}}
+{{% tab "Google GKE" %}}
+
+{{% observability_pipelines/install_worker/google_gke %}}
+
+{{% /tab %}}
+{{% tab "Linux (APT)" %}}
+
+{{% observability_pipelines/install_worker/linux_apt %}}
+
+{{% /tab %}}
+{{% tab "Linux (RPM)" %}}
+
+{{% observability_pipelines/install_worker/linux_rpm %}}
+
+{{% /tab %}}
+{{% tab "CloudFormation" %}}
+
+{{% observability_pipelines/install_worker/cloudformation %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+[1]: https://app.datadoghq.com/observability-pipelines
\ No newline at end of file
diff --git a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/datadog_agent.md b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/datadog_agent.md
index 5c4a064fcd5f4..3520816bb270a 100644
--- a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/datadog_agent.md
+++ b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/datadog_agent.md
@@ -13,7 +13,7 @@ Use Observability Pipelines to identify, tag, and optionally redact or hash sens
 
 {{% observability_pipelines/use_case_images/sensitive_data_redaction %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Connecting the Datadog Agent to the Observability Pipelines Worker](#connect-the-datadog-agent-to-the-observability-pipelines-worker)
@@ -34,56 +34,95 @@ This document walks you through the following steps:
 
 ### Set up the destination
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -93,9 +132,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors_sds %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -103,15 +152,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -123,91 +187,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Datadog Agent address. The Observability Pipelines Worker listens to this address and port for incoming logs from the Datadog Agent. For example, `0.0.0.0:<port_number>`.
 1. Provide the environment variables for each of your selected destinations.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/fluent.md b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/fluent.md
index 34ca912a8ed6e..bb05d6c82a5df 100644
--- a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/fluent.md
+++ b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/fluent.md
@@ -13,7 +13,7 @@ Use Observability Pipelines to identify, tag, and optionally redact or hash sens
 
 {{% observability_pipelines/use_case_images/sensitive_data_redaction %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-fluent)
@@ -34,56 +34,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -93,9 +132,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors_sds %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -103,15 +152,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -123,92 +187,118 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Fluent socket address and port. The Observability Pipelines Worker listens on this address for incoming log messages.
 
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/google_pubsub.md b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/google_pubsub.md
index ff8f9aa038509..cfb04a3d5a43c 100644
--- a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/google_pubsub.md
+++ b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/google_pubsub.md
@@ -11,7 +11,7 @@ Use Observability Pipelines to identify, tag, and optionally redact or hash sens
 
 {{% observability_pipelines/use_case_images/sensitive_data_redaction %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 
@@ -31,56 +31,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -90,9 +129,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors_sds %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -100,15 +149,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -120,90 +184,116 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/http_client.md b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/http_client.md
index a80bad46bd7ba..0f73b6db1fe33 100644
--- a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/http_client.md
+++ b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/http_client.md
@@ -13,7 +13,7 @@ Use Observability Pipelines to identify, tag, and optionally redact or hash sens
 
 {{% observability_pipelines/use_case_images/sensitive_data_redaction %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 
@@ -33,56 +33,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -92,9 +131,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors_sds %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -102,15 +151,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -122,92 +186,118 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the full path of the HTTP/S endpoint URL. For example, `https://127.0.0.8/logs`. The Observability Pipelines Worker collects logs events from this endpoint.
 
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/http_server.md b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/http_server.md
index 5c14a099e076f..688e43a295f9d 100644
--- a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/http_server.md
+++ b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/http_server.md
@@ -11,7 +11,7 @@ Use Observability Pipelines to identify, tag, and optionally redact or hash sens
 
 {{% observability_pipelines/use_case_images/sensitive_data_redaction %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 
@@ -31,56 +31,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -90,9 +129,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors_sds %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -100,15 +149,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -120,91 +184,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the HTTP/S server address, such as `0.0.0.0:9997`. The Observability Pipelines Worker listens to this socket address for your HTTP client logs.
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/kafka.md b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/kafka.md
new file mode 100644
index 0000000000000..ef616ae5d5014
--- /dev/null
+++ b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/kafka.md
@@ -0,0 +1,342 @@
+---
+title: Sensitive Data Redaction for Kafka
+disable_toc: false
+---
+
+## Overview
+
+Sensitive data, such as credit card numbers, bank routing numbers, and API keys, can be revealed unintentionally in your logs, which can expose your organization to financial and privacy risks.
+
+Use Observability Pipelines to identify, tag, and optionally redact or hash sensitive information before routing logs to different destinations and outside of your infrastructure. You can use out-of-the-box scanning rules to detect common patterns such as email addresses, credit card numbers, API keys, authorization tokens, and more. You can also create custom scanning rules using regex patterns to match sensitive information.
+
+{{% observability_pipelines/use_case_images/sensitive_data_redaction %}}
+
+This document walks you through the following:
+1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
+1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
+1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-kafka)
+
+## Prerequisites
+
+{{% observability_pipelines/prerequisites/kafka %}}
+
+## Set up Observability Pipelines
+
+1. Navigate to [Observability Pipelines][1].
+1. Select the **Sensitive Data Redactions** template to create a new pipeline.
+1. Select the **Kafka** source.
+
+### Set up the source
+
+{{% observability_pipelines/source_settings/kafka %}}
+
+### Set up the destinations
+
+Enter the following information based on your selected logs destinations.
+
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
+
+Follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_settings/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_settings/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
+### Set up processors
+
+{{% observability_pipelines/processors/intro %}}
+
+{{% observability_pipelines/processors/filter_syntax %}}
+
+{{% observability_pipelines/processors/add_processors_sds %}}
+
+{{< tabs >}}
+{{% tab "Add env vars" %}}
+
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
+
+{{% /tab %}}
+{{% tab "Edit fields" %}}
+
+{{% observability_pipelines/processors/remap %}}
+
+{{% /tab %}}
+{{% tab "Enrichment table" %}}
+
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
+
+{{% /tab %}}
+{{% tab "Grok Parser" %}}
+
+{{% observability_pipelines/processors/grok_parser %}}
+
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
+{{% /tab %}}
+{{% tab "Quota" %}}
+
+{{% observability_pipelines/processors/quota %}}
+
+{{% /tab %}}
+{{% tab "Reduce" %}}
+
+{{% observability_pipelines/processors/reduce %}}
+
+{{% /tab %}}
+{{% tab "Remap to OCSF" %}}
+
+{{% observability_pipelines/processors/remap_ocsf %}}
+
+{{% /tab %}}
+{{% tab "Sample" %}}
+
+{{% observability_pipelines/processors/sample %}}
+
+{{% /tab %}}
+{{% tab "Sensitive Data Scanner" %}}
+
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
+
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_library_rules %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_custom_rules %}}
+
+{{% /collapse-content %}} -->
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add another set of processors and destinations
+
+{{% observability_pipelines/multiple_processors %}}
+
+### Install the Observability Pipelines Worker
+1. Select your platform in the **Choose your installation platform** dropdown menu.
+1. Enter the host and port of the Kafka bootstrap servers, which clients should use to connect to the Kafka cluster and discover all the other hosts in the cluster. Must be entered in the format of `host:port`, such as `10.14.22.123:9092`. If there is more than one server, use commas to separate them.
+
+    If you enabled SASL, enter the Kafka SASL username and Kafka SASL password.
+
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+1. Follow the instructions for your environment to install the Worker.
+{{< tabs >}}
+{{% tab "Docker" %}}
+
+{{% observability_pipelines/install_worker/docker %}}
+
+{{% /tab %}}
+{{% tab "Amazon EKS" %}}
+
+{{% observability_pipelines/install_worker/amazon_eks %}}
+
+{{% /tab %}}
+{{% tab "Azure AKS" %}}
+
+{{% observability_pipelines/install_worker/azure_aks %}}
+
+{{% /tab %}}
+{{% tab "Google GKE" %}}
+
+{{% observability_pipelines/install_worker/google_gke %}}
+
+{{% /tab %}}
+{{% tab "Linux (APT)" %}}
+
+{{% observability_pipelines/install_worker/linux_apt %}}
+
+{{% /tab %}}
+{{% tab "Linux (RPM)" %}}
+
+{{% observability_pipelines/install_worker/linux_rpm %}}
+
+{{% /tab %}}
+{{% tab "CloudFormation" %}}
+
+{{% observability_pipelines/install_worker/cloudformation %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+[1]: https://app.datadoghq.com/observability-pipelines
\ No newline at end of file
diff --git a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/logstash.md b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/logstash.md
index 9d84d6b298310..89deec9887ce0 100644
--- a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/logstash.md
+++ b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/logstash.md
@@ -11,7 +11,7 @@ Use Observability Pipelines to identify, tag, and optionally redact or hash sens
 
 {{% observability_pipelines/use_case_images/sensitive_data_redaction %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-logstash)
@@ -32,56 +32,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -91,9 +130,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors_sds %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -101,15 +150,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -121,91 +185,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Logstash address and port, such as `0.0.0.0:9997`. The Observability Pipelines Worker listens on this address for incoming log messages.
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/splunk_hec.md b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/splunk_hec.md
index aff601733d11b..263c1375193e1 100644
--- a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/splunk_hec.md
+++ b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/splunk_hec.md
@@ -34,56 +34,95 @@ This document walks through the following steps:
 
 ### Set up the destination
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -93,9 +132,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors_sds %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -103,15 +152,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -123,91 +187,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Splunk HEC address. This is the address and port where your applications are sending their logging data. The Observability Pipelines Worker listens to this address for incoming logs.
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/splunk_tcp.md b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/splunk_tcp.md
index 627d1da97dd61..c2d4b4286665d 100644
--- a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/splunk_tcp.md
+++ b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/splunk_tcp.md
@@ -13,7 +13,7 @@ Use Observability Pipelines to identify, tag, and optionally redact or hash sens
 
 {{% observability_pipelines/use_case_images/sensitive_data_redaction %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Connecting Splunk Forwarder to the Observability Pipelines Worker](#connect-splunk-forwarder-to-the-observability-pipelines-worker)
@@ -34,56 +34,95 @@ This document walks you through the following steps:
 
 ### Set up the destination
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -93,9 +132,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors_sds %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -103,15 +152,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -123,91 +187,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Splunk TCP address. This is the address and port where your applications are sending their logging data. The Observability Pipelines Worker listens to this address for incoming logs.
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/sumo_logic_hosted_collector.md b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/sumo_logic_hosted_collector.md
index b7843d1b7796a..fb0a1a41422ee 100644
--- a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/sumo_logic_hosted_collector.md
+++ b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/sumo_logic_hosted_collector.md
@@ -15,7 +15,7 @@ Observability Pipelines supports Sumo Logic Collector HTTP Logs source.
 
 {{% observability_pipelines/use_case_images/sensitive_data_redaction %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Observability Pipelines Worker over Sumo Logic HTTP Source](#send-logs-to-the-observability-pipelines-worker-over-sumo-logic-http-source)
@@ -36,56 +36,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -95,9 +134,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors_sds %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -105,15 +154,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -125,91 +189,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Sumo Logic address. This is the address and port where your applications are sending their logging data. The Observability Pipelines Worker listens to this address for incoming logs.
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/syslog.md b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/syslog.md
index 14d8739722998..912ce83b65c4e 100644
--- a/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/syslog.md
+++ b/content/en/observability_pipelines/set_up_pipelines/sensitive_data_redaction/syslog.md
@@ -13,7 +13,7 @@ Use Observability Pipelines to identify, tag, and optionally redact or hash sens
 
 {{% observability_pipelines/use_case_images/sensitive_data_redaction %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-syslog)
@@ -34,56 +34,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -93,9 +132,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors_sds %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -103,15 +152,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -123,92 +187,118 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Syslog address. This is a Syslog-compatible endpoint, exposed by the Worker, that your applications send logs to. The Observability Pipelines Worker listens on this address for incoming logs.
 
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/split_logs/_index.md b/content/en/observability_pipelines/set_up_pipelines/split_logs/_index.md
index e41cf1d1f1f5a..1336c3947057e 100644
--- a/content/en/observability_pipelines/set_up_pipelines/split_logs/_index.md
+++ b/content/en/observability_pipelines/set_up_pipelines/split_logs/_index.md
@@ -13,11 +13,14 @@ Often, organizations need to send their logs to multiple products for different
 
 Select your log source to get started:
 
+<!-- - [Amazon Data Firehose][12] -->
+- [Amazon S3][11]
 - [Datadog Agent][1]
 - [Fluentd or Fluent Bit][2]
 - [Google Pub/Sub][3]
 - [HTTP Client][4]
 - [HTTP Server][5]
+- [Kafka][13]
 - [Logstash][6]
 - [Splunk HTTP Event Collector (HEC)][7]
 - [Splunk Heavy or Universal Forwarders (TCP)][8]
@@ -34,3 +37,6 @@ Select your log source to get started:
 [8]: /observability_pipelines/split_logs/splunk_tcp
 [9]: /observability_pipelines/split_logs/sumo_logic_hosted_collector
 [10]: /observability_pipelines/split_logs/syslog
+[11]: /observability_pipelines/set_up_pipelines/split_logs/amazon_s3
+[12]: /observability_pipelines/set_up_pipelines/split_logs/amazon_data_firehose
+[13]: /observability_pipelines/set_up_pipelines/split_logs/kafka
\ No newline at end of file
diff --git a/content/en/observability_pipelines/set_up_pipelines/split_logs/amazon_data_firehose.md b/content/en/observability_pipelines/set_up_pipelines/split_logs/amazon_data_firehose.md
new file mode 100644
index 0000000000000..22b0a4b55853e
--- /dev/null
+++ b/content/en/observability_pipelines/set_up_pipelines/split_logs/amazon_data_firehose.md
@@ -0,0 +1,341 @@
+---
+title: Split Logs for Amazon Data Firehose
+disable_toc: false
+---
+
+## Overview
+
+Use the Observability Pipelines Worker to process and route your Amazon Data Firehose logs to different destinations based on your use case.
+
+{{% observability_pipelines/use_case_images/split_logs %}}
+
+This document walks you through the following:
+1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
+1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
+1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-amazon_data_firehose)
+
+## Prerequisites
+
+{{% observability_pipelines/prerequisites/amazon_data_firehose %}}
+
+## Set up Observability Pipelines
+
+1. Navigate to [Observability Pipelines][1].
+1. Select the **Split Logs** template to create a new pipeline.
+1. Select the **Amazon Data Firehose** source.
+
+### Set up the source
+
+{{% observability_pipelines/source_settings/amazon_data_firehose %}}
+
+### Set up the destinations
+
+Enter the following information based on your selected logs destinations.
+
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
+
+Follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_settings/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_settings/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
+### Set up processors
+
+{{% observability_pipelines/processors/intro %}}
+
+{{% observability_pipelines/processors/filter_syntax %}}
+
+{{% observability_pipelines/processors/add_processors %}}
+
+{{< tabs >}}
+{{% tab "Add env vars" %}}
+
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
+
+{{% /tab %}}
+{{% tab "Edit fields" %}}
+
+{{% observability_pipelines/processors/remap %}}
+
+{{% /tab %}}
+{{% tab "Enrichment table" %}}
+
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
+
+{{% /tab %}}
+{{% tab "Grok Parser" %}}
+
+{{% observability_pipelines/processors/grok_parser %}}
+
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
+{{% /tab %}}
+{{% tab "Quota" %}}
+
+{{% observability_pipelines/processors/quota %}}
+
+{{% /tab %}}
+{{% tab "Reduce" %}}
+
+{{% observability_pipelines/processors/reduce %}}
+
+{{% /tab %}}
+{{% tab "Remap to OCSF" %}}
+
+{{% observability_pipelines/processors/remap_ocsf %}}
+
+{{% /tab %}}
+{{% tab "Sample" %}}
+
+{{% observability_pipelines/processors/sample %}}
+
+{{% /tab %}}
+{{% tab "Sensitive Data Scanner" %}}
+
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
+
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_library_rules %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_custom_rules %}}
+
+{{% /collapse-content %}} -->
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add another set of processors and destinations
+
+{{% observability_pipelines/multiple_processors %}}
+
+### Install the Observability Pipelines Worker
+1. Select your platform in the **Choose your installation platform** dropdown menu.
+1. Enter the Amazon Data Firehose address. The Observability Pipelines Worker listens to this address and port for incoming logs from Amazon Data Firehose.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+1. Follow the instructions for your environment to install the Worker.
+{{< tabs >}}
+{{% tab "Docker" %}}
+
+{{% observability_pipelines/install_worker/docker %}}
+
+{{% /tab %}}
+{{% tab "Amazon EKS" %}}
+
+{{% observability_pipelines/install_worker/amazon_eks %}}
+
+{{% /tab %}}
+{{% tab "Azure AKS" %}}
+
+{{% observability_pipelines/install_worker/azure_aks %}}
+
+{{% /tab %}}
+{{% tab "Google GKE" %}}
+
+{{% observability_pipelines/install_worker/google_gke %}}
+
+{{% /tab %}}
+{{% tab "Linux (APT)" %}}
+
+{{% observability_pipelines/install_worker/linux_apt %}}
+
+{{% /tab %}}
+{{% tab "Linux (RPM)" %}}
+
+{{% observability_pipelines/install_worker/linux_rpm %}}
+
+{{% /tab %}}
+{{% tab "CloudFormation" %}}
+
+{{% observability_pipelines/install_worker/cloudformation %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+## Send logs to the Observability Pipelines Worker over Amazon Data Firehose
+
+{{% observability_pipelines/log_source_configuration/amazon_data_firehose %}}
+
+[1]: https://app.datadoghq.com/observability-pipelines
\ No newline at end of file
diff --git a/content/en/observability_pipelines/set_up_pipelines/split_logs/amazon_s3.md b/content/en/observability_pipelines/set_up_pipelines/split_logs/amazon_s3.md
new file mode 100644
index 0000000000000..407a3eda928ea
--- /dev/null
+++ b/content/en/observability_pipelines/set_up_pipelines/split_logs/amazon_s3.md
@@ -0,0 +1,336 @@
+---
+title: Split Logs for Amazon S3
+disable_toc: false
+---
+
+## Overview
+
+Use the Observability Pipelines Worker to process and route your Amazon S3 logs to different destinations based on your use case.
+
+{{% observability_pipelines/use_case_images/split_logs %}}
+
+This document walks you through the following:
+1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
+1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
+
+## Prerequisites
+
+{{% observability_pipelines/prerequisites/amazon_s3 %}}
+
+## Set up Observability Pipelines
+
+1. Navigate to [Observability Pipelines][1].
+1. Select the **Split Logs** template to create a new pipeline.
+1. Select the **Amazon S3** source.
+
+### Set up the source
+
+{{% observability_pipelines/source_settings/amazon_s3 %}}
+
+### Set up the destinations
+
+Enter the following information based on your selected logs destinations.
+
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
+
+Follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_settings/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_settings/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
+### Set up processors
+
+{{% observability_pipelines/processors/intro %}}
+
+{{% observability_pipelines/processors/filter_syntax %}}
+
+{{% observability_pipelines/processors/add_processors %}}
+
+{{< tabs >}}
+{{% tab "Add env vars" %}}
+
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
+
+{{% /tab %}}
+{{% tab "Edit fields" %}}
+
+{{% observability_pipelines/processors/remap %}}
+
+{{% /tab %}}
+{{% tab "Enrichment table" %}}
+
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
+
+{{% /tab %}}
+{{% tab "Grok Parser" %}}
+
+{{% observability_pipelines/processors/grok_parser %}}
+
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
+{{% /tab %}}
+{{% tab "Quota" %}}
+
+{{% observability_pipelines/processors/quota %}}
+
+{{% /tab %}}
+{{% tab "Reduce" %}}
+
+{{% observability_pipelines/processors/reduce %}}
+
+{{% /tab %}}
+{{% tab "Remap to OCSF" %}}
+
+{{% observability_pipelines/processors/remap_ocsf %}}
+
+{{% /tab %}}
+{{% tab "Sample" %}}
+
+{{% observability_pipelines/processors/sample %}}
+
+{{% /tab %}}
+{{% tab "Sensitive Data Scanner" %}}
+
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
+
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_library_rules %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_custom_rules %}}
+
+{{% /collapse-content %}} -->
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add another set of processors and destinations
+
+{{% observability_pipelines/multiple_processors %}}
+
+### Install the Observability Pipelines Worker
+1. Select your platform in the **Choose your installation platform** dropdown menu.
+1. In the **AWS S3 SQS URL** field, enter the URL of the SQS queue to which the S3 bucket sends notification events.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+1. Follow the instructions for your environment to install the Worker.
+{{< tabs >}}
+{{% tab "Docker" %}}
+
+{{% observability_pipelines/install_worker/docker %}}
+
+{{% /tab %}}
+{{% tab "Amazon EKS" %}}
+
+{{% observability_pipelines/install_worker/amazon_eks %}}
+
+{{% /tab %}}
+{{% tab "Azure AKS" %}}
+
+{{% observability_pipelines/install_worker/azure_aks %}}
+
+{{% /tab %}}
+{{% tab "Google GKE" %}}
+
+{{% observability_pipelines/install_worker/google_gke %}}
+
+{{% /tab %}}
+{{% tab "Linux (APT)" %}}
+
+{{% observability_pipelines/install_worker/linux_apt %}}
+
+{{% /tab %}}
+{{% tab "Linux (RPM)" %}}
+
+{{% observability_pipelines/install_worker/linux_rpm %}}
+
+{{% /tab %}}
+{{% tab "CloudFormation" %}}
+
+{{% observability_pipelines/install_worker/cloudformation %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+[1]: https://app.datadoghq.com/observability-pipelines
\ No newline at end of file
diff --git a/content/en/observability_pipelines/set_up_pipelines/split_logs/datadog_agent.md b/content/en/observability_pipelines/set_up_pipelines/split_logs/datadog_agent.md
index 9be4a558ba845..c89fbeb390a50 100644
--- a/content/en/observability_pipelines/set_up_pipelines/split_logs/datadog_agent.md
+++ b/content/en/observability_pipelines/set_up_pipelines/split_logs/datadog_agent.md
@@ -11,7 +11,7 @@ Configure your Datadog Agent to send logs to the Observability Pipelines Worker
 
 {{% observability_pipelines/use_case_images/split_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Connecting the Datadog Agent to the Observability Pipelines Worker](#connect-the-datadog-agent-to-the-observability-pipelines-worker)
@@ -45,56 +45,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -104,9 +143,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -114,15 +163,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -134,91 +198,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Datadog Agent address. The Observability Pipelines Worker listens to this address and port for incoming logs from the Datadog Agent. For example, `0.0.0.0:<port_number>`.
 1. Provide the environment variables for each of your selected destinations.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/split_logs/fluent.md b/content/en/observability_pipelines/set_up_pipelines/split_logs/fluent.md
index 4bc93214cbb0d..5d10b0d66482a 100644
--- a/content/en/observability_pipelines/set_up_pipelines/split_logs/fluent.md
+++ b/content/en/observability_pipelines/set_up_pipelines/split_logs/fluent.md
@@ -11,7 +11,7 @@ Configure Fluentd and Fluent Bit to send logs to the Observability Pipelines Wor
 
 {{% observability_pipelines/use_case_images/split_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-fluent)
@@ -32,56 +32,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -91,9 +130,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -101,15 +150,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -121,92 +185,118 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Fluent socket address and port. The Observability Pipelines Worker listens on this address for incoming log messages.
 
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/split_logs/google_pubsub.md b/content/en/observability_pipelines/set_up_pipelines/split_logs/google_pubsub.md
index 5257b26b623b7..ce10678e4976d 100644
--- a/content/en/observability_pipelines/set_up_pipelines/split_logs/google_pubsub.md
+++ b/content/en/observability_pipelines/set_up_pipelines/split_logs/google_pubsub.md
@@ -9,7 +9,7 @@ Configure Google Pub/Sub to send logs to the Observability Pipelines Worker and
 
 {{% observability_pipelines/use_case_images/split_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 
@@ -29,56 +29,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -88,9 +127,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -98,15 +147,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -118,90 +182,116 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/split_logs/http_client.md b/content/en/observability_pipelines/set_up_pipelines/split_logs/http_client.md
index 633359dc88fc2..1e1b7bb6ce24c 100644
--- a/content/en/observability_pipelines/set_up_pipelines/split_logs/http_client.md
+++ b/content/en/observability_pipelines/set_up_pipelines/split_logs/http_client.md
@@ -11,7 +11,7 @@ Use the Observability Pipelines Worker to process and route your HTTP server log
 
 {{% observability_pipelines/use_case_images/split_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 
@@ -31,56 +31,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -90,9 +129,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -100,15 +149,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -120,92 +184,118 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the full path of the HTTP/S endpoint URL. For example, `https://127.0.0.8/logs`. The Observability Pipelines Worker collects logs events from this endpoint.
 
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/split_logs/http_server.md b/content/en/observability_pipelines/set_up_pipelines/split_logs/http_server.md
index fae8e216f5358..70df50d3e2045 100644
--- a/content/en/observability_pipelines/set_up_pipelines/split_logs/http_server.md
+++ b/content/en/observability_pipelines/set_up_pipelines/split_logs/http_server.md
@@ -9,7 +9,7 @@ Use the Observability Pipelines Worker to process and route your HTTP client log
 
 {{% observability_pipelines/use_case_images/split_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 
@@ -29,56 +29,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -88,9 +127,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -98,15 +147,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -118,91 +182,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the HTTP/S server address, such as `0.0.0.0:9997`. The Observability Pipelines Worker listens to this socket address for your HTTP client logs.
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/split_logs/kafka.md b/content/en/observability_pipelines/set_up_pipelines/split_logs/kafka.md
new file mode 100644
index 0000000000000..2a30332f70c10
--- /dev/null
+++ b/content/en/observability_pipelines/set_up_pipelines/split_logs/kafka.md
@@ -0,0 +1,340 @@
+---
+title: Split Logs for Kafka
+disable_toc: false
+---
+
+## Overview
+
+Use the Observability Pipelines Worker to process and route logs from your Kafka topics to different destinations based on your use case.
+
+{{% observability_pipelines/use_case_images/split_logs %}}
+
+This document walks you through the following:
+1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
+1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
+1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-kafka)
+
+## Prerequisites
+
+{{% observability_pipelines/prerequisites/kafka %}}
+
+## Set up Observability Pipelines
+
+1. Navigate to [Observability Pipelines][1].
+1. Select the **Split Logs** template to create a new pipeline.
+1. Select the **Kafka** source.
+
+### Set up the source
+
+{{% observability_pipelines/source_settings/kafka %}}
+
+### Set up the destinations
+
+Enter the following information based on your selected logs destinations.
+
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_settings/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_settings/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
+
+Follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_settings/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_settings/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_settings/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
+### Set up processors
+
+{{% observability_pipelines/processors/intro %}}
+
+{{% observability_pipelines/processors/filter_syntax %}}
+
+{{% observability_pipelines/processors/add_processors %}}
+
+{{< tabs >}}
+{{% tab "Add env vars" %}}
+
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
+
+{{% /tab %}}
+{{% tab "Edit fields" %}}
+
+{{% observability_pipelines/processors/remap %}}
+
+{{% /tab %}}
+{{% tab "Enrichment table" %}}
+
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
+
+{{% /tab %}}
+{{% tab "Grok Parser" %}}
+
+{{% observability_pipelines/processors/grok_parser %}}
+
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
+{{% /tab %}}
+{{% tab "Quota" %}}
+
+{{% observability_pipelines/processors/quota %}}
+
+{{% /tab %}}
+{{% tab "Reduce" %}}
+
+{{% observability_pipelines/processors/reduce %}}
+
+{{% /tab %}}
+{{% tab "Remap to OCSF" %}}
+
+{{% observability_pipelines/processors/remap_ocsf %}}
+
+{{% /tab %}}
+{{% tab "Sample" %}}
+
+{{% observability_pipelines/processors/sample %}}
+
+{{% /tab %}}
+{{% tab "Sensitive Data Scanner" %}}
+
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
+
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_library_rules %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
+
+{{% observability_pipelines/processors/sds_custom_rules %}}
+
+{{% /collapse-content %}} -->
+
+{{% /tab %}}
+{{< /tabs >}}
+
+#### Add another set of processors and destinations
+
+{{% observability_pipelines/multiple_processors %}}
+
+### Install the Observability Pipelines Worker
+1. Select your platform in the **Choose your installation platform** dropdown menu.
+1. Enter the host and port of the Kafka bootstrap servers, which clients should use to connect to the Kafka cluster and discover all the other hosts in the cluster. Must be entered in the format of `host:port`, such as `10.14.22.123:9092`. If there is more than one server, use commas to separate them.
+
+    If you enabled SASL, enter the Kafka SASL username and Kafka SASL password.
+
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
+{{< tabs >}}
+{{% tab "Amazon OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+
+{{% /tab %}}
+{{% tab "Chronicle" %}}
+
+{{% observability_pipelines/destination_env_vars/chronicle %}}
+
+{{% /tab %}}
+{{% tab "Datadog" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog %}}
+
+{{% /tab %}}
+{{% tab "Datadog Archives" %}}
+
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
+
+{{% /tab %}}
+{{% tab "Elasticsearch" %}}
+
+{{% observability_pipelines/destination_env_vars/elasticsearch %}}
+
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
+{{% /tab %}}
+{{% tab "OpenSearch" %}}
+
+{{% observability_pipelines/destination_env_vars/opensearch %}}
+
+{{% /tab %}}
+{{% tab "SentinelOne" %}}
+
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
+
+{{% /tab %}}
+{{% tab "Splunk HEC" %}}
+
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+1. Follow the instructions for your environment to install the Worker.
+{{< tabs >}}
+{{% tab "Docker" %}}
+
+{{% observability_pipelines/install_worker/docker %}}
+
+{{% /tab %}}
+{{% tab "Amazon EKS" %}}
+
+{{% observability_pipelines/install_worker/amazon_eks %}}
+
+{{% /tab %}}
+{{% tab "Azure AKS" %}}
+
+{{% observability_pipelines/install_worker/azure_aks %}}
+
+{{% /tab %}}
+{{% tab "Google GKE" %}}
+
+{{% observability_pipelines/install_worker/google_gke %}}
+
+{{% /tab %}}
+{{% tab "Linux (APT)" %}}
+
+{{% observability_pipelines/install_worker/linux_apt %}}
+
+{{% /tab %}}
+{{% tab "Linux (RPM)" %}}
+
+{{% observability_pipelines/install_worker/linux_rpm %}}
+
+{{% /tab %}}
+{{% tab "CloudFormation" %}}
+
+{{% observability_pipelines/install_worker/cloudformation %}}
+
+{{% /tab %}}
+{{< /tabs >}}
+
+[1]: https://app.datadoghq.com/observability-pipelines
\ No newline at end of file
diff --git a/content/en/observability_pipelines/set_up_pipelines/split_logs/logstash.md b/content/en/observability_pipelines/set_up_pipelines/split_logs/logstash.md
index 7799d3fb11dba..b2034e5eb28c9 100644
--- a/content/en/observability_pipelines/set_up_pipelines/split_logs/logstash.md
+++ b/content/en/observability_pipelines/set_up_pipelines/split_logs/logstash.md
@@ -9,7 +9,7 @@ Configure Logstash to send logs to the Observability Pipelines Worker and then p
 
 {{% observability_pipelines/use_case_images/split_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-logstash)
@@ -30,56 +30,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -89,9 +128,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -99,15 +148,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -119,91 +183,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Logstash address and port, such as `0.0.0.0:9997`. The Observability Pipelines Worker listens on this address for incoming log messages.
 1. Provide the environment variables for each of your selected destinations. See the [prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/split_logs/splunk_hec.md b/content/en/observability_pipelines/set_up_pipelines/split_logs/splunk_hec.md
index 89c131cb3a0a4..2133cc5cd7708 100644
--- a/content/en/observability_pipelines/set_up_pipelines/split_logs/splunk_hec.md
+++ b/content/en/observability_pipelines/set_up_pipelines/split_logs/splunk_hec.md
@@ -41,53 +41,92 @@ This document walks you through the following setup steps:
 Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -97,9 +136,19 @@ Enter the following information based on your selected logs destinations.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -107,15 +156,30 @@ Enter the following information based on your selected logs destinations.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -127,91 +191,117 @@ Enter the following information based on your selected logs destinations.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Splunk HEC address. This is the address and port where your applications are sending their logging data. The Observability Pipelines Worker listens to this address for incoming logs.
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/split_logs/splunk_tcp.md b/content/en/observability_pipelines/set_up_pipelines/split_logs/splunk_tcp.md
index 52259cc165ed7..1a7ea8af86534 100644
--- a/content/en/observability_pipelines/set_up_pipelines/split_logs/splunk_tcp.md
+++ b/content/en/observability_pipelines/set_up_pipelines/split_logs/splunk_tcp.md
@@ -11,7 +11,7 @@ Configure your Splunk Heavy or Universal Forwarders to send logs to the Observab
 
 {{% observability_pipelines/use_case_images/split_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Connecting Splunk Forwarder to the Observability Pipelines Worker](#connect-splunk-forwarder-to-the-observability-pipelines-worker)
@@ -38,56 +38,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -97,9 +136,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -107,15 +156,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -127,91 +191,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Splunk TCP address. This is the address and port where your applications are sending their logging data. The Observability Pipelines Worker listens to this address for incoming logs.
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/split_logs/sumo_logic_hosted_collector.md b/content/en/observability_pipelines/set_up_pipelines/split_logs/sumo_logic_hosted_collector.md
index 64af666637192..8f091124b2788 100644
--- a/content/en/observability_pipelines/set_up_pipelines/split_logs/sumo_logic_hosted_collector.md
+++ b/content/en/observability_pipelines/set_up_pipelines/split_logs/sumo_logic_hosted_collector.md
@@ -11,7 +11,7 @@ Configure you Sumo Logic Hosted Collector HTTP Logs source to send logs to the O
 
 {{% observability_pipelines/use_case_images/split_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Observability Pipelines Worker over Sumo Logic HTTP Source](#send-logs-to-the-observability-pipelines-worker-over-sumo-logic-http-source)
@@ -38,56 +38,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -97,9 +136,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -107,15 +156,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -127,91 +191,117 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Sumo Logic address. This is the address and port where your applications are sending their logging data. The Observability Pipelines Worker listens to this address for incoming logs.
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/set_up_pipelines/split_logs/syslog.md b/content/en/observability_pipelines/set_up_pipelines/split_logs/syslog.md
index a8729f07a2a84..30184122cc683 100644
--- a/content/en/observability_pipelines/set_up_pipelines/split_logs/syslog.md
+++ b/content/en/observability_pipelines/set_up_pipelines/split_logs/syslog.md
@@ -11,7 +11,7 @@ Configure rsyslog or syslog-ng to send logs to the Observability Pipelines Worke
 
 {{% observability_pipelines/use_case_images/split_logs %}}
 
-This document walks you through the following steps:
+This document walks you through the following:
 1. The [prerequisites](#prerequisites) needed to set up Observability Pipelines
 1. [Setting up Observability Pipelines](#set-up-observability-pipelines)
 1. [Sending logs to the Observability Pipelines Worker](#send-logs-to-the-observability-pipelines-worker-over-syslog)
@@ -32,56 +32,95 @@ This document walks you through the following steps:
 
 ### Set up the destinations
 
-Enter the following information based on your selected logs destination.
+Enter the following information based on your selected logs destinations.
 
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_settings/datadog %}}
+{{% observability_pipelines/destination_settings/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_settings/splunk_hec %}}
+{{% observability_pipelines/destination_settings/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_settings/sumo_logic %}}
+{{% observability_pipelines/destination_settings/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_settings/syslog %}}
+{{% observability_pipelines/destination_settings/datadog_archives_note %}}
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% observability_pipelines/destination_settings/datadog_archives_prerequisites %}}
 
-{{% observability_pipelines/destination_settings/chronicle %}}
+To set up the destination, follow the instructions for the cloud provider you are using to archive your logs.
+
+{{% collapse-content title="Amazon S3" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_settings/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_settings/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_settings/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_settings/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_settings/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_settings/amazon_opensearch %}}
+{{% observability_pipelines/destination_settings/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_settings/new_relic %}}
+{{% observability_pipelines/destination_settings/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_settings/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_settings/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
 
+#### Add additional destinations
+
+{{% observability_pipelines/multiple_destinations %}}
+
 ### Set up processors
 
 {{% observability_pipelines/processors/intro %}}
@@ -91,9 +130,19 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/add_processors %}}
 
 {{< tabs >}}
-{{% tab "Filter" %}}
+{{% tab "Add env vars" %}}
 
-{{% observability_pipelines/processors/filter %}}
+{{% observability_pipelines/processors/add_env_vars %}}
+
+{{% /tab %}}
+{{% tab "Add hostname" %}}
+
+{{% observability_pipelines/processors/add_hostname %}}
+
+{{% /tab %}}
+{{% tab "Dedupe" %}}
+
+{{% observability_pipelines/processors/dedupe %}}
 
 {{% /tab %}}
 {{% tab "Edit fields" %}}
@@ -101,15 +150,30 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/remap %}}
 
 {{% /tab %}}
-{{% tab "Sample" %}}
+{{% tab "Enrichment table" %}}
 
-{{% observability_pipelines/processors/sample %}}
+{{% observability_pipelines/processors/enrichment_table %}}
+
+{{% /tab %}}
+{{% tab "Filter" %}}
+
+{{% observability_pipelines/processors/filter %}}
+
+{{% /tab %}}
+{{% tab "Generate metrics" %}}
+
+{{% observability_pipelines/processors/generate_metrics %}}
 
 {{% /tab %}}
 {{% tab "Grok Parser" %}}
 
 {{% observability_pipelines/processors/grok_parser %}}
 
+{{% /tab %}}
+{{% tab "Parse JSON" %}}
+
+{{% observability_pipelines/processors/parse_json %}}
+
 {{% /tab %}}
 {{% tab "Quota" %}}
 
@@ -121,92 +185,118 @@ Enter the following information based on your selected logs destination.
 {{% observability_pipelines/processors/reduce %}}
 
 {{% /tab %}}
-{{% tab "Dedupe" %}}
+{{% tab "Remap to OCSF" %}}
 
-{{% observability_pipelines/processors/dedupe %}}
+{{% observability_pipelines/processors/remap_ocsf %}}
 
 {{% /tab %}}
-{{% tab "Sensitive Data Scanner" %}}
+{{% tab "Sample" %}}
 
-{{% observability_pipelines/processors/sensitive_data_scanner %}}
+{{% observability_pipelines/processors/sample %}}
 
 {{% /tab %}}
-{{% tab "Add hostname" %}}
+{{% tab "Sensitive Data Scanner" %}}
 
-{{% observability_pipelines/processors/add_hostname %}}
+{{% observability_pipelines/processors/sensitive_data_scanner %}}
 
-{{% /tab %}}
-{{% tab "Parse JSON" %}}
+<!-- {{% collapse-content title="Add rules from the library" level="h5" %}}
 
-{{% observability_pipelines/processors/parse_json %}}
+{{% observability_pipelines/processors/sds_library_rules %}}
 
-{{% /tab %}}
-{{% tab "Enrichment table" %}}
-
-{{% observability_pipelines/processors/enrichment_table %}}
+{{% /collapse-content %}}
+{{% collapse-content title="Add a custom rule" level="h5" %}}
 
-{{% /tab %}}
-{{% tab "Generate metrics" %}}
+{{% observability_pipelines/processors/sds_custom_rules %}}
 
-{{% observability_pipelines/processors/generate_metrics %}}
+{{% /collapse-content %}} -->
 
 {{% /tab %}}
-{{% tab "Add env vars" %}}
+{{< /tabs >}}
 
-{{% observability_pipelines/processors/add_env_vars %}}
+#### Add another set of processors and destinations
 
-{{% /tab %}}
-{{< /tabs >}}
+{{% observability_pipelines/multiple_processors %}}
 
 ### Install the Observability Pipelines Worker
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. Enter the Syslog address. This is a Syslog-compatible endpoint, exposed by the Worker, that your applications send logs to. The Observability Pipelines Worker listens on this address for incoming logs.
 
-1. Provide the environment variables for each of your selected destinations. See [prerequisites](#prerequisites) for more information.
+1. Provide the environment variables for each of your selected destinations. See [Prerequisites](#prerequisites) for more information.
 {{< tabs >}}
-{{% tab "Datadog" %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/destination_env_vars/datadog %}}
+{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Chronicle" %}}
 
-{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% tab "Datadog" %}}
 
-{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/destination_env_vars/syslog %}}
+For the Datadog Archives destination, follow the instructions for the cloud provider you are using to archive your logs.
 
-{{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% collapse-content title="Amazon S3" level="h5" %}}
 
-{{% observability_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Google Cloud Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+
+{{% /collapse-content %}}
+{{% collapse-content title="Azure Storage" level="h5" %}}
+
+{{% observability_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+
+{{% /collapse-content %}}
 
 {{% /tab %}}
 {{% tab "Elasticsearch" %}}
 
 {{% observability_pipelines/destination_env_vars/elasticsearch %}}
 
+{{% /tab %}}
+{{% tab "Microsoft Sentinel" %}}
+
+{{% observability_pipelines/destination_env_vars/microsoft_sentinel %}}
+
+{{% /tab %}}
+{{% tab "New Relic" %}}
+
+{{% observability_pipelines/destination_env_vars/new_relic %}}
+
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
 
 {{% observability_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/observability_pipelines/sources/_index.md b/content/en/observability_pipelines/sources/_index.md
index bf58f1f9fce0d..836db73c91403 100644
--- a/content/en/observability_pipelines/sources/_index.md
+++ b/content/en/observability_pipelines/sources/_index.md
@@ -29,11 +29,14 @@ Select and set up your source when you build a pipeline in the UI. This is step
 Sources have different prerequisites and settings. Some sources also need to be configured to send logs to the Observability Pipelines Worker.
 
 {{< whatsnext desc="Select a source for more information:" >}}
+    <!-- {{< nextlink href="observability_pipelines/sources/amazon_data_firehose/" >}}Amazon Data Firehose{{< /nextlink >}} -->
+    {{< nextlink href="observability_pipelines/sources/amazon_s3/" >}}Amazon S3{{< /nextlink >}}
     {{< nextlink href="observability_pipelines/sources/datadog_agent/" >}}Datadog Agent{{< /nextlink >}}
     {{< nextlink href="observability_pipelines/sources/fluent/" >}}Fluentd and Fluent Bit{{< /nextlink >}}
     {{< nextlink href="observability_pipelines/sources/google_pubsub/" >}}Google Pub/Sub{{< /nextlink >}}
     {{< nextlink href="observability_pipelines/sources/http_client/" >}}HTTP/S Client{{< /nextlink >}}
     {{< nextlink href="observability_pipelines/sources/http_server/" >}}HTTP/S Server{{< /nextlink >}}
+    {{< nextlink href="observability_pipelines/sources/kafka/" >}}Kafka{{< /nextlink >}}
     {{< nextlink href="observability_pipelines/sources/logstash/" >}}Logstash (includes Filebeat){{< /nextlink >}}
     {{< nextlink href="observability_pipelines/sources/splunk_hec/" >}}Splunk HTTP Event Collector (HEC){{< /nextlink >}}
     {{< nextlink href="observability_pipelines/sources/splunk_tcp/" >}}Splunk Heavy or Universal Forwarders (TCP){{< /nextlink >}}
diff --git a/content/en/observability_pipelines/sources/amazon_data_firehose.md b/content/en/observability_pipelines/sources/amazon_data_firehose.md
new file mode 100644
index 0000000000000..7b4331a0fee10
--- /dev/null
+++ b/content/en/observability_pipelines/sources/amazon_data_firehose.md
@@ -0,0 +1,22 @@
+---
+title: Amazon Data Firehose Source
+disable_toc: false
+---
+
+Use Observability Pipelines' Amazon Data Firehose source to receive logs from Amazon Data Firehose. Select and set up this source when you [set up a pipeline][1].
+
+## Prerequisites
+
+{{% observability_pipelines/prerequisites/amazon_data_firehose %}}
+
+## Set up the source in the pipeline UI
+
+Select and set up this source when you [set up a pipeline][1]. The information below is for the source settings in the pipeline UI.
+
+{{% observability_pipelines/source_settings/amazon_data_firehose %}}
+
+## Send logs to the Observability Pipelines Worker over Amazon Data Firehose
+
+{{% observability_pipelines/log_source_configuration/amazon_data_firehose %}}
+
+[1]: /observability_pipelines/set_up_pipelines/
\ No newline at end of file
diff --git a/content/en/observability_pipelines/sources/amazon_s3.md b/content/en/observability_pipelines/sources/amazon_s3.md
new file mode 100644
index 0000000000000..adbdbff7eae66
--- /dev/null
+++ b/content/en/observability_pipelines/sources/amazon_s3.md
@@ -0,0 +1,29 @@
+---
+title: Amazon S3 Source
+disable_toc: false
+---
+
+Use Observability Pipelines' Amazon S3 source to receive logs from Amazon S3. Select and set up this source when you [set up a pipeline][1].
+
+## Prerequisites
+
+{{% observability_pipelines/prerequisites/amazon_s3 %}}
+
+## Set up the source in the pipeline UI
+
+Select and set up this source when you [set up a pipeline][1]. The information below is for the source settings in the pipeline UI.
+
+{{% observability_pipelines/source_settings/amazon_s3 %}}
+
+## AWS Authentication
+
+{{% observability_pipelines/aws_authentication/amazon_s3_source/intro %}}
+
+{{% observability_pipelines/aws_authentication/instructions %}}
+
+### Permissions
+
+{{% observability_pipelines/aws_authentication/amazon_s3_source/permissions %}}
+
+
+[1]: /observability_pipelines/set_up_pipelines/
\ No newline at end of file
diff --git a/content/en/observability_pipelines/sources/kafka.md b/content/en/observability_pipelines/sources/kafka.md
new file mode 100644
index 0000000000000..07cc5732cf306
--- /dev/null
+++ b/content/en/observability_pipelines/sources/kafka.md
@@ -0,0 +1,19 @@
+---
+title: Kafka Source
+disable_toc: false
+---
+
+Use Observability Pipelines' Kafka source to receive logs from your Kafka topics. Select and set up this source when you [set up a pipeline][1]. The Kafka source uses [librdkafka][2].
+
+## Prerequisites
+
+{{% observability_pipelines/prerequisites/kafka %}}
+
+## Set up the source in the pipeline UI
+
+Select and set up this source when you [set up a pipeline][1]. The information below is for the source settings in the pipeline UI.
+
+{{% observability_pipelines/source_settings/kafka %}}
+
+[1]: /observability_pipelines/set_up_pipelines/
+[2]: https://github.com/confluentinc/librdkafka/tree/master
\ No newline at end of file
diff --git a/content/en/observability_pipelines/update_existing_pipelines.md b/content/en/observability_pipelines/update_existing_pipelines.md
index d28209fa80964..93d007e7a6767 100644
--- a/content/en/observability_pipelines/update_existing_pipelines.md
+++ b/content/en/observability_pipelines/update_existing_pipelines.md
@@ -22,6 +22,16 @@ On the Worker installation page:
 1. Select your platform in the **Choose your installation platform** dropdown menu.
 1. If you want to update source environment variables, update the information for your log source.
 {{< tabs >}}
+<!-- {{% tab "Amazon Data Firehose" %}}
+
+{{% observability_pipelines/configure_existing_pipelines/source_env_vars/amazon_data_firehose %}}
+
+{{% /tab %}} -->
+{{% tab "Amazon S3" %}}
+
+{{% observability_pipelines/configure_existing_pipelines/source_env_vars/amazon_s3 %}}
+
+{{% /tab %}}
 {{% tab "Datadog Agent" %}}
 
 {{% observability_pipelines/configure_existing_pipelines/source_env_vars/datadog_agent %}}
@@ -46,6 +56,11 @@ On the Worker installation page:
 
 {{% observability_pipelines/configure_existing_pipelines/source_env_vars/http_server %}}
 
+{{% /tab %}}
+{{% tab "Kafka" %}}
+
+{{% observability_pipelines/configure_existing_pipelines/source_env_vars/kafka %}}
+
 {{% /tab %}}
 {{% tab "Logstash" %}}
 
@@ -75,14 +90,14 @@ On the Worker installation page:
 {{< /tabs >}}
 1. If you want to update destination environment variables, update the information for your log destination.
 {{< tabs >}}
-{{% tab "Datadog Archives" %}}
-
-{{% observability_pipelines/configure_existing_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
+{{% tab "Amazon OpenSearch" %}}
 
-{{% observability_pipelines/configure_existing_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
+{{% observability_pipelines/configure_existing_pipelines/destination_env_vars/amazon_opensearch %}}
 
-{{% observability_pipelines/configure_existing_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
+{{% /tab %}}
+{{% tab "Chronicle" %}}
 
+{{% observability_pipelines/configure_existing_pipelines/destination_env_vars/chronicle %}}
 
 {{% /tab %}}
 {{% tab "Datadog" %}}
@@ -90,29 +105,28 @@ On the Worker installation page:
 {{% observability_pipelines/configure_existing_pipelines/destination_env_vars/datadog %}}
 
 {{% /tab %}}
-{{% tab "Splunk HEC" %}}
+{{% tab "Datadog Archives" %}}
 
-{{% observability_pipelines/configure_existing_pipelines/destination_env_vars/splunk_hec %}}
+{{% observability_pipelines/configure_existing_pipelines/destination_env_vars/datadog_archives_amazon_s3 %}}
 
-{{% /tab %}}
-{{% tab "Sumo Logic" %}}
+{{% observability_pipelines/configure_existing_pipelines/destination_env_vars/datadog_archives_google_cloud_storage %}}
 
-{{% observability_pipelines/configure_existing_pipelines/destination_env_vars/sumo_logic %}}
+{{% observability_pipelines/configure_existing_pipelines/destination_env_vars/datadog_archives_azure_storage %}}
 
 {{% /tab %}}
-{{% tab "Syslog" %}}
+{{% tab "Elasticsearch" %}}
 
-{{% observability_pipelines/configure_existing_pipelines/destination_env_vars/syslog %}}
+{{% observability_pipelines/configure_existing_pipelines/destination_env_vars/elasticsearch %}}
 
 {{% /tab %}}
-{{% tab "Chronicle" %}}
+{{% tab "Microsoft Sentinel" %}}
 
-{{% observability_pipelines/configure_existing_pipelines/destination_env_vars/chronicle %}}
+{{% observability_pipelines/configure_existing_pipelines/destination_env_vars/microsoft_sentinel %}}
 
 {{% /tab %}}
-{{% tab "Elasticsearch" %}}
+{{% tab "New Relic" %}}
 
-{{% observability_pipelines/configure_existing_pipelines/destination_env_vars/elasticsearch %}}
+{{% observability_pipelines/configure_existing_pipelines/destination_env_vars/new_relic %}}
 
 {{% /tab %}}
 {{% tab "OpenSearch" %}}
@@ -120,17 +134,28 @@ On the Worker installation page:
 {{% observability_pipelines/configure_existing_pipelines/destination_env_vars/opensearch %}}
 
 {{% /tab %}}
-{{% tab "Amazon OpenSearch" %}}
+{{% tab "SentinelOne" %}}
 
-{{% observability_pipelines/configure_existing_pipelines/destination_env_vars/amazon_opensearch %}}
+{{% observability_pipelines/configure_existing_pipelines/destination_env_vars/sentinelone %}}
 
 {{% /tab %}}
-{{% tab "New Relic" %}}
+{{% tab "Splunk HEC" %}}
 
-{{% observability_pipelines/configure_existing_pipelines/destination_env_vars/new_relic %}}
+{{% observability_pipelines/configure_existing_pipelines/destination_env_vars/splunk_hec %}}
+
+{{% /tab %}}
+{{% tab "Sumo Logic" %}}
+
+{{% observability_pipelines/configure_existing_pipelines/destination_env_vars/sumo_logic %}}
+
+{{% /tab %}}
+{{% tab "Syslog" %}}
+
+{{% observability_pipelines/configure_existing_pipelines/destination_env_vars/syslog %}}
 
 {{% /tab %}}
 {{< /tabs >}}
+
 1. Follow the instructions for your environment to update the worker:
 {{< tabs >}}
 {{% tab "Docker" %}}
diff --git a/content/en/opentelemetry/interoperability/otlp_ingest_in_the_agent.md b/content/en/opentelemetry/interoperability/otlp_ingest_in_the_agent.md
index 488e2a90e0506..e4be7b12db1d5 100644
--- a/content/en/opentelemetry/interoperability/otlp_ingest_in_the_agent.md
+++ b/content/en/opentelemetry/interoperability/otlp_ingest_in_the_agent.md
@@ -98,6 +98,14 @@ OTLP logs ingestion on the Datadog Agent is disabled by default so that you don'
    - Set `DD_LOGS_ENABLED` to true.
    - Set `DD_OTLP_CONFIG_LOGS_ENABLED` to true.
 
+<div class="alert alert-warning">
+<strong>Known Issue</strong>: Agent versions 7.61.0 through 7.63.0 have an issue where OTLP ingestion pipelines may fail to start in Docker environments, showing the error: <code>Error running the OTLP ingest pipeline: failed to register process metrics: process does not exist</code>.<br>
+If you are using an affected version, you can use one of these workarounds:<br>
+1. Set the environment variable <code>HOST_PROC</code> to <code>/proc</code> in your Agent Docker container.<br>
+2. Remove <code>/proc/:/host/proc/:ro</code> from <code>volumes</code> in your Agent Docker container.<br>
+3. Set <code>pid</code> to <code>host</code> in your Agent Docker container.<br>
+These configurations can be applied through either the <code>docker</code> command or Docker compose file.</div>
+
 [1]: /agent/docker/
 {{% /tab %}}
 {{% tab "Kubernetes (Daemonset)" %}}
diff --git a/content/en/real_user_monitoring/browser/setup/client.md b/content/en/real_user_monitoring/browser/setup/client.md
index d271d76ee9c5c..9b66c189a092d 100644
--- a/content/en/real_user_monitoring/browser/setup/client.md
+++ b/content/en/real_user_monitoring/browser/setup/client.md
@@ -257,8 +257,7 @@ datadogRum.init({
    // site: '<SITE>',
    // version: '1.0.0',
    trackUserInteractions: true,
-   trackResources: true,
-   defaultPrivacyLevel: {default to the selection in form}
+   trackResources: true
 });
 
 ```
@@ -1326,8 +1325,7 @@ Early RUM API calls must be wrapped in the `window.DD_RUM.onReady()` callback. T
       // site: '<SITE>',
       service: '<APP_ID>',
       env: '<ENV_NAME>',
-      // version: '1.0.0',
-      defaultPrivacyLevel: {default to the selection in form}
+      // version: '1.0.0'
     });
   })
 </script>
@@ -2239,8 +2237,7 @@ The `window.DD_RUM` check is used to prevent issues if a loading failure occurs
       // site: '<SITE>',
       service: '<APP_ID>',
       env: '<ENV_NAME>',
-      // version: '1.0.0',
-      defaultPrivacyLevel: {default to the selection in form}
+      // version: '1.0.0'
     });
 </script>
 
diff --git a/content/en/real_user_monitoring/error_tracking/_index.md b/content/en/real_user_monitoring/error_tracking/_index.md
index 6d657bacf868e..4703975a6313c 100644
--- a/content/en/real_user_monitoring/error_tracking/_index.md
+++ b/content/en/real_user_monitoring/error_tracking/_index.md
@@ -44,7 +44,7 @@ Take a tour of key Error Tracking features in the [Error Tracking Explorer][3] d
     {{< nextlink href="real_user_monitoring/error_tracking/reactnative" >}}React Native{{< /nextlink >}}
     {{< nextlink href="real_user_monitoring/error_tracking/flutter" >}}Flutter{{< /nextlink >}}
     {{< nextlink href="real_user_monitoring/error_tracking/roku" >}}Roku{{< /nextlink >}}
-    {{< nextlink href="real_user_monitoring/error_tracking/kotlin-multiplatform" >}}Kotlin Multiplatform{{< /nextlink >}}
+    {{< nextlink href="real_user_monitoring/error_tracking/kotlin_multiplatform" >}}Kotlin Multiplatform{{< /nextlink >}}
 {{< /whatsnext >}}
 
 ## Further Reading
diff --git a/content/en/real_user_monitoring/error_tracking/mobile/kotlin-multiplatform.md b/content/en/real_user_monitoring/error_tracking/mobile/kotlin_multiplatform.md
similarity index 99%
rename from content/en/real_user_monitoring/error_tracking/mobile/kotlin-multiplatform.md
rename to content/en/real_user_monitoring/error_tracking/mobile/kotlin_multiplatform.md
index 768c17e94d038..b8af9cf0ed068 100644
--- a/content/en/real_user_monitoring/error_tracking/mobile/kotlin-multiplatform.md
+++ b/content/en/real_user_monitoring/error_tracking/mobile/kotlin_multiplatform.md
@@ -3,6 +3,7 @@ title: Kotlin Multiplatform Crash Reporting and Error Tracking
 description: Set up Error Tracking for your Kotlin Multiplatform applications.
 aliases:
     - /real_user_monitoring/error_tracking/kotlin-multiplatform
+    - /real_user_monitoring/error_tracking/kotlin_multiplatform
 type: multi-code-lang
 code_lang: kotlin-multiplatform
 code_lang_weight: 10
diff --git a/content/en/real_user_monitoring/feature_flag_tracking/_index.md b/content/en/real_user_monitoring/feature_flag_tracking/_index.md
index 427baea295a2e..3d0073d377f61 100644
--- a/content/en/real_user_monitoring/feature_flag_tracking/_index.md
+++ b/content/en/real_user_monitoring/feature_flag_tracking/_index.md
@@ -15,16 +15,15 @@ further_reading:
 
 ## Overview
 
-Feature flag data gives you greater visibility into your user experience and performance monitoring by allowing you to determine which users are being shown a specific feature and if any change you introduce is impacting your user experience or negatively affecting performance. 
+Feature flag data provides greater visibility into user experience and performance monitoring. It allows you to determine which users are being shown a specific feature and assess if any changes introduced are impacting user experience or negatively affecting performance. You can use this information to determine whether or not to roll back the feature.
 
-By enriching your RUM data with feature flag data, you can: 
-- Be confident that your feature will successfully launch without unintentionally causing a bug or performance regression
+By enriching your RUM data with feature flag data, you can:
+
+- Be confident that your feature successfully launches without unintentionally causing a bug or performance regression
 - Correlate feature releases with performance, pinpoint issues to specific releases, and troubleshoot faster
 - Streamline data collection and analysis and focus on troubleshooting
 
-## Set up feature flag data collection
-
-To see detailed set up instructions, see our guide to [get started with feature flag data collection][1]
+## Supported frameworks
 
 Feature flag tracking is available in the RUM Browser SDK. To start, set up [RUM browser monitoring][2]. You need the Browser RUM SDK version >= 4.25.0.
 
@@ -36,61 +35,22 @@ We support integrations with:
 
 </br>
 
-Feature flags will show up in the context of events where they are evaluated, meaning they should show up on the views that the feature flag code logic is run on.
-
-## View your Feature Flags
-
-Once you have set up your feature flag data collection, navigate to the [**Feature Flags**][4] tab within RUM.
-
-From this view, you can investigate any questions you have about your feature flag's health and usage.
-- Monitor the number of users experiencing each variant and see summary statistics of your feature flag
-- Check the status of your feature flag to see if there are any that can be removed for code clean up
-- View which pages your feature flags are being evaluated on
-
-{{< img src="real_user_monitoring/feature_flag_tracking/feature-flag-list-2.png" alt="View a list of your feature flags to investigate any questions you have about your feature flag's health and usage" style="width:90%;" >}}
-
-
-### Search and filter
-Search and filter your feature flags by typing in the search bar. You can also use the faceted search to narrow down, broaden, or shift your focus on subsets of feature flags you are interested in.
-
-{{< img src="real_user_monitoring/feature_flag_tracking/feature-flag-list-search-filter.png" alt="Feature Flag list search bar and filtering" style="width:90%;" >}}
-
-### Feature Flag Status
-There are three possible feature flag statuses:
-- **Active**: The feature flag has evaluated different variants for the past 2 weeks
-- **Inactive**: For the past 2 weeks, there have only been feature flag evaluations for your control variant
-- **Out to 100%**: For the past 2 weeks, there have only been feature flag evaluations for one of your non-control variants
-
-## Analyze your feature flags
-To get more details about the health and performance of your feature flag, you can click the flag in the list to navigate to a dedicated feature flag analysis dashboard. The Feature Flag analysis dashboard provides an overview of the performance of your feature flag, displaying information about user sessions, changes in your Core Web Vitals, and error rates. 
-
-These out-of-the-box graphs are aggregated across your flag variants, making it easy to spot problems in your feature releases before they turn into serious issues. This dashboard provides an easy way to monitor your feature releases and allows you to quickly roll back as soon as you spot an issue so you can avoid negative user experiences. 
-
-{{< img src="real_user_monitoring/feature_flag_tracking/feature-flag-details-page.mp4" alt="Feature Flag details page - Users overview" video=true width=90% >}}
-
-
-The **Users** tab provides some high level summary statistics of your feature flag and allows you to further analyze the users viewing each of your feature flag variants by any attribute. If you want to understand what it looks like for someone who experienced a certain variant versus another, you can watch a [Session Replay][5] for each case.
-
-The **Issues** tab gives you a view of the errors that are occurring in your application for user sessions that have your feature flag. See if any issues detected by [Error Tracking][6] occurred for a specific variant of your feature flag and may be related to your changes.
-
-The **Performance** tab allows you to understand if one of your feature flag variants have caused poor performance. You can view your Core Web Vitals and loading time for each variant to determine if one of your variants may be causing a negative impact on your application's performance.
-
-### Build custom views from Feature Flag data using the RUM Explorer
-Search through all the data collected by RUM in the [RUM Explorer][7] to surface trends on feature flags, analyze patterns with greater context, or export them into [dashboards][8] and [monitors][9]. 
-
-You can search your Sessions, Views, or Errors in the RUM Explorer, with the `@feature_flags.{flag_name}` attribute to scope down and focus on events where users were shown a specific user experience.
+## Start using feature flags
 
-You can compare important metrics to you and your teams by grouping your query by `@feature_flags.{flag_name}`. For example, if you want to understand how your new checkout flow is affecting the conversion rate from the checkout page to users making a purchase, you can add a "Group by" on the conversion rate graph.
+To get started with feature flags, set up feature flag tracking for the browser SDK or mobile SDK, then start collecting data using one of Datadog's integration partners or a custom feature flag management solution.
 
-{{< img src="real_user_monitoring/feature_flag_tracking/feature-flag-rum-explorer.png" alt="Feature Flag list search bar and filtering" style="width:90%;" >}}
+{{< whatsnext desc="This section includes the following topics:">}}
+  {{< nextlink href="/real_user_monitoring/feature_flag_tracking/setup">}}<u>Setup</u>: Learn how to set up RUM to capture feature flag data and analyze the performance in Datadog.{{< /nextlink >}}
+  {{< nextlink href="/real_user_monitoring/feature_flag_tracking/using_feature_flags">}}<u>Use your feature flags</u>: Learn how to view and analyze your feature flag's health and usage.{{< /nextlink >}}
+{{< /whatsnext >}}
 
 ## Further reading
 
 {{< partial name="whats-next/whats-next.html" >}}
 
-[1]: /real_user_monitoring/guide/setup-feature-flag-data-collection/
+[1]: /real_user_monitoring/setup/
 [2]: /real_user_monitoring/browser#setup
-[3]: /real_user_monitoring/guide/setup-feature-flag-data-collection/?tab=npm#custom-feature-flag-management
+[3]: /real_user_monitoring/setup/?tab=npm#custom-feature-flag-management
 [4]: https://app.datadoghq.com/rum/feature-flags
 [5]: /real_user_monitoring/session_replay/browser/
 [6]: /real_user_monitoring/error_tracking/explorer/#explore-your-issues
diff --git a/content/en/real_user_monitoring/feature_flag_tracking/setup.md b/content/en/real_user_monitoring/feature_flag_tracking/setup.md
new file mode 100644
index 0000000000000..8213262991108
--- /dev/null
+++ b/content/en/real_user_monitoring/feature_flag_tracking/setup.md
@@ -0,0 +1,849 @@
+---
+title: Setup Feature Flag Tracking
+beta: true
+description: Learn how to set up RUM to capture feature flag data and analyze the performance in Datadog
+aliases:
+- /real_user_monitoring/guide/getting-started-feature-flags/
+- /real_user_monitoring/guide/setup-feature-flag-data-collection/
+disable_toc: false
+further_reading:
+- link: "/real_user_monitoring/guide/setup-feature-flag-data-collection/"
+  tag: "Documentation"
+  text: "Set up Feature Flag data collection"
+- link: "/real_user_monitoring/explorer/"
+  tag: "Documentation"
+  text: "Learn about the RUM Explorer"
+- link: "https://www.datadoghq.com/blog/feature-flag-tracking/"
+  tag: "Blog"
+  text: "Ensure release safety with feature flag tracking in Datadog RUM"
+---
+
+Feature flag data provides greater visibility into user experience and performance monitoring. It allows you to determine which users are being shown a specific feature and assess if any changes introduced are impacting user experience or negatively affecting performance.
+
+By enriching your RUM data with feature flag data, you can be confident that your feature successfully launches without unintentionally causing a bug or performance regression. With this additional layer of insight, you can correlate feature releases with performance, pinpoint issues to specific releases, and troubleshoot faster.
+
+## Set up RUM monitoring
+
+Feature flag tracking is available in the RUM Browser, iOS, Android, Flutter, and React Native SDK.
+
+{{< tabs >}}
+{{% tab "Browser" %}}
+
+To enable feature flag data collection for the Browser SDK:
+
+1. Set up [RUM browser monitoring][1]. You need the Browser RUM SDK version >= 4.25.0.
+
+2. Initialize the RUM SDK and configure the `enableExperimentalFeatures` initialization parameter with ` ["feature_flags"]`.
+
+   <details open>
+     <summary>npm</summary>
+
+   ```javascript
+     import { datadogRum } from '@datadog/browser-rum';
+
+     // Initialize Datadog Browser SDK
+     datadogRum.init({
+       ...
+       enableExperimentalFeatures: ["feature_flags"],
+       ...
+   });
+   ```
+
+   </details>
+
+   <details>
+     <summary>CDN async</summary>
+
+   ```javascript
+   window.DD_RUM.onReady(function() {
+       window.DD_RUM.init({
+         ...
+         enableExperimentalFeatures: ["feature_flags"],
+         ...
+       })
+   })
+   ```
+   </details>
+
+   <details>
+     <summary>CDN sync</summary>
+
+   ```javascript
+   window.DD_RUM &&
+       window.DD_RUM.init({
+         ...
+         enableExperimentalFeatures: ["feature_flags"],
+         ...
+       })
+   ```
+   </details>
+   <br/>
+
+[1]: /real_user_monitoring/browser#setup
+{{% /tab %}}
+{{% tab "iOS" %}}
+
+To enable feature flag data collection for your iOS application:
+
+1. Set up [RUM iOS monitoring][1]. You need the iOS RUM SDK version >= 1.16.0.
+
+[1]: https://docs.datadoghq.com/real_user_monitoring/ios/?tab=swift
+{{% /tab %}}
+{{% tab "Android" %}}
+
+To enable feature flag data collection for your Android application:
+
+1. Set up [RUM Android monitoring][1]. You need the Android RUM SDK version >= 1.18.0.
+
+[1]: https://docs.datadoghq.com/real_user_monitoring/android/?tab=kotlin
+{{% /tab %}}
+{{% tab "Flutter" %}}
+
+To enable feature flag data collection for your Flutter application:
+
+1. Set up [RUM Flutter monitoring][1]. You need the Flutter Plugin version >= 1.3.2.
+
+[1]: https://docs.datadoghq.com/real_user_monitoring/mobile_and_tv_monitoring/setup/flutter/
+{{% /tab %}}
+{{% tab "React Native" %}}
+
+To enable feature flag data collection for your React Native application:
+
+1. Set up [RUM React Native monitoring][1]. You need the React Native RUM SDK version >= 1.7.0.
+
+[1]: https://docs.datadoghq.com/real_user_monitoring/reactnative/
+{{% /tab %}}
+{{< /tabs >}}
+
+## Set up a feature flag integration
+
+You can start collecting feature flag data with [custom feature flag management solutions](#custom-feature-flag-management), or by using one of Datadog's integration partners listed below.
+
+<div class="alert alert-warning">
+
+**Note**: The following special characters are not supported for Feature Flag Tracking: `.`, `:`, `+`, `-`, `=`, `&&`, `||`, `>`, `<`, `!`, `(`, `)`, `{`, `}`, `[`, `]`, `^`, `"`, `“`, `”`, `~`, `*`, `?`, `\`. Datadog recommends avoiding these characters when possible in your feature flag names. If you are required to use one of these characters, replace the character before sending the data to Datadog. For example:
+
+  ```javascript
+  datadogRum.addFeatureFlagEvaluation(key.replace(':', '_'), value);
+  ```
+
+</div>
+
+{{< partial name="rum/rum-feature-flag-tracking.html" >}}
+
+</br>
+
+### Amplitude integration
+
+Before you initialize this feature flag integration, make sure you've [set up RUM monitoring](#set-up-rum-monitoring).
+
+{{< tabs >}}
+{{% tab "Browser" %}}
+
+Initialize Amplitude's SDK and create an exposure listener reporting feature flag evaluations to Datadog using the following snippet of code:
+
+For more information about initializing Amplitude's SDK, see Amplitude's [JavaScript SDK documentation][1].
+
+```javascript
+  const experiment = Experiment.initialize("CLIENT_DEPLOYMENT_KEY", {
+    exposureTrackingProvider: {
+      track(exposure: Exposure)  {
+        // Send the feature flag when Amplitude reports the exposure
+        datadogRum.addFeatureFlagEvaluation(exposure.flag_key, exposure.variant);
+      }
+    }
+  })
+```
+
+
+[1]: https://www.docs.developers.amplitude.com/experiment/sdks/javascript-sdk/
+
+{{% /tab %}}
+{{% tab "iOS" %}}
+
+Initialize Amplitude's SDK and create an inspector reporting feature flag evaluations to Datadog using the snippet of code below.
+
+For more information about initializing Amplitude's SDK, see Amplitude's [iOS SDK documentation][1].
+
+```swift
+  class DatadogExposureTrackingProvider : ExposureTrackingProvider {
+    func track(exposure: Exposure) {
+      // Send the feature flag when Amplitude reports the exposure
+      if let variant = exposure.variant {
+        RUMMonitor.shared().addFeatureFlagEvaluation(name: exposure.flagKey, value: variant)
+      }
+    }
+  }
+
+  // In initialization:
+  ExperimentConfig config = ExperimentConfigBuilder()
+    .exposureTrackingProvider(DatadogExposureTrackingProvider(analytics))
+    .build()
+```
+
+[1]: https://www.docs.developers.amplitude.com/experiment/sdks/ios-sdk/
+
+
+{{% /tab %}}
+{{% tab "Android" %}}
+
+Initialize Amplitude's SDK and create an inspector reporting feature flag evaluations to Datadog using the snippet of code below.
+
+For more information about initializing Amplitude's SDK, see Amplitude's [Android SDK documentation][1].
+
+```kotlin
+  internal class DatadogExposureTrackingProvider : ExposureTrackingProvider {
+    override fun track(exposure: Exposure) {
+        // Send the feature flag when Amplitude reports the exposure
+        GlobalRumMonitor.get().addFeatureFlagEvaluation(
+            exposure.flagKey,
+            exposure.variant.orEmpty()
+        )
+    }
+  }
+
+  // In initialization:
+  val config = ExperimentConfig.Builder()
+      .exposureTrackingProvider(DatadogExposureTrackingProvider())
+      .build()
+```
+
+[1]: https://www.docs.developers.amplitude.com/experiment/sdks/android-sdk/
+
+
+{{% /tab %}}
+{{% tab "Flutter" %}}
+
+Amplitude does not support this integration. Create a ticket with Amplitude to request this feature.
+
+
+{{% /tab %}}
+{{< /tabs >}}
+
+### ConfigCat integration
+
+Before you initialize this feature flag integration, make sure you've [set up RUM monitoring](#set-up-rum-monitoring).
+
+{{< tabs >}}
+{{% tab "Browser" %}}
+
+When initializing the ConfigCat Javascript SDK, subscribe to the `flagEvaluated` event and report feature flag evaluations to Datadog:
+
+```javascript
+const configCatClient = configcat.getClient(
+  '#YOUR-SDK-KEY#',
+  configcat.PollingMode.AutoPoll,
+  {
+    setupHooks: (hooks) =>
+      hooks.on('flagEvaluated', (details) => {
+        datadogRum.addFeatureFlagEvaluation(details.key, details.value);
+      })
+  }
+);
+```
+
+For more information about initializing the ConfigCat Javascript SDK, see ConfigCat's [JavaScript SDK documentation][1].
+
+[1]: https://configcat.com/docs/sdk-reference/js
+
+
+{{% /tab %}}
+{{% tab "iOS" %}}
+
+When initializing the ConfigCat Swift iOS SDK, subscribe to the `flagEvaluated` event and report feature flag evaluations to Datadog:
+
+```swift
+  let client = ConfigCatClient.get(sdkKey: "#YOUR-SDK-KEY#") { options in
+    options.hooks.addOnFlagEvaluated { details in
+        RUMMonitor.shared().addFeatureFlagEvaluation(featureFlag: details.key, variation: details.value)
+    }
+  }
+```
+
+For more information about initializing the ConfigCat Swift (iOS) SDK, see ConfigCat's[Swift iOS SDK documentation][1].
+
+[1]: https://configcat.com/docs/sdk-reference/ios
+
+
+{{% /tab %}}
+{{% tab "Android" %}}
+
+When initializing the ConfigCat Android SDK, subscribe to the `flagEvaluated` event and report feature flag evaluations to Datadog:
+
+```java
+  ConfigCatClient client = ConfigCatClient.get("#YOUR-SDK-KEY#", options -> {
+    options.hooks().addOnFlagEvaluated(details -> {
+        GlobalRumMonitor.get().addFeatureFlagEvaluation(details.key, details.value);
+    });
+  });
+```
+
+For more information about initializing the ConfigCat Android SDK, see ConfigCat's [Android SDK documentation][1].
+
+[1]: https://configcat.com/docs/sdk-reference/android
+
+
+{{% /tab %}}
+{{% tab "Flutter" %}}
+
+When initializing the ConfigCat Dart SDK, subscribe to the `flagEvaluated` event and report feature flag evaluations to Datadog:
+
+```dart
+  final client = ConfigCatClient.get(
+    sdkKey: '#YOUR-SDK-KEY#',
+    options: ConfigCatOptions(
+        pollingMode: PollingMode.autoPoll(),
+        hooks: Hooks(
+            onFlagEvaluated: (details) => {
+              DatadogSdk.instance.rum?.addFeatureFlagEvaluation(details.key, details.value);
+            }
+        )
+    )
+  );
+```
+
+For more information about initializing the ConfigCat Dart (Flutter) SDK, see ConfigCat's [Dart SDK documentation][1].
+
+[1]: https://configcat.com/docs/sdk-reference/dart
+
+
+{{% /tab %}}
+
+
+{{% tab "React Native" %}}
+
+When initializing the ConfigCat React SDK, subscribe to the `flagEvaluated` event and report feature flag evaluations to Datadog:
+
+```typescript
+<ConfigCatProvider
+  sdkKey="YOUR_SDK_KEY"
+  pollingMode={PollingMode.AutoPoll}
+  options={{
+    setupHooks: (hooks) =>
+      hooks.on('flagEvaluated', (details) => {
+        DdRum.addFeatureFlagEvaluation(details.key, details.value);
+      }),
+  }}
+>
+  ...
+</ConfigCatProvider>
+```
+
+For more information about initializing the ConfigCat React SDK, see ConfigCat's [React SDK documentation][1].
+
+[1]: https://configcat.com/docs/sdk-reference/react
+
+{{% /tab %}}
+{{< /tabs >}}
+
+### Custom feature flag management
+
+Before you initialize a custom feature flag integration, make sure you've [set up RUM monitoring](#set-up-rum-monitoring).
+
+{{< tabs >}}
+{{% tab "Browser" %}}
+
+Each time a feature flag is evaluated, add the following function to send the feature flag information to RUM:
+
+```javascript
+datadogRum.addFeatureFlagEvaluation(key, value);
+```
+
+{{% /tab %}}
+{{% tab "iOS" %}}
+
+Each time a feature flag is evaluated, add the following function to send the feature flag information to RUM:
+
+   ```swift
+   RUMMonitor.shared().addFeatureFlagEvaluation(key, value);
+   ```
+
+{{% /tab %}}
+{{% tab "Android" %}}
+
+Each time a feature flag is evaluated, add the following function to send the feature flag information to RUM:
+
+   ```kotlin
+   GlobalRumMonitor.get().addFeatureFlagEvaluation(key, value);
+   ```
+
+{{% /tab %}}
+{{% tab "Flutter" %}}
+
+Each time a feature flag is evaluated, add the following function to send the feature flag information to RUM:
+
+   ```dart
+   DatadogSdk.instance.rum?.addFeatureFlagEvaluation(key, value);
+   ```
+{{% /tab %}}
+{{% tab "React Native" %}}
+
+Each time a feature flag is evaluated, add the following function to send the feature flag information to RUM:
+
+   ```javascript
+   DdRum.addFeatureFlagEvaluation(key, value);
+   ```
+
+{{% /tab %}}
+{{< /tabs >}}
+
+### DevCycle integration
+
+Before you initialize this feature flag integration, make sure you've [set up RUM monitoring](#set-up-rum-monitoring).
+
+{{< tabs >}}
+{{% tab "Browser" %}}
+
+Initialize DevCycle's SDK and subscribe to the `variableEvaluated` event, choosing to subscribe to all variable evaluations `variableEvaluated:*` or particular variable evaluations `variableEvaluated:my-variable-key`.
+
+For more information about initializing DevCycle's SDK, see [DevCycle's JavaScript SDK documentation][5] and for more information about DevCycle's event system, see [DevCycle's SDK Event Documentation][6].
+
+```javascript
+const user = { user_id: "<USER_ID>" };
+const dvcOptions = { ... };
+const dvcClient = initialize("<DVC_CLIENT_SDK_KEY>", user, dvcOptions);
+...
+dvcClient.subscribe(
+    "variableEvaluated:*",
+    (key, variable) => {
+        // track all variable evaluations
+        datadogRum.addFeatureFlagEvaluation(key, variable.value);
+    }
+)
+...
+dvcClient.subscribe(
+    "variableEvaluated:my-variable-key",
+    (key, variable) => {
+        // track a particular variable evaluation
+        datadogRum.addFeatureFlagEvaluation(key, variable.value);
+    }
+)
+```
+
+
+[5]: https://docs.devcycle.com/sdk/client-side-sdks/javascript/javascript-install
+[6]: https://docs.devcycle.com/sdk/client-side-sdks/javascript/javascript-usage#subscribing-to-sdk-events
+{{% /tab %}}
+{{% tab "iOS" %}}
+
+DevCycle does not support this integration. Create a ticket with [DevCycle][1] to request this feature.
+
+[1]: https://devcycle.com/contact/request-support
+
+{{% /tab %}}
+{{% tab "Android" %}}
+
+DevCycle does not support this integration. Create a ticket with [DevCycle][1] to request this feature.
+
+[1]: https://devcycle.com/contact/request-support
+
+{{% /tab %}}
+{{% tab "Flutter" %}}
+
+DevCycle does not support this integration. Create a ticket with [DevCycle][1] to request this feature.
+
+[1]: https://devcycle.com/contact/request-support
+
+{{% /tab %}}
+{{% tab "React Native" %}}
+
+DevCycle does not support this integration. Create a ticket with [DevCycle][1] to request this feature.
+
+[1]: https://devcycle.com/contact/request-support
+
+{{% /tab %}}
+{{< /tabs >}}
+
+### Eppo integration
+
+Before you initialize this feature flag integration, make sure you've [set up RUM monitoring](#set-up-rum-monitoring).
+
+{{< tabs >}}
+{{% tab "Browser" %}}
+
+Initialize Eppo's SDK and create an assignment logger that additionally reports feature flag evaluations to Datadog using the snippet of code shown below.
+
+For more information about initializing Eppo's SDK, see [Eppo's JavaScript SDK documentation][1].
+
+```typescript
+const assignmentLogger: IAssignmentLogger = {
+  logAssignment(assignment) {
+    datadogRum.addFeatureFlagEvaluation(assignment.featureFlag, assignment.variation);
+  },
+};
+
+await eppoInit({
+  apiKey: "<API_KEY>",
+  assignmentLogger,
+});
+```
+
+[1]: https://docs.geteppo.com/sdks/client-sdks/javascript
+{{% /tab %}}
+{{% tab "iOS" %}}
+
+Initialize Eppo's SDK and create an assignment logger that additionally reports feature flag evaluations to Datadog using the snippet of code shown below.
+
+For more information about initializing Eppo's SDK, see [Eppo's iOS SDK documentation][1].
+
+```swift
+func IAssignmentLogger(assignment: Assignment) {
+  RUMMonitor.shared().addFeatureFlagEvaluation(featureFlag: assignment.featureFlag, variation: assignment.variation)
+}
+
+let eppoClient = EppoClient(apiKey: "mock-api-key", assignmentLogger: IAssignmentLogger)
+```
+
+[1]: https://docs.geteppo.com/sdks/client-sdks/ios
+
+{{% /tab %}}
+{{% tab "Android" %}}
+
+Initialize Eppo's SDK and create an assignment logger that additionally reports feature flag evaluations to Datadog using the snippet of code shown below.
+
+For more information about initializing Eppo's SDK, see [Eppo's Android SDK documentation][1].
+
+```java
+AssignmentLogger logger = new AssignmentLogger() {
+    @Override
+    public void logAssignment(Assignment assignment) {
+      GlobalRumMonitor.get().addFeatureFlagEvaluation(assignment.getFeatureFlag(), assignment.getVariation());
+    }
+};
+
+EppoClient eppoClient = new EppoClient.Builder()
+    .apiKey("YOUR_API_KEY")
+    .assignmentLogger(logger)
+    .application(application)
+    .buildAndInit();
+```
+
+
+[1]: https://docs.geteppo.com/sdks/client-sdks/android
+
+{{% /tab %}}
+{{% tab "Flutter" %}}
+
+Eppo does not support this integration. [Contact Eppo][1] to request this feature.
+
+[1]: mailto:support@geteppo.com
+
+{{% /tab %}}
+{{% tab "React Native" %}}
+
+Initialize Eppo's SDK and create an assignment logger that additionally reports feature flag evaluations to Datadog using the snippet of code shown below.
+
+For more information about initializing Eppo's SDK, see [Eppo's React native SDK documentation][1].
+
+```typescript
+const assignmentLogger: IAssignmentLogger = {
+  logAssignment(assignment) {
+    DdRum.addFeatureFlagEvaluation(assignment.featureFlag, assignment.variation);
+  },
+};
+
+await eppoInit({
+  apiKey: "<API_KEY>",
+  assignmentLogger,
+});
+```
+
+[1]: https://docs.geteppo.com/sdks/client-sdks/react-native
+
+{{% /tab %}}
+{{< /tabs >}}
+
+### Flagsmith Integration
+
+Before you initialize this feature flag integration, make sure you've [set up RUM monitoring](#set-up-rum-monitoring).
+
+{{< tabs >}}
+{{% tab "Browser" %}}
+
+Initialize Flagsmith's SDK with the `datadogRum` option, which reports feature flag evaluations to Datadog using the snippet of code shown below.
+
+   Optionally, you can configure the client so that Flagsmith traits are sent to Datadog via `datadogRum.setUser()`. For more information about initializing Flagsmith's SDK, check out [Flagsmith's JavaScript SDK documentation][1].
+
+   ```javascript
+    // Initialize the Flagsmith SDK
+    flagsmith.init({
+        datadogRum: {
+            client: datadogRum,
+            trackTraits: true,
+        },
+        ...
+    })
+   ```
+
+
+[1]: https://docs.flagsmith.com/clients/javascript
+{{% /tab %}}
+{{% tab "iOS" %}}
+
+Flagsmith does not support this integration. Create a ticket with Flagsmith to request this feature.
+
+
+{{% /tab %}}
+{{% tab "Android" %}}
+
+Flagsmith does not support this integration. Create a ticket with Flagsmith to request this feature.
+
+{{% /tab %}}
+{{% tab "Flutter" %}}
+
+Flagsmith does not support this integration. Create a ticket with Flagsmith to request this feature.
+
+{{% /tab %}}
+{{% tab "React Native" %}}
+
+Flagsmith does not currently support this integration. Create a ticket with Flagsmith to request this feature.
+
+{{% /tab %}}
+{{< /tabs >}}
+
+### LaunchDarkly integration
+
+Before you initialize this feature flag integration, make sure you've [set up RUM monitoring](#set-up-rum-monitoring).
+
+{{< tabs >}}
+{{% tab "Browser" %}}
+
+Initialize LaunchDarkly's SDK and create an inspector reporting feature flags evaluations to Datadog using the snippet of code shown below.
+
+ For more information about initializing LaunchDarkly's SDK, see [LaunchDarkly's JavaScript SDK documentation][1].
+
+```javascript
+const client = LDClient.initialize("<CLIENT_SIDE_ID>", "<CONTEXT>", {
+  inspectors: [
+    {
+      type: "flag-used",
+      name: "dd-inspector",
+      method: (key: string, detail: LDClient.LDEvaluationDetail) => {
+        datadogRum.addFeatureFlagEvaluation(key, detail.value);
+      },
+    },
+  ],
+});
+```
+
+
+[1]: https://docs.launchdarkly.com/sdk/client-side/javascript#initializing-the-client
+{{% /tab %}}
+{{% tab "iOS" %}}
+
+LaunchDarkly does not support this integration. Create a ticket with LaunchDarkly to request this feature.
+
+
+{{% /tab %}}
+{{% tab "Android" %}}
+
+LaunchDarkly does not support this integration. Create a ticket with LaunchDarkly to request this feature.
+
+
+{{% /tab %}}
+{{% tab "Flutter" %}}
+
+LaunchDarkly does not support this integration. Create a ticket with LaunchDarkly to request this feature.
+
+
+{{% /tab %}}
+{{% tab "React Native" %}}
+
+LaunchDarkly does not currently support this integration. Create a ticket with LaunchDarkly to request this feature.
+
+
+{{% /tab %}}
+{{< /tabs >}}
+
+
+### Split Integration
+
+Before you initialize this feature flag integration, make sure you've [set up RUM monitoring](#set-up-rum-monitoring).
+
+{{< tabs >}}
+{{% tab "Browser" %}}
+
+Initialize Split's SDK and create an impression listener reporting feature flag evaluations to Datadog using the following snippet of code:
+
+For more information about initializing Split's SDK, see Split's [JavaScript SDK documentation][1].
+
+```javascript
+const factory = SplitFactory({
+    core: {
+      authorizationKey: "<APP_KEY>",
+      key: "<USER_ID>",
+    },
+    impressionListener: {
+      logImpression(impressionData) {
+          datadogRum
+              .addFeatureFlagEvaluation(
+                  impressionData.impression.feature,
+                  impressionData.impression.treatment
+              );
+    },
+  },
+});
+
+const client = factory.client();
+```
+
+
+[1]: https://help.split.io/hc/en-us/articles/360020448791-JavaScript-SDK#2-instantiate-the-sdk-and-create-a-new-split-client
+{{% /tab %}}
+{{% tab "iOS" %}}
+
+Initialize Split's SDK and create an inspector reporting feature flag evaluations to Datadog using the snippet of code below.
+
+For more information about initializing Split's SDK, see Split's [iOS SDK documentation][1].
+
+```swift
+  let config = SplitClientConfig()
+  // Send the feature flag when Split reports the impression
+  config.impressionListener = { impression in
+      if let feature = impression.feature,
+          let treatment = impression.treatment {
+          RUMMonitor.shared().addFeatureFlagEvaluation(name: feature, value: treatment)
+      }
+  }
+```
+
+
+[1]: https://help.split.io/hc/en-us/articles/360020401491-iOS-SDK
+{{% /tab %}}
+{{% tab "Android" %}}
+
+Initialize Split's SDK and create an inspector reporting feature flag evaluations to Datadog using the snippet of code below.
+
+For more information about initializing Split's SDK, see Split's [Android SDK documentation][1].
+
+```kotlin
+  internal class DatadogSplitImpressionListener : ImpressionListener {
+    override fun log(impression: Impression) {
+        // Send the feature flag when Split reports the impression
+        GlobalRumMonitor.get().addFeatureFlagEvaluation(
+            impression.split(),
+            impression.treatment()
+        )
+    }
+    override fun close() {
+    }
+  }
+
+  // In initialization:
+  val apikey = BuildConfig.SPLIT_API_KEY
+  val config = SplitClientConfig.builder()
+      .impressionListener(DatadogSplitImpressionListener())
+      .build()
+```
+
+
+[1]: https://help.split.io/hc/en-us/articles/360020343291-Android-SDK
+{{% /tab %}}
+{{% tab "Flutter" %}}
+
+Initialize Split's SDK and create an inspector reporting feature flag evaluations to Datadog using the snippet of code below.
+
+For more information about initializing Split's SDK, see Split's [Flutter plugin documentation][1].
+
+```dart
+  StreamSubscription<Impression> impressionsStream = _split.impressionsStream().listen((impression) {
+    // Send the feature flag when Split reports the impression
+    final split = impression.split;
+    final treatment = impression.treatment;
+    if (split != null && treatment != null) {
+      DatadogSdk.instance.rum?.addFeatureFlagEvaluation(split, treatment);
+    }
+  });
+```
+
+
+[1]: https://help.split.io/hc/en-us/articles/8096158017165-Flutter-plugin
+{{% /tab %}}
+{{% tab "React Native" %}}
+
+Initialize Split's SDK and create an impression listener reporting feature flag evaluations to Datadog using the following snippet of code:
+
+For more information about initializing Split's SDK, see Split's [React Native SDK documentation][1].
+
+```javascript
+const factory = SplitFactory({
+    core: {
+      authorizationKey: "<APP_KEY>",
+      key: "<USER_ID>",
+    },
+    impressionListener: {
+      logImpression(impressionData) {
+          DdRum
+              .addFeatureFlagEvaluation(
+                  impressionData.impression.feature,
+                  impressionData.impression.treatment
+              );
+    },
+  },
+});
+
+const client = factory.client();
+```
+
+
+[1]: https://help.split.io/hc/en-us/articles/4406066357901-React-Native-SDK#2-instantiate-the-sdk-and-create-a-new-split-client
+{{% /tab %}}
+{{< /tabs >}}
+
+### Statsig Integration
+
+Before you initialize this feature flag integration, make sure you've [set up RUM monitoring](#set-up-rum-monitoring).
+
+{{< tabs >}}
+{{% tab "Browser" %}}
+
+Initialize Statsig's SDK with `statsig.initialize`.
+
+1. Update your Browser RUM SDK version 4.25.0 or above.
+2. Initialize the RUM SDK and configure the `enableExperimentalFeatures` initialization parameter with `["feature_flags"]`.
+3. Initialize Statsig's SDK (`>= v4.34.0`) and implement the `gateEvaluationCallback` option as shown below:
+
+   ```javascript
+    await statsig.initialize('client-<STATSIG CLIENT KEY>',
+    {userID: '<USER ID>'},
+    {
+        gateEvaluationCallback: (key, value) => {
+            datadogRum.addFeatureFlagEvaluation(key, value);
+        }
+    }
+    );
+   ```
+
+[1]: https://docs.statsig.com/client/jsClientSDK
+{{% /tab %}}
+{{% tab "iOS" %}}
+
+Statsig does not support this integration. Contact support@statsig.com to request this feature.
+
+{{% /tab %}}
+{{% tab "Android" %}}
+
+Statsig does not support this integration. Contact support@statsig.com to request this feature.
+
+{{% /tab %}}
+{{% tab "Flutter" %}}
+
+Statsig does not support this integration. Contact support@statsig.com to request this feature.
+
+{{% /tab %}}
+{{% tab "React Native" %}}
+
+Statsig does not currently support this integration. Contact support@statsig.com to request this feature.
+
+{{% /tab %}}
+{{< /tabs >}}
+
+### Next steps
+
+[View and analyze][1] your feature flags.
+
+## Further reading
+
+{{< partial name="whats-next/whats-next.html" >}}
+
+[1]: /real_user_monitoring/feature_flag_tracking/using_feature_flags
diff --git a/content/en/real_user_monitoring/feature_flag_tracking/using_feature_flags.md b/content/en/real_user_monitoring/feature_flag_tracking/using_feature_flags.md
new file mode 100644
index 0000000000000..3331f441544b1
--- /dev/null
+++ b/content/en/real_user_monitoring/feature_flag_tracking/using_feature_flags.md
@@ -0,0 +1,81 @@
+---
+title: Using Feature Flags
+beta: true
+description: View and understand your feature flag's health and usage.
+aliases:
+- /real_user_monitoring/guide/getting-started-feature-flags/
+- /real_user_monitoring/guide/setup-feature-flag-data-collection/
+disable_toc: false
+further_reading:
+- link: "/real_user_monitoring/guide/setup-feature-flag-data-collection/"
+  tag: "Documentation"
+  text: "Set up Feature Flag data collection"
+- link: "/real_user_monitoring/explorer/"
+  tag: "Documentation"
+  text: "Learn about the RUM Explorer"
+- link: "https://www.datadoghq.com/blog/feature-flag-tracking/"
+  tag: "Blog"
+  text: "Ensure release safety with feature flag tracking in Datadog RUM"
+---
+
+Once you have set up your feature flag data collection, navigate to the [**Feature Flags**][1] tab within RUM.
+
+From this view, you can investigate any questions you have about your feature flag's health and usage.
+- Monitor the number of users experiencing each variant and see summary statistics of your feature flag.
+- Check the [status](#feature-flag-status) of your feature flag to see if there are any that can be removed for code clean up.
+- View which pages your feature flags are being evaluated against.
+
+Feature flags show up in the context of events where they are evaluated, meaning they should show up on the views that the feature flag code logic is run on.
+
+{{< img src="real_user_monitoring/feature_flag_tracking/feature-flag-list-2.png" alt="View a list of your feature flags to investigate any questions you have about your feature flag's health and usage" style="width:90%;" >}}
+
+## Search and filter
+Search and filter your feature flags by typing in the search bar. You can also use the faceted search to narrow down, broaden, or shift your focus on subsets of feature flags you are interested in.
+
+{{< img src="real_user_monitoring/feature_flag_tracking/feature-flag-list-search-filter.png" alt="Feature Flag list search bar and filtering" style="width:90%;" >}}
+
+## Feature flag status
+There are three possible feature flag statuses:
+
+Active
+: The feature flag has evaluated different variants for the past 2 weeks.
+
+Inactive
+: For the past 2 weeks, there have only been feature flag evaluations for your control variant.
+
+Out to 100%
+: For the past 2 weeks, there have only been feature flag evaluations for one of your _non-control_ variants.
+
+
+## Analyze your feature flags
+To get more details about the health and performance of your feature flag, you can click the flag in the list to navigate to a dedicated Feature Flag analysis dashboard. The Feature Flag analysis dashboard provides an overview of the performance of your feature flag, displaying information about user sessions, changes in your Core Web Vitals, and error rates. 
+
+These out-of-the-box graphs are aggregated across your flag variants, making it easy to spot problems in your feature releases before they turn into serious issues. This dashboard provides an easy way to monitor your feature releases and allows you to quickly roll back as soon as you spot an issue so you can avoid negative user experiences. 
+
+{{< img src="real_user_monitoring/feature_flag_tracking/feature-flag-details-page.mp4" alt="Feature Flag details page - Users overview" video=true width=90% >}}
+
+- The **Users** tab provides some high level summary statistics of your feature flag and allows you to further analyze the users viewing each of your feature flag variants by any attribute. If you want to understand what it looks like for someone who experienced a certain variant versus another, you can watch a [Session Replay][2] for each case.
+
+- The **Issues** tab gives you a view of the errors that are occurring in your application for user sessions that have your feature flag. Check if any issues detected by [Error Tracking][3] occurred for a specific variant of your feature flag and might be related to your changes.
+
+- The **Performance** tab allows you to understand if one of your feature flag variants have caused poor performance. You can view your Core Web Vitals and loading time for each variant to determine if one of your variants may be causing a negative impact on your application's performance.
+
+## Build custom views from Feature Flag data using the RUM Explorer
+Search through all the data collected by RUM in the [RUM Explorer][4] to surface trends on feature flags, analyze patterns with greater context, or export them into [dashboards][5] and [monitors][6]. 
+
+You can search your Sessions, Views, or Errors in the RUM Explorer, with the `@feature_flags.{flag_name}` attribute to scope down and focus on events where users were shown a specific user experience.
+
+You can compare important metrics to you and your teams by grouping your query by `@feature_flags.{flag_name}`. For example, if you want to understand how your new checkout flow is affecting the conversion rate from the checkout page to users making a purchase, you can add a "Group by" on the conversion rate graph.
+
+{{< img src="real_user_monitoring/feature_flag_tracking/feature-flag-rum-explorer.png" alt="Feature Flag list search bar and filtering" style="width:90%;" >}}
+
+## Further reading
+
+{{< partial name="whats-next/whats-next.html" >}}
+
+[1]: https://app.datadoghq.com/rum/feature-flags
+[2]: /real_user_monitoring/session_replay/browser/
+[3]: /real_user_monitoring/error_tracking/explorer/#explore-your-issues
+[4]: https://app.datadoghq.com/rum/explorer
+[5]: /dashboards/
+[6]: /monitors/#create-monitors
\ No newline at end of file
diff --git a/content/en/real_user_monitoring/guide/setup-feature-flag-data-collection.md b/content/en/real_user_monitoring/guide/setup-feature-flag-data-collection.md
index e062d0a93fc45..ee25526d9f08c 100644
--- a/content/en/real_user_monitoring/guide/setup-feature-flag-data-collection.md
+++ b/content/en/real_user_monitoring/guide/setup-feature-flag-data-collection.md
@@ -885,7 +885,7 @@ Filtering your **Errors** with the `@feature_flags.{flag_name}` attribute, you c
 
 ## Troubleshooting
 
-### Feature flag data is not reflecting the expected information
+### My feature flag data doesn't reflect what I expect to see
 Feature flags show up in the context of events where they are evaluated, meaning they should show up on the views that the feature flag code logic is run on.
 
 Depending on how you've structured your code and set up your feature flags, you may see unexpected feature flags appear in the context of some events.
@@ -901,14 +901,6 @@ Here are a few examples of reasons why your feature flag is being evaluated on u
 
 When performing your investigations, you can also scope your data for `View Name`'s that are relevant to your feature flag.
 
-### Feature flag naming
-
-The following special characters are not supported for [Feature Flag Tracking][5]: `.`, `:`, `+`, `-`, `=`, `&&`, `||`, `>`, `<`, `!`, `(`, `)`, `{`, `}`, `[`, `]`, `^`, `"`, `“`, `”`, `~`, `*`, `?`, `\`. Datadog recommends avoiding these characters when possible in your feature flag names. If you are required to use one of these characters, replace the character before sending the data to Datadog. For example:
-
-```javascript
-datadogRum.addFeatureFlagEvaluation(key.replace(':', '_'), value);
-```
-
 
 ## Further Reading
 {{< partial name="whats-next/whats-next.html" >}}
diff --git a/content/en/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/advanced_configuration.md b/content/en/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/advanced_configuration.md
index a42fe42f2d989..c4a1cd1c7386e 100644
--- a/content/en/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/advanced_configuration.md
+++ b/content/en/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/advanced_configuration.md
@@ -2,7 +2,9 @@
 title: Kotlin Multiplatform Advanced Configuration
 aliases:
     - /real_user_monitoring/mobile_and_tv_monitoring/advanced_configuration/kotlin-multiplatform
+    - /real_user_monitoring/mobile_and_tv_monitoring/advanced_configuration/kotlin_multiplatform
     - /real_user_monitoring/kotlin-multiplatform
+    - /real_user_monitoring/kotlin_multiplatform
 further_reading:
 - link: https://github.com/DataDog/dd-sdk-kotlin-multiplatform
   tag: "Source Code"
@@ -444,12 +446,12 @@ GlobalRumMonitor.get().getCurrentSessionId { sessionId ->
 {{< partial name="whats-next/whats-next.html" >}}
 
 [1]: https://app.datadoghq.com/rum/application/create
-[2]: /real_user_monitoring/mobile_and_tv_monitoring/kotlin-multiplatform
-[3]: /real_user_monitoring/mobile_and_tv_monitoring/kotlin-multiplatform/data_collected
-[4]: /real_user_monitoring/mobile_and_tv_monitoring/kotlin-multiplatform/advanced_configuration/#automatically-track-views
-[5]: /real_user_monitoring/mobile_and_tv_monitoring/kotlin-multiplatform/advanced_configuration/#initialization-parameters
-[6]: /real_user_monitoring/mobile_and_tv_monitoring/kotlin-multiplatform/#initialize-rum-ktor-plugin-to-track-network-events-made-with-ktor
-[7]: /real_user_monitoring/mobile_and_tv_monitoring/kotlin-multiplatform/data_collected
+[2]: /real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform
+[3]: /real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/data_collected
+[4]: /real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/advanced_configuration/#automatically-track-views
+[5]: /real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/advanced_configuration/#initialization-parameters
+[6]: /real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/#initialize-rum-ktor-plugin-to-track-network-events-made-with-ktor
+[7]: /real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/data_collected
 [8]: /real_user_monitoring/explorer/search/#setup-facets-and-measures
-[9]: /real_user_monitoring/mobile_and_tv_monitoring/kotlin-multiplatform/#sending-data-when-device-is-offline
+[9]: /real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/#sending-data-when-device-is-offline
 [10]: /real_user_monitoring/error_tracking/mobile/ios/#add-app-hang-reporting
\ No newline at end of file
diff --git a/content/en/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/data_collected.md b/content/en/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/data_collected.md
index 9ea3ca5e950e4..90b1e285a011d 100644
--- a/content/en/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/data_collected.md
+++ b/content/en/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/data_collected.md
@@ -3,6 +3,7 @@ title: Kotlin Multiplatform Data Collected
 description: Learn about the data collected by Kotlin Multiplatform Monitoring.
 aliases:
 - /real_user_monitoring/kotlin-multiplatform/data_collected/
+- /real_user_monitoring/kotlin_multiplatform/data_collected/
 - /real_user_monitoring/mobile_and_tv_monitoring/kotlin-multiplatform/data_collected/
 further_reading:
 - link: https://github.com/DataDog/dd-sdk-kotlin-multiplatform
diff --git a/content/en/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/error_tracking.md b/content/en/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/error_tracking.md
index 712bc8477e94e..aa4de500b0a3f 100644
--- a/content/en/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/error_tracking.md
+++ b/content/en/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/error_tracking.md
@@ -3,6 +3,7 @@ title: Kotlin Multiplatform Crash Reporting and Error Tracking
 description: Set up Error Tracking for your Kotlin Multiplatform applications.
 aliases:
     - /real_user_monitoring/error_tracking/kotlin-multiplatform
+    - /real_user_monitoring/error_tracking/kotlin_multiplatform
 further_reading:
 - link: '/real_user_monitoring/error_tracking/'
   tag: 'Documentation'
diff --git a/content/en/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/integrated_libraries.md b/content/en/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/integrated_libraries.md
index 93bec59ea4617..ffea5cb065b90 100644
--- a/content/en/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/integrated_libraries.md
+++ b/content/en/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/integrated_libraries.md
@@ -2,6 +2,7 @@
 title: Kotlin Multiplatform Libraries for RUM
 aliases:
 - /real_user_monitoring/kotlin-multiplatform/integrated_libraries/
+- /real_user_monitoring/kotlin_multiplatform/integrated_libraries/
 - /real_user_monitoring/mobile_and_tv_monitoring/kotlin-multiplatform/integrated_libraries/
 further_reading:
 - link: https://github.com/DataDog/dd-sdk-kotlin-multiplatform
@@ -49,4 +50,4 @@ val ktorClient = HttpClient {
 
 {{< partial name="whats-next/whats-next.html" >}}
 
-[1]: /real_user_monitoring/mobile_and_tv_monitoring/kotlin-multiplatform/
+[1]: /real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/
diff --git a/content/en/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/setup.md b/content/en/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/setup.md
index a0751b02f9934..fcf61ab534a8d 100644
--- a/content/en/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/setup.md
+++ b/content/en/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/setup.md
@@ -5,8 +5,11 @@ private: true
 description: Collect RUM and Error Tracking data from your Kotlin Multiplatform projects.
 aliases:
     - /real_user_monitoring/kotlin-multiplatform/
+    - /real_user_monitoring/kotlin_multiplatform/
     - /real_user_monitoring/kotlin-multiplatform/setup
+    - /real_user_monitoring/kotlin_multiplatform/setup
     - /real_user_monitoring/mobile_and_tv_monitoring/setup/kotlin-multiplatform
+    - /real_user_monitoring/mobile_and_tv_monitoring/setup/kotlin_multiplatform
 further_reading:
 - link: https://github.com/DataDog/dd-sdk-kotlin-multiplatform
   tag: "Source Code"
@@ -115,7 +118,7 @@ If you are integrating Kotlin Multiplatform library as a framework with an `embe
 3. To disable automatic user data collection for either client IP or geolocation data, uncheck the boxes for those settings. For more information, see [RUM Kotlin Multiplatform Data Collected][2].
 
 [1]: https://app.datadoghq.com/rum/application/create
-[2]: /real_user_monitoring/kotlin-multiplatform/data_collected/
+[2]: /real_user_monitoring/kotlin_multiplatform/data_collected/
 
 {{% /tab %}}
 {{% tab "Error Tracking" %}}
@@ -125,7 +128,7 @@ If you are integrating Kotlin Multiplatform library as a framework with an `embe
 3. To disable automatic user data collection for either client IP or geolocation data, uncheck the boxes for those settings. For more information, see [RUM Kotlin Multiplatform Data Collected][2].
 
 [1]: https://app.datadoghq.com/error-tracking/settings/setup/client
-[2]: /real_user_monitoring/kotlin-multiplatform/data_collected/
+[2]: /real_user_monitoring/kotlin_multiplatform/data_collected/
 
 {{% /tab %}}
 {{< /tabs >}}
@@ -400,10 +403,10 @@ This means that even if users open your application while offline, no data is lo
 [4]: /account_management/api-app-keys/#api-keys
 [5]: /account_management/api-app-keys/#client-tokens
 [6]: /getting_started/tagging/using_tags/
-[7]: /real_user_monitoring/mobile_and_tv_monitoring/advanced_configuration/kotlin-multiplatform/#initialization-parameters
+[7]: /real_user_monitoring/mobile_and_tv_monitoring/advanced_configuration/kotlin_multiplatform/#initialization-parameters
 [8]: https://app.datadoghq.com/rum/application/create
-[9]: /real_user_monitoring/mobile_and_tv_monitoring/advanced_configuration/kotlin-multiplatform/#automatically-track-views
+[9]: /real_user_monitoring/mobile_and_tv_monitoring/advanced_configuration/kotlin_multiplatform/#automatically-track-views
 [10]: https://github.com/DataDog/dd-sdk-kotlin-multiplatform/tree/develop/integrations/ktor
-[11]: /real_user_monitoring/mobile_and_tv_monitoring/advanced_configuration/kotlin-multiplatform/#custom-views
-[12]: /real_user_monitoring/error_tracking/kotlin-multiplatform/
+[11]: /real_user_monitoring/mobile_and_tv_monitoring/advanced_configuration/kotlin_multiplatform/#custom-views
+[12]: /real_user_monitoring/error_tracking/kotlin_multiplatform/
 [13]: /real_user_monitoring/explorer/
diff --git a/content/en/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/troubleshooting.md b/content/en/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/troubleshooting.md
index f59e30de58526..54b9c4fe21607 100644
--- a/content/en/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/troubleshooting.md
+++ b/content/en/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/troubleshooting.md
@@ -3,6 +3,7 @@ title: Troubleshooting Kotlin Multiplatform SDK issues
 description: Learn how to troubleshoot issues with Kotlin Multiplatform Monitoring.
 aliases:
 - /real_user_monitoring/mobile_and_tv_monitoring/troubleshooting/kotlin-multiplatform
+- /real_user_monitoring/mobile_and_tv_monitoring/troubleshooting/kotlin_multiplatform
 further_reading:
 - link: https://github.com/DataDog/dd-sdk-kotlin-multiplatform
   tag: "Source Code"
diff --git a/content/en/real_user_monitoring/mobile_and_tv_monitoring/web_view_tracking/_index.md b/content/en/real_user_monitoring/mobile_and_tv_monitoring/web_view_tracking/_index.md
index 7fc390cfa1090..86498cba33252 100644
--- a/content/en/real_user_monitoring/mobile_and_tv_monitoring/web_view_tracking/_index.md
+++ b/content/en/real_user_monitoring/mobile_and_tv_monitoring/web_view_tracking/_index.md
@@ -6,6 +6,7 @@ aliases:
   - /real_user_monitoring/flutter/web_view_tracking
   - /real_user_monitoring/reactnative/web_view_tracking
   - /real_user_monitoring/kotlin-multiplatform/web_view_tracking
+  - /real_user_monitoring/kotlin_multiplatform/web_view_tracking
   - /real_user_monitoring/mobile_and_tv_monitoring/unity/web_view_tracking
 further_reading:
   - link: https://github.com/DataDog/dd-sdk-android
@@ -134,7 +135,7 @@ Set up the RUM Browser SDK on the web page you want rendered on your mobile appl
 
 Add `DatadogWebViewTracking` library to your application by following the guide [here][1].
 
-[1]: /real_user_monitoring/mobile_and_tv_monitoring/setup/kotlin-multiplatform/#add-native-dependencies-for-ios
+[1]: /real_user_monitoring/mobile_and_tv_monitoring/setup/kotlin_multiplatform/#add-native-dependencies-for-ios
 
 {{% /tab %}}
 {{< /tabs >}}
@@ -347,9 +348,9 @@ The `allowedHosts` parameter of `DatadogInAppWebViewUserScript` matches the give
 `allowedHosts` matches the given hosts and their subdomain. No regular expressions are allowed.
 
 [1]: https://search.maven.org/artifact/com.datadoghq/dd-sdk-kotlin-multiplatform-rum
-[2]: /real_user_monitoring/kotlin-multiplatform/#setup
+[2]: /real_user_monitoring/kotlin_multiplatform/#setup
 [3]: https://search.maven.org/artifact/com.datadoghq/dd-sdk-kotlin-multiplatform-logs
-[4]: /logs/log_collection/kotlin-multiplatform/#setup
+[4]: /logs/log_collection/kotlin_multiplatform/#setup
 
 {{% /tab %}}
 {{< /tabs >}}
diff --git a/content/en/real_user_monitoring/session_replay/mobile/setup_and_configuration.md b/content/en/real_user_monitoring/session_replay/mobile/setup_and_configuration.md
index 079322c4fe33f..66da7d62a8c26 100644
--- a/content/en/real_user_monitoring/session_replay/mobile/setup_and_configuration.md
+++ b/content/en/real_user_monitoring/session_replay/mobile/setup_and_configuration.md
@@ -133,8 +133,8 @@ To set up Mobile Session Replay for Kotlin Multiplatform:
 4. In case you need Material support on Android, call the `SessionReplayConfiguration.Builder.addExtensionSupport(MaterialExtensionSupport())` method, available in the Android source set.
 
 [1]: https://central.sonatype.com/artifact/com.datadoghq/dd-sdk-kotlin-multiplatform-session-replay/versions
-[2]: /real_user_monitoring/kotlin-multiplatform/
-[3]: /real_user_monitoring/kotlin-multiplatform/#add-native-dependencies-for-ios
+[2]: /real_user_monitoring/kotlin_multiplatform/
+[3]: /real_user_monitoring/kotlin_multiplatform/#add-native-dependencies-for-ios
 
 {{% /tab %}}
 
diff --git a/content/en/service_catalog/customize/_index.md b/content/en/service_catalog/customize/_index.md
index dbd2d298f0f49..9ab4065e47974 100644
--- a/content/en/service_catalog/customize/_index.md
+++ b/content/en/service_catalog/customize/_index.md
@@ -142,7 +142,7 @@ With [Service Catalog metadata schema 2.2][5], you can specify the type and lang
 [4]: https://forms.gle/zbLfnJYhD5Ab4Wr18
 [5]: https://github.com/DataDog/schema/tree/main/service-catalog/v2.2
 [6]: /actions/actions_catalog/
-[7]: /tracing/guide/inferred-service-opt-in/?tab=java#global-default-service-naming-migration
+[7]: /tracing/services/inferred_services
 [8]: /tracing/guide/service_overrides/#remove-service-overrides
 [9]: /tracing/guide/service_overrides/
 [10]: /service_catalog/service_definitions/#add-metadata-with-automation
diff --git a/content/en/tests/flaky_test_management/auto_test_retries.md b/content/en/tests/flaky_test_management/auto_test_retries.md
index cc19024b6b52f..d75fe68280eb5 100644
--- a/content/en/tests/flaky_test_management/auto_test_retries.md
+++ b/content/en/tests/flaky_test_management/auto_test_retries.md
@@ -33,6 +33,8 @@ Ensure [Test Optimization][1] is configured for your test runs.
 
 `dd-trace-java >= 1.34.0`
 
+The test framework compatibility is the same as [Test Optimization Compatibility][3], with the exception of `Scala Weaver`.
+
 ### Configuration
 After you have set up Test Optimization, you can configure Auto Test Retries from the [Test Service Settings page][1].
 
@@ -46,6 +48,7 @@ This behavior can be fine-tuned with the following environment variables:
 
 [1]: https://app.datadoghq.com/ci/settings/test-optimization
 [2]: /tests/flaky_test_management/
+[3]: /tests/setup/java/#compatibility
 {{% /tab %}}
 
 {{% tab "Javascript" %}}
diff --git a/content/en/tests/flaky_test_management/early_flake_detection.md b/content/en/tests/flaky_test_management/early_flake_detection.md
index 6182bd7d9108f..37a396a2427ec 100644
--- a/content/en/tests/flaky_test_management/early_flake_detection.md
+++ b/content/en/tests/flaky_test_management/early_flake_detection.md
@@ -79,6 +79,9 @@ The test framework compatibility is the same as [Test Optimization Compatibility
 
 `dd-trace-java>=1.34.0`
 
+The test framework compatibility is the same as [Test Optimization Compatibility][2], with the exception of `Scala Weaver`.
+
+[2]: /tests/setup/java/#compatibility
 {{% /tab %}}
 
 {{% tab ".NET" %}}
diff --git a/content/en/tracing/guide/inferred-service-opt-in.md b/content/en/tracing/guide/inferred-service-opt-in.md
deleted file mode 100644
index 3813fb1822122..0000000000000
--- a/content/en/tracing/guide/inferred-service-opt-in.md
+++ /dev/null
@@ -1,277 +0,0 @@
----
-title: Inferred Service dependencies
-disable_toc: false
-further_reading:
-- link: "/tracing/services/"
-  tag: "Documentation"
-  text: "Service Observability"
-- link: "/tracing/trace_collection/"
-  tag: "Documentation"
-  text: "Sending Traces to Datadog"
-- link: "/tracing/trace_collection/dd_libraries/"
-  tag: "Documentation"
-  text: "Add the Datadog Tracing Library"
-- link: "/tracing/guide/service_overrides"
-  tag: "Documentation"
-  text: "Service Overrides"
----
-
-## Overview
-
-Datadog can automatically discover the dependencies for an instrumented service, such as a database, a queue, or a third-party API, even if that dependency hasn't been instrumented yet. By analyzing outbound requests from your instrumented services, Datadog infers the presence of these dependencies and collects associated performance metrics.
-
-With the new inferred entities experience, you can filter [Service Catalog][3] entries by entity type, such as database, queue, or third-party API. This allows you to better visualize service dependencies using the [Service Page dependency map](#service-page-dependency-map) and APM features.
-
-To determine the names and types of the inferred service dependencies, Datadog uses standard span attributes and maps them to `peer.*` attributes. For the full list of `peer.*` attributes, see [Inferred service dependencies nomenclature](#inferred-service-dependencies-nomemclature). Inferred external APIs use the default naming scheme `net.peer.name`. For example, `api.stripe.com`, `api.twilio.com`, `us6.api.mailchimp.com`. Inferred databases use the default naming scheme `db.instance`.
-
-If you're using the Go, Java, Node.js, PHP, .NET, or Ruby tracer, you can customize the default names for inferred entities.
-
-### Service page Dependency map
-
-Use the dependency map to visualize service-to-service communication and gain insight into system components such as databases, queues, and third-party dependencies. You can group dependencies by type and filter by Requests, Latency, or Errors to identify slow or failing connections.
-
-{{< img src="tracing/services/service_page/dependencies.png" alt="Service page service dependency map" style="width:100%;">}}
-
-## Opt in
-
-<div class="alert alert-warning">Only go through migration steps once Datadog support confirmed the feature is enabled for you on the Datadog side.</div>
-
-To opt in, Datadog recommends you adjust your:
-- [Datadog Agent](#datadog-agent-configuration) (or [OpenTelemetry collector](#opentelemetry-collector)) configuration
-- [APM tracing libraries](#apm-tracing-library-configuration) configuration
-
-### Datadog Agent configuration
-
-#### Datadog Agent 7.55.1 and higher
-
-From Datadog Agent version >= [7.55.1][9], update your `datadog.yaml` configuration file with the following:
-
-{{< code-block lang="yaml" filename="datadog.yaml" collapsible="true" >}}
-
-apm_config:
-  compute_stats_by_span_kind: true
-  peer_tags_aggregation: true
-
-{{< /code-block >}}
-
-Alternatively, configure this by setting the following environment variables in your Datadog Agent launch configuration:
-
-{{< code-block collapsible="true" lang="yaml" >}}
-
-DD_APM_COMPUTE_STATS_BY_SPAN_KIND=true 
-DD_APM_PEER_TAGS_AGGREGATION=true
-
-{{< /code-block >}}
-
-#### Datadog Agent version between 7.50.3 and 7.54.1
-
-If you use a Datadog Agent version >= [7.50.3][4] and <= 7.54.1, update your `datadog.yaml` configuration file with the following:
-
-{{< code-block lang="yaml" filename="datadog.yaml" collapsible="true" >}}
-
-apm_config:
-  compute_stats_by_span_kind: true
-  peer_tags_aggregation: true
-  peer_tags: ["_dd.base_service","amqp.destination","amqp.exchange","amqp.queue","aws.queue.name","aws.s3.bucket","bucketname","cassandra.keyspace","db.cassandra.contact.points","db.couchbase.seed.nodes","db.hostname","db.instance","db.name","db.namespace","db.system","grpc.host","hostname","http.host","http.server_name","messaging.destination","messaging.destination.name","messaging.kafka.bootstrap.servers","messaging.rabbitmq.exchange","messaging.system","mongodb.db","msmq.queue.path","net.peer.name","network.destination.name","peer.hostname","peer.service","queuename","rpc.service","rpc.system","server.address","streamname","tablename","topicname"]
-
-{{< /code-block >}}
-
-Alternatively, configure this by setting the following environment variables in your Datadog Agent launch configuration:
-
-{{< code-block collapsible="true" lang="yaml" >}}
-
-DD_APM_COMPUTE_STATS_BY_SPAN_KIND=true 
-DD_APM_PEER_TAGS_AGGREGATION=true
-DD_APM_PEER_TAGS='["_dd.base_service","amqp.destination","amqp.exchange","amqp.queue","aws.queue.name","aws.s3.bucket","bucketname","cassandra.keyspace","db.cassandra.contact.points","db.couchbase.seed.nodes","db.hostname","db.instance","db.name","db.namespace","db.system","grpc.host","hostname","http.host","http.server_name","messaging.destination","messaging.destination.name","messaging.kafka.bootstrap.servers","messaging.rabbitmq.exchange","messaging.system","mongodb.db","msmq.queue.path","net.peer.name","network.destination.name","peer.hostname","peer.service","queuename","rpc.service","rpc.system","server.address","streamname","tablename","topicname"]'
-
-{{< /code-block >}}
-
-
-#### Helm 
-Include the same set of environment variables in your `values.yaml` [file][8].
-
-
-### OpenTelemetry Collector 
-
-Minimum version recommended: opentelemetry-collector-contrib >= [v0.95.0][7].
-
-Example [collector.yaml][6].
-
-{{< code-block lang="yaml"  collapsible="true" >}}
-
-connectors:
-  datadog/connector:
-    traces:
-      compute_stats_by_span_kind: true
-      peer_tags_aggregation: true
-      peer_tags: ["_dd.base_service","amqp.destination","amqp.exchange","amqp.queue","aws.queue.name","aws.s3.bucket","bucketname","db.cassandra.contact.points","db.couchbase.seed.nodes","db.hostname","db.instance","db.name","db.namespace","db.system","grpc.host","hostname","http.host","http.server_name","messaging.destination","messaging.destination.name","messaging.kafka.bootstrap.servers","messaging.rabbitmq.exchange","messaging.system","mongodb.db","msmq.queue.path","net.peer.name","network.destination.name","peer.hostname","peer.service","queuename","rpc.service","rpc.system","server.address","streamname","tablename","topicname"]
-
-{{< /code-block >}}
-
-If your collector version is below [v0.95.0][7], use an exporter configuration with the following `peer_tags`:
-
-
-{{< code-block lang="yaml" collapsible="true" >}}
-
-exporters:
-  datadog:
-    traces:
-      compute_stats_by_span_kind: true
-      peer_tags_aggregation: true
-      peer_tags: ["_dd.base_service","amqp.destination","amqp.exchange","amqp.queue","aws.queue.name","aws.s3.bucket","bucketname","db.cassandra.contact.points","db.couchbase.seed.nodes","db.hostname","db.instance","db.name","db.namespace","db.system","grpc.host","hostname","http.host","http.server_name","messaging.destination","messaging.destination.name","messaging.kafka.bootstrap.servers","messaging.rabbitmq.exchange","messaging.system","mongodb.db","msmq.queue.path","net.peer.name","network.destination.name","peer.hostname","peer.service","queuename","rpc.service","rpc.system","server.address","streamname","tablename","topicname"]   
-
-{{< /code-block >}}
-
-
-### APM tracing library configuration
-
-<div class="alert alert-warning">The following steps introduce a <b>breaking change</b>: Datadog will change the way service names are captured by default. Refer to <a href="#global-default-service-naming-migration">Global default service naming migration</a>, to determine if you need to take any migration actions.</div>
-
-{{< tabs >}}
-{{% tab "Java" %}}
-
-The minimum Java tracer version required is 1.16.0. Regular updates to the latest version are recommended to access changes and bug fixes.
-
-[Download the latest Java tracer version][1].
-
-To opt in, add the following environment variables or system properties to your tracer settings:
-
-| Environment variable | System property |
-| ---  | ----------- |
-| `DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED=true` | `-Ddd.trace.remove.integration-service-names.enabled=true` |
-
-Remove the following settings from your configuration:
-
-| Environment variable | Reason for removal |
-| ---  | ----------- |
-| `DD_SERVICE_MAPPING` | All service names default to `DD_SERVICE`. |
-| `DD_TRACE_SPLIT_BY_TAGS` | Inferred services are automatically displayed with the introduction of the `peer.service` tag. |
-| `DD_TRACE_DB_CLIENT_SPLIT_BY_INSTANCE` | DB instances are inferred based on the on the `peer.service` tag. |
-
-[1]: https://dtdg.co/latest-java-tracer
-
-{{% /tab %}}
-
-{{% tab "Go" %}}
-
-The minimum Go tracer version required is [v1.52.0][1]. Regular updates to the latest version are recommended to access changes and bug fixes.
-
-To opt in, add the following environment variables or system properties to your tracer settings:
-
-| Environment variable | System property |
-| ---  | ----------- |
-| `DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED=true` | `WithGlobalServiceName(true)` |
-
-[1]: https://github.com/DataDog/dd-trace-go/releases/tag/v1.52.0
-
-{{% /tab %}}
-
-{{% tab "Node.js" %}}
-
-The minimum Node.js tracer versions required are [2.44.0][1], [3.31.0][2], or [4.10.0][3]. Regular updates to the latest version are recommended to access changes and bug fixes.
-
-To opt in, add the following environment variables or system properties to your tracer settings:
-
-| Environment variable | System property |
-| ---  | ----------- |
-| `DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED=true` | `spanRemoveIntegrationFromService=true` |
-
-[1]: https://github.com/DataDog/dd-trace-js/releases/tag/v2.44.0
-[2]: https://github.com/DataDog/dd-trace-js/releases/tag/v3.31.0
-[3]: https://github.com/DataDog/dd-trace-js/releases/tag/v4.10.0
-
-{{% /tab %}}
-
-{{% tab "PHP" %}}
-The minimum PHP tracer version required is [0.90.0][1]. Regular updates to the latest version are recommended to access changes and bug fixes.
-
-To opt in, add the following environment variables or system properties to your tracer settings:
-
-| Environment variable | System property |
-| ---  | ----------- |
-| `DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED=true` | `datadog.trace.remove_integration_service_names_enabled=true` |
-
-[1]: https://github.com/DataDog/dd-trace-php/releases/tag/0.90.0
-{{% /tab %}}
-
-{{% tab ".NET" %}}
-
-The minimum .NET tracer version required is [v2.35.0][1]. Regular updates to the latest version are recommended to access changes and bug fixes.
-
-To opt in, add the following environment variable to your tracer settings or system properties:
-- `DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED=true`
-
-[1]: https://github.com/DataDog/dd-trace-dotnet/releases/tag/v2.35.0
-
-{{% /tab %}}
-
-{{% tab "Python" %}}
-
-The minimum Python tracer version required is [v1.16.0][1]. Regular updates to the latest version are recommended to access changes and bug fixes.
-
-To opt in, add the following environment variables to your tracer settings or system properties:
-
-Add the following environment variables to your tracer settings or system properties:
-- `DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED=true`
-
-As of tracer version `v1.16.0` all libraries are supported except for Boto2.
-
-[1]: https://github.com/DataDog/dd-trace-py/releases/tag/v1.16.0
-
-{{% /tab %}}
-
-{{% tab "Ruby" %}}
-The minimum Ruby tracer version required is [v1.13.0][1]. Regular updates to the latest version are recommended to access changes and bug fixes.
-
-To opt in, add the following environment variables to your tracer settings or system properties:
-- `DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED=true`
-
-[1]: https://github.com/DataDog/dd-trace-rb/releases/tag/v1.13.0
-{{% /tab %}}
-
-{{< /tabs >}}
-
-
-
-## The new nomenclature: What is changing
-
-### List of newly introduced peer.* tags 
-
-`peer.*` dimensions | Remapped from ...
---------------------|-------------------
-`peer.aws.dynamodb.table` | `tablename`
-`peer.aws.kinesis.stream` | `streamname`
-`peer.aws.s3.bucket` | `bucketname`, `aws.s3.bucket`
-`peer.aws.sqs.queue` | `queuename`
-`peer.cassandra.contact.points` | `db.cassandra.contact.points`
-`peer.couchbase.seed.nodes` | `db.couchbase.seed.nodes`
-`peer.db.name` | `db.name`, `mongodb.db`, `db.instance`, `cassandra.keyspace`, `db.namespace`
-`peer.db.system` | `db.system`
-`peer.hostname` | `peer.hostname`, `hostname`, `net.peer.name`, `db.hostname`, `network.destination.name`, `grpc.host`, `http.host`, `server.address`, `http.server_name`
-`peer.kafka.bootstrap.servers` | `messaging.kafka.bootstrap.servers`
-`peer.messaging.destination` | `topicname`, `messaging.destination`, `messaging.destination.name`, `messaging.rabbitmq.exchange`, `amqp.destination`, `amqp.queue`, `amqp.exchange`, `msmq.queue.path`, `aws.queue.name`
-`peer.messaging.system` | `messaging.system`
-`peer.rpc.service` | `rpc.service`
-`peer.rpc.system` | `rpc.system`
-`peer.service` | `peer.service`
-
-### Global default service naming migration
-
-With inferred services, service names are automatically detected from span attributes. When you enable the `DD_TRACE_REMOVE_INTEGRATION_SERVICE_NAMES_ENABLED` environment variable, it improves how service-to-service connections and inferred services are represented in Datadog visualizations, across all supported tracing library languages and integrations.
-
-<div class="alert alert-warning">Enabling this option may impact existing APM metrics, custom span metrics, trace analytics, retention filters, sensitive data scans, monitors, dashboards, or notebooks that reference the old service names. Update these assets to use the global default service tag (<code>service:&lt;DD_SERVICE&gt;</code>).</div>
-
-For instructions on how to remove service overrides and migrate to inferred services, see the [Service Overrides guide][10].
-
-## Further reading
-
-{{< partial name="whats-next/whats-next.html" >}}
-
-[3]: /tracing/service_catalog/
-[4]: https://github.com/DataDog/datadog-agent/releases/tag/7.50.3
-[5]: /agent/guide/agent-configuration-files/?tab=agentv6v7
-[6]: https://github.com/open-telemetry/opentelemetry-collector-contrib/blob/main/exporter/datadogexporter/examples/collector.yaml#L335-L357
-[7]: https://github.com/open-telemetry/opentelemetry-collector-contrib/releases
-[8]: https://github.com/DataDog/helm-charts/blob/main/charts/datadog/values.yaml#L517-L538 
-[9]: https://github.com/DataDog/datadog-agent/releases/tag/7.55.1
-[10]: /tracing/guide/service_overrides
diff --git a/content/en/tracing/guide/service_overrides.md b/content/en/tracing/guide/service_overrides.md
index af8e49b374f67..9d22e70f83dde 100644
--- a/content/en/tracing/guide/service_overrides.md
+++ b/content/en/tracing/guide/service_overrides.md
@@ -2,9 +2,9 @@
 title: Service Overrides
 disable_toc: false
 further_reading:
-- link: "/tracing/guide/inferred-service-opt-in"
+- link: "/tracing/services/inferred_services"
   tag: "Documentation"
-  text: "Opting-in to the new service representation"
+  text: "Inferred services"
 ---
 
 ## Overview
@@ -140,9 +140,9 @@ The default `DD_SERVICE` name.
 
 {{< partial name="whats-next/whats-next.html" >}}
 
-[1]: /tracing/guide/inferred-service-opt-in
+[1]: /tracing/services/inferred_services
 [2]: /tracing/trace_pipeline/generate_metrics
 [3]: /monitors/types/apm/?tab=traceanalytics
 [4]: /tracing/trace_pipeline/trace_retention/#retention-filters
 [5]: /tracing/metrics/metrics_namespace/
-[6]: https://docs.datadoghq.com/tracing/guide/inferred-service-opt-in/#list-of-newly-introduced-peer-tags
+[6]: /tracing/services/inferred_services/#peer-tags
diff --git a/content/en/tracing/services/inferred_services.md b/content/en/tracing/services/inferred_services.md
index f28c626f4f77c..c0eeab41e143a 100644
--- a/content/en/tracing/services/inferred_services.md
+++ b/content/en/tracing/services/inferred_services.md
@@ -1,5 +1,7 @@
 ---
 title: Inferred services
+aliases:
+  - /tracing/guide/inferred-service-opt-in
 further_reading:
 - link: "/tracing/services/service_page/"
   tag: "Documentation"
diff --git a/content/en/tracing/trace_collection/automatic_instrumentation/dd_libraries/go.md b/content/en/tracing/trace_collection/automatic_instrumentation/dd_libraries/go.md
index 3760d47e4b600..aa7972ce7000d 100644
--- a/content/en/tracing/trace_collection/automatic_instrumentation/dd_libraries/go.md
+++ b/content/en/tracing/trace_collection/automatic_instrumentation/dd_libraries/go.md
@@ -231,7 +231,7 @@ Some of the instrumentation performed by `orchestrion` is done callee-side (or l
 
 #### Use the tracing library
 
-You can use the [tracing library][4] ([or the v2 tracing library][5]) in your Orchestrion-built application. This is useful for instrumenting frameworks not yet supported by Orchestrion. However, be aware that this may result in duplicated trace spans in the future as Orchestrion support expands. Review the [release notes][11] when updating your `orchestrion` dependency to stay informed about new features and adjust your manual instrumentation as necessary.
+You can use the [tracing library][4] in your Orchestrion-built application. This is useful for instrumenting frameworks not yet supported by Orchestrion. However, be aware that this may result in duplicated trace spans in the future as Orchestrion support expands. Review the [release notes][11] when updating your `orchestrion` dependency to stay informed about new features and adjust your manual instrumentation as necessary.
 
 #### Use the continuous profiler
 
@@ -243,7 +243,6 @@ To enable the profiler, set the environment variable `DD_PROFILING_ENABLED=true`
 To troubleshoot builds that `orchestrion` manages, see [Troubleshooting Go Compile-Time Instrumentation][13].
 
 [4]: https://pkg.go.dev/gopkg.in/DataDog/dd-trace-go.v1/ddtrace
-[5]: https://pkg.go.dev/github.com/DataDog/dd-trace-go/v2/ddtrace
 [6]: https://github.com/DataDog/orchestrion
 [7]: /security/application_security/threats/exploit-prevention
 [8]: https://go.dev/doc/devel/release#policy
diff --git a/content/en/tracing/trace_explorer/request_flow_map.md b/content/en/tracing/trace_explorer/request_flow_map.md
deleted file mode 100644
index f09b5d414758e..0000000000000
--- a/content/en/tracing/trace_explorer/request_flow_map.md
+++ /dev/null
@@ -1,57 +0,0 @@
----
-title: Request Flow Map
-description: "Trace Search and Analytics"
-aliases:
- - /tracing/trace_search_and_analytics/request_flow_map
-further_reading:
-    - link: 'https://www.datadoghq.com/blog/apm-request-flow-map-datadog'
-      tag: 'Blog'
-      text: 'Learn more about Request Flow Maps'
----
-
-{{< img src="tracing/live_search_and_analytics/request_flow_map/Overview.png" style="width:100%; background:none; border:none; box-shadow:none;" alt="Request Flow Map" >}}
-
-_Request flow maps_ combine two key features of Datadog APM: the [service map][1] and [live exploring][2], to help you understand and track request paths through your stack. Quickly identify noisy services and choke points, or how many database calls are generated by a request to a specific endpoint.
-
-No additional configuration is required to use these flow maps, and they are powered by your [ingested spans][3]. Scope your LIVE (last 15 minutes) traces to any combination of tags and generate a dynamic map that represents the flow of requests between every service. The map is automatically generated based on your search criteria, and will regenerate live after any changes.
-
-## Navigating the request flow map
-
-- Hover over an edge that connects two services to see metrics for requests, errors, and latency for requests between those two services that match the query parameters.
-
-- The highest throughput connections are highlighted to show the most common path.
-
-- Click **Export** to save a PNG image of the current request flow map. This is a great way to generate a live architecture diagram, or one scoped to a specific user flow.
-
-{{< img src="tracing/live_search_and_analytics/request_flow_map/ServicePanel.png" style="width:100%; background:none; border:none; box-shadow:none;" alt="Flow map side panel for service information" >}}
-
-- Click any service on the map to view overall health and performance information for that service (throughput, latency, error rates, monitor status), along with infrastructure and runtime metrics.
-
-- The map automatically selects an appropriate layout based on the number of services present, and you can click **Cluster** or **Flow** to switch between the two available layouts.
-
-- RUM Applications are represented on the request flow map if you have [connected RUM and Traces][4].
-
-{{< img src="tracing/live_search_and_analytics/request_flow_map/RUMService.mp4" alt="RUM service link from flow map" video=true style="width:100%;">}}
-
-Try the [request flow map in the app][5]. To get started, scope a simple query such as a single service or endpoint.
-
-### Examples
-
-Use the request flow map to investigate your application's behavior:
-
-- Search for a [resource][6] that corresponds to a particular HTTP request.
-
-- If you use [shadow deployments][7] or feature flags set as custom span tags, use the map to compare request latency between requests. This is a great pre-production complement to [deployment tracking][9] to observe how potential code changes will impact latency of deployed versions.
-
-## Further Reading
-
-{{< partial name="whats-next/whats-next.html" >}}
-
-[1]: /tracing/services/services_map/
-[2]: /tracing/trace_explorer/
-[3]: /tracing/trace_pipeline/ingestion_controls
-[4]: /real_user_monitoring/platform/connect_rum_and_traces?tab=browserrum
-[5]: https://app.datadoghq.com/apm/flow-map
-[6]: /tracing/glossary/#resources
-[7]: /tracing/services/deployment_tracking/#shadow-deploys
-[9]: /tracing/services/deployment_tracking/
diff --git a/content/en/tracing/trace_explorer/visualize.md b/content/en/tracing/trace_explorer/visualize.md
index 2d4e6f0ae1fbc..638fdd1885327 100644
--- a/content/en/tracing/trace_explorer/visualize.md
+++ b/content/en/tracing/trace_explorer/visualize.md
@@ -1,10 +1,16 @@
 ---
 title: Span Visualizations
 description: 'View spans in a list, or aggregate spans into timeseries, top lists and more.'
+aliases:
+ - /tracing/trace_search_and_analytics/request_flow_map
+ - /tracing/trace_explorer/request_flow_map/
 further_reading:
     - link: 'tracing/trace_explorer/'
       tag: 'Documentation'
       text: 'Trace Explorer'
+    - link: 'https://www.datadoghq.com/blog/apm-request-flow-map-datadog'
+      tag: 'Blog'
+      text: 'Learn more about Request Flow Maps'
 ---
 
 ## Overview
@@ -89,6 +95,31 @@ The following table shows the error spans count by `Env`, `Service`, and `Error
 
 {{< img src="tracing/trace_explorer/visualize/table_view.png" alt="Table view" style="width:100%;">}}
 
+## Request Flow Map
+
+[Request flow maps][6] combine APM's [service map][7] and [live exploring][8] features to illustrate request paths through your stack. Scope your traces to any combination of tags and generate a dynamic map that represents the flow of requests between every service.
+
+{{< img src="tracing/live_search_and_analytics/request_flow_map/Overview.png" style="width:100%; background:none; border:none; box-shadow:none;" alt="Request flow map showing the flow of requests between services, as well as request times and error rates" >}}
+
+For example, you can use request flow maps to identify high-traffic services or track the number of database calls generated by a request to a specific endpoint. If you use [shadow deployments][9] or feature flags set as custom span tags, you can use request flow maps to compare request latencies between requests and anticipate how code changes will impact perforamnce. 
+
+### Navigating the request flow map
+
+- Hover over the edge that connects two services to see metrics for requests, errors, and latencies between those services. **Note**: Highlighted edges represent the highest throughput connections, or the most common paths.
+
+- Click **Export** to save a PNG image of the current request flow map. Use this feature to generate a live architecture diagram or one scoped to a specific user flow.
+
+- Click any service on the map to view health, performance, infrastructure, and runtime metrics for that service.
+
+{{< img src="tracing/live_search_and_analytics/request_flow_map/ServicePanel.png" style="width:100%; background:none; border:none; box-shadow:none;" alt="Request flow map side panel with metrics and metadata for the selected service" >}}
+
+- The map automatically selects an appropriate layout based on the number of services present. Click **Cluster** or **Flow** to switch between the layouts.
+
+- RUM Applications are represented on the request flow map if you have [connected RUM and Traces][10].
+
+{{< img src="tracing/live_search_and_analytics/request_flow_map/RUMService.mp4" alt="A video showing how to navigate into the service details of a RUM application in the request flow map" video=true style="width:100%;">}}
+
+
 ## Further Reading
 
 {{< partial name="whats-next/whats-next.html" >}}
@@ -98,3 +129,8 @@ The following table shows the error spans count by `Env`, `Service`, and `Error
 [3]: /tracing/trace_explorer/facets/#quantitative-facets-measures
 [4]: /tracing/trace_explorer/?tab=timeseriesview#live-search-for-15-minutes
 [5]: /tracing/error_tracking/
+[6]: https://app.datadoghq.com/apm/flow-map
+[7]: /tracing/services/services_map/
+[8]: /tracing/trace_explorer/
+[9]: /tracing/services/deployment_tracking/#shadow-deploys
+[10]: /real_user_monitoring/platform/connect_rum_and_traces?tab=browserrum
diff --git a/content/en/tracing/troubleshooting/_index.md b/content/en/tracing/troubleshooting/_index.md
index cd7bf7dee5b1d..a5814d2163e3d 100644
--- a/content/en/tracing/troubleshooting/_index.md
+++ b/content/en/tracing/troubleshooting/_index.md
@@ -30,9 +30,9 @@ further_reading:
 - link: '/integrations/'
   tag: 'Documentation'
   text: "Datadog's full list of integrations"
-- link: '/tracing/guide/inferred-service-opt-in/'
+- link: '/tracing/services/inferred_services'
   tag: 'Documentation'
-  text: 'Inferred Service dependencies (Preview)'
+  text: 'Inferred Service dependencies'
 ---
 
 If you experience unexpected behavior while using Datadog APM, read the information on this page to help resolve the issue. Datadog recommends regularly updating to the latest version of the Datadog tracing libraries you use, as each release contains improvements and fixes. If you continue to experience issues, reach out to [Datadog support][1].
@@ -372,7 +372,7 @@ When you open a [support ticket][1], the Datadog support team may ask for the fo
 [27]: /tracing/trace_collection/library_config/
 [28]: https://app.datadoghq.com/dash/integration/apm_estimated_usage
 [29]: /tracing/troubleshooting/#data-volume-guidelines
-[30]: /tracing/guide/inferred-service-opt-in/?tab=java
+[30]: /tracing/services/inferred_services
 [31]: /tracing/trace_pipeline/metrics/#apm-traces-estimated-usage-dashboard
 [32]: https://app.datadoghq.com/services
 
diff --git a/layouts/partials/error_tracking/error-tracking-mobile.html b/layouts/partials/error_tracking/error-tracking-mobile.html
index 565506343259e..6c4a42fdd61a1 100644
--- a/layouts/partials/error_tracking/error-tracking-mobile.html
+++ b/layouts/partials/error_tracking/error-tracking-mobile.html
@@ -46,7 +46,7 @@
         </a>
       </div>
       <div class="col">
-        <a class="card h-100" href="/error_tracking/frontend/mobile/kotlin-multiplatform/">
+        <a class="card h-100" href="/error_tracking/frontend/mobile/kotlin_multiplatform/">
           <div class="card-body text-center py-2 px-1">
             {{ partial "img.html" (dict "root" . "src" "integrations_logos/kotlin-multiplatform_large.svg" "class" "img-fluid" "alt" "Kotlin Multiplatform" "width" "200") }}
           </div>
diff --git a/layouts/partials/logs/logs-languages.html b/layouts/partials/logs/logs-languages.html
index 20ea01542268d..6da304af336bf 100644
--- a/layouts/partials/logs/logs-languages.html
+++ b/layouts/partials/logs/logs-languages.html
@@ -94,7 +94,7 @@
 </a>
 </div>
 <div class="col">
-<a class="card h-100" href="/logs/log_collection/kotlin-multiplatform">
+<a class="card h-100" href="/logs/log_collection/kotlin_multiplatform">
 <div class="card-body text-center py-2 px-1">
 {{ partial "img.html" (dict "root" . "src" "integrations_logos/kotlin-multiplatform_large.svg" "class" "img-fluid" "alt" "Kotlin Multiplatform" "width" "400") }}
 </div>
diff --git a/layouts/partials/rum/rum-error-tracking-mobile.html b/layouts/partials/rum/rum-error-tracking-mobile.html
index 85833178490df..592ee2e8f9c01 100644
--- a/layouts/partials/rum/rum-error-tracking-mobile.html
+++ b/layouts/partials/rum/rum-error-tracking-mobile.html
@@ -53,7 +53,7 @@
         </a>
       </div>
       <div class="col">
-        <a class="card h-100" href="/real_user_monitoring/error_tracking/mobile/kotlin-multiplatform/">
+        <a class="card h-100" href="/real_user_monitoring/error_tracking/mobile/kotlin_multiplatform/">
           <div class="card-body text-center py-2 px-1">
             {{ partial "img.html" (dict "root" . "src" "integrations_logos/kotlin-multiplatform_large.svg" "class" "img-fluid" "alt" "Kotlin Multiplatform" "width" "200") }}
           </div>
diff --git a/layouts/partials/rum/rum-feature-flag-tracking.html b/layouts/partials/rum/rum-feature-flag-tracking.html
index 9aa6ce0f5bbaa..45db145c68258 100644
--- a/layouts/partials/rum/rum-feature-flag-tracking.html
+++ b/layouts/partials/rum/rum-feature-flag-tracking.html
@@ -5,7 +5,7 @@
             <div class="col">
                 <a
                     class="card h-100"
-                    href="/real_user_monitoring/guide/setup-feature-flag-data-collection/?tab=browser#amplitude-integration"
+                    href="/real_user_monitoring/feature_flag_tracking/setup/?tab=browser#amplitude-integration"
                 >
                     <div class="card-body text-center py-2 px-1">
                         {{ partial "img.html" (dict "root" . "src" "integrations_logos/amplitude_large.svg" "class"
@@ -16,7 +16,7 @@
             <div class="col">
                 <a
                     class="card h-100"
-                    href="/real_user_monitoring/guide/setup-feature-flag-data-collection/?tab=browser#configcat-integration"
+                    href="/real_user_monitoring/feature_flag_tracking/setup/?tab=browser#configcat-integration"
                 >
                     <div class="card-body text-center py-2 px-1">
                         {{ partial "img.html" (dict "root" . "src" "integrations_logos/configcat_large.svg" "class"
@@ -27,7 +27,7 @@
             <div class="col">
                 <a
                     class="card h-100"
-                    href="/real_user_monitoring/guide/setup-feature-flag-data-collection/?tab=browser#custom-feature-flag-management"
+                    href="/real_user_monitoring/feature_flag_tracking/setup/?tab=browser#custom-feature-flag-management"
                 >
                     <div class="card-body text-center py-2 px-1">
                         {{ partial "img.html" (dict "root" . "src"
@@ -39,7 +39,7 @@
             <div class="col">
                 <a
                     class="card h-100"
-                    href="/real_user_monitoring/guide/setup-feature-flag-data-collection/?tab=npm#devcycle-integration"
+                    href="/real_user_monitoring/feature_flag_tracking/setup/?tab=npm#devcycle-integration"
                 >
                     <div class="card-body text-center py-2 px-1">
                         {{ partial "img.html" (dict "root" . "src" "integrations_logos/devcycle_large.svg" "class"
@@ -50,7 +50,7 @@
             <div class="col">
                 <a
                     class="card h-100"
-                    href="/real_user_monitoring/guide/setup-feature-flag-data-collection/?tab=browser#eppo-integration"
+                    href="/real_user_monitoring/feature_flag_tracking/setup/?tab=browser#eppo-integration"
                 >
                     <div class="card-body text-center py-2 px-1">
                         {{ partial "img.html" (dict "root" . "src" "integrations_logos/eppo_large.svg" "class"
@@ -61,7 +61,7 @@
             <div class="col">
                 <a
                     class="card h-100"
-                    href="/real_user_monitoring/guide/setup-feature-flag-data-collection/?tab=npm#flagsmith-integration"
+                    href="/real_user_monitoring/feature_flag_tracking/setup/?tab=npm#flagsmith-integration"
                 >
                     <div class="card-body text-center py-2 px-1">
                         {{ partial "img.html" (dict "root" . "src" "integrations_logos/flagsmith_large.svg" "class"
@@ -72,7 +72,7 @@
             <div class="col">
                 <a
                     class="card h-100"
-                    href="/real_user_monitoring/guide/setup-feature-flag-data-collection/?tab=npm#kameleoon-integration"
+                    href="/real_user_monitoring/feature_flag_tracking/setup/?tab=npm#kameleoon-integration"
                 >
                     <div class="card-body text-center py-2 px-1">
                         {{ partial "img.html" (dict "root" . "src" "integrations_logos/kameleoon.png" "class"
@@ -83,7 +83,7 @@
             <div class="col">
                 <a
                     class="card h-100"
-                    href="/real_user_monitoring/guide/setup-feature-flag-data-collection/?tab=npm#launchdarkly-integration"
+                    href="/real_user_monitoring/feature_flag_tracking/setup/?tab=npm#launchdarkly-integration"
                 >
                     <div class="card-body text-center py-2 px-1">
                         {{ partial "img.html" (dict "root" . "src" "integrations_logos/launchdarkly_large.svg" "class"
@@ -94,7 +94,7 @@
             <div class="col">
                 <a
                     class="card h-100"
-                    href="/real_user_monitoring/guide/setup-feature-flag-data-collection/?tab=npm#split-integration"
+                    href="/real_user_monitoring/feature_flag_tracking/setup/?tab=npm#split-integration"
                 >
                     <div class="card-body text-center py-2 px-1">
                         {{ partial "img.html" (dict "root" . "src" "integrations_logos/split_large.svg" "class"
@@ -105,7 +105,7 @@
             <div class="col">
                 <a
                     class="card h-100"
-                    href="/real_user_monitoring/guide/setup-feature-flag-data-collection/?tab=npm#statsig-integration"
+                    href="/real_user_monitoring/feature_flag_tracking/setup/?tab=npm#statsig-integration"
                 >
                     <div class="card-body text-center py-2 px-1">
                         {{ partial "img.html" (dict "root" . "src" "integrations_logos/statsig_large.svg" "class"
diff --git a/layouts/partials/rum/rum-getting-started-mobile-advanced.html b/layouts/partials/rum/rum-getting-started-mobile-advanced.html
index 7cb0400325621..a54a9c076fbf5 100644
--- a/layouts/partials/rum/rum-getting-started-mobile-advanced.html
+++ b/layouts/partials/rum/rum-getting-started-mobile-advanced.html
@@ -68,7 +68,7 @@
               </a>
             </div>
             <div class="col">
-                <a class="card h-100" href="kotlin-multiplatform">
+                <a class="card h-100" href="kotlin_multiplatform">
                     <div class="card-body text-center py-2 px-1">
                         {{ partial "img.html" (dict "root" . "src" "integrations_logos/kotlin-multiplatform_large.svg" "class"
                         "img-fluid" "alt" "Kotlin Multiplatform" "width" "200") }}
diff --git a/layouts/partials/rum/rum-getting-started-mobile-data-collected.html b/layouts/partials/rum/rum-getting-started-mobile-data-collected.html
index 40dc0ed0238e1..88a484e54b6d0 100644
--- a/layouts/partials/rum/rum-getting-started-mobile-data-collected.html
+++ b/layouts/partials/rum/rum-getting-started-mobile-data-collected.html
@@ -46,7 +46,7 @@
         </a>
       </div>
       <div class="col">
-        <a class="card h-100" href="/real_user_monitoring/mobile_and_tv_monitoring/data_collected/kotlin-multiplatform">
+        <a class="card h-100" href="/real_user_monitoring/mobile_and_tv_monitoring/data_collected/kotlin_multiplatform">
           <div class="card-body text-center py-2 px-1">
             {{ partial "img.html" (dict "root" . "src" "integrations_logos/kotlin-multiplatform_large.svg" "class" "img-fluid" "alt" "Kotlin Multiplatform" "width" "200") }}
           </div>
diff --git a/layouts/partials/rum/rum-getting-started.html b/layouts/partials/rum/rum-getting-started.html
index 3632d481ef3ab..f6604bcf37e9a 100644
--- a/layouts/partials/rum/rum-getting-started.html
+++ b/layouts/partials/rum/rum-getting-started.html
@@ -67,7 +67,7 @@
         </a>
       </div>
       <div class="col">
-        <a class="card h-100" href="/real_user_monitoring/mobile_and_tv_monitoring/setup/kotlin-multiplatform/">
+        <a class="card h-100" href="/real_user_monitoring/mobile_and_tv_monitoring/setup/kotlin_multiplatform/">
           <div class="card-body text-center py-2 px-1">
             {{ partial "img.html" (dict "root" . "src" "integrations_logos/kotlin-multiplatform_large.svg" "class" "img-fluid" "alt" "Kotlin Multiplatform" "width" "200") }}
           </div>
diff --git a/layouts/partials/rum/rum-mobile-integrated-libraries.html b/layouts/partials/rum/rum-mobile-integrated-libraries.html
index dca9545b0bdd3..7c0e90970dfbd 100644
--- a/layouts/partials/rum/rum-mobile-integrated-libraries.html
+++ b/layouts/partials/rum/rum-mobile-integrated-libraries.html
@@ -32,7 +32,7 @@
         </a>
       </div>
       <div class="col">
-        <a class="card h-100" href="/real_user_monitoring/mobile_and_tv_monitoring/kotlin-multiplatform/integrated_libraries">
+        <a class="card h-100" href="/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/integrated_libraries">
           <div class="card-body text-center py-2 px-1">
             {{ partial "img.html" (dict "root" . "src" "integrations_logos/kotlin-multiplatform_large.svg" "class" "img-fluid" "alt" "kotlin-multiplatform" "width" "200") }}
           </div>
diff --git a/layouts/partials/rum/rum-troubleshooting-mobile.html b/layouts/partials/rum/rum-troubleshooting-mobile.html
index 06d91c43e348e..7d3206190f534 100644
--- a/layouts/partials/rum/rum-troubleshooting-mobile.html
+++ b/layouts/partials/rum/rum-troubleshooting-mobile.html
@@ -48,7 +48,7 @@
                 </a>
             </div>
             <div class="col">
-                <a class="card h-100" href="/real_user_monitoring/mobile_and_tv_monitoring/kotlin-multiplatform/troubleshooting">
+                <a class="card h-100" href="/real_user_monitoring/mobile_and_tv_monitoring/kotlin_multiplatform/troubleshooting">
                     <div class="card-body text-center py-2 px-1">
                         {{ partial "img.html" (dict "root" . "src" "integrations_logos/kotlin-multiplatform_large.svg" "class"
                         "img-fluid" "alt" "kotlin-multiplatform" "width" "200") }}
diff --git a/layouts/shortcodes/latest-lambda-layer-version.html b/layouts/shortcodes/latest-lambda-layer-version.html
index 7c4c67de04270..780dad0dfb33b 100644
--- a/layouts/shortcodes/latest-lambda-layer-version.html
+++ b/layouts/shortcodes/latest-lambda-layer-version.html
@@ -31,7 +31,7 @@
 
 <!-- dd-trace-dotnet Layer -->
 {{- if eq (.Get "layer") "dd-trace-dotnet" -}}
-    16
+    18
 {{- end -}}
 
 <!-- Python Versions -->
diff --git a/layouts/shortcodes/observability_pipelines/aws_authentication/amazon_s3_source/intro.md b/layouts/shortcodes/observability_pipelines/aws_authentication/amazon_s3_source/intro.md
new file mode 100644
index 0000000000000..a8520683c6eb2
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/aws_authentication/amazon_s3_source/intro.md
@@ -0,0 +1 @@
+To use the Amazon S3 source, you need to set up AWS credential files and environment variables. Observability Pipelines uses those credentials to collect logs from Amazon S3. Datadog recommends setting up a specific AWS profile that can be used by Observability Pipelines.
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/aws_authentication/amazon_s3_source/permissions.md b/layouts/shortcodes/observability_pipelines/aws_authentication/amazon_s3_source/permissions.md
new file mode 100644
index 0000000000000..5ccb5817eb2be
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/aws_authentication/amazon_s3_source/permissions.md
@@ -0,0 +1,5 @@
+For Observability Pipelines to collect logs from Amazon S3, the following policy permissions are required: 
+
+- `s3:GetObject`
+- `sqs:ReceiveMessage`
+- `sqs:DeleteMessage`
diff --git a/layouts/shortcodes/observability_pipelines/aws_authentication/amazon_security_lake/intro.md b/layouts/shortcodes/observability_pipelines/aws_authentication/amazon_security_lake/intro.md
new file mode 100644
index 0000000000000..6878d930d87fb
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/aws_authentication/amazon_security_lake/intro.md
@@ -0,0 +1 @@
+To use the Amazon Security Lake destination, you need to set up AWS credential files and environment variables. Observability Pipelines uses those credentials to send logs to Amazon Security Lake. Datadog recommends setting up a specific AWS profile that can be used by Observability Pipelines.
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/aws_authentication/amazon_security_lake/permissions.md b/layouts/shortcodes/observability_pipelines/aws_authentication/amazon_security_lake/permissions.md
new file mode 100644
index 0000000000000..efd193a549577
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/aws_authentication/amazon_security_lake/permissions.md
@@ -0,0 +1,4 @@
+For Observability Pipelines to send logs to Amazon Security Lake, the following policy permissions are required:
+
+- `s3:ListBucket`
+- `s3:PutObject`
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/aws_authentication/instructions.md b/layouts/shortcodes/observability_pipelines/aws_authentication/instructions.md
new file mode 100644
index 0000000000000..3bccc6848577f
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/aws_authentication/instructions.md
@@ -0,0 +1,15 @@
+To set up AWS authentication:
+
+1. Create an IAM role if you don't have one already. The role needs, at a minimum, these [permissions](#permissions) to interact with the component. See [Create a role to delegate permissions to an IAM user][10101] for more information.
+1. In your AWS configuration file, create a new profile using the `role_arn` from the role you created in step 1.
+1. When installing the Observability Pipelines Worker, ensure you set the `AWS_PROFILE` and `AWS_CONFIG_FILE` environment variables. The `AWS_CONFIG_FILE` variable is the path to your AWS configuration file. Set `AWS_PROFILE` to the name of the profile you created in step 2. See [Configuration and credential file setting in the AWS CLI][10101] for more information. This is an example of a profile configuration:
+    ```
+    [profile profile_name]
+    region = us-east-1
+    output = json
+    role_arn = arn:aws:iam::123456789:role/MyRole
+    source_profile = default
+    ```
+
+[10101]: https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles_create_for-user.html
+[10102]: https://docs.aws.amazon.com/cli/v1/userguide/cli-configure-files.html
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/configure_existing_pipelines/destination_env_vars/amazon_security_lake.md b/layouts/shortcodes/observability_pipelines/configure_existing_pipelines/destination_env_vars/amazon_security_lake.md
new file mode 100644
index 0000000000000..4b7bd343e33fb
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/configure_existing_pipelines/destination_env_vars/amazon_security_lake.md
@@ -0,0 +1 @@
+There are no environment variables to configure for the Amazon Security Lake destination.
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/configure_existing_pipelines/destination_env_vars/microsoft_sentinel.md b/layouts/shortcodes/observability_pipelines/configure_existing_pipelines/destination_env_vars/microsoft_sentinel.md
new file mode 100644
index 0000000000000..a36852bf50f07
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/configure_existing_pipelines/destination_env_vars/microsoft_sentinel.md
@@ -0,0 +1,4 @@
+- Data collection endpoint (DCE)
+	- Stored as the environment variable: `DD_OP_DESTINATION_MICROSOFT_SENTINEL_DCE_URI`
+- Client secret
+	- Stored as the environment variable: `DD_OP_DESTINATION_MICROSOFT_SENTINEL_CLIENT_SECRET`
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/configure_existing_pipelines/destination_env_vars/sentinelone.md b/layouts/shortcodes/observability_pipelines/configure_existing_pipelines/destination_env_vars/sentinelone.md
new file mode 100644
index 0000000000000..0ccf7480f1233
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/configure_existing_pipelines/destination_env_vars/sentinelone.md
@@ -0,0 +1,2 @@
+- SentinelOne write access token:
+    - Stored as the environment variable: `DD_OP_DESTINATION_SENTINEL_ONE_TOKEN`
diff --git a/layouts/shortcodes/observability_pipelines/configure_existing_pipelines/source_env_vars/amazon_data_firehose.md b/layouts/shortcodes/observability_pipelines/configure_existing_pipelines/source_env_vars/amazon_data_firehose.md
new file mode 100644
index 0000000000000..15ba01188a6c0
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/configure_existing_pipelines/source_env_vars/amazon_data_firehose.md
@@ -0,0 +1,3 @@
+- Amazon Data Firehose address
+    - The Observability Pipelines Worker listens to this socket address to receive logs from Amazon Data Firehose.
+    - The address is stored in the environment variable `AWS_DATA_FIREHOSE_ADDRESS`.
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/configure_existing_pipelines/source_env_vars/amazon_s3.md b/layouts/shortcodes/observability_pipelines/configure_existing_pipelines/source_env_vars/amazon_s3.md
new file mode 100644
index 0000000000000..13a5c52d25e10
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/configure_existing_pipelines/source_env_vars/amazon_s3.md
@@ -0,0 +1,9 @@
+- Amazon S3 SQS URL
+    - The URL of the SQS queue to which the S3 bucket sends the notification events.
+    - Stored as the environment variable: `DD_OP_SOURCE_AWS_S3_SQS_URL`
+- AWS_CONFIG_FILE path
+	- The path to the AWS configuration file local to this node.
+    - Stored as the environment variable: `AWS_CONFIG_FILE`.
+- AWS_PROFILE name
+	- The name of the profile to use within these files.
+    - Stored as the environment variable: `AWS_PROFILE`.
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/configure_existing_pipelines/source_env_vars/kafka.md b/layouts/shortcodes/observability_pipelines/configure_existing_pipelines/source_env_vars/kafka.md
new file mode 100644
index 0000000000000..929c1ab1842ff
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/configure_existing_pipelines/source_env_vars/kafka.md
@@ -0,0 +1,8 @@
+- The host and port of the Kafka bootstrap servers.
+    - The bootstrap server that the client uses to connect to the Kafka cluster and discover all the other hosts in the cluster. The host and port must be entered in the format of `host:port`, such as `10.14.22.123:9092`. If there is more than one server, use commas to separate them.
+	- Stored as the environment variable: `DD_OP_SOURCE_KAFKA_BOOTSTRAP_SERVERS`.
+- If you enabled SASL:
+	- Kafka SASL username
+		- Stored as the environment variable: `DD_OP_SOURCE_KAFKA_SASL_USERNAME`.
+	- Kafka SASL password
+		- Stored as the environment variable: `DD_OP_SOURCE_KAFKA_SASL_PASSWORD`.
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/destination_env_vars/amazon_security_lake.md b/layouts/shortcodes/observability_pipelines/destination_env_vars/amazon_security_lake.md
new file mode 100644
index 0000000000000..4b7bd343e33fb
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/destination_env_vars/amazon_security_lake.md
@@ -0,0 +1 @@
+There are no environment variables to configure for the Amazon Security Lake destination.
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/destination_env_vars/microsoft_sentinel.md b/layouts/shortcodes/observability_pipelines/destination_env_vars/microsoft_sentinel.md
new file mode 100644
index 0000000000000..e46c293b97456
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/destination_env_vars/microsoft_sentinel.md
@@ -0,0 +1,2 @@
+1. Enter the data collection endpoint (DCE).
+1. Enter the client secret.
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/destination_env_vars/sentinelone.md b/layouts/shortcodes/observability_pipelines/destination_env_vars/sentinelone.md
new file mode 100644
index 0000000000000..590a54d8da39b
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/destination_env_vars/sentinelone.md
@@ -0,0 +1,10 @@
+Enter your SentinelOne write access token. To find your write access token:
+
+1. Log into the [S1 console][10061].
+1. Navigate to the Singularity Data Lake (SDL) API Keys page. To access it from the console, click **Visibility** on the left menu to go to SDL. Click on your username and then **API Keys**.
+1. Copy the **Logs Access** write key and paste it into the **SentinelOne Write Access Token** field on the **Install Observability Pipelines Worker** page.
+
+After you've installed the Observability Pipelines Worker and finished setting up the pipeline, see [View logs in a SentinelOne cluster][10062] for instructions on how to see the logs you sent from Observability Pipelines to the SentinelOne destination.
+
+[10061]: https://usea1-partners.sentinelone.net/login
+[10062]: /observability_pipelines/destinations/sentinelone#view-logs-in-a-sentinelone-cluster
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/destination_settings/amazon_opensearch.en.md b/layouts/shortcodes/observability_pipelines/destination_settings/amazon_opensearch.en.md
index d0931b924da23..f108fd5373bd4 100644
--- a/layouts/shortcodes/observability_pipelines/destination_settings/amazon_opensearch.en.md
+++ b/layouts/shortcodes/observability_pipelines/destination_settings/amazon_opensearch.en.md
@@ -1,2 +1,4 @@
-1. Optionally, enter the name of the Amazon OpenSearch index.
-1. Select an authentication strategy, **Basic** or **AWS**. For **AWS**, enter the AWS region.
\ No newline at end of file
+1. Optionally, enter the name of the Amazon OpenSearch index. See [template syntax][10051] if you want to route logs to different indexes based on specific fields in your logs.
+1. Select an authentication strategy, **Basic** or **AWS**. For **AWS**, enter the AWS region.
+
+[10051]: /observability_pipelines/destinations/#template-syntax
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/destination_settings/amazon_security_lake.md b/layouts/shortcodes/observability_pipelines/destination_settings/amazon_security_lake.md
new file mode 100644
index 0000000000000..04751cf366ddf
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/destination_settings/amazon_security_lake.md
@@ -0,0 +1,12 @@
+1. Enter your S3 bucket name.
+1. Enter the AWS region.
+1. Optionally, toggle the switch to enable TLS. If you enable TLS, the following certificate and key files are required:
+    - `Server Certificate Path`: The path to the certificate file that has been signed by your Certificate Authority (CA) Root File in DER or PEM (X.509).
+    - `CA Certificate Path`: The path to the certificate file that is your Certificate Authority (CA) Root File in DER or PEM (X.509).
+    - `Private Key Path`: The path to the `.key` private key file that belongs to your Server Certificate Path in DER or PEM (PKCS#8) format.
+
+**Notes**:
+- When you add the Amazon Security Lake destination, the OCSF processor is automatically added so that you can convert your logs to Parquet before they are sent to Amazon Security Lake. See [Remap to OCSF documentation][10081] for setup instructions.
+- Only logs formatted by the OCSF processor are converted to Parquet.
+
+[10081]: /observability_pipelines/processors/remap_ocsf
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/destination_settings/chronicle.en.md b/layouts/shortcodes/observability_pipelines/destination_settings/chronicle.en.md
index f0f281d5652e2..546c2c064861d 100644
--- a/layouts/shortcodes/observability_pipelines/destination_settings/chronicle.en.md
+++ b/layouts/shortcodes/observability_pipelines/destination_settings/chronicle.en.md
@@ -5,9 +5,10 @@ To set up the Worker's Google Chronicle destination:
 1. Enter the customer ID for your Google Chronicle instance.
 1. Enter the path to the credentials JSON file you downloaded earlier.
 1. Select **JSON** or **Raw** encoding in the dropdown menu.
-1. Select the appropriate **Log Type** in the dropdown menu.
+1. Enter the log type. See [template syntax][10002] if you want to route logs to different log types based on specific fields in your logs.
 
 **Note**: Logs sent to the Google Chronicle destination must have ingestion labels. For example, if the logs are from a A10 load balancer, it must have the ingestion label `A10_LOAD_BALANCER`. See Google Cloud's [Support log types with a default parser][10003] for a list of available log types and their respective ingestion labels.
 
 [10001]: https://cloud.google.com/chronicle/docs/reference/ingestion-api#getting_api_authentication_credentials
+[10002]: /observability_pipelines/destinations/#template-syntax
 [10003]: https://cloud.google.com/chronicle/docs/ingestion/parser-list/supported-default-parsers#with-default-parser
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/destination_settings/datadog_archives_amazon_s3.en.md b/layouts/shortcodes/observability_pipelines/destination_settings/datadog_archives_amazon_s3.en.md
index 34001b2572204..d0fa10940f28a 100644
--- a/layouts/shortcodes/observability_pipelines/destination_settings/datadog_archives_amazon_s3.en.md
+++ b/layouts/shortcodes/observability_pipelines/destination_settings/datadog_archives_amazon_s3.en.md
@@ -1,6 +1,10 @@
 1. Enter the S3 bucket name for the S3 bucket you created earlier.
 1. Enter the AWS region the S3 bucket is in.
-1. Enter the key prefix. Prefixes are useful for partitioning objects. For example, you can use a prefix as an object key to store objects under a particular directory. If using a prefix for this purpose, it must end in `/` to act as a directory path; a trailing `/` is not automatically added.
+1. Enter the key prefix.
+    - Prefixes are useful for partitioning objects. For example, you can use a prefix as an object key to store objects under a particular directory. If using a prefix for this purpose, it must end in `/` to act as a directory path; a trailing `/` is not automatically added.
+    - See [template syntax][10051] if you want to route logs to different object keys based on specific fields in your logs.
 1. Select the storage class for your S3 bucket in the **Storage Class** dropdown menu.
 
-Your AWS access key ID and AWS secret access key are set as environment variables when you install the Worker later.
\ No newline at end of file
+Your AWS access key ID and AWS secret access key are set as environment variables when you install the Worker later.
+
+[10051]: /observability_pipelines/destinations/#template-syntax
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/destination_settings/datadog_archives_azure_storage.en.md b/layouts/shortcodes/observability_pipelines/destination_settings/datadog_archives_azure_storage.en.md
index 581711b8e3b0f..d9b9b65bd9395 100644
--- a/layouts/shortcodes/observability_pipelines/destination_settings/datadog_archives_azure_storage.en.md
+++ b/layouts/shortcodes/observability_pipelines/destination_settings/datadog_archives_azure_storage.en.md
@@ -1,2 +1,6 @@
 1. Enter the name of the Azure container you created earlier.
-2. Optionally, enter a prefix. Prefixes are useful for partitioning objects. For example, you can use a prefix as an object key to store objects under a particular directory. If using a prefix for this purpose, it must end in `/` to act as a directory path; a trailing `/` is not automatically added.
+2. Optionally, enter a prefix.
+    - Prefixes are useful for partitioning objects. For example, you can use a prefix as an object key to store objects under a particular directory. If using a prefix for this purpose, it must end in `/` to act as a directory path; a trailing `/` is not automatically added.
+    - See [template syntax][10051] if you want to route logs to different object keys based on specific fields in your logs.
+
+[10051]: /observability_pipelines/destinations/#template-syntax
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/destination_settings/datadog_archives_google_cloud_storage.en.md b/layouts/shortcodes/observability_pipelines/destination_settings/datadog_archives_google_cloud_storage.en.md
index c4034a751864e..77603f9315952 100644
--- a/layouts/shortcodes/observability_pipelines/destination_settings/datadog_archives_google_cloud_storage.en.md
+++ b/layouts/shortcodes/observability_pipelines/destination_settings/datadog_archives_google_cloud_storage.en.md
@@ -2,5 +2,9 @@
 1. Enter the path to the credentials JSON file you downloaded [earlier](#create-a-service-account-to-allow-workers-to-write-to-the-bucket).
 1. Select the storage class for the created objects.
 1. Select the access level of the created objects.
-1. Optionally, enter in the prefix. Prefixes are useful for partitioning objects. For example, you can use a prefix as an object key to store objects under a particular directory. If using a prefix for this purpose, it must end in `/` to act as a directory path; a trailing `/` is not automatically added.
-1. Optionally, click **Add Header** to add metadata.
\ No newline at end of file
+1. Optionally, enter in the prefix.
+    - Prefixes are useful for partitioning objects. For example, you can use a prefix as an object key to store objects under a particular directory. If using a prefix for this purpose, it must end in `/` to act as a directory path; a trailing `/` is not automatically added.
+    - See [template syntax][10051] if you want to route logs to different object keys based on specific fields in your logs.
+1. Optionally, click **Add Header** to add metadata.
+
+[10051]: /observability_pipelines/destinations/#template-syntax
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/destination_settings/datadog_archives_prerequisites.md b/layouts/shortcodes/observability_pipelines/destination_settings/datadog_archives_prerequisites.md
new file mode 100644
index 0000000000000..4b2058322475b
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/destination_settings/datadog_archives_prerequisites.md
@@ -0,0 +1,10 @@
+If you do not have a Datadog Log Archive configured for Observability Pipelines, configure a Log Archive for your cloud provider ([Amazon S3][10041], [Google Cloud Storage][10042], or [Azure Storage][10043]).
+
+**Note**: You need to have the Datadog integration for your cloud provider installed to set up Datadog Log Archives. See the [AWS integration][10044], [Google Cloud Platform][10045], and [Azure integration][1046] documentation for more information.
+
+[10041]: /observability_pipelines/destinations/amazon_s3/?tab=docker#configure-log-archives
+[10042]: /observability_pipelines/destinations/google_cloud_storage/
+[10043]: /observability_pipelines/destinations/azure_storage/#configure-log-archives
+[10044]: /integrations/amazon_web_services/#setup
+[10045]: /integrations/google_cloud_platform/#setup
+[10046]: /integrations/azure/#setup
diff --git a/layouts/shortcodes/observability_pipelines/destination_settings/elasticsearch.en.md b/layouts/shortcodes/observability_pipelines/destination_settings/elasticsearch.en.md
index ae25b0c26c41e..f4adb00c7f029 100644
--- a/layouts/shortcodes/observability_pipelines/destination_settings/elasticsearch.en.md
+++ b/layouts/shortcodes/observability_pipelines/destination_settings/elasticsearch.en.md
@@ -1,4 +1,6 @@
 The following fields are optional:
 
-1. Enter the name for the Elasticsearch index.
-2. Enter the Elasticsearch version.
\ No newline at end of file
+1. Enter the name for the Elasticsearch index. See [template syntax][10051] if you want to route logs to different indexes based on specific fields in your logs.
+2. Enter the Elasticsearch version.
+
+[10051]: /observability_pipelines/destinations/#template-syntax
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/destination_settings/microsoft_sentinel.md b/layouts/shortcodes/observability_pipelines/destination_settings/microsoft_sentinel.md
new file mode 100644
index 0000000000000..6d9828428a0cb
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/destination_settings/microsoft_sentinel.md
@@ -0,0 +1,4 @@
+1. Enter the client ID for your application.
+1. Enter the directory ID for your tenant.
+1. Enter the name of the table to which you are sending the logs.
+1. Enter the Data Collection Rule (DCR) immutable ID.
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/destination_settings/opensearch.en.md b/layouts/shortcodes/observability_pipelines/destination_settings/opensearch.en.md
index 4ce84c9767cd4..956acd7e2dd41 100644
--- a/layouts/shortcodes/observability_pipelines/destination_settings/opensearch.en.md
+++ b/layouts/shortcodes/observability_pipelines/destination_settings/opensearch.en.md
@@ -1 +1,3 @@
-Optionally, enter the name of the OpenSearch index.
\ No newline at end of file
+Optionally, enter the name of the OpenSearch index. See [template syntax][10051] if you want to route logs to different indexes based on specific fields in your logs.
+
+[10051]: /observability_pipelines/destinations/#template-syntax
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/destination_settings/sentinelone.md b/layouts/shortcodes/observability_pipelines/destination_settings/sentinelone.md
new file mode 100644
index 0000000000000..80337a958a046
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/destination_settings/sentinelone.md
@@ -0,0 +1 @@
+Select your SentinelOne logs environment in the dropdown menu.
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/destination_settings/splunk_hec.en.md b/layouts/shortcodes/observability_pipelines/destination_settings/splunk_hec.en.md
index e08293b6a6b6a..d0edc30822781 100644
--- a/layouts/shortcodes/observability_pipelines/destination_settings/splunk_hec.en.md
+++ b/layouts/shortcodes/observability_pipelines/destination_settings/splunk_hec.en.md
@@ -1,4 +1,6 @@
 The following fields are optional:
-1. Enter the name of the Splunk index you want your data in. This has to be an allowed index for your HEC.
+1. Enter the name of the Splunk index you want your data in. This has to be an allowed index for your HEC. See [template syntax][10051] if you want to route logs to different indexes based on specific fields in your logs.
 1.  Select whether the timestamp should be auto-extracted. If set to `true`, Splunk extracts the timestamp from the message with the expected format of `yyyy-mm-dd hh:mm:ss`.
-1. Set the `sourcetype` to override Splunk's default value, which is `httpevent` for HEC data.
+1. Optionally, set the `sourcetype` to override Splunk's default value, which is `httpevent` for HEC data. See [template syntax][10051] if you want to route logs to different source types based on specific fields in your logs.
+
+[10051]: /observability_pipelines/destinations/#template-syntax
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/log_source_configuration/amazon_data_firehose.md b/layouts/shortcodes/observability_pipelines/log_source_configuration/amazon_data_firehose.md
new file mode 100644
index 0000000000000..0051b5f908838
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/log_source_configuration/amazon_data_firehose.md
@@ -0,0 +1,6 @@
+Since Amazon Data Firehose can only deliver data over HTTP to an HTTPS URL, when you deploy the Observability Pipelines Worker, you need to deploy it with a publicly exposed endpoint and solve TLS termination. To solve TLS termination, you can front OPW with a load balancer or configure TLS options. See [Understand HTTP endpoint delivery request and response specifications][10122] for more information.
+
+To send logs to the Observability Pipelines Worker, set up an Amazon Data Firehose stream with an [HTTP endpoint destination][10121] in the region where your logs are. Configure the endpoint URL to the endpoint where OPW is deployed.
+
+[10121]: https://docs.aws.amazon.com/firehose/latest/dev/create-destination.html?icmpid=docs_console_unmapped#create-destination-http
+[10122]: https://docs.aws.amazon.com/firehose/latest/dev/httpdeliveryrequestresponse.html#requestformat
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/multiple_destinations.md b/layouts/shortcodes/observability_pipelines/multiple_destinations.md
new file mode 100644
index 0000000000000..1403bb0e12f4d
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/multiple_destinations.md
@@ -0,0 +1,8 @@
+Click the plus sign (**+**) to the left of the destinations to add additional destinations to the same set of processors.
+
+To delete a destination, click on the pencil icon to the top right of the destination, and select **Delete destination**. If you delete a destination from a processor group that has multiple destinations, only the deleted destination is removed. If you delete a destination from a processor group that only has one destination, both the destination and the processor group are removed.
+
+**Notes**:
+- A pipeline must have at least one destination. If a processor group only has one destination, that destination cannot be deleted.
+- You can add a total of three destinations for a pipeline.
+- A specific destination can only be added once. For example, you cannot add multiple Splunk HEC destinations.
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/multiple_processors.md b/layouts/shortcodes/observability_pipelines/multiple_processors.md
new file mode 100644
index 0000000000000..01a70feb9f3b3
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/multiple_processors.md
@@ -0,0 +1,3 @@
+Click the plus sign (**+**) to the left of the processors to add another set of processors and destinations to the source. See [Add additional destinations](#add-additional-destinations) on adding additional destinations to the processor group.
+
+To delete a processor group, you need to delete all destinations linked to that processor group. When the last destination is deleted, the processor group is removed with it.
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/prerequisites/amazon_data_firehose.md b/layouts/shortcodes/observability_pipelines/prerequisites/amazon_data_firehose.md
new file mode 100644
index 0000000000000..b8726033bc606
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/prerequisites/amazon_data_firehose.md
@@ -0,0 +1,5 @@
+To use Observability Pipelines' Amazon Data Firehose:
+- Since Amazon Data Firehose can only deliver data over HTTP to an HTTPS URL, when you deploy the Observability Pipelines Worker, you need to deploy it with a publicly exposed endpoint and solve TLS termination. To solve TLS termination, you can front OPW with a load balancer or configure TLS options. See [Understand HTTP endpoint delivery request and response specifications][10111] for more information.
+- If your forwarders are globally configured to enable SSL, you need the appropriate TLS certificates and the password you used to create your private key.
+
+[10111]: https://docs.aws.amazon.com/firehose/latest/dev/httpdeliveryrequestresponse.html#requestformat
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/prerequisites/amazon_s3.md b/layouts/shortcodes/observability_pipelines/prerequisites/amazon_s3.md
new file mode 100644
index 0000000000000..17e997cc60508
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/prerequisites/amazon_s3.md
@@ -0,0 +1,5 @@
+To use Observability Pipelines' Amazon S3 source, you need to:
+- Configure a SQS queue to receive your S3 bucket notifications, which is required to use the Amazon S3 source.
+- Set up AWS authentication using `AWS_PROFILE` and `AWS_CONFIG FILE` environment variables. Observability Pipelines uses the credentials associated with those environment variables to collect logs from Amazon S3. See [AWS Authentication][10091] for more information.
+
+[10091]: /observability_pipelines/sources/amazon_s3/#aws-authentication
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/prerequisites/amazon_security_lake.md b/layouts/shortcodes/observability_pipelines/prerequisites/amazon_security_lake.md
new file mode 100644
index 0000000000000..d6f2ee7cf7f4e
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/prerequisites/amazon_security_lake.md
@@ -0,0 +1,21 @@
+<div class="alert alert-warning">The Amazon Security Lake destination is in Preview. Complete the <a href="https://www.datadoghq.com/product-preview/route-logs-to-amazon-security-lake/">form</a> to request access.
+</div>
+
+1. Follow the [Getting Started with Amazon Security Lake][10071] to set up Amazon Security Lake, and make sure to:
+    - Enable Amazon Security Lake for the AWS account.
+    - Select the AWS regions where S3 buckets will be created for OCSF data.
+    - Take note of the Amazon Security Lake S3 bucket name. The bucket name is used when you set up the Amazon Security Lake destination in Observability Pipelines.
+1. Follow [Collecting data from custom sources in Security Lake][10072] to create a custom source in Amazon Security Lake.
+    - When you [configure a custom log source in Security Lake in the AWS console][10073]:
+        - Enter a source name.
+        - Select the OCSF event class for the log source and type.
+        - Enter the account details for the AWS account that will write logs to Amazon Security Lake:
+            - AWS account ID
+            - External ID
+     - Select **Create and use a new service** for service access.
+1. Set up AWS authentication using `AWS_PROFILE` and `AWS_CONFIG FILE` environment variables. Observability Pipelines uses credentials associated with those environment variables to send logs to Amazon Security Lake. See [AWS Authentication][10074] for more information.
+
+[10071]: https://docs.aws.amazon.com/security-lake/latest/userguide/getting-started.html
+[10072]: https://docs.aws.amazon.com/security-lake/latest/userguide/custom-sources.html
+[10073]: https://docs.aws.amazon.com/security-lake/latest/userguide/get-started-console.html
+[10074]: /observability_pipelines/destinations/amazon_security_lake/#aws-authentication
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/prerequisites/fluent.en.md b/layouts/shortcodes/observability_pipelines/prerequisites/fluent.en.md
index a3525e432b7a3..51ae2f7f37b5b 100644
--- a/layouts/shortcodes/observability_pipelines/prerequisites/fluent.en.md
+++ b/layouts/shortcodes/observability_pipelines/prerequisites/fluent.en.md
@@ -1,4 +1,4 @@
-To use Observability Pipelines's Fluentd or Fluent Bit source, you need the following information available:
+To use Observability Pipelines' Fluentd or Fluent Bit source, you need the following information available:
 
 1. The Observability Pipelines Worker listens on this bind address to receive logs from your applications. For example, `0.0.0.0:8088`. Later on, you configure your applications to send logs to this address.
 2. The appropriate TLS certificates and the password you used to create your private key if your forwarders are globally configured to enable SSL.
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/prerequisites/http_client.en.md b/layouts/shortcodes/observability_pipelines/prerequisites/http_client.en.md
index ef9850664b383..76a87946043ea 100644
--- a/layouts/shortcodes/observability_pipelines/prerequisites/http_client.en.md
+++ b/layouts/shortcodes/observability_pipelines/prerequisites/http_client.en.md
@@ -1,4 +1,4 @@
-To use Observability Pipelines's HTTP/S Client source, you need the following information available:
+To use Observability Pipelines' HTTP/S Client source, you need the following information available:
 
 1. The full path of the HTTP Server endpoint that the Observability Pipelines Worker collects log events from. For example, `https://127.0.0.8/logs`.
 2. The HTTP authentication token or password.
diff --git a/layouts/shortcodes/observability_pipelines/prerequisites/kafka.md b/layouts/shortcodes/observability_pipelines/prerequisites/kafka.md
new file mode 100644
index 0000000000000..b38488e3714d5
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/prerequisites/kafka.md
@@ -0,0 +1,4 @@
+To use Observability Pipelines' Kafka source, you need the following information available:
+
+- The hosts and ports of the Kafka bootstrap servers, which clients should use to connect to the Kafka cluster and discover all the other hosts in the cluster.
+- The appropriate TLS certificates and the password you used to create your private key, if your forwarders are globally configured to enable SSL.
diff --git a/layouts/shortcodes/observability_pipelines/prerequisites/splunk_hec.en.md b/layouts/shortcodes/observability_pipelines/prerequisites/splunk_hec.en.md
index a0f2b30141547..732d17fbd6013 100644
--- a/layouts/shortcodes/observability_pipelines/prerequisites/splunk_hec.en.md
+++ b/layouts/shortcodes/observability_pipelines/prerequisites/splunk_hec.en.md
@@ -1,6 +1,6 @@
-To use Observability Pipelines's Splunk HTTP Event Collector (HEC) source, you have applications sending data to Splunk in the [expected HEC format][3001].
+To use Observability Pipelines' Splunk HTTP Event Collector (HEC) source, you have applications sending data to Splunk in the [expected HEC format][3001].
 
-To use Observability Pipelines's Splunk HEC destination, you have a Splunk Enterprise or Cloud instance configured with an HTTP Event Collector (HEC) input. You also have the following information available:
+To use Observability Pipelines' Splunk HEC destination, you have a Splunk Enterprise or Cloud instance configured with an HTTP Event Collector (HEC) input. You also have the following information available:
 
 - The Splunk HEC token.
 - The bind address that your Observability Pipelines Worker will listen on to receive logs from your applications. For example, `0.0.0.0:8080`. Later on, you [configure your applications](#send-logs-to-the-observability-pipelines-worker-over-splunk-hec) to send logs to this address.
diff --git a/layouts/shortcodes/observability_pipelines/prerequisites/splunk_hec_destination_only.en.md b/layouts/shortcodes/observability_pipelines/prerequisites/splunk_hec_destination_only.en.md
index 52765f5fb411b..298bf00ce8926 100644
--- a/layouts/shortcodes/observability_pipelines/prerequisites/splunk_hec_destination_only.en.md
+++ b/layouts/shortcodes/observability_pipelines/prerequisites/splunk_hec_destination_only.en.md
@@ -1,4 +1,4 @@
-To use Observability Pipelines's Splunk HEC destination, you have a Splunk Enterprise or Cloud instance configured with an HTTP Event Collector (HEC) input. You also have the following information available:
+To use Observability Pipelines' Splunk HEC destination, you have a Splunk Enterprise or Cloud instance configured with an HTTP Event Collector (HEC) input. You also have the following information available:
 
 - The Splunk HEC token.
 - The bind address that your Observability Pipelines Worker will listen on to receive logs from your applications. For example, `0.0.0.0:8080`. Later on, you [configure your applications](#send-logs-to-the-observability-pipelines-worker-over-splunk-hec) to send logs to this address.
diff --git a/layouts/shortcodes/observability_pipelines/prerequisites/splunk_tcp.en.md b/layouts/shortcodes/observability_pipelines/prerequisites/splunk_tcp.en.md
index b233639dce232..4138776f914ec 100644
--- a/layouts/shortcodes/observability_pipelines/prerequisites/splunk_tcp.en.md
+++ b/layouts/shortcodes/observability_pipelines/prerequisites/splunk_tcp.en.md
@@ -1,4 +1,4 @@
-To use Observability Pipelines's Splunk TCP source, you have a Splunk Enterprise or Cloud Instance alongside either a Splunk Universal Forwarder or a Splunk Heavy Forwarder routing data to your Splunk instance. You also have the following information available:
+To use Observability Pipelines' Splunk TCP source, you have a Splunk Enterprise or Cloud Instance alongside either a Splunk Universal Forwarder or a Splunk Heavy Forwarder routing data to your Splunk instance. You also have the following information available:
 - The bind address that your Observability Pipelines Worker will listen on to receive logs from your applications. For example, `0.0.0.0:8088`. Later on, you [configure your applications](#connect-splunk-forwarder-to-the-observability-pipelines-worker) to send logs to this address.
 - The appropriate [TLS certificates][101] and the password you used to create your private key if your forwarders are globally configured to enable SSL.
 
diff --git a/layouts/shortcodes/observability_pipelines/prerequisites/sumo_logic.en.md b/layouts/shortcodes/observability_pipelines/prerequisites/sumo_logic.en.md
index 4168fd635bb2f..f1618d34ad382 100644
--- a/layouts/shortcodes/observability_pipelines/prerequisites/sumo_logic.en.md
+++ b/layouts/shortcodes/observability_pipelines/prerequisites/sumo_logic.en.md
@@ -1,6 +1,6 @@
-To use Observability Pipelines's Sumo Logic source, you have applications sending data to Sumo Logic in the [expected format][101].
+To use Observability Pipelines' Sumo Logic source, you have applications sending data to Sumo Logic in the [expected format][101].
 
-To use Observability Pipelines's Sumo Logic destination, you have a Hosted Sumo Logic Collector with a HTTP Logs source, and the following information available:
+To use Observability Pipelines' Sumo Logic destination, you have a Hosted Sumo Logic Collector with a HTTP Logs source, and the following information available:
 - The bind address that your Observability Pipelines Worker will listen on to receive logs. For example, `0.0.0.0:80`.
 - The URL of the Sumo Logic HTTP Logs Source that the Worker will send processed logs to. This URL is provided by Sumo Logic once you configure your hosted collector and set up an HTTP Logs and Metrics source.
 
diff --git a/layouts/shortcodes/observability_pipelines/prerequisites/sumo_logic_destination_only.en.md b/layouts/shortcodes/observability_pipelines/prerequisites/sumo_logic_destination_only.en.md
index e708fb79a3145..9ffc172760dc3 100644
--- a/layouts/shortcodes/observability_pipelines/prerequisites/sumo_logic_destination_only.en.md
+++ b/layouts/shortcodes/observability_pipelines/prerequisites/sumo_logic_destination_only.en.md
@@ -1,4 +1,4 @@
-To use Observability Pipelines's Sumo Logic destination, you have a Hosted Sumo Logic Collector with a HTTP Logs source, and the following information available:
+To use Observability Pipelines' Sumo Logic destination, you have a Hosted Sumo Logic Collector with a HTTP Logs source, and the following information available:
 - The bind address that your Observability Pipelines Worker will listen on to receive logs. For example, `0.0.0.0:80`.
 - The URL of the Sumo Logic HTTP Logs Source that the Worker will send processed logs to. This URL is provided by Sumo Logic once you configure your hosted collector and set up an HTTP Logs and Metrics source.
 
diff --git a/layouts/shortcodes/observability_pipelines/prerequisites/syslog.en.md b/layouts/shortcodes/observability_pipelines/prerequisites/syslog.en.md
index e00725ce81eaa..84d5cbd3a6eb7 100644
--- a/layouts/shortcodes/observability_pipelines/prerequisites/syslog.en.md
+++ b/layouts/shortcodes/observability_pipelines/prerequisites/syslog.en.md
@@ -1,4 +1,4 @@
-To use Observability Pipelines's Syslog source, your applications must be sending data in one of the following formats: [RFC 6587][9071], [RFC 5424][9072], [RFC 3164][9073]. You also need to have the following information available:
+To use Observability Pipelines' Syslog source, your applications must be sending data in one of the following formats: [RFC 6587][9071], [RFC 5424][9072], [RFC 3164][9073]. You also need to have the following information available:
 
 1. The bind address that your Observability Pipelines Worker (OPW) will listen on to receive logs from your applications. For example, `0.0.0.0:8088`. Later on, you configure your applications to send logs to this address.
 2. The appropriate TLS certificates and the password you used to create your private key if your forwarders are globally configured to enable SSL.
diff --git a/layouts/shortcodes/observability_pipelines/processors/dedupe.en.md b/layouts/shortcodes/observability_pipelines/processors/dedupe.en.md
index 606842ce6d755..4d7cd35510adb 100644
--- a/layouts/shortcodes/observability_pipelines/processors/dedupe.en.md
+++ b/layouts/shortcodes/observability_pipelines/processors/dedupe.en.md
@@ -16,12 +16,12 @@ To set up the deduplicate processor:
 {
     "outer_key": {
         "inner_key": "inner_value",
-            "a": {
-                    "double_inner_key": "double_inner_value",
-                    "b": "b value"
-                },
-            "c": "c value"
+        "a": {
+            "double_inner_key": "double_inner_value",
+            "b": "b value"
         },
-        "d": "d value"
-    }
+        "c": "c value"
+    },
+    "d": "d value"
+}
 ```
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/processors/remap.en.md b/layouts/shortcodes/observability_pipelines/processors/remap.en.md
index 53143640a214d..00b6504afd1b9 100644
--- a/layouts/shortcodes/observability_pipelines/processors/remap.en.md
+++ b/layouts/shortcodes/observability_pipelines/processors/remap.en.md
@@ -34,12 +34,12 @@ To set up the rename field processor:
 {
     "outer_key": {
         "inner_key": "inner_value",
-            "a": {
-                    "double_inner_key": "double_inner_value",
-                    "b": "b value"
-                },
-            "c": "c value"
+        "a": {
+            "double_inner_key": "double_inner_value",
+            "b": "b value"
         },
-        "d": "d value"
-    }
+        "c": "c value"
+    },
+    "d": "d value"
+}
 ```
diff --git a/layouts/shortcodes/observability_pipelines/processors/remap_ocsf.md b/layouts/shortcodes/observability_pipelines/processors/remap_ocsf.md
new file mode 100644
index 0000000000000..54418470dbff8
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/processors/remap_ocsf.md
@@ -0,0 +1,28 @@
+<div class="alert alert-warning">The Remap to OCSF processor is in Preview. Complete this <a href="https://www.datadoghq.com/product-preview/remap-logs-to-the-ocsf-format/">form</a> to request access.
+</div>
+
+Use this processor to remap logs to Open Cybersecurity Schema Framework (OCSF) events.  OCSF schema event classes are set for a specific log source and type. You can add multiple mappings to one processor. **Note**: Datadog recommends that the OCSF processor be the last processor in your pipeline, so that remapping is done after the logs have been processed by all the other processors.
+
+To set up this processor:
+
+Click **Manage mappings**. This opens a side panel:
+
+- If you have not added any mappings yet, enter the mapping parameters as described in [Add a mapping](#add-a-mapping).
+- If you have already added mappings, click on a mapping in the list to edit or delete it. Use the search bar to find a mapping by its name. Click **Add Mapping** to add another mapping.
+
+#### Add a mapping
+
+1. Select the log type in the dropdown menu.
+1.  Define a filter query. Only logs that match the specified filter query are remapped. All logs, regardless of whether they do or do not match the filter query, are sent to the next step in the pipeline.
+1. Click **Add Mapping**.
+
+#### Mappings
+
+These are the mappings available:
+
+| Log Source             | Log Type                                      | OCSF Category                 |
+|------------------------|-----------------------------------------------|-------------------------------|
+| AWS CloudTrail         | Type: Management<br>EventName: ChangePassword | Account Change (3001)         |
+| Okta                   | User session start                            | Authentication (3002)         |
+| Palo Alto Networks     | Traffic                                       | Network Activity (4001)       |
+| Google Workspace Admin | addPrivilege                                  | User Account Management (3005)|
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/processors/sds_custom_rules.md b/layouts/shortcodes/observability_pipelines/processors/sds_custom_rules.md
new file mode 100644
index 0000000000000..3d94b8a14143a
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/processors/sds_custom_rules.md
@@ -0,0 +1,41 @@
+1. In the **Define match conditions** section, specify the regex pattern to use for matching against events in the **Define the regex** field. Enter sample data in the **Add sample data** field to verify that your regex pattern is valid.
+    Sensitive Data Scanner supports Perl Compatible Regular Expressions (PCRE), but the following patterns are not supported:
+    - Backreferences and capturing sub-expressions (lookarounds)
+    - Arbitrary zero-width assertions
+    - Subroutine references and recursive patterns
+    - Conditional patterns
+    - Backtracking control verbs
+    - The `\C` "single-byte" directive (which breaks UTF-8 sequences)
+    - The `\R` newline match
+    - The `\K` start of match reset directive
+    - Callouts and embedded code
+    - Atomic grouping and possessive quantifiers
+1. For **Create keyword dictionary**, add keywords to refine detection accuracy when matching regex conditions. For example, if you are scanning for a sixteen-digit Visa credit card number, you can add keywords like `visa`, `credit`, and `card`. You can also require that these keywords be within a specified number of characters of a match. By default, keywords must be within 30 characters before a matched value.
+1. In the **Define rule target and action** section, select if you want to scan the **Entire Event**, **Specific Attributes**, or **Exclude Attributes** in the dropdown menu.
+    - If you are scanning the entire event, you can optionally exclude specific attributes from getting scanned. Use [path notation](#path-notation-example-custom) (`outer_key.inner_key`) to access nested keys. For specified attributes with nested data, all nested data is excluded.
+    - If you are scanning specific attributes, specify which attributes you want to scan. Use [path notation](#path-notation-example-custom) (`outer_key.inner_key`) to access nested keys. For specified attributes with nested data, all nested data is scanned.
+1. For **Define actions on match**, select the action you want to take for the matched information. **Note**: Redaction, partial redaction, and hashing are all irreversible actions.
+    - **Redact**: Replaces all matching values with the text you specify in the **Replacement text** field.
+    - **Partially Redact**: Replaces a specified portion of all matched data. In the **Redact** section, specify the number of characters you want to redact and which part of the matched data to redact.
+    - **Hash**: Replaces all matched data with a unique identifier. The UTF-8 bytes of the match is hashed with the 64-bit fingerprint of FarmHash.
+1. Optionally, add tags you want to associate with the matched events.
+1. Add a name for the scanning rule.
+1. Optionally, add a description for the rule.
+1. Click **Add Rule**.
+
+##### Path notation example {#path-notation-example-custom}
+
+ For the following message structure, use `outer_key.inner_key.double_inner_key` to refer to the key with the value `double_inner_value`.
+```json
+{
+    "outer_key": {
+        "inner_key": "inner_value",
+        "a": {
+            "double_inner_key": "double_inner_value",
+            "b": "b value"
+        },
+        "c": "c value"
+    },
+    "d": "d value"
+}
+```
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/processors/sds_library_rules.md b/layouts/shortcodes/observability_pipelines/processors/sds_library_rules.md
new file mode 100644
index 0000000000000..8172c99f18ee1
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/processors/sds_library_rules.md
@@ -0,0 +1,39 @@
+1. Toggle the industry classifications you want to select rules for.
+1. Select the library rules you want to use.
+1. In the **Define rule target and action** section, select if you want to scan the **Entire Event**, **Specific Attributes**, or **Exclude Attributes** in the dropdown menu.
+    - If you are scanning the entire event, you can optionally exclude specific attributes from getting scanned. Use [path notation](#path-notation-example-lib) (`outer_key.inner_key`) to access nested keys. For specified attributes with nested data, all nested data is excluded.
+    - If you are scanning specific attributes, specify which attributes you want to scan. Use [path notation](#path-notation-example-lib) (`outer_key.inner_key`) to access nested keys. For specified attributes with nested data, all nested data is scanned.
+1. For **Define actions on match**, select the action you want to take for the matched information. **Note**: Redaction, partial redaction, and hashing are all irreversible actions.
+    - **Redact**: Replaces all matching values with the text you specify in the **Replacement text** field.
+    - **Partially Redact**: Replaces a specified portion of all matched data. In the **Redact** section, specify the number of characters you want to redact and which part of the matched data to redact.
+    - **Hash**: Replaces all matched data with a unique identifier. The UTF-8 bytes of the match are hashed with the 64-bit fingerprint of FarmHash.
+1. Optionally, add tags you want to associate with the matched events.
+1. Click **Add Rules**.
+
+##### Path notation example {#path-notation-example-lib}
+
+ For the following message structure, use `outer_key.inner_key.double_inner_key` to refer to the key with the value `double_inner_value`.
+```json
+{
+    "outer_key": {
+        "inner_key": "inner_value",
+        "a": {
+            "double_inner_key": "double_inner_value",
+            "b": "b value"
+        },
+        "c": "c value"
+    },
+    "d": "d value"
+}
+```
+
+##### Add additional keywords
+
+After adding scanning rules from the library, you can edit each rule separately and add additional keywords to the keyword dictionary.
+
+1. Navigate to your [pipeline][10141].
+1. In the Sensitive Data Scanner processor with the rule you want to edit, click **Manage Scanning Rules**.
+1. Toggle **Use recommended keywords** if you want the rule to use them. Otherwise, add your own keywords to the **Create keyword dictionary** field. You can also require that these keywords be within a specified number of characters of a match. By default, keywords must be within 30 characters before a matched value.
+1. Click **Update**.
+
+[10141]: https://app.datadoghq.com/observability-pipelines
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/processors/sensitive_data_scanner_update.md b/layouts/shortcodes/observability_pipelines/processors/sensitive_data_scanner_update.md
new file mode 100644
index 0000000000000..84b6f4b1885ad
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/processors/sensitive_data_scanner_update.md
@@ -0,0 +1,7 @@
+The Sensitive Data Scanner processor scans logs to detect and redact or hash sensitive information such as PII, PCI, and custom sensitive data. You can pick from Datadog's library of predefined rules, or input custom Regex rules to scan for sensitive data.
+
+To set up the processor:
+
+1. Define a filter query. Only logs that match the specified filter query are scanned and processed. All logs are sent to the next step in the pipeline, regardless of whether they match the filter query.
+1. Click **Add Scanning Rule**.
+1. Select one of the following:
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/source_settings/amazon_data_firehose.md b/layouts/shortcodes/observability_pipelines/source_settings/amazon_data_firehose.md
new file mode 100644
index 0000000000000..f87fb0f52736c
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/source_settings/amazon_data_firehose.md
@@ -0,0 +1,4 @@
+Optionally, toggle the switch to enable TLS. If you enable TLS, the following certificate and key files are required:
+- `Server Certificate Path`: The path to the certificate file that has been signed by your Certificate Authority (CA) Root File in DER or PEM (X.509).
+- `CA Certificate Path`: The path to the certificate file that is your Certificate Authority (CA) Root File in DER or PEM (X.509).
+- `Private Key Path`: The path to the `.key` private key file that belongs to your Server Certificate Path in DER or PEM (PKCS#8) format.
\ No newline at end of file
diff --git a/layouts/shortcodes/observability_pipelines/source_settings/amazon_s3.md b/layouts/shortcodes/observability_pipelines/source_settings/amazon_s3.md
new file mode 100644
index 0000000000000..ed50770bdde77
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/source_settings/amazon_s3.md
@@ -0,0 +1,5 @@
+1. Enter the AWS region.
+1. Optionally, toggle the switch to enable TLS. If you enable TLS, the following certificate and key files are required:
+    - `Server Certificate Path`: The path to the certificate file that has been signed by your Certificate Authority (CA) Root File in DER or PEM (X.509).
+    - `CA Certificate Path`: The path to the certificate file that is your Certificate Authority (CA) Root File in DER or PEM (X.509).
+    - `Private Key Path`: The path to the `.key` private key file that belongs to your Server Certificate Path in DER or PEM (PKCS#8) format.
diff --git a/layouts/shortcodes/observability_pipelines/source_settings/kafka.md b/layouts/shortcodes/observability_pipelines/source_settings/kafka.md
new file mode 100644
index 0000000000000..15103bbbdddaf
--- /dev/null
+++ b/layouts/shortcodes/observability_pipelines/source_settings/kafka.md
@@ -0,0 +1,14 @@
+1. Enter the group ID.
+1. Enter the topic name. If there is more than one, click **Add Field** to add additional topics.
+1. Optionally, toggle the switch to enable SASL Authentication and select the mechanism (**PLAIN**, **SCHRAM-SHA-256**, or **SCHRAM-SHA-512**) in the dropdown menu.
+1. Optionally, toggle the switch to enable TLS. If you enable TLS, the following certificate and key files are required:
+    - `Server Certificate Path`: The path to the certificate file that has been signed by your Certificate Authority (CA) Root File in DER or PEM (X.509).
+    - `CA Certificate Path`: The path to the certificate file that is your Certificate Authority (CA) Root File in DER or PEM (X.509).
+    - `Private Key Path`: The path to the `.key` private key file that belongs to your Server Certificate Path in DER or PEM (PKCS#8) format.
+1. Optionally, click **Advanced** and click **Add Option** to add additional librdkafka options.
+    1. Select an option in the dropdown menu.
+    1. Enter a value for that option.
+    1. Check your values against the [librdkafka documentation][10131] to make sure they have the correct type and are within the set range.
+    1. Click **Add Option** to add another librdkafka option.
+
+[10131]: https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md
\ No newline at end of file
diff --git a/static/images/integrations/guide/source_code_integration/dynamic-instrumentation-create-new.png b/static/images/integrations/guide/source_code_integration/dynamic-instrumentation-create-new.png
new file mode 100644
index 0000000000000..5ce79de2ffe67
Binary files /dev/null and b/static/images/integrations/guide/source_code_integration/dynamic-instrumentation-create-new.png differ
diff --git a/static/images/observability_pipelines/amazon_s3_prefix.png b/static/images/observability_pipelines/amazon_s3_prefix.png
new file mode 100644
index 0000000000000..c2e1de997b832
Binary files /dev/null and b/static/images/observability_pipelines/amazon_s3_prefix.png differ