Skip to content

Commit

Permalink
add support for 5.9 and 6.0
Browse files Browse the repository at this point in the history
  • Loading branch information
Jeevan-Darapu authored and Jeevan-Darapu committed Feb 4, 2025
1 parent a4886eb commit 110cd73
Show file tree
Hide file tree
Showing 12 changed files with 1,033 additions and 459 deletions.
856 changes: 431 additions & 425 deletions examples/all.yaml

Large diffs are not rendered by default.

4 changes: 3 additions & 1 deletion examples/ocp_cluster_logging_vars.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,16 @@ ocp_cluster_logging: false
cluster_log_forwarder: false
cluster_logging_channel: ""
elastic_search_channel: ""
loki_channel: ""
elasticsearch_clf_cs: "" # brew.registry.redhat.io/rh-osbs/iib:111110
clusterlogging_clf_cs: "" # brew.registry.redhat.io/rh-osbs/iib:11111
loki_clf_cs: "" # brew.registry.redhat.io/rh-osbs/iib:111112
log_label: ""
elasticsearch_url: ""
syslog_url: ""
fluentd_url: ""
kafka_url: ""
kafka_path: "" # Location of kafka on external vm ex. /root/kafka/kafka_2.13-2.7.0/bin
kafka_path: "" # Location of kafka on external vm ex. /usr/local/kafka/bin
loki_url: ""
cloudwatch_secret: ""
aws_region: ""
Expand Down
26 changes: 26 additions & 0 deletions playbooks/roles/ocp-cluster-logging/files/clf-cleanup.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,15 @@
shell: oc delete ClusterLogging instance -n openshift-logging
when: check_cl.stdout|int != 0

# Check Lokistack instance and delete if it exists
- name: Check if the lokistack instance exists
shell: oc get lokistack -n openshift-logging | grep lokistack | awk 'NR==1{print $1}'
register: check_lokistack

- name: Delete lokistack instance if it exists
shell: oc delete lokistack {{ check_lokistack }} -n openshift-logging
when: check_lokistack.stdout|int != 0

# Check and delete Elasticsearch subscription if it exists
- name: Check if the Elasticsearch subscription exists
shell: oc get subscription -n openshift-operators-redhat | grep elasticsearch-operator | wc -l
Expand Down Expand Up @@ -85,3 +94,20 @@
shell: oc delete clusterserviceversion {{ cluster_logging_csv.stdout }} -n openshift-logging
when: cluster_logging_csv.stdout|length > 0

# Check and delete lokistack subscription if it exists
- name: Check if the Loki subscription exists
shell: oc get subscription -n openshift-operators-redhat | grep loki-operator | wc -l
register: loki_subs

- name: Delete Loki subscription if it exists
shell: oc delete subscription loki-operator -n openshift-operators-redhat
when: loki_subs.stdout|int != 0

# Check and delete Loki operator if it exists
- name: Check if the Loki operator exists
shell: oc get csv -n openshift-operators-redhat | grep loki-operator | awk 'NR==1{print $1}'
register: loki_csv

- name: Delete Loki operator if it exists
shell: oc delete clusterserviceversion {{ loki_csv.stdout }} -n openshift-operators-redhat
when: loki_csv.stdout|length > 0
59 changes: 48 additions & 11 deletions playbooks/roles/ocp-cluster-logging/files/clusterlogforwarder.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,21 @@
src: "{{ role_path }}/templates/clf-instance.yml.j2"
dest: "{{ role_path }}/files/clf-instance.yml"
delegate_to: localhost
when: clo_version | float < 6.0

- name: Generating ClusterLogForwarder file
template:
src: "{{ role_path }}/templates/new-clf-instance.yml.j2"
dest: "{{ role_path }}/files/new-clf-instance.yml"
delegate_to: localhost
when: clo_version | float >= 6.0

# Creating ClusterLogForwarder custom resource
- include_tasks: "{{ role_path }}/files/clf-instance.yml"
when: clo_version | float < 6.0

- include_tasks: "{{ role_path }}/files/new-clf-instance.yml"
when: clo_version | float >= 6.0

# Check if the pods are in good state
- name: Check the logging pods are in good state
Expand Down Expand Up @@ -65,23 +77,28 @@
- "loki"
- "cloudwatch"
- "kibana-ldap"
- "lokistack"

- set_fact:
syslog_server_logfile: "/var/log/messages"
external_server_logs_path: "/root/clf_logs"

- name: Pause for 2 minutes to get new logs
pause:
minutes: 2

- name: Fetch the logs from external instances
block:
# Save the logs on external Kafka system and fetch on bastion
- block:
- name: Save the logs on Kafka server
shell: |
mkdir -p {{ external_server_logs_path }}/kafka
{{ kafka_path }}/kafka-console-consumer.sh --bootstrap-server {{ kafka_host }}:9092 --topic {{ log_labels }}-audit --max-messages 10 > {{ external_server_logs_path }}/kafka/audit.txt
{{ kafka_path }}/kafka-console-consumer.sh --bootstrap-server {{ kafka_host }}:9092 --topic {{ log_labels }}-infrastructure --max-messages 10 > {{ external_server_logs_path }}/kafka/infrastructure.txt
{{ kafka_path }}/kafka-console-consumer.sh --bootstrap-server {{ kafka_host }}:9092 --topic {{ log_labels }}-application --max-messages 10 > {{ external_server_logs_path }}/kafka/application.txt
async: 30
poll: 5
{{ kafka_path }}/kafka-console-consumer.sh --bootstrap-server {{ kafka_host }}:9092 --topic {{ app_log_label }} --max-messages 10 > {{ external_server_logs_path }}/kafka/application.txt
{{ kafka_path }}/kafka-console-consumer.sh --bootstrap-server {{ kafka_host }}:9092 --topic {{ audit_log_label }} --max-messages 10 > {{ external_server_logs_path }}/kafka/audit.txt
{{ kafka_path }}/kafka-console-consumer.sh --bootstrap-server {{ kafka_host }}:9092 --topic {{ infra_log_label }} --max-messages 10 > {{ external_server_logs_path }}/kafka/infrastructure.txt
async: 120
poll: 10

- name: Copy the logs file from Kafka to bastion
fetch:
Expand All @@ -95,6 +112,19 @@
delegate_to: kafka
when: kafka_server_url is defined

#Make lokistack.sh file executable
- name: Give executable permissions for lokistack.sh file
file:
path: /root/ocp4-playbooks-extras/playbooks/roles/ocp-cluster-logging/files/lokistack.sh
mode: '0555'
state: file

# Save the logs on external lokistack and fetch on bastion
- name: Save the logs for lokistack instance
shell: |
mkdir -p {{ cl_log_dir }}/lokistack
/root/ocp4-playbooks-extras/playbooks/roles/ocp-cluster-logging/files/lokistack.sh
# Save the logs on external Syslog system and fecth on bastion
- block:
- name: Save the logs on external Syslog instance
Expand Down Expand Up @@ -123,9 +153,9 @@
# Fetch logs from Elasticsearch
- name: Fetch Logs from Elasticsearch
shell: |
curl -XGET "{{ elasticsearch_server_url }}/infra*/_search" -H 'Content-Type: application/json' -d '{ "query": { "bool": { "must": [ { "match":{"openshift.labels.logs":"{{ log_labels }}-infrastructure"} } ] } } }' > {{ cl_log_dir }}/elasticsearch/infrastructure.txt
curl -XGET "{{ elasticsearch_server_url }}/audit*/_search" -H 'Content-Type: application/json' -d '{ "query": { "bool": { "must": [ { "match":{"openshift.labels.logs":"{{ log_labels }}-audit"} } ] } } }' > {{ cl_log_dir }}/elasticsearch/audit.txt
curl -XGET "{{ elasticsearch_server_url }}/app*/_search" -H 'Content-Type: application/json' -d '{ "query": { "bool": { "must": [ { "match":{"openshift.labels.logs":"{{ log_labels }}-application"} } ] } } }' > {{ cl_log_dir }}/elasticsearch/application.txt
curl -XGET "{{ elasticsearch_server_url }}/infra*/_search" -H 'Content-Type: application/json' -d '{ "query": { "bool": { "must": [ { "match":{"openshift.labels.label":"{{ infra_log_label }}"} } ] } } }' > {{ cl_log_dir }}/elasticsearch/infrastructure.txt
curl -XGET "{{ elasticsearch_server_url }}/audit*/_search" -H 'Content-Type: application/json' -d '{ "query": { "bool": { "must": [ { "match":{"openshift.labels.label":"{{ audit_log_label }}"} } ] } } }' > {{ cl_log_dir }}/elasticsearch/audit.txt
curl -XGET "{{ elasticsearch_server_url }}/app*/_search" -H 'Content-Type: application/json' -d '{ "query": { "bool": { "must": [ { "match":{"openshift.labels.label":"{{ app_log_label }}"} } ] } } }' > {{ cl_log_dir }}/elasticsearch/application.txt
when: elasticsearch_server_url is defined

# Fetch logs from Loki
Expand All @@ -134,11 +164,18 @@
curl -G -s "{{ loki_server_url }}/api/prom/query" --data-urlencode 'query={log_type="infrastructure"}' > {{ cl_log_dir }}/loki/infrastructure.txt
curl -G -s "{{ loki_server_url }}/api/prom/query" --data-urlencode 'query={log_type="audit"}' > {{ cl_log_dir }}/loki/audit.txt
curl -G -s "{{ loki_server_url }}/api/prom/query" --data-urlencode 'query={log_type="application"}' > {{ cl_log_dir }}/loki/application.txt
async: 120
poll: 10
when: loki_server_url is defined

# Deleting CLF Custom Resource instance because Fluentd and CloudWatch stores the logs on their system
- name: Delete ClusterLogForwarder
shell: oc delete ClusterLogForwarder instance -n openshift-logging
when: clo_version | float <= 5.9

- name: Delete ClusterLogForwarder
shell: oc delete obsclf collector -n openshift-logging
when: clo_version | float <= 6.0

- name: Check the logging pods are restarting
shell: oc get pods -n openshift-logging --no-headers | awk '{if ($3 == "Terminating" ) print $1}' | wc -l
Expand Down Expand Up @@ -216,8 +253,8 @@

- name: Elasticsearch clean up
shell: |
curl -X POST "{{ elasticsearch_server_url }}/audit*/_delete_by_query?pretty" -H 'Content-Type: application/json' -d '{ "query": { "match": { "openshift.labels.logs":"{{ log_labels }}-audit" } }}'
curl -X POST "{{ elasticsearch_server_url }}/app*/_delete_by_query?pretty" -H 'Content-Type: application/json' -d '{ "query": { "match": { "openshift.labels.logs":"{{ log_labels }}-application" } }}'
curl -X POST "{{ elasticsearch_server_url }}/infra*/_delete_by_query?pretty" -H 'Content-Type: application/json' -d '{ "query": { "match": { "openshift.labels.logs":"{{ log_labels }}-infrastructure" } }}'
curl -X POST "{{ elasticsearch_server_url }}/audit*/_delete_by_query?pretty" -H 'Content-Type: application/json' -d '{ "query": { "match": { "openshift.labels.label":"{{ audit_log_label }}" } }}'
curl -X POST "{{ elasticsearch_server_url }}/app*/_delete_by_query?pretty" -H 'Content-Type: application/json' -d '{ "query": { "match": { "openshift.labels.label":"{{ app_log_label }}" } }}'
curl -X POST "{{ elasticsearch_server_url }}/infra*/_delete_by_query?pretty" -H 'Content-Type: application/json' -d '{ "query": { "match": { "openshift.labels.label":"{{ infra_log_label }}" } }}'
when: elasticsearch_server_url is defined
ignore_errors: yes
209 changes: 208 additions & 1 deletion playbooks/roles/ocp-cluster-logging/files/clusterlogging.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
# Set the default collector type
- set_fact:
collector_type: "{{ log_collector_type | default('fluentd', true) }}"

clo_version: "{{ cluster_logging_channel | regex_search('\\d+\\.\\d+') | float }}"
# Check if the ClusterLogging CR is exists
- name: Check if the ClusterLogging instance exists
shell: oc get ClusterLogging -n openshift-logging | wc -l
Expand Down Expand Up @@ -60,6 +60,213 @@
type: {{ collector_type }}
{{ collector_type }}: {}
register: cl_cr
when: clo_version | float <= 5.8

- name: Create an instance for logging operator
k8s:
state: present
definition:
apiVersion: "logging.openshift.io/v1"
kind: "ClusterLogging"
metadata:
name: "instance"
namespace: "openshift-logging"
annotations:
logging.openshift.io/preview-vector-collector: "{{ (collector_type == 'vector') | ternary('enabled', omit) }}"
spec:
managementState: "Managed"
logStore:
type: "lokistack"
lokistack:
name: lokistack-sample
collection:
logs: "{{ collector_spec | from_yaml }}"
vars:
collector_spec: |
type: {{ collector_type }}
{{ collector_type }}: {}
register: cl_cr
when: clo_version | float == 5.9

# Create a ConfigMap for lokistack
- name: Create ConfigMap for lokistack
k8s:
state: present
definition:
apiVersion: v1
kind: ConfigMap
metadata:
name: lokistack-ca
namespace: openshift-logging
annotations:
service.beta.openshift.io/inject-cabundle: "true"
when: clo_version | float >= 5.9

# Create a Role binding for lokistack
- name: Create Cluster Role for lokistack
k8s:
state: present
definition:
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRole
metadata:
name: lokistack-instance-tenant-logs
rules:
- apiGroups:
- 'loki.grafana.com'
resources:
- application
- infrastructure
- audit
resourceNames:
- logs
verbs:
- 'get'
- 'create'
when: clo_version | float >= 5.9
- name: Create Role binding for lokistack
k8s:
state: present
definition:
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: lokistack-instance-tenant-logs
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: lokistack-instance-tenant-logs
subjects:
- kind: ServiceAccount
name: logcollector
namespace: openshift-logging
when: clo_version | float >= 5.9

# Create lokistack
- name: Create lokistack
k8s:
state: present
definition:
apiVersion: loki.grafana.com/v1
kind: LokiStack
metadata:
name: lokistack-sample
namespace: openshift-logging
spec:
size: 1x.small
replicationFactor: 1
storage:
secret:
name: lokicred-secret
type: s3
storageClassName: nfs-storage-provisioner
tenants:
mode: openshift-logging
rules:
enabled: true
selector:
matchLabels:
openshift.io/cluster-monitoring: 'true'
namespaceSelector:
matchLabels:
openshift.io/cluster-monitoring: 'true'
when: clo_version | float == 5.9

# Create lokistack for 6.0 and above
- name: Create lokistack
k8s:
state: present
definition:
apiVersion: loki.grafana.com/v1
kind: LokiStack
metadata:
name: lokistack-sample
namespace: openshift-logging
spec:
size: 1x.demo
replicationFactor: 1
storage:
secret:
name: lokicred-secret
type: s3
storageClassName: nfs-storage-provisioner
tenants:
mode: openshift-logging
rules:
enabled: true
selector:
matchLabels:
openshift.io/cluster-monitoring: 'true'
namespaceSelector:
matchLabels:
openshift.io/cluster-monitoring: 'true'
when: clo_version | float >= 6.0


# Create a loki log forwarder
- name: Create loki logforwarder
k8s:
state: present
definition:
apiVersion: logging.openshift.io/v1
kind: ClusterLogForwarder
metadata:
name: instance
namespace: openshift-logging
spec:
pipelines:
- name: all-to-default
inputRefs:
- infrastructure
- application
- audit
outputRefs:
- default
when: clo_version | float == 5.9

- name: Create loki logforwarder
k8s:
state: present
definition:
apiVersion: observability.openshift.io/v1
kind: ClusterLogForwarder
metadata:
name: collector
namespace: openshift-logging
spec:
managementState: Managed
outputs:
- lokiStack:
authentication:
token:
from: serviceAccount
target:
name: lokistack-sample
namespace: openshift-logging
name: lokistack
tls:
ca:
configMapName: lokistack-sample-gateway-ca-bundle
key: service-ca.crt
type: lokiStack
pipelines:
- inputRefs:
- infrastructure
- audit
- application
name: forward-to-lokistack
outputRefs:
- lokistack
serviceAccount:
name: logcollector
when: clo_version | float >= 6.0


- name: Check lokistack secret exists
shell: oc get secret -n openshift-logging | grep "lokicred-secret" | wc -l
register: lokisecret
failed_when: lokisecret.stdout|int == 0
when: clo_version | float >= 5.9

# Check if deployment is successfull
- name: Check the deployment
Expand Down
Empty file.
Loading

0 comments on commit 110cd73

Please sign in to comment.