Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add support for logging 5.9 and above versions #101

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions examples/all.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -159,8 +159,10 @@ ocp_cluster_logging: false
cluster_log_forwarder: false
cluster_logging_channel: ""
elastic_search_channel: ""
loki_channel: ""
elasticsearch_clf_cs: "" # brew.registry.redhat.io/rh-osbs/iib:111110
clusterlogging_clf_cs: "" # brew.registry.redhat.io/rh-osbs/iib:11111
loki_clf_cs: "" # brew.registry.redhat.io/rh-osbs/iib:111112
log_label: ""
elasticsearch_url: ""
syslog_url: ""
Expand Down
4 changes: 3 additions & 1 deletion examples/ocp_cluster_logging_vars.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,16 @@ ocp_cluster_logging: false
cluster_log_forwarder: false
cluster_logging_channel: ""
elastic_search_channel: ""
loki_channel: ""
elasticsearch_clf_cs: "" # brew.registry.redhat.io/rh-osbs/iib:111110
clusterlogging_clf_cs: "" # brew.registry.redhat.io/rh-osbs/iib:11111
loki_clf_cs: "" # brew.registry.redhat.io/rh-osbs/iib:111112
log_label: ""
elasticsearch_url: ""
syslog_url: ""
fluentd_url: ""
kafka_url: ""
kafka_path: "" # Location of kafka on external vm ex. /root/kafka/kafka_2.13-2.7.0/bin
kafka_path: "" # Location of kafka on external vm ex. /usr/local/kafka/bin
loki_url: ""
cloudwatch_secret: ""
aws_region: ""
Expand Down
26 changes: 26 additions & 0 deletions playbooks/roles/ocp-cluster-logging/files/clf-cleanup.yml
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,15 @@
shell: oc delete ClusterLogging instance -n openshift-logging
when: check_cl.stdout|int != 0

# Check Lokistack instance and delete if it exists
- name: Check if the lokistack instance exists
shell: oc get lokistack -n openshift-logging | grep lokistack | awk 'NR==1{print $1}'
register: check_lokistack

- name: Delete lokistack instance if it exists
shell: oc delete lokistack {{ check_lokistack }} -n openshift-logging
when: check_lokistack.stdout|int != 0

# Check and delete Elasticsearch subscription if it exists
- name: Check if the Elasticsearch subscription exists
shell: oc get subscription -n openshift-operators-redhat | grep elasticsearch-operator | wc -l
Expand Down Expand Up @@ -85,3 +94,20 @@
shell: oc delete clusterserviceversion {{ cluster_logging_csv.stdout }} -n openshift-logging
when: cluster_logging_csv.stdout|length > 0

# Check and delete lokistack subscription if it exists
- name: Check if the Loki subscription exists
shell: oc get subscription -n openshift-operators-redhat | grep loki-operator | wc -l
register: loki_subs

- name: Delete Loki subscription if it exists
shell: oc delete subscription loki-operator -n openshift-operators-redhat
when: loki_subs.stdout|int != 0

# Check and delete Loki operator if it exists
- name: Check if the Loki operator exists
shell: oc get csv -n openshift-operators-redhat | grep loki-operator | awk 'NR==1{print $1}'
register: loki_csv

- name: Delete Loki operator if it exists
shell: oc delete clusterserviceversion {{ loki_csv.stdout }} -n openshift-operators-redhat
when: loki_csv.stdout|length > 0
59 changes: 48 additions & 11 deletions playbooks/roles/ocp-cluster-logging/files/clusterlogforwarder.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,9 +23,21 @@
src: "{{ role_path }}/templates/clf-instance.yml.j2"
dest: "{{ role_path }}/files/clf-instance.yml"
delegate_to: localhost
when: clo_version | float < 6.0

- name: Generating ClusterLogForwarder file
template:
src: "{{ role_path }}/templates/new-clf-instance.yml.j2"
dest: "{{ role_path }}/files/new-clf-instance.yml"
delegate_to: localhost
when: clo_version | float >= 6.0

# Creating ClusterLogForwarder custom resource
- include_tasks: "{{ role_path }}/files/clf-instance.yml"
when: clo_version | float < 6.0

- include_tasks: "{{ role_path }}/files/new-clf-instance.yml"
when: clo_version | float >= 6.0

# Check if the pods are in good state
- name: Check the logging pods are in good state
Expand Down Expand Up @@ -65,23 +77,28 @@
- "loki"
- "cloudwatch"
- "kibana-ldap"
- "lokistack"

- set_fact:
syslog_server_logfile: "/var/log/messages"
external_server_logs_path: "/root/clf_logs"

- name: Pause for 2 minutes to get new logs
pause:
minutes: 2

- name: Fetch the logs from external instances
block:
# Save the logs on external Kafka system and fetch on bastion
- block:
- name: Save the logs on Kafka server
shell: |
mkdir -p {{ external_server_logs_path }}/kafka
{{ kafka_path }}/kafka-console-consumer.sh --bootstrap-server {{ kafka_host }}:9092 --topic {{ log_labels }}-audit --max-messages 10 > {{ external_server_logs_path }}/kafka/audit.txt
{{ kafka_path }}/kafka-console-consumer.sh --bootstrap-server {{ kafka_host }}:9092 --topic {{ log_labels }}-infrastructure --max-messages 10 > {{ external_server_logs_path }}/kafka/infrastructure.txt
{{ kafka_path }}/kafka-console-consumer.sh --bootstrap-server {{ kafka_host }}:9092 --topic {{ log_labels }}-application --max-messages 10 > {{ external_server_logs_path }}/kafka/application.txt
async: 30
poll: 5
{{ kafka_path }}/kafka-console-consumer.sh --bootstrap-server {{ kafka_host }}:9092 --topic {{ app_log_label }} --max-messages 10 > {{ external_server_logs_path }}/kafka/application.txt
{{ kafka_path }}/kafka-console-consumer.sh --bootstrap-server {{ kafka_host }}:9092 --topic {{ audit_log_label }} --max-messages 10 > {{ external_server_logs_path }}/kafka/audit.txt
{{ kafka_path }}/kafka-console-consumer.sh --bootstrap-server {{ kafka_host }}:9092 --topic {{ infra_log_label }} --max-messages 10 > {{ external_server_logs_path }}/kafka/infrastructure.txt
async: 120
poll: 10

- name: Copy the logs file from Kafka to bastion
fetch:
Expand All @@ -95,6 +112,19 @@
delegate_to: kafka
when: kafka_server_url is defined

#Make lokistack.sh file executable
- name: Give executable permissions for lokistack.sh file
file:
path: /root/ocp4-playbooks-extras/playbooks/roles/ocp-cluster-logging/files/lokistack.sh
mode: '0555'
state: file

# Save the logs on external lokistack and fetch on bastion
- name: Save the logs for lokistack instance
shell: |
mkdir -p {{ cl_log_dir }}/lokistack
/root/ocp4-playbooks-extras/playbooks/roles/ocp-cluster-logging/files/lokistack.sh

# Save the logs on external Syslog system and fecth on bastion
- block:
- name: Save the logs on external Syslog instance
Expand Down Expand Up @@ -123,9 +153,9 @@
# Fetch logs from Elasticsearch
- name: Fetch Logs from Elasticsearch
shell: |
curl -XGET "{{ elasticsearch_server_url }}/infra*/_search" -H 'Content-Type: application/json' -d '{ "query": { "bool": { "must": [ { "match":{"openshift.labels.logs":"{{ log_labels }}-infrastructure"} } ] } } }' > {{ cl_log_dir }}/elasticsearch/infrastructure.txt
curl -XGET "{{ elasticsearch_server_url }}/audit*/_search" -H 'Content-Type: application/json' -d '{ "query": { "bool": { "must": [ { "match":{"openshift.labels.logs":"{{ log_labels }}-audit"} } ] } } }' > {{ cl_log_dir }}/elasticsearch/audit.txt
curl -XGET "{{ elasticsearch_server_url }}/app*/_search" -H 'Content-Type: application/json' -d '{ "query": { "bool": { "must": [ { "match":{"openshift.labels.logs":"{{ log_labels }}-application"} } ] } } }' > {{ cl_log_dir }}/elasticsearch/application.txt
curl -XGET "{{ elasticsearch_server_url }}/infra*/_search" -H 'Content-Type: application/json' -d '{ "query": { "bool": { "must": [ { "match":{"openshift.labels.label":"{{ infra_log_label }}"} } ] } } }' > {{ cl_log_dir }}/elasticsearch/infrastructure.txt
curl -XGET "{{ elasticsearch_server_url }}/audit*/_search" -H 'Content-Type: application/json' -d '{ "query": { "bool": { "must": [ { "match":{"openshift.labels.label":"{{ audit_log_label }}"} } ] } } }' > {{ cl_log_dir }}/elasticsearch/audit.txt
curl -XGET "{{ elasticsearch_server_url }}/app*/_search" -H 'Content-Type: application/json' -d '{ "query": { "bool": { "must": [ { "match":{"openshift.labels.label":"{{ app_log_label }}"} } ] } } }' > {{ cl_log_dir }}/elasticsearch/application.txt
when: elasticsearch_server_url is defined

# Fetch logs from Loki
Expand All @@ -134,11 +164,18 @@
curl -G -s "{{ loki_server_url }}/api/prom/query" --data-urlencode 'query={log_type="infrastructure"}' > {{ cl_log_dir }}/loki/infrastructure.txt
curl -G -s "{{ loki_server_url }}/api/prom/query" --data-urlencode 'query={log_type="audit"}' > {{ cl_log_dir }}/loki/audit.txt
curl -G -s "{{ loki_server_url }}/api/prom/query" --data-urlencode 'query={log_type="application"}' > {{ cl_log_dir }}/loki/application.txt
async: 120
poll: 10
when: loki_server_url is defined

# Deleting CLF Custom Resource instance because Fluentd and CloudWatch stores the logs on their system
- name: Delete ClusterLogForwarder
shell: oc delete ClusterLogForwarder instance -n openshift-logging
when: clo_version | float <= 5.9

- name: Delete ClusterLogForwarder
shell: oc delete obsclf collector -n openshift-logging
when: clo_version | float <= 6.0

- name: Check the logging pods are restarting
shell: oc get pods -n openshift-logging --no-headers | awk '{if ($3 == "Terminating" ) print $1}' | wc -l
Expand Down Expand Up @@ -216,8 +253,8 @@

- name: Elasticsearch clean up
shell: |
curl -X POST "{{ elasticsearch_server_url }}/audit*/_delete_by_query?pretty" -H 'Content-Type: application/json' -d '{ "query": { "match": { "openshift.labels.logs":"{{ log_labels }}-audit" } }}'
curl -X POST "{{ elasticsearch_server_url }}/app*/_delete_by_query?pretty" -H 'Content-Type: application/json' -d '{ "query": { "match": { "openshift.labels.logs":"{{ log_labels }}-application" } }}'
curl -X POST "{{ elasticsearch_server_url }}/infra*/_delete_by_query?pretty" -H 'Content-Type: application/json' -d '{ "query": { "match": { "openshift.labels.logs":"{{ log_labels }}-infrastructure" } }}'
curl -X POST "{{ elasticsearch_server_url }}/audit*/_delete_by_query?pretty" -H 'Content-Type: application/json' -d '{ "query": { "match": { "openshift.labels.label":"{{ audit_log_label }}" } }}'
curl -X POST "{{ elasticsearch_server_url }}/app*/_delete_by_query?pretty" -H 'Content-Type: application/json' -d '{ "query": { "match": { "openshift.labels.label":"{{ app_log_label }}" } }}'
curl -X POST "{{ elasticsearch_server_url }}/infra*/_delete_by_query?pretty" -H 'Content-Type: application/json' -d '{ "query": { "match": { "openshift.labels.label":"{{ infra_log_label }}" } }}'
when: elasticsearch_server_url is defined
ignore_errors: yes
Loading