diff --git a/.github/workflows/atex.yaml b/.github/workflows/atex.yaml
new file mode 100644
index 000000000000..4a0c918b40e0
--- /dev/null
+++ b/.github/workflows/atex.yaml
@@ -0,0 +1,93 @@
+name: TMT Tests with Artifact Upload
+
+on:
+ pull_request:
+ types: [opened, synchronize, reopened]
+
+jobs:
+ test-and-upload:
+ runs-on: ubuntu-latest
+ container:
+ image: fedora:latest
+
+ steps:
+ - name: Checkout repository
+ uses: actions/checkout@v4
+
+ # TODO: Add TMT+FMF test steps here
+ - name: Run TMT+FMF tests
+ run: |
+ echo "TODO: Add TMT+FMF test commands here"
+ # Example:
+ # dnf -y install tmt
+ # tmt run
+
+
+ - name: Install dependencies
+ if: always() # Run even if previous steps fail
+ run: |
+ dnf -y install git-core python3-pip
+ pip install fmf atex==0.10
+
+ - name: Checkout RHSecurityCompliance repository to submit test results to Testing Farm Infrastructure
+ if: always()
+ uses: actions/checkout@v4
+ with:
+ repository: RHSecurityCompliance/atex-results-testing-farm
+ ref: main
+ path: atex-results-testing-farm
+ token: ${{ secrets.GITHUB_TOKEN }}
+
+ - name: Initialize FMF metadata
+ if: always()
+ run: |
+ fmf init
+
+ - name: Create TMT dummy plan for artifact transport
+ if: always()
+ run: |
+ cat > main.fmf <<'EOF'
+ /dummy_plan:
+ discover:
+ how: shell
+ tests:
+ - name: /dummy_test
+ test: mv * "$TMT_TEST_DATA/."
+ execute:
+ how: tmt
+ EOF
+
+ - name: Push artifacts as tag to Testing Farm repository
+ if: always()
+ working-directory: atex-results-testing-farm
+ env:
+ GH_TOKEN: ${{ secrets.ATEX_RESULTS_TF_REPO_TOKEN }}
+ run: |
+ git config user.name "openscap-ci[bot]"
+ git config user.email "openscap.ci@gmail.com"
+
+ # Copy artifacts from main workspace to the TF repo
+ cp -r ../* . 2>/dev/null || true
+
+ # Commit and push as a tag
+ git add .
+ git commit -m "Test outputs from PR #${{ github.event.pull_request.number }}" || true
+ git tag PR${{ github.event.pull_request.number }}
+ git push origin PR${{ github.event.pull_request.number }}
+
+ - name: Submit test to Testing Farm
+ if: always()
+ env:
+ TESTING_FARM_API_TOKEN: ${{ secrets.TESTING_FARM_API_TOKEN }}
+ run: |
+ python tests/submit_results_to_testing_farm.py \
+ --repo-url "https://github.com/RHSecurityCompliance/atex-results-testing-farm" \
+ --pr-number "${{ github.event.pull_request.number }}"
+
+ - name: Cleanup temporary tag
+ if: always()
+ working-directory: atex-results-testing-farm
+ env:
+ GH_TOKEN: ${{ secrets.ATEX_RESULTS_TF_REPO_TOKEN }}
+ run: |
+ git push --delete origin PR${{ github.event.pull_request.number }} || true
diff --git a/.github/workflows/automatus-cs9.yaml b/.github/workflows/automatus-cs9.yaml
deleted file mode 100644
index 68cd20e56bb3..000000000000
--- a/.github/workflows/automatus-cs9.yaml
+++ /dev/null
@@ -1,179 +0,0 @@
-name: Automatus CS9
-on:
- pull_request:
- branches: [ master, 'stabilization*' ]
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.number || github.run_id }}
- cancel-in-progress: true
-env:
- DATASTREAM: ssg-cs9-ds.xml
-jobs:
- build-content:
- name: Build Content
- runs-on: ubuntu-latest
- container:
- image: fedora:latest
- steps:
- - name: Install Deps
- run: dnf install -y cmake make openscap-utils python3-pyyaml python3-jinja2 git python3-deepdiff python3-requests jq python3-pip
- - name: Install deps python
- run: pip install gitpython xmldiff
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- with:
- fetch-depth: 0
- - name: Checkout (CTF)
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- with:
- repository: ComplianceAsCode/content-test-filtering
- path: ctf
- # https://github.com/actions/checkout/issues/766
- - name: Set git safe directory
- run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
- - name: Find forking point
- env:
- BASE_BRANCH: ${{ github.base_ref }}
- run: echo "FORK_POINT=$(git merge-base origin/$BASE_BRANCH ${{ github.event.pull_request.head.sha }})" >> $GITHUB_OUTPUT
- id: fork_point
- - name: Detect content changes in the PR
- run: python3 ./ctf/content_test_filtering.py pr --base ${{ steps.fork_point.outputs.FORK_POINT }} --remote_repo ${{ github.server_url }}/${{ github.repository }} --verbose --rule --output json ${{ github.event.pull_request.number }} > output.json
- - name: Test if there are no content changes
- run: echo "CTF_OUTPUT_SIZE=$(stat --printf="%s" output.json)" >> $GITHUB_OUTPUT
- id: ctf
- - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- with:
- name: output.json
- path: output.json
- - name: Print changes to content detected if any
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: cat output.json
- - name: Get product attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: product
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'product'
- - name: Build product
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: ./build_product rhel9 --derivatives
- - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- with:
- name: ${{ env.DATASTREAM }}
- path: build/${{ env.DATASTREAM }}
- validate-ubuntu:
- name: Run Tests
- needs: build-content
- runs-on: ubuntu-22.04
- steps:
- - name: Install Deps
- run: sudo apt-get update && sudo apt-get install cmake ninja-build libopenscap8 libxml2-utils xsltproc python3-jinja2 python3-yaml ansible-lint podman
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- - name: Get cached CTF output
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v4
- id: get_ctf_output
- with:
- name: output.json
- # continue even if the file is unavailable that
- # means there are no changes detected by CTF in the previous job
- continue-on-error: true
- - name: Test if there are no content changes
- if: ${{ steps.get_ctf_output.outcome == 'success' }}
- run: echo "CTF_OUTPUT_SIZE=$(stat --printf="%s" output.json)" >> $GITHUB_OUTPUT
- id: ctf
- - name: Print changes to content detected if any
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: cat output.json
- - name: Generate id_rsa key
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: ssh-keygen -N '' -t rsa -f ~/.ssh/id_rsa
- - name: Build test suite container
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: podman build --build-arg "CLIENT_PUBLIC_KEY=$(cat ~/.ssh/id_rsa.pub)" -t ssg_test_suite -f test_suite-cs9
- working-directory: ./Dockerfiles
- - name: Get oscap-ssh
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: |
- wget https://raw.githubusercontent.com/OpenSCAP/openscap/maint-1.3/utils/oscap-ssh
- sudo chmod 755 oscap-ssh
- sudo mv -v oscap-ssh /usr/local/bin
- sudo chown root:root /usr/local/bin/oscap-ssh
- rm -f oscap-ssh
- - name: Get rule ids to be tested
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: rules
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'rules'
- - name: Get product attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: product
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'product'
- - name: Get bash attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: bash
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'bash'
- - name: Get ansible attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: ansible
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'ansible'
- - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v4
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- with:
- name: ${{ env.DATASTREAM }}
- - name: Run tests in a container - Bash
- if: ${{steps.bash.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: tests/test_rule_in_container.sh --no-make-applicable-in-containers --dontclean --logdir logs_bash --remediate-using bash --name ssg_test_suite --datastream $DATASTREAM ${{join(fromJSON(steps.rules.outputs.prop))}}
- env:
- ADDITIONAL_TEST_OPTIONS: "--duplicate-templates --remove-fips-certified --product rhel9"
- - name: Check for ERROR in logs
- if: ${{steps.bash.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: grep -q "^ERROR" logs_bash/test_suite.log
- id: check_results_bash
- # when grep returns 1 means it didn't find the ^ERROR string in the test_suite.log file
- # and this means tests finished successfully without errors. So the job needs to keep going.
- # By using continue-on-error: true the "conclusion" parameter is set to true so it's not possible to use
- # it to determine whether the task has failed or succeed. The "outcome" parameter has to be used instead.
- # See the step below
- continue-on-error: true
- - name: Upload logs in case of failure
- if: ${{steps.bash.outputs.prop == 'True' && steps.check_results_bash.outcome == 'success' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- with:
- name: logs_bash
- path: logs_bash/
- - name: Run tests in a container - Ansible
- if: ${{ steps.ansible.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: tests/test_rule_in_container.sh --no-make-applicable-in-containers --dontclean --logdir logs_ansible --remediate-using ansible --name ssg_test_suite --datastream $DATASTREAM ${{join(fromJSON(steps.rules.outputs.prop))}}
- env:
- ADDITIONAL_TEST_OPTIONS: "--duplicate-templates --remove-fips-certified --product rhel9"
- - name: Check for ERROR in logs
- if: ${{steps.ansible.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: grep -q "^ERROR" logs_ansible/test_suite.log
- id: check_results_ansible
- continue-on-error: true
- - name: Upload logs in case of failure
- if: ${{ steps.ansible.outputs.prop == 'True' && steps.check_results_ansible.outcome == 'success' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- with:
- name: logs_ansible
- path: logs_ansible/
- - name: Fail in case of ERROR present in logs_bash/test_suite.log or logs_ansible/test_suite.log
- if: ${{ (steps.check_results_bash.outcome == 'success' || steps.check_results_ansible.outcome == 'success') && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: |
- [[ -f logs_bash/test_suite.log ]] && echo "---------Bash Remediation Logs---------" && cat logs_bash/test_suite.log | grep -v "DEBUG - "
- [[ -f logs_ansible/test_suite.log ]] && echo "---------Ansible Remediation Logs---------" && cat logs_ansible/test_suite.log | grep -v "DEBUG - "
- exit 1
diff --git a/.github/workflows/automatus-debian12.yaml b/.github/workflows/automatus-debian12.yaml
deleted file mode 100644
index 7f51a63b6ad3..000000000000
--- a/.github/workflows/automatus-debian12.yaml
+++ /dev/null
@@ -1,195 +0,0 @@
-name: Automatus Debian 12
-on:
- pull_request:
- branches: [ master, 'stabilization*' ]
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.number || github.run_id }}
- cancel-in-progress: true
-env:
- DATASTREAM: ssg-debian12-ds.xml
-jobs:
- build-content:
- name: Build Content
- runs-on: ubuntu-22.04
- steps:
- - name: Install Deps
- run: sudo apt-get update && sudo apt-get install -y cmake ninja-build python3-yaml python3-jinja2 git python3-deepdiff python3-requests jq python3-pip libxml2-utils xsltproc ansible-lint wget libdbus-1-dev libdbus-glib-1-dev libcurl4-openssl-dev libgcrypt20-dev libselinux1-dev libxslt1-dev libgconf2-dev libacl1-dev libblkid-dev libcap-dev libxml2-dev libldap2-dev libpcre3-dev python3 swig libxml-parser-perl libxml-xpath-perl libperl-dev libbz2-dev librpm-dev g++ libyaml-dev libxmlsec1-dev libxmlsec1-openssl
- - name: Install deps python
- run: pip3 install gitpython xmldiff compliance-trestle==2.4.0 lxml lxml-stubs requests
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- with:
- fetch-depth: 0
- - name: Checkout (CTF)
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- with:
- repository: ComplianceAsCode/content-test-filtering
- path: ctf
- # https://github.com/actions/checkout/issues/766
- - name: Set git safe directory
- run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
- - name: Find forking point
- env:
- BASE_BRANCH: ${{ github.base_ref }}
- run: echo "FORK_POINT=$(git merge-base origin/$BASE_BRANCH ${{ github.event.pull_request.head.sha }})" >> $GITHUB_OUTPUT
- id: fork_point
- - name: Detect content changes in the PR
- run: python3 ./ctf/content_test_filtering.py pr --base ${{ steps.fork_point.outputs.FORK_POINT }} --remote_repo ${{ github.server_url }}/${{ github.repository }} --verbose --rule --output json ${{ github.event.pull_request.number }} > output.json
- - name: Test if there are no content changes
- run: echo "CTF_OUTPUT_SIZE=$(stat --printf="%s" output.json)" >> $GITHUB_OUTPUT
- id: ctf
- - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- with:
- name: output.json
- path: output.json
- - name: Print changes to content detected if any
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: cat output.json
- - name: Get product attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: product
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'product'
- - name: Download OpenSCAP
- run: wget https://github.com/OpenSCAP/openscap/releases/download/1.3.10/openscap-1.3.10.tar.gz
- - name: Extract OpenSCAP
- run: tar xf openscap-1.3.10.tar.gz
- - name: Build OpenSCAP
- run: |
- cd openscap-1.3.10
- cmake -Bbuild -DCMAKE_INSTALL_PREFIX=/usr .
- sudo cmake --build build --target install
- - name: Build product
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: ./build_product debian12
- - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- with:
- name: ${{ env.DATASTREAM }}
- path: build/${{ env.DATASTREAM }}
- validate-ubuntu:
- name: Run Tests
- needs: build-content
- runs-on: ubuntu-22.04
- steps:
- - name: Install Deps
- run: sudo apt update && sudo apt install -y cmake ninja-build libxml2-utils xsltproc python3-jinja2 python3-yaml ansible-lint podman wget libdbus-1-dev libdbus-glib-1-dev libcurl4-openssl-dev libgcrypt20-dev libselinux1-dev libxslt1-dev libgconf2-dev libacl1-dev libblkid-dev libcap-dev libxml2-dev libldap2-dev libpcre3-dev python3 swig libxml-parser-perl libxml-xpath-perl libperl-dev libbz2-dev librpm-dev g++ libyaml-dev libxmlsec1-dev libxmlsec1-openssl
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- - name: Get cached CTF output
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v4
- id: get_ctf_output
- with:
- name: output.json
- # continue even if the file is unavailable that
- # means there are no changes detected by CTF in the previous job
- continue-on-error: true
- - name: Download OpenSCAP
- run: wget https://github.com/OpenSCAP/openscap/releases/download/1.3.10/openscap-1.3.10.tar.gz
- - name: Extract OpenSCAP
- run: tar xf openscap-1.3.10.tar.gz
- - name: Build OpenSCAP
- run: |
- cd openscap-1.3.10
- cmake -Bbuild -DCMAKE_INSTALL_PREFIX=/usr .
- sudo cmake --build build --target install
- - name: Test if there are no content changes
- if: ${{ steps.get_ctf_output.outcome == 'success' }}
- run: echo "CTF_OUTPUT_SIZE=$(stat --printf="%s" output.json)" >> $GITHUB_OUTPUT
- id: ctf
- - name: Print changes to content detected if any
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: cat output.json
- - name: Generate id_rsa key
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: ssh-keygen -N '' -t rsa -f ~/.ssh/id_rsa
- - name: Build test suite container
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: podman build --build-arg "CLIENT_PUBLIC_KEY=$(cat ~/.ssh/id_rsa.pub)" -t ssg_test_suite -f test_suite-debian12
- working-directory: ./Dockerfiles
- - name: Get oscap-ssh
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: |
- wget https://raw.githubusercontent.com/OpenSCAP/openscap/maint-1.3/utils/oscap-ssh
- sudo chmod 755 oscap-ssh
- sudo mv -v oscap-ssh /usr/local/bin
- sudo chown root:root /usr/local/bin/oscap-ssh
- rm -f oscap-ssh
- - name: Get rule ids to be tested
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: rules
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'rules'
- - name: Get product attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: product
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'product'
- - name: Get bash attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: bash
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'bash'
- - name: Get ansible attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: ansible
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'ansible'
- - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v4
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- with:
- name: ${{ env.DATASTREAM }}
- - name: Run tests in a container - Bash
- if: ${{steps.bash.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: tests/test_rule_in_container.sh --no-make-applicable-in-containers --dontclean --logdir logs_bash --remediate-using bash --name ssg_test_suite --datastream $DATASTREAM ${{join(fromJSON(steps.rules.outputs.prop))}}
- env:
- ADDITIONAL_TEST_OPTIONS: "--duplicate-templates --remove-fips-certified"
- - name: Check for ERROR in logs
- if: ${{steps.bash.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: grep -q "^ERROR" logs_bash/test_suite.log
- id: check_results_bash
- # when grep returns 1 means it didn't find the ^ERROR string in the test_suite.log file
- # and this means tests finished successfully without errors. So the job needs to keep going.
- # By using continue-on-error: true the "conclusion" parameter is set to true so it's not possible to use
- # it to determine whether the task has failed or succeed. The "outcome" parameter has to be used instead.
- # See the step below
- continue-on-error: true
- - name: Upload logs in case of failure
- if: ${{steps.bash.outputs.prop == 'True' && steps.check_results_bash.outcome == 'success' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- with:
- name: logs_bash
- path: logs_bash/
- - name: Run tests in a container - Ansible
- if: ${{ steps.ansible.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: tests/test_rule_in_container.sh --no-make-applicable-in-containers --dontclean --logdir logs_ansible --remediate-using ansible --name ssg_test_suite --datastream $DATASTREAM ${{join(fromJSON(steps.rules.outputs.prop))}}
- env:
- ADDITIONAL_TEST_OPTIONS: "--duplicate-templates --remove-fips-certified --product debian12"
- - name: Check for ERROR in logs
- if: ${{steps.ansible.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: grep -q "^ERROR" logs_ansible/test_suite.log
- id: check_results_ansible
- continue-on-error: true
- - name: Upload logs in case of failure
- if: ${{ steps.ansible.outputs.prop == 'True' && steps.check_results_ansible.outcome == 'success' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- with:
- name: logs_ansible
- path: logs_ansible/
- - name: Fail in case of ERROR present in logs_bash/test_suite.log or logs_ansible/test_suite.log
- if: ${{ (steps.check_results_bash.outcome == 'success' || steps.check_results_ansible.outcome == 'success') && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: |
- [[ -f logs_bash/test_suite.log ]] && echo "---------Bash Remediation Logs---------" && cat logs_bash/test_suite.log | grep -v "DEBUG - "
- [[ -f logs_ansible/test_suite.log ]] && echo "---------Ansible Remediation Logs---------" && cat logs_ansible/test_suite.log | grep -v "DEBUG - "
- exit 1
diff --git a/.github/workflows/automatus-sanity.yaml b/.github/workflows/automatus-sanity.yaml
deleted file mode 100644
index 23b4b9f584be..000000000000
--- a/.github/workflows/automatus-sanity.yaml
+++ /dev/null
@@ -1,82 +0,0 @@
-name: Automatus Sanity
-on:
- pull_request:
- branches: [ master, 'stabilization*' ]
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.number || github.run_id }}
- cancel-in-progress: true
-env:
- DATASTREAM: ssg-fedora-ds.xml
-jobs:
- build-content:
- name: Build Content
- runs-on: ubuntu-latest
- container:
- image: fedora:latest
- steps:
- - name: Install Deps
- run: dnf install -y cmake make openscap-utils python3-pyyaml python3-jinja2 git python3-pip python3-setuptools
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- with:
- fetch-depth: 0
- - name: Build product
- run: ./build_product fedora --debug
- - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- with:
- name: ${{ env.DATASTREAM }}
- path: build/${{ env.DATASTREAM }}
-
- validate-automatus-ubuntu:
- name: Run Tests
- needs: build-content
- runs-on: ubuntu-22.04
- steps:
- - name: Install Deps
- run: sudo apt-get update && sudo apt-get install cmake ninja-build libopenscap8 libxml2-utils xsltproc python3-jinja2 python3-yaml ansible-lint podman
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- - name: Generate id_rsa key
- run: ssh-keygen -N '' -t rsa -f ~/.ssh/id_rsa
- - name: Build test suite container
- run: podman build --build-arg "CLIENT_PUBLIC_KEY=$(cat ~/.ssh/id_rsa.pub)" -t ssg_test_suite -f test_suite-fedora
- working-directory: ./Dockerfiles
- - name: Get oscap-ssh
- run: |
- wget https://raw.githubusercontent.com/OpenSCAP/openscap/maint-1.3/utils/oscap-ssh
- sudo chmod 755 oscap-ssh
- sudo mv -v oscap-ssh /usr/local/bin
- sudo chown root:root /usr/local/bin/oscap-ssh
- rm -f oscap-ssh
- - name: Get Datastream
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v4
- with:
- name: ${{ env.DATASTREAM }}
- - name: Check One Rule
- run: ./tests/automatus.py rule --remove-platforms --make-applicable-in-containers --logdir log_rule --datastream ssg-fedora-ds.xml --container ssg_test_suite package_sudo_installed
- - name: Check One Rule - Ansible
- run: ./tests/automatus.py rule --remove-platforms --make-applicable-in-containers --logdir log_rule_ansible --remediate-using ansible --datastream ssg-fedora-ds.xml --container ssg_test_suite file_owner_etc_passwd
- - name: Check Profile Mode
- run: ./tests/automatus.py profile --remove-platforms --make-applicable-in-containers --logdir log_profile --datastream ssg-fedora-ds.xml --container ssg_test_suite test
- - name: Check Combined Mode
- run: ./tests/automatus.py combined --remove-platforms --make-applicable-in-containers --logdir log_combined --datastream ssg-fedora-ds.xml --container ssg_test_suite test
- - name: Check Template Mode
- run: ./tests/automatus.py template --logdir log_template --datastream ssg-fedora-ds.xml --container ssg_test_suite --slice 1 15 file_owner
- - name: Check for ERROR in logs
- run: grep -q "^ERROR" log_rule/test_suite.log log_rule_ansible/test_suite.log log_profile/test_suite.log log_combined/test_suite.log log_template/test_suite.log
- id: check_results
- # when grep returns 1 means it didn't find the ^ERROR string in the test_suite.log file
- # and this means tests finished successfully without errors. So the job needs to keep going.
- # By using continue-on-error: true the "conclusion" parameter is set to true so it's not possible to use
- # it to determine whether the task has failed or succeed. The "outcome" parameter has to be used instead.
- # See the step below
- continue-on-error: true
- - name: Fail in case of ERROR present in logs
- if: ${{ steps.check_results.outcome == 'success' }}
- run: |
- [[ -f log_rule/test_suite.log ]] && echo "---------Rule Remediation Logs---------" && cat log_rule/test_suite.log | grep -v "DEBUG - "
- [[ -f log_rule_ansible/test_suite.log ]] && echo "---------Rule Ansible Remediation Logs---------" && cat log_rule_ansible/test_suite.log | grep -v "DEBUG - "
- [[ -f log_profile/test_suite.log ]] && echo "---------Profile Remediation Logs---------" && cat log_profile/test_suite.log | grep -v "DEBUG - "
- [[ -f log_combined/test_suite.log ]] && echo "---------Combined Remediation Logs---------" && cat log_combined/test_suite.log | grep -v "DEBUG - "
- [[ -f log_template/test_suite.log ]] && echo "---------Template Remediation Logs---------" && cat log_template/test_suite.log | grep -v "DEBUG - "
- exit 1
diff --git a/.github/workflows/automatus-sle15.yaml b/.github/workflows/automatus-sle15.yaml
deleted file mode 100644
index 6697823d4e6b..000000000000
--- a/.github/workflows/automatus-sle15.yaml
+++ /dev/null
@@ -1,187 +0,0 @@
-name: Automatus SLE15
-on:
- pull_request:
- branches: [ master, 'stabilization*' ]
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.number || github.run_id }}
- cancel-in-progress: true
-env:
- DATASTREAM: ssg-sle15-ds.xml
-jobs:
- build-content:
- name: Build Content
- runs-on: ubuntu-latest
- container:
- image: registry.suse.com/bci/bci-base:latest
- steps:
- - name: Update CA certificates
- run: update-ca-certificates
- - name: Zypper refs
- run: zypper refs
- - name: Zypper refresh
- run: zypper refresh
- - name: Install deps
- run: zypper --non-interactive in cmake ninja expat openssh-clients openssh-server openscap-utils tar gzip git python3 python3-rpm python3-pip python3-devel libxml2-tools libxslt-tools python3-PyYAML
- - name: Upgrade pip python
- run: pip install pip --upgrade
- - name: Install deps python
- run: pip install json2html sphinxcontrib.jinjadomain GitPython deepdiff Jinja2 xmldiff
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- with:
- fetch-depth: 0
- - name: Checkout (CTF)
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- with:
- repository: ComplianceAsCode/content-test-filtering
- path: ctf
- # https://github.com/actions/checkout/issues/766
- - name: Set git safe directory
- run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
- - name: Find forking point
- env:
- BASE_BRANCH: ${{ github.base_ref }}
- run: echo "FORK_POINT=$(git merge-base origin/$BASE_BRANCH ${{ github.event.pull_request.head.sha }})" >> $GITHUB_OUTPUT
- id: fork_point
- - name: Detect content changes in the PR
- run: python3 ./ctf/content_test_filtering.py pr --base ${{ steps.fork_point.outputs.FORK_POINT }} --remote_repo ${{ github.server_url }}/${{ github.repository }} --verbose --rule --output json ${{ github.event.pull_request.number }} > output.json
- - name: Test if there are no content changes
- run: echo "CTF_OUTPUT_SIZE=$(stat --printf="%s" output.json)" >> $GITHUB_OUTPUT
- id: ctf
- - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- with:
- name: output.json
- path: output.json
- - name: Print changes to content detected if any
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: cat output.json
- - name: Get product attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: product
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'product'
- - name: Build product
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: ./build_product sle15
- - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- with:
- name: ${{ env.DATASTREAM }}
- path: build/${{ env.DATASTREAM }}
- validate-ubuntu:
- name: Run Tests
- needs: build-content
- runs-on: ubuntu-22.04
- steps:
- - name: Install Deps
- run: sudo apt-get update && sudo apt-get install cmake ninja-build libopenscap8 libxml2-utils xsltproc python3-jinja2 python3-yaml ansible-lint podman
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- - name: Get cached CTF output
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v4
- id: get_ctf_output
- with:
- name: output.json
- # continue even if the file is unavailable that
- # means there are no changes detected by CTF in the previous job
- continue-on-error: true
- - name: Test if there are no content changes
- if: ${{ steps.get_ctf_output.outcome == 'success' }}
- run: echo "CTF_OUTPUT_SIZE=$(stat --printf="%s" output.json)" >> $GITHUB_OUTPUT
- id: ctf
- - name: Print changes to content detected if any
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: cat output.json
- - name: Generate id_rsa key
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: ssh-keygen -N '' -t rsa -f ~/.ssh/id_rsa
- - name: Build test suite container
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: podman build --build-arg "CLIENT_PUBLIC_KEY=$(cat ~/.ssh/id_rsa.pub)" -t ssg_test_suite -f test_suite-sle15
- working-directory: ./Dockerfiles
- - name: Get oscap-ssh
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: |
- wget https://raw.githubusercontent.com/OpenSCAP/openscap/maint-1.3/utils/oscap-ssh
- sudo chmod 755 oscap-ssh
- sudo mv -v oscap-ssh /usr/local/bin
- sudo chown root:root /usr/local/bin/oscap-ssh
- rm -f oscap-ssh
- - name: Get rule ids to be tested
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: rules
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'rules'
- - name: Get product attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: product
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'product'
- - name: Get bash attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: bash
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'bash'
- - name: Get ansible attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: ansible
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'ansible'
- - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v4
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- with:
- name: ${{ env.DATASTREAM }}
- - name: Run tests in a container - Bash
- if: ${{steps.bash.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: tests/test_rule_in_container.sh --no-make-applicable-in-containers --dontclean --logdir logs_bash --remediate-using bash --name ssg_test_suite --datastream $DATASTREAM ${{join(fromJSON(steps.rules.outputs.prop))}}
- env:
- ADDITIONAL_TEST_OPTIONS: "--duplicate-templates --remove-fips-certified --product sle15"
- - name: Check for ERROR in logs
- if: ${{steps.bash.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: grep -q "^ERROR" logs_bash/test_suite.log
- id: check_results_bash
- # when grep returns 1 means it didn't find the ^ERROR string in the test_suite.log file
- # and this means tests finished successfully without errors. So the job needs to keep going.
- # By using continue-on-error: true the "conclusion" parameter is set to true so it's not possible to use
- # it to determine whether the task has failed or succeed. The "outcome" parameter has to be used instead.
- # See the step below
- continue-on-error: true
- - name: Upload logs in case of failure
- if: ${{steps.bash.outputs.prop == 'True' && steps.check_results_bash.outcome == 'success' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- with:
- name: logs_bash
- path: logs_bash/
- - name: Run tests in a container - Ansible
- if: ${{ steps.ansible.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: tests/test_rule_in_container.sh --no-make-applicable-in-containers --dontclean --logdir logs_ansible --remediate-using ansible --name ssg_test_suite --datastream $DATASTREAM ${{join(fromJSON(steps.rules.outputs.prop))}}
- env:
- ADDITIONAL_TEST_OPTIONS: "--duplicate-templates --remove-fips-certified --product sle15"
- - name: Check for ERROR in logs
- if: ${{steps.ansible.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: grep -q "^ERROR" logs_ansible/test_suite.log
- id: check_results_ansible
- continue-on-error: true
- - name: Upload logs in case of failure
- if: ${{ steps.ansible.outputs.prop == 'True' && steps.check_results_ansible.outcome == 'success' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- with:
- name: logs_ansible
- path: logs_ansible/
- - name: Fail in case of ERROR present in logs_bash/test_suite.log or logs_ansible/test_suite.log
- if: ${{ (steps.check_results_bash.outcome == 'success' || steps.check_results_ansible.outcome == 'success') && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: |
- [[ -f logs_bash/test_suite.log ]] && echo "---------Bash Remediation Logs---------" && cat logs_bash/test_suite.log | grep -v "DEBUG - "
- [[ -f logs_ansible/test_suite.log ]] && echo "---------Ansible Remediation Logs---------" && cat logs_ansible/test_suite.log | grep -v "DEBUG - "
- exit 1
diff --git a/.github/workflows/automatus-ubi8.yaml b/.github/workflows/automatus-ubi8.yaml
deleted file mode 100644
index 1d779992c31c..000000000000
--- a/.github/workflows/automatus-ubi8.yaml
+++ /dev/null
@@ -1,179 +0,0 @@
-name: Automatus UBI8
-on:
- pull_request:
- branches: [ master, 'stabilization*' ]
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.number || github.run_id }}
- cancel-in-progress: true
-env:
- DATASTREAM: ssg-rhel8-ds.xml
-jobs:
- build-content:
- name: Build Content
- runs-on: ubuntu-latest
- container:
- image: fedora:latest
- steps:
- - name: Install Deps
- run: dnf install -y cmake make openscap-utils python3-pyyaml python3-jinja2 git python3-deepdiff python3-requests jq python3-pip
- - name: Install deps python
- run: pip install gitpython xmldiff
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- with:
- fetch-depth: 0
- - name: Checkout (CTF)
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- with:
- repository: ComplianceAsCode/content-test-filtering
- path: ctf
- # https://github.com/actions/checkout/issues/766
- - name: Set git safe directory
- run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
- - name: Find forking point
- env:
- BASE_BRANCH: ${{ github.base_ref }}
- run: echo "FORK_POINT=$(git merge-base origin/$BASE_BRANCH ${{ github.event.pull_request.head.sha }})" >> $GITHUB_OUTPUT
- id: fork_point
- - name: Detect content changes in the PR
- run: python3 ./ctf/content_test_filtering.py pr --base ${{ steps.fork_point.outputs.FORK_POINT }} --remote_repo ${{ github.server_url }}/${{ github.repository }} --verbose --rule --output json ${{ github.event.pull_request.number }} > output.json
- - name: Test if there are no content changes
- run: echo "CTF_OUTPUT_SIZE=$(stat --printf="%s" output.json)" >> $GITHUB_OUTPUT
- id: ctf
- - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- with:
- name: output.json
- path: output.json
- - name: Print changes to content detected if any
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: cat output.json
- - name: Get product attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: product
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'product'
- - name: Build product
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: ./build_product rhel8 --derivatives
- - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- with:
- name: ${{ env.DATASTREAM }}
- path: build/${{ env.DATASTREAM }}
- validate-ubuntu:
- name: Run Tests
- needs: build-content
- runs-on: ubuntu-22.04
- steps:
- - name: Install Deps
- run: sudo apt-get update && sudo apt-get install cmake ninja-build libopenscap8 libxml2-utils xsltproc python3-jinja2 python3-yaml ansible-lint podman
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- - name: Get cached CTF output
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v4
- id: get_ctf_output
- with:
- name: output.json
- # continue even if the file is unavailable that
- # means there are no changes detected by CTF in the previous job
- continue-on-error: true
- - name: Test if there are no content changes
- if: ${{ steps.get_ctf_output.outcome == 'success' }}
- run: echo "CTF_OUTPUT_SIZE=$(stat --printf="%s" output.json)" >> $GITHUB_OUTPUT
- id: ctf
- - name: Print changes to content detected if any
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: cat output.json
- - name: Generate id_rsa key
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: ssh-keygen -N '' -t rsa -f ~/.ssh/id_rsa
- - name: Build test suite container
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: podman build --build-arg "CLIENT_PUBLIC_KEY=$(cat ~/.ssh/id_rsa.pub)" -t ssg_test_suite -f test_suite-ubi8
- working-directory: ./Dockerfiles
- - name: Get oscap-ssh
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: |
- wget https://raw.githubusercontent.com/OpenSCAP/openscap/maint-1.3/utils/oscap-ssh
- sudo chmod 755 oscap-ssh
- sudo mv -v oscap-ssh /usr/local/bin
- sudo chown root:root /usr/local/bin/oscap-ssh
- rm -f oscap-ssh
- - name: Get rule ids to be tested
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: rules
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'rules'
- - name: Get product attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: product
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'product'
- - name: Get bash attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: bash
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'bash'
- - name: Get ansible attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: ansible
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'ansible'
- - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v4
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- with:
- name: ${{ env.DATASTREAM }}
- - name: Run tests in a container - Bash
- if: ${{steps.bash.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: tests/test_rule_in_container.sh --no-make-applicable-in-containers --dontclean --logdir logs_bash --remediate-using bash --name ssg_test_suite --datastream $DATASTREAM ${{join(fromJSON(steps.rules.outputs.prop))}}
- env:
- ADDITIONAL_TEST_OPTIONS: "--duplicate-templates --remove-fips-certified --product rhel8"
- - name: Check for ERROR in logs
- if: ${{steps.bash.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: grep -q "^ERROR" logs_bash/test_suite.log
- id: check_results_bash
- # when grep returns 1 means it didn't find the ^ERROR string in the test_suite.log file
- # and this means tests finished successfully without errors. So the job needs to keep going.
- # By using continue-on-error: true the "conclusion" parameter is set to true so it's not possible to use
- # it to determine whether the task has failed or succeed. The "outcome" parameter has to be used instead.
- # See the step below
- continue-on-error: true
- - name: Upload logs in case of failure
- if: ${{steps.bash.outputs.prop == 'True' && steps.check_results_bash.outcome == 'success' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- with:
- name: logs_bash
- path: logs_bash/
- - name: Run tests in a container - Ansible
- if: ${{ steps.ansible.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: tests/test_rule_in_container.sh --no-make-applicable-in-containers --dontclean --logdir logs_ansible --remediate-using ansible --name ssg_test_suite --datastream $DATASTREAM ${{join(fromJSON(steps.rules.outputs.prop))}}
- env:
- ADDITIONAL_TEST_OPTIONS: "--duplicate-templates --remove-fips-certified --product rhel8"
- - name: Check for ERROR in logs
- if: ${{steps.ansible.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: grep -q "^ERROR" logs_ansible/test_suite.log
- id: check_results_ansible
- continue-on-error: true
- - name: Upload logs in case of failure
- if: ${{ steps.ansible.outputs.prop == 'True' && steps.check_results_ansible.outcome == 'success' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- with:
- name: logs_ansible
- path: logs_ansible/
- - name: Fail in case of ERROR present in logs_bash/test_suite.log or logs_ansible/test_suite.log
- if: ${{ (steps.check_results_bash.outcome == 'success' || steps.check_results_ansible.outcome == 'success') && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: |
- [[ -f logs_bash/test_suite.log ]] && echo "---------Bash Remediation Logs---------" && cat logs_bash/test_suite.log | grep -v "DEBUG - "
- [[ -f logs_ansible/test_suite.log ]] && echo "---------Ansible Remediation Logs---------" && cat logs_ansible/test_suite.log | grep -v "DEBUG - "
- exit 1
diff --git a/.github/workflows/automatus-ubuntu2204.yaml b/.github/workflows/automatus-ubuntu2204.yaml
deleted file mode 100644
index d3ec2b1ac718..000000000000
--- a/.github/workflows/automatus-ubuntu2204.yaml
+++ /dev/null
@@ -1,195 +0,0 @@
-name: Automatus Ubuntu 22.04
-on:
- pull_request:
- branches: [ master, 'stabilization*' ]
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.number || github.run_id }}
- cancel-in-progress: true
-env:
- DATASTREAM: ssg-ubuntu2204-ds.xml
-jobs:
- build-content:
- name: Build Content
- runs-on: ubuntu-22.04
- steps:
- - name: Install Deps
- run: sudo apt-get update && sudo apt-get install -y cmake ninja-build python3-yaml python3-jinja2 git python3-deepdiff python3-requests jq python3-pip libxml2-utils xsltproc ansible-lint wget libdbus-1-dev libdbus-glib-1-dev libcurl4-openssl-dev libgcrypt20-dev libselinux1-dev libxslt1-dev libgconf2-dev libacl1-dev libblkid-dev libcap-dev libxml2-dev libldap2-dev libpcre3-dev python3 swig libxml-parser-perl libxml-xpath-perl libperl-dev libbz2-dev librpm-dev g++ libyaml-dev libxmlsec1-dev libxmlsec1-openssl
- - name: Install deps python
- run: pip3 install gitpython xmldiff compliance-trestle==2.4.0 lxml lxml-stubs requests
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- with:
- fetch-depth: 0
- - name: Checkout (CTF)
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- with:
- repository: ComplianceAsCode/content-test-filtering
- path: ctf
- # https://github.com/actions/checkout/issues/766
- - name: Set git safe directory
- run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
- - name: Find forking point
- env:
- BASE_BRANCH: ${{ github.base_ref }}
- run: echo "FORK_POINT=$(git merge-base origin/$BASE_BRANCH ${{ github.event.pull_request.head.sha }})" >> $GITHUB_OUTPUT
- id: fork_point
- - name: Detect content changes in the PR
- run: python3 ./ctf/content_test_filtering.py pr --base ${{ steps.fork_point.outputs.FORK_POINT }} --remote_repo ${{ github.server_url }}/${{ github.repository }} --verbose --rule --output json ${{ github.event.pull_request.number }} > output.json
- - name: Test if there are no content changes
- run: echo "CTF_OUTPUT_SIZE=$(stat --printf="%s" output.json)" >> $GITHUB_OUTPUT
- id: ctf
- - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- with:
- name: output.json
- path: output.json
- - name: Print changes to content detected if any
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: cat output.json
- - name: Get product attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: product
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'product'
- - name: Download OpenSCAP
- run: wget https://github.com/OpenSCAP/openscap/releases/download/1.3.10/openscap-1.3.10.tar.gz
- - name: Extract OpenSCAP
- run: tar xf openscap-1.3.10.tar.gz
- - name: Build OpenSCAP
- run: |
- cd openscap-1.3.10
- cmake -Bbuild -DCMAKE_INSTALL_PREFIX=/usr .
- sudo cmake --build build --target install
- - name: Build product
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: ./build_product ubuntu2204
- - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- with:
- name: ${{ env.DATASTREAM }}
- path: build/${{ env.DATASTREAM }}
- validate-ubuntu:
- name: Run Tests
- needs: build-content
- runs-on: ubuntu-22.04
- steps:
- - name: Install Deps
- run: sudo apt update && sudo apt install -y cmake ninja-build libxml2-utils xsltproc python3-jinja2 python3-yaml ansible-lint podman wget libdbus-1-dev libdbus-glib-1-dev libcurl4-openssl-dev libgcrypt20-dev libselinux1-dev libxslt1-dev libgconf2-dev libacl1-dev libblkid-dev libcap-dev libxml2-dev libldap2-dev libpcre3-dev python3 swig libxml-parser-perl libxml-xpath-perl libperl-dev libbz2-dev librpm-dev g++ libyaml-dev libxmlsec1-dev libxmlsec1-openssl
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- - name: Get cached CTF output
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v4
- id: get_ctf_output
- with:
- name: output.json
- # continue even if the file is unavailable that
- # means there are no changes detected by CTF in the previous job
- continue-on-error: true
- - name: Download OpenSCAP
- run: wget https://github.com/OpenSCAP/openscap/releases/download/1.3.10/openscap-1.3.10.tar.gz
- - name: Extract OpenSCAP
- run: tar xf openscap-1.3.10.tar.gz
- - name: Build OpenSCAP
- run: |
- cd openscap-1.3.10
- cmake -Bbuild -DCMAKE_INSTALL_PREFIX=/usr .
- sudo cmake --build build --target install
- - name: Test if there are no content changes
- if: ${{ steps.get_ctf_output.outcome == 'success' }}
- run: echo "CTF_OUTPUT_SIZE=$(stat --printf="%s" output.json)" >> $GITHUB_OUTPUT
- id: ctf
- - name: Print changes to content detected if any
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: cat output.json
- - name: Generate id_rsa key
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: ssh-keygen -N '' -t rsa -f ~/.ssh/id_rsa
- - name: Build test suite container
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: podman build --build-arg "CLIENT_PUBLIC_KEY=$(cat ~/.ssh/id_rsa.pub)" -t ssg_test_suite -f test_suite-ubuntu2204
- working-directory: ./Dockerfiles
- - name: Get oscap-ssh
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: |
- wget https://raw.githubusercontent.com/OpenSCAP/openscap/maint-1.3/utils/oscap-ssh
- sudo chmod 755 oscap-ssh
- sudo mv -v oscap-ssh /usr/local/bin
- sudo chown root:root /usr/local/bin/oscap-ssh
- rm -f oscap-ssh
- - name: Get rule ids to be tested
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: rules
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'rules'
- - name: Get product attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: product
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'product'
- - name: Get bash attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: bash
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'bash'
- - name: Get ansible attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: ansible
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'ansible'
- - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v4
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- with:
- name: ${{ env.DATASTREAM }}
- - name: Run tests in a container - Bash
- if: ${{steps.bash.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: tests/test_rule_in_container.sh --no-make-applicable-in-containers --dontclean --logdir logs_bash --remediate-using bash --name ssg_test_suite --datastream $DATASTREAM ${{join(fromJSON(steps.rules.outputs.prop))}}
- env:
- ADDITIONAL_TEST_OPTIONS: "--duplicate-templates --remove-fips-certified"
- - name: Check for ERROR in logs
- if: ${{steps.bash.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: grep -q "^ERROR" logs_bash/test_suite.log
- id: check_results_bash
- # when grep returns 1 means it didn't find the ^ERROR string in the test_suite.log file
- # and this means tests finished successfully without errors. So the job needs to keep going.
- # By using continue-on-error: true the "conclusion" parameter is set to true so it's not possible to use
- # it to determine whether the task has failed or succeed. The "outcome" parameter has to be used instead.
- # See the step below
- continue-on-error: true
- - name: Upload logs in case of failure
- if: ${{steps.bash.outputs.prop == 'True' && steps.check_results_bash.outcome == 'success' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- with:
- name: logs_bash
- path: logs_bash/
- - name: Run tests in a container - Ansible
- if: ${{ steps.ansible.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: tests/test_rule_in_container.sh --no-make-applicable-in-containers --dontclean --logdir logs_ansible --remediate-using ansible --name ssg_test_suite --datastream $DATASTREAM ${{join(fromJSON(steps.rules.outputs.prop))}}
- env:
- ADDITIONAL_TEST_OPTIONS: "--duplicate-templates --remove-fips-certified --product ubuntu2204"
- - name: Check for ERROR in logs
- if: ${{steps.ansible.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: grep -q "^ERROR" logs_ansible/test_suite.log
- id: check_results_ansible
- continue-on-error: true
- - name: Upload logs in case of failure
- if: ${{ steps.ansible.outputs.prop == 'True' && steps.check_results_ansible.outcome == 'success' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- with:
- name: logs_ansible
- path: logs_ansible/
- - name: Fail in case of ERROR present in logs_bash/test_suite.log or logs_ansible/test_suite.log
- if: ${{ (steps.check_results_bash.outcome == 'success' || steps.check_results_ansible.outcome == 'success') && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: |
- [[ -f logs_bash/test_suite.log ]] && echo "---------Bash Remediation Logs---------" && cat logs_bash/test_suite.log | grep -v "DEBUG - "
- [[ -f logs_ansible/test_suite.log ]] && echo "---------Ansible Remediation Logs---------" && cat logs_ansible/test_suite.log | grep -v "DEBUG - "
- exit 1
diff --git a/.github/workflows/automatus-ubuntu2404.yml b/.github/workflows/automatus-ubuntu2404.yml
deleted file mode 100644
index fd60bb914b14..000000000000
--- a/.github/workflows/automatus-ubuntu2404.yml
+++ /dev/null
@@ -1,171 +0,0 @@
-name: Automatus Ubuntu 24.04
-on:
- pull_request:
- branches: [ master, 'stabilization*' ]
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.number || github.run_id }}
- cancel-in-progress: true
-env:
- DATASTREAM: ssg-ubuntu2404-ds.xml
-jobs:
- build-content:
- name: Build Content
- runs-on: ubuntu-24.04
- steps:
- - name: Install build deps
- run: sudo apt-get update && sudo apt-get install -y cmake ninja-build xsltproc libxml2-utils python3-yaml python3-jinja2 openscap-utils
- - name: Install workflow deps
- run: sudo apt install -y git python3-deepdiff python3-requests jq python3-pip
- - name: Install deps python
- run: pip3 install gitpython xmldiff
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- with:
- fetch-depth: 0
- - name: Checkout (CTF)
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- with:
- repository: ComplianceAsCode/content-test-filtering
- path: ctf
- # https://github.com/actions/checkout/issues/766
- - name: Set git safe directory
- run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
- - name: Find forking point
- env:
- BASE_BRANCH: ${{ github.base_ref }}
- run: echo "FORK_POINT=$(git merge-base origin/$BASE_BRANCH ${{ github.event.pull_request.head.sha }})" >> $GITHUB_OUTPUT
- id: fork_point
- - name: Detect content changes in the PR
- run: python3 ./ctf/content_test_filtering.py pr --base ${{ steps.fork_point.outputs.FORK_POINT }} --remote_repo ${{ github.server_url }}/${{ github.repository }} --verbose --rule --output json ${{ github.event.pull_request.number }} > output.json
- - name: Test if there are no content changes
- run: echo "CTF_OUTPUT_SIZE=$(stat --printf="%s" output.json)" >> $GITHUB_OUTPUT
- id: ctf
- - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- with:
- name: output.json
- path: output.json
- - name: Print changes to content detected if any
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: cat output.json
- - name: Get product attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: product
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'product'
- - name: Build product
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: ./build_product ubuntu2404 --datastream-only
- - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- with:
- name: ${{ env.DATASTREAM }}
- path: build/${{ env.DATASTREAM }}
- validate-ubuntu:
- name: Run Tests
- needs: build-content
- runs-on: ubuntu-24.04
- steps:
- - name: Install test deps
- run: sudo apt-get update && sudo apt-get install -y cmake ninja-build xsltproc libxml2-utils python3-yaml python3-jinja2 openscap-utils podman
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- - name: Get cached CTF output
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v4
- id: get_ctf_output
- with:
- name: output.json
- # continue even if the file is unavailable that
- # means there are no changes detected by CTF in the previous job
- continue-on-error: true
- - name: Test if there are no content changes
- if: ${{ steps.get_ctf_output.outcome == 'success' }}
- run: echo "CTF_OUTPUT_SIZE=$(stat --printf="%s" output.json)" >> $GITHUB_OUTPUT
- id: ctf
- - name: Print changes to content detected if any
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: cat output.json
- - name: Generate id_rsa key
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: ssh-keygen -N '' -t rsa -f ~/.ssh/id_rsa
- - name: Build test suite container
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: podman build --build-arg "CLIENT_PUBLIC_KEY=$(cat ~/.ssh/id_rsa.pub)" -t ssg_test_suite -f test_suite-ubuntu2404
- working-directory: ./Dockerfiles
- - name: Get rule ids to be tested
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: rules
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'rules'
- - name: Get product attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: product
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'product'
- - name: Get bash attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: bash
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'bash'
- - name: Get ansible attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: ansible
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'ansible'
- - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v4
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- with:
- name: ${{ env.DATASTREAM }}
- - name: Run tests in a container - Bash
- if: ${{steps.bash.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: tests/test_rule_in_container.sh --no-make-applicable-in-containers --dontclean --logdir logs_bash --remediate-using bash --name ssg_test_suite --datastream ${{ env.DATASTREAM }} ${{join(fromJSON(steps.rules.outputs.prop))}}
- env:
- ADDITIONAL_TEST_OPTIONS: "--duplicate-templates --remove-fips-certified"
- - name: Check for ERROR in logs
- if: ${{steps.bash.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: grep -q "^ERROR" logs_bash/test_suite.log
- id: check_results_bash
- # when grep returns 1 means it didn't find the ^ERROR string in the test_suite.log file
- # and this means tests finished successfully without errors. So the job needs to keep going.
- # By using continue-on-error: true the "conclusion" parameter is set to true so it's not possible to use
- # it to determine whether the task has failed or succeed. The "outcome" parameter has to be used instead.
- # See the step below
- continue-on-error: true
- - name: Upload logs in case of failure
- if: ${{steps.bash.outputs.prop == 'True' && steps.check_results_bash.outcome == 'success' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- with:
- name: logs_bash
- path: logs_bash/
- - name: Run tests in a container - Ansible
- if: ${{ steps.ansible.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: tests/test_rule_in_container.sh --no-make-applicable-in-containers --dontclean --logdir logs_ansible --remediate-using ansible --name ssg_test_suite --datastream ${{ env.DATASTREAM }} ${{join(fromJSON(steps.rules.outputs.prop))}}
- env:
- ADDITIONAL_TEST_OPTIONS: "--duplicate-templates --remove-fips-certified"
- - name: Check for ERROR in logs
- if: ${{steps.ansible.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: grep -q "^ERROR" logs_ansible/test_suite.log
- id: check_results_ansible
- continue-on-error: true
- - name: Upload logs in case of failure
- if: ${{ steps.ansible.outputs.prop == 'True' && steps.check_results_ansible.outcome == 'success' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- with:
- name: logs_ansible
- path: logs_ansible/
- - name: Fail in case of ERROR present in logs_bash/test_suite.log or logs_ansible/test_suite.log
- if: ${{ (steps.check_results_bash.outcome == 'success' || steps.check_results_ansible.outcome == 'success') && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: |
- [[ -f logs_bash/test_suite.log ]] && echo "---------Bash Remediation Logs---------" && cat logs_bash/test_suite.log | grep -v "DEBUG - "
- [[ -f logs_ansible/test_suite.log ]] && echo "---------Ansible Remediation Logs---------" && cat logs_ansible/test_suite.log | grep -v "DEBUG - "
- exit 1
diff --git a/.github/workflows/automatus.yaml b/.github/workflows/automatus.yaml
deleted file mode 100644
index 2d317aa12764..000000000000
--- a/.github/workflows/automatus.yaml
+++ /dev/null
@@ -1,179 +0,0 @@
-name: Automatus Fedora
-on:
- pull_request:
- branches: [ master, 'stabilization*' ]
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.number || github.run_id }}
- cancel-in-progress: true
-jobs:
- build-content:
- name: Build Content
- runs-on: ubuntu-latest
- container:
- image: fedora:latest
- steps:
- - name: Install Deps
- run: dnf install -y cmake make openscap-utils python3-pyyaml python3-jinja2 git python3-deepdiff python3-requests jq python3-pip
- - name: Install deps python
- run: pip install gitpython xmldiff
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- with:
- fetch-depth: 0
- - name: Checkout (CTF)
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- with:
- repository: ComplianceAsCode/content-test-filtering
- path: ctf
- # https://github.com/actions/checkout/issues/766
- - name: Set git safe directory
- run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
- - name: Find forking point
- env:
- BASE_BRANCH: ${{ github.base_ref }}
- run: echo "FORK_POINT=$(git merge-base origin/$BASE_BRANCH ${{ github.event.pull_request.head.sha }})" >> $GITHUB_OUTPUT
- id: fork_point
- - name: Detect content changes in the PR
- run: python3 ./ctf/content_test_filtering.py pr --base ${{ steps.fork_point.outputs.FORK_POINT }} --remote_repo ${{ github.server_url }}/${{ github.repository }} --verbose --rule --output json ${{ github.event.pull_request.number }} > output.json
- - name: Test if there are no content changes
- run: echo "CTF_OUTPUT_SIZE=$(stat --printf="%s" output.json)" >> $GITHUB_OUTPUT
- id: ctf
- - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- with:
- name: output.json
- path: output.json
- - name: Print changes to content detected if any
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: cat output.json
- - name: Get product attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: product
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'product'
- - name: Build product
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: ./build_product ${{steps.product.outputs.prop}} --datastream-only
- - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- with:
- name: ssg-${{steps.product.outputs.prop}}-ds.xml
- path: build/ssg-${{steps.product.outputs.prop}}-ds.xml
- validate-ubuntu:
- name: Run Tests
- needs: build-content
- runs-on: ubuntu-22.04
- steps:
- - name: Install Deps
- run: sudo apt-get update && sudo apt-get install cmake ninja-build libopenscap8 libxml2-utils xsltproc python3-jinja2 python3-pip python3-yaml podman
- - name: Install deps python
- run: pip install ansible setuptools
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- - name: Get cached CTF output
- uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v4
- id: get_ctf_output
- with:
- name: output.json
- # continue even if the file is unavailable that
- # means there are no changes detected by CTF in the previous job
- continue-on-error: true
- - name: Test if there are no content changes
- if: ${{ steps.get_ctf_output.outcome == 'success' }}
- run: echo "CTF_OUTPUT_SIZE=$(stat --printf="%s" output.json)" >> $GITHUB_OUTPUT
- id: ctf
- - name: Print changes to content detected if any
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: cat output.json
- - name: Generate id_rsa key
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: ssh-keygen -N '' -t rsa -f ~/.ssh/id_rsa
- - name: Build test suite container
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: podman build --build-arg "CLIENT_PUBLIC_KEY=$(cat ~/.ssh/id_rsa.pub)" -t ssg_test_suite -f test_suite-fedora
- working-directory: ./Dockerfiles
- - name: Get oscap-ssh
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: |
- wget https://raw.githubusercontent.com/OpenSCAP/openscap/maint-1.3/utils/oscap-ssh
- sudo chmod 755 oscap-ssh
- sudo mv -v oscap-ssh /usr/local/bin
- sudo chown root:root /usr/local/bin/oscap-ssh
- rm -f oscap-ssh
- - name: Get rule ids to be tested
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: rules
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'rules'
- - name: Get product attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: product
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'product'
- - name: Get bash attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: bash
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'bash'
- - name: Get ansible attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: ansible
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'ansible'
- - uses: actions/download-artifact@018cc2cf5baa6db3ef3c5f8a56943fffe632ef53 # v4
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- with:
- name: ssg-${{steps.product.outputs.prop}}-ds.xml
- - name: Run tests in a container - Bash
- if: ${{steps.bash.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: tests/test_rule_in_container.sh --no-make-applicable-in-containers --dontclean --logdir logs_bash --remediate-using bash --name ssg_test_suite --datastream ssg-${{steps.product.outputs.prop}}-ds.xml ${{join(fromJSON(steps.rules.outputs.prop))}}
- env:
- ADDITIONAL_TEST_OPTIONS: "--duplicate-templates --remove-fips-certified"
- - name: Check for ERROR in logs
- if: ${{steps.bash.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: grep -q "^ERROR" logs_bash/test_suite.log
- id: check_results_bash
- # when grep returns 1 means it didn't find the ^ERROR string in the test_suite.log file
- # and this means tests finished successfully without errors. So the job needs to keep going.
- # By using continue-on-error: true the "conclusion" parameter is set to true so it's not possible to use
- # it to determine whether the task has failed or succeed. The "outcome" parameter has to be used instead.
- # See the step below
- continue-on-error: true
- - name: Upload logs in case of failure
- if: ${{steps.bash.outputs.prop == 'True' && steps.check_results_bash.outcome == 'success' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- with:
- name: logs_bash
- path: logs_bash/
- - name: Run tests in a container - Ansible
- if: ${{ steps.ansible.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: tests/test_rule_in_container.sh --no-make-applicable-in-containers --dontclean --logdir logs_ansible --remediate-using ansible --name ssg_test_suite --datastream ssg-${{steps.product.outputs.prop}}-ds.xml ${{join(fromJSON(steps.rules.outputs.prop))}}
- env:
- ADDITIONAL_TEST_OPTIONS: "--duplicate-templates --remove-fips-certified"
- - name: Check for ERROR in logs
- if: ${{steps.ansible.outputs.prop == 'True' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: grep -q "^ERROR" logs_ansible/test_suite.log
- id: check_results_ansible
- continue-on-error: true
- - name: Upload logs in case of failure
- if: ${{ steps.ansible.outputs.prop == 'True' && steps.check_results_ansible.outcome == 'success' && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- with:
- name: logs_ansible
- path: logs_ansible/
- - name: Fail in case of ERROR present in logs_bash/test_suite.log or logs_ansible/test_suite.log
- if: ${{ (steps.check_results_bash.outcome == 'success' || steps.check_results_ansible.outcome == 'success') && steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: |
- [[ -f logs_bash/test_suite.log ]] && echo "---------Bash Remediation Logs---------" && cat logs_bash/test_suite.log | grep -v "DEBUG - "
- [[ -f logs_ansible/test_suite.log ]] && echo "---------Ansible Remediation Logs---------" && cat logs_ansible/test_suite.log | grep -v "DEBUG - "
- exit 1
diff --git a/.github/workflows/ci_lint.yml b/.github/workflows/ci_lint.yml
deleted file mode 100644
index 73e8f279c7e4..000000000000
--- a/.github/workflows/ci_lint.yml
+++ /dev/null
@@ -1,63 +0,0 @@
-name: CI Lint
-on:
- pull_request:
- branches: [master, 'stabilization*']
-permissions:
- contents: read
-jobs:
- yamllint:
- name: Yaml Lint on Changed Controls and Profiles Files
- runs-on: ubuntu-latest
- steps:
- - name: Install Git
- run: sudo apt-get update && sudo apt-get install -y git
-
- - name: Checkout Repository
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
- with:
- repository: ${{ github.repository }}
- fetch-depth: 0
-
- - name: Detect Files Changed by PR
- id: changed_files
- run: |
- repo=${{ github.repository }}
- pr_number=${{ github.event.pull_request.number }}
- # Fetch all pages of the files for the pull request
- url="repos/$repo/pulls/$pr_number/files"
- response=$(gh api "$url" --paginate)
- echo "$response" | jq -r '.[].filename' > filenames.txt
- cat filenames.txt
-
- if grep -q "controls/" filenames.txt; then
- echo "CONTROLS_CHANGES=true" >> $GITHUB_ENV
- else
- echo "CONTROLS_CHANGES=false" >> $GITHUB_ENV
- fi
- if grep -q "\.profile" filenames.txt; then
- echo "PROFILES_CHANGES=true" >> $GITHUB_ENV
- else
- echo "PROFILES_CHANGES=false" >> $GITHUB_ENV
- fi
- env:
- GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
- - name: Install yamllint
- if: ${{ env.CONTROLS_CHANGES == 'true' || env.PROFILES_CHANGES == 'true' }}
- run: pip install yamllint
-
- - name: Run yamllint in Control Files Modified by PR
- if: ${{ env.CONTROLS_CHANGES == 'true' }}
- run: |
- for control_file in $(cat filenames.txt | grep "controls/"); do
- echo "Running yamllint on $control_file..."
- yamllint "$control_file"
- done
-
- - name: Run yamllint in Profile Files Modified by PR
- if: ${{ env.PROFILES_CHANGES == 'true' }}
- run: |
- for profile_file in $(cat filenames.txt | grep "\.profile"); do
- echo "Running yamllint on $profile_file..."
- yamllint "$profile_file"
- done
diff --git a/.github/workflows/compare-ds-build.yml b/.github/workflows/compare-ds-build.yml
deleted file mode 100644
index a5e8829dc6b9..000000000000
--- a/.github/workflows/compare-ds-build.yml
+++ /dev/null
@@ -1,68 +0,0 @@
-name: Compare DS Build
-on:
- pull_request:
- branches: [ master, 'stabilization*' ]
-permissions:
- contents: read
-jobs:
- compare-ds-build-content:
- name: Compare DS Build Content
- runs-on: ubuntu-latest
- container:
- image: fedora:latest
- steps:
- - name: Install Deps
- run: dnf install -y cmake make openscap-utils python3-pyyaml python3-setuptools
- python3-jinja2 git python3-deepdiff python3-requests jq python3-pip
- python3-setuptools
- - name: Install deps python
- run: pip install gitpython xmldiff
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd
- with:
- ref: ${{ github.event.pull_request.head.sha }}
- fetch-depth: 0
- - name: Checkout (CTF)
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- with:
- repository: ComplianceAsCode/content-test-filtering
- path: ctf
- - name: Set git safe directory
- run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
- - name: Find forking point
- env:
- BASE_BRANCH: ${{ github.base_ref }}
- run: echo "FORK_POINT=$(git merge-base origin/$BASE_BRANCH ${{ github.event.pull_request.head.sha }})" >> $GITHUB_OUTPUT
- id: fork_point
- - name: Detect content changes in the PR
- run: python3 ./ctf/content_test_filtering.py pr --base ${{ steps.fork_point.outputs.FORK_POINT }} --remote_repo ${{ github.server_url }}/${{ github.repository }} --verbose --rule --output json ${{ github.event.pull_request.number }} > output.json
- - name: Test if there are no content changes
- run: echo "CTF_OUTPUT_SIZE=$(stat --printf="%s" output.json)" >> $GITHUB_OUTPUT
- id: ctf
- - name: Print changes to content detected if any
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: cat output.json
- - name: Get product attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: product
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'product'
- - name: Build product ${{ github.base_ref }} (${{ steps.fork_point.outputs.FORK_POINT }})
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: ./build_product ${{steps.product.outputs.prop}} --datastream-only
- - name: Create Artifact
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: |-
- mkdir artifacts
- cp build/ssg-${{steps.product.outputs.prop}}-ds.xml artifacts
- cp output.json artifacts
- tar -czvf artifacts.tar.gz artifacts
- - name: Upload artifacts
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- with:
- name: pr-artifacts-${{ github.event.pull_request.head.sha }}
- retention-days: 1
- path: artifacts.tar.gz
diff --git a/.github/workflows/compare-ds.yaml b/.github/workflows/compare-ds.yaml
deleted file mode 100644
index 47496a8c500d..000000000000
--- a/.github/workflows/compare-ds.yaml
+++ /dev/null
@@ -1,171 +0,0 @@
-name: Compare DS
-on:
- workflow_run:
- workflows: ["Compare DS Build"]
- types:
- - completed
- branches: [master, 'stabilization*']
-permissions:
- pull-requests: write
- contents: read
-jobs:
- build-content:
- name: Generate Diff
- runs-on: ubuntu-latest
- container:
- image: fedora:latest
- steps:
- - name: Install Deps
- run: dnf install -y cmake make openscap-utils python3-pyyaml python3-setuptools python3-jinja2 git python3-deepdiff python3-requests jq python3-pip python3-setuptools
- - name: Install deps python
- run: pip install gitpython xmldiff
- - name: Checkout master
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- with:
- repo: ComplianceAsCode/content
- ref: master
- fetch-depth: 0
- # https://github.com/actions/checkout/issues/766
- - name: Set git safe directory
- run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
- - name: Find forking point
- env:
- BASE_BRANCH: ${{ github.base_ref }}
- run: echo "FORK_POINT=$(git merge-base origin/$BASE_BRANCH ${{ github.event.pull_request.head.sha }})" >> $GITHUB_OUTPUT
- id: fork_point
- - name: Checkout (CTF)
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- with:
- repository: ComplianceAsCode/content-test-filtering
- path: ctf
- - name: Detect content changes in the PR
- run: python3 ./ctf/content_test_filtering.py pr --base ${{ steps.fork_point.outputs.FORK_POINT }} --remote_repo ${{ github.server_url }}/${{ github.repository }} --verbose --rule --output json ${{ github.event.pull_request.number }} > output.json
- - name: Test if there are no content changes
- run: echo "CTF_OUTPUT_SIZE=$(stat --printf="%s" output.json)" >> $GITHUB_OUTPUT
- id: ctf
- - name: Print changes to content detected if any
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: cat output.json
- - name: Get product attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: product
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'product'
- - name: Download built product ${{ github.base_ref }} (${{ steps.fork_point.outputs.FORK_POINT }})
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- uses: actions/download-artifact@v6
- with:
- name: pr-artifacts-${{ github.event.workflow_run.head_sha }}
- path: pr_artifacts
- - name: Unpack built artifacts
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: tar -xvzf pr_artifacts/artifacts.tar.gz -C pr_artifacts/unpacked_artifacts
- - name: Build product
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: ./build_product ${{steps.product.outputs.prop}} --datastream-only
- - name: Compare datastreams
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: utils/compare_ds.py pr_artifacts/unpacked_artifacts/ssg-${{steps.product.outputs.prop}}-ds.xml build/ssg-${{steps.product.outputs.prop}}-ds.xml | tee diff.log
- env:
- PYTHONPATH: ${{ github.workspace }}
- - name: Test if there are datastream changes
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: echo "COMPARE_DS_OUTPUT_SIZE=$(stat --printf="%s" diff.log)" >> $GITHUB_OUTPUT
- id: compare_ds
- - name: Print datastream changes if any
- if: ${{ steps.compare_ds.outputs.COMPARE_DS_OUTPUT_SIZE != '0'}}
- run: cat diff.log
- - name: Get diff.log
- if: ${{ steps.compare_ds.outputs.COMPARE_DS_OUTPUT_SIZE != '0'}}
- id: diff
- run: |
- body=$(cat diff.log)
- EOF=$(dd if=/dev/urandom bs=15 count=1 status=none | base64)
- echo "log<<$EOF" >> "$GITHUB_OUTPUT"
- echo "${body:0:65000}" >> "$GITHUB_OUTPUT"
- echo "$EOF" >> "$GITHUB_OUTPUT"
- - name: Find Comment
- uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3
- id: fc
- with:
- issue-number: ${{ github.event.pull_request.number }}
- comment-author: 'github-actions[bot]'
- body-includes: This datastream diff is auto generated by the check
- - name: Create or update comment
- if: ${{ steps.compare_ds.outputs.COMPARE_DS_OUTPUT_SIZE != '0' && steps.compare_ds.outputs.COMPARE_DS_OUTPUT_SIZE <= 65000 }}
- uses: peter-evans/create-or-update-comment@e8674b075228eee787fea43ef493e45ece1004c9 # v4
- with:
- comment-id: ${{ steps.fc.outputs.comment-id }}
- issue-number: ${{ github.event.pull_request.number }}
- body: |
- This datastream diff is auto generated by the check `Compare DS/Generate Diff`
-
- Click here to see the full diff
-
- ```diff
- ${{ steps.diff.outputs.log }}
- ```
-
-
- edit-mode: replace
- - name: Create or update a trimmed comment
- if: ${{ steps.compare_ds.outputs.COMPARE_DS_OUTPUT_SIZE > 65000 }}
- uses: peter-evans/create-or-update-comment@e8674b075228eee787fea43ef493e45ece1004c9 # v4
- with:
- comment-id: ${{ steps.fc.outputs.comment-id }}
- issue-number: ${{ github.event.pull_request.number }}
- body: |
- This datastream diff is auto generated by the check `Compare DS/Generate Diff`.
- Due to the excessive size of the diff, it has been trimmed to fit the 65535-character limit.
-
- Click here to see the trimmed diff
-
- ```diff
- ${{ steps.diff.outputs.log }}
-
- ... The diff is trimmed here ...
- ```
-
-
- edit-mode: replace
- - name: Delete existing comment in case new commits trigger no changes in Compare DS tool
- if: ${{ (steps.compare_ds.outputs.COMPARE_DS_OUTPUT_SIZE == '0' || steps.ctf.outputs.CTF_OUTPUT_SIZE == '0') && steps.fc.outputs.comment-id != 0 }}
- uses: jungwinter/comment@fda92dbcb5e7e79cccd55ecb107a8a3d7802a469 # v1
- with:
- type: delete
- comment_id: ${{ steps.fc.outputs.comment-id }}
- token: ${{ secrets.GITHUB_TOKEN }}
- - name: Compare Ansible playbook shell commands
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: utils/ansible_shell_diff.py ssg-${{steps.product.outputs.prop}}-ds.xml build/ssg-${{steps.product.outputs.prop}}-ds.xml | tee diff.log
- env:
- PYTHONPATH: ${{ github.workspace }}
- - name: Test if there are Ansible shell module changes
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: echo "SHELL_DIFF_OUTPUT_SIZE=$(stat --printf="%s" diff.log)" >> $GITHUB_OUTPUT
- id: ansible_shell_diff
- - name: Find Comment
- uses: peter-evans/find-comment@3eae4d37986fb5a8592848f6a574fdf654e61f9e # v3
- id: shell_diff
- with:
- issue-number: ${{ github.event.pull_request.number }}
- comment-author: 'github-actions[bot]'
- body-includes: Change in Ansible 'shell' module found.
- - name: Create comment
- if: ${{ steps.ansible_shell_diff.outputs.SHELL_DIFF_OUTPUT_SIZE != '0' && steps.shell_diff.outputs.comment-id == 0 }}
- uses: peter-evans/create-or-update-comment@e8674b075228eee787fea43ef493e45ece1004c9 # v4
- with:
- issue-number: ${{ github.event.pull_request.number }}
- body: |
- Change in Ansible `shell` module found.
-
- Please consider using more suitable Ansible module than `shell` if possible.
- - name: Delete existing comment in case new commits trigger no changes in Ansible shell module
- if: ${{ (steps.ansible_shell_diff.outputs.SHELL_DIFF_OUTPUT_SIZE == '0' || steps.ctf.outputs.CTF_OUTPUT_SIZE == '0') && steps.shell_diff.outputs.comment-id != 0 }}
- uses: jungwinter/comment@fda92dbcb5e7e79cccd55ecb107a8a3d7802a469 # v1
- with:
- type: delete
- comment_id: ${{ steps.shell_diff.outputs.comment-id }}
- token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/ctf.yaml b/.github/workflows/ctf.yaml
deleted file mode 100644
index eb017decef0b..000000000000
--- a/.github/workflows/ctf.yaml
+++ /dev/null
@@ -1,49 +0,0 @@
-name: Gating
-on:
- pull_request_target:
- branches: [ master, 'stabilization*' ]
-jobs:
- content-test-filtering:
- name: Content Test Filtering on Ubuntu Latest
- runs-on: ubuntu-latest
- steps:
- - name: Install Deps
- run: sudo apt-get update && sudo apt-get install git python3-jinja2 python3-yaml python3-setuptools python3-deepdiff python3-git python3-github python3-requests xmldiff python3-setuptools
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- with:
- ref: ${{ github.event.pull_request.head.sha }}
- fetch-depth: 0
- # https://github.com/actions/checkout/issues/766
- - name: Set git safe directory
- run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
- - name: Find forking point
- env:
- BASE_BRANCH: ${{ github.base_ref }}
- run: echo "FORK_POINT=$(git merge-base origin/$BASE_BRANCH ${{ github.event.pull_request.head.sha }})" >> $GITHUB_OUTPUT
- id: fork_point
- - name: Checkout fork point
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- with:
- ref: ${{ steps.fork_point.outputs.FORK_POINT }}
- fetch-depth: 0
- - name: Checkout (CTF)
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- with:
- repository: ComplianceAsCode/content-test-filtering
- path: ctf
- - name: Detect content changes in the PR
- run: python3 ./ctf/content_test_filtering.py pr --base ${{ steps.fork_point.outputs.FORK_POINT }} --remote_repo ${{ github.server_url }}/${{ github.repository }} --verbose --rule --output json ${{ github.event.pull_request.number }} > output.json
- - name: Test if there are no content changes
- run: echo "CTF_OUTPUT_SIZE=$(stat --printf="%s" output.json)" >> $GITHUB_OUTPUT
- id: ctf
- - name: Print changes to content detected if any
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: cat output.json
- - name: Get product attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: product
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'output.json'
- prop_path: 'product'
diff --git a/.github/workflows/gate-lint-ansible-roles.yaml b/.github/workflows/gate-lint-ansible-roles.yaml
deleted file mode 100644
index d31c0cb968c5..000000000000
--- a/.github/workflows/gate-lint-ansible-roles.yaml
+++ /dev/null
@@ -1,31 +0,0 @@
-name: Gate (AR / RHEL)
-on:
- pull_request:
- branches: [ 'master' ]
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.number || github.run_id }}
- cancel-in-progress: true
-jobs:
- validate-fedora-ar:
- name: Build, Lint Ansible Roles on Fedora Latest (Container)
- runs-on: ubuntu-latest
- container:
- image: fedora:latest
- steps:
- - name: Install Deps
- run: dnf install -y cmake make ninja-build openscap-utils python3-pyyaml python3-setuptools python3-jinja2 python3-pygithub ansible ansible-lint libxslt git python3-setuptools
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- - name: Configure
- run: cmake -DSSG_PRODUCT_RHEL8=ON -DSSG_PRODUCT_RHEL9=ON -DSSG_PRODUCT_RHEL10=ON -G Ninja ..
- working-directory: ./build
- - name: Build
- run: ninja -j2 rhel10-profile-playbooks rhel9-profile-playbooks rhel8-profile-playbooks
- working-directory: ./build
- - name: Build Ansible Roles
- run: PYTHONPATH=. python3 utils/ansible_playbook_to_role.py --build-playbooks-dir ./build/ansible/ --dry-run ./build/ansible_roles
- # https://github.com/actions/checkout/issues/766
- - name: Set git safe directory
- run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
- - name: Lint Ansible Roles
- run: ansible-lint -x 204 -x experimental -x command-instead-of-module ./build/ansible_roles/*
diff --git a/.github/workflows/gate.yaml b/.github/workflows/gate.yaml
deleted file mode 100644
index 44b056d3a8d1..000000000000
--- a/.github/workflows/gate.yaml
+++ /dev/null
@@ -1,145 +0,0 @@
-name: Gate
-on:
- merge_group:
- branches: [ 'master' ]
- push:
- branches: ['*', '!stabilization*', '!stable*', '!master' ]
- pull_request:
- branches: [ 'master', 'stabilization*', 'oscal-update-*' ]
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.number || github.run_id }}
- cancel-in-progress: true
-jobs:
- validate-sle:
- name: Build, Test on SLE 15 (Container)
- runs-on: ubuntu-latest
- container:
- image: registry.suse.com/bci/bci-base:latest
- steps:
- - name: Update CA certificates
- run: update-ca-certificates
- - name: Zypper add factory repo - to install bats and ShellCheck
- run: zypper --non-interactive ar https://download.opensuse.org/repositories/openSUSE:/Backports:/SLE-15-SP5/standard/openSUSE:Backports:SLE-15-SP5.repo
- - name: Zypper auto import keys
- run: zypper --gpg-auto-import-keys --non-interactive ref
- - name: Zypper refs
- run: zypper refs
- - name: Zypper refresh
- run: zypper refresh
- - name: Install Deps
- run: zypper install -y git cmake make bats openscap-utils python3 python3-rpm python3-pip python3-devel python3-PyYAML python3-Jinja2 python3-setuptools libxslt-tools libxml2-tools ShellCheck
- - name: Upgrade pip python
- run: pip install pip --upgrade
- - name: Install deps python
- run: pip install pytest pytest-cov
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- - name: Build
- run: ./build_product sle12 sle15
- - name: Test
- run: ctest -j2 --output-on-failure -E unique-stigids
- working-directory: ./build
-
- validate-suse:
- name: Build, Test on OpenSUSE Leap 15 (Container)
- runs-on: ubuntu-latest
- container:
- image: opensuse/leap:15
- steps:
- - name: Install Deps
- run: zypper install -y git cmake make openscap-utils python3-PyYAML bats python3-pytest python3-pytest-cov python3-Jinja2 python3-setuptools libxslt-tools libxml2-tools ShellCheck
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- - name: Build
- run: ./build_product opensuse
- env:
- ADDITIONAL_CMAKE_OPTIONS: "-DSSG_OVAL_SCHEMATRON_VALIDATION_ENABLED=OFF"
- - name: Test
- run: ctest -j2 --output-on-failure -E unique-stigids
- working-directory: ./build
-
- validate-debian:
- name: Build, Test on Debian 12 (Container)
- runs-on: ubuntu-latest
- container:
- image: debian:bookworm
- steps:
- - name: Update the package repository
- run: apt-get update
- - name: Install Deps
- run: apt-get install -y ansible-lint bats check cmake openscap-scanner openscap-utils libxml2-utils ninja-build python3-pip xsltproc libxslt1-dev libxml2-dev zlib1g-dev python3.11-venv
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- - name: Upgrade pip python
- run: pip install --upgrade pip --break-system-packages
- - name: Install deps python
- run: pip3 install -r requirements.txt -r test-requirements.txt --ignore-installed PyYAML PyGithub --break-system-packages
- - name: Build
- env:
- ADDITIONAL_CMAKE_OPTIONS: "-DSSG_ANSIBLE_PLAYBOOKS_PER_RULE_ENABLED=ON -DSSG_OVAL_SCHEMATRON_VALIDATION_ENABLED=OFF"
- run: |-
- ./build_product debian11 debian12 debian13
- - name: Test
- working-directory: ./build
- run: ctest -j2 --output-on-failure -E unique-stigids
-
- validate-ubuntu-22-04:
- name: Build, Test on Ubuntu 22.04
- runs-on: ubuntu-22.04
- steps:
- - name: Install Deps
- run: sudo apt-get update && sudo apt-get install -y cmake ninja-build libopenscap8 libxml2-utils xsltproc ansible-lint bats python3-github python3-jinja2 python3-pip python3-pytest python3-pytest-cov python3-setuptools python3-yaml shellcheck
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- - name: Install deps python
- run: pip3 install -r requirements.txt -r test-requirements.txt
- - name: Build
- env:
- ADDITIONAL_CMAKE_OPTIONS: "-DSSG_OVAL_SCHEMATRON_VALIDATION_ENABLED=OFF"
- run: |-
- ./build_product ubuntu2204
- - name: Test
- run: ctest -j2 --output-on-failure -E unique-stigids
- working-directory: ./build
-
- validate-ubuntu-24-04:
- name: Build, Test on Ubuntu 24.04
- runs-on: ubuntu-24.04
- steps:
- - name: Install Deps
- run: sudo apt-get update && sudo apt-get install -y cmake ninja-build openscap-utils libxml2-utils xsltproc ansible-lint bats python3-github python3-jinja2 python3-pip python3-pytest python3-pytest-cov python3-setuptools python3-yaml shellcheck
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- - name: Install deps python
- run: pip3 install -r requirements.txt -r test-requirements.txt
- - name: Build
- env:
- ADDITIONAL_CMAKE_OPTIONS: "-DSSG_OVAL_SCHEMATRON_VALIDATION_ENABLED=OFF"
- run: |-
- ./build_product ubuntu2404
- - name: Test
- run: ctest -j2 --output-on-failure -E unique-stigids
- working-directory: ./build
-
- validate-fedora-rawhide:
- name: Build, Test on Fedora Rawhide (Container)
- runs-on: ubuntu-latest
- container:
- image: registry.fedoraproject.org/fedora:rawhide
- steps:
- - name: Run Updates
- run: dnf update -y
- - name: Install Deps
- run: dnf install -y cmake make openscap-utils bats ansible python3-pip ShellCheck git python3-devel gcc-c++ libxml2-devel libxslt-devel python3-setuptools gawk
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- - name: Install deps python
- run: pip install -r requirements-base.txt -r test-requirements.txt
- - name: Build
- run: |-
- ./build_product al2023 alinux2 alinux3 anolis23 anolis8 fedora firefox ocp4 rhcos4 rhel8 rhel9 rhel10
- env:
- ADDITIONAL_CMAKE_OPTIONS: "-DSSG_OVAL_SCHEMATRON_VALIDATION_ENABLED=OFF"
- - name: Test
- run: ctest -j2 --output-on-failure -E unique-stigids
- working-directory: ./build
diff --git a/.github/workflows/gate_fedora.yml b/.github/workflows/gate_fedora.yml
deleted file mode 100644
index 9c32cca23dbe..000000000000
--- a/.github/workflows/gate_fedora.yml
+++ /dev/null
@@ -1,84 +0,0 @@
-name: Gate Fedora
-on:
- merge_group:
- branches: [ 'master' ]
- push:
- branches: ['*', '!stabilization*', '!stable*', 'master' ]
- pull_request:
- branches: [ 'master', 'stabilization*', 'oscal-update-*' ]
-concurrency:
- group: ${{ github.workflow }}-fedora-${{ github.event.number || github.run_id }}
- cancel-in-progress: true
-jobs:
- validate-fedora:
- name: Build, Test on Fedora Latest (Container)
- runs-on: ubuntu-latest
- container:
- image: fedora:latest
- steps:
- - name: Install Deps
- run: dnf install -y cmake make openscap-utils python3-pyyaml bats ansible python3-pip ShellCheck git gcc gcc-c++ python3-devel libxml2-devel libxslt-devel python3-setuptools gawk
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- - name: Install deps python
- run: pip install pcre2==0.4.0 -r requirements.txt -r test-requirements.txt
- - name: Build
- run: |-
- ./build_product -j2 \
- al2023 \
- alinux2 \
- alinux3 \
- almalinux9 \
- anolis23 \
- anolis8 \
- eks \
- example \
- fedora \
- firefox \
- ocp4 \
- ol7 \
- ol8 \
- ol9 \
- openembedded \
- openeuler2203 \
- rhcos4 \
- rhel8 \
- rhel9 \
- rhel10 \
- rhv4 \
- env:
- ADDITIONAL_CMAKE_OPTIONS: "-DSSG_ANSIBLE_PLAYBOOKS_PER_RULE_ENABLED:BOOL=ON -DSSG_SCAP_VALIDATION_ENABLED:BOOL=OFF -DENABLE_CHECK_RULE_REMOVAL:BOOL=ON -DOLD_RELEASE_DIR=/__w/content/content/old_release -DENABLE_PYTHON_COVERAGE:BOOL=ON"
- - name: Get Latest Release
- uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd # v8.0.0
- with:
- script: |
- const fs = require('fs');
- const release = await github.rest.repos.getLatestRelease({owner: 'ComplianceAsCode', repo: 'content'})
- const tag = release.data.tag_name;
- const version = tag.substring(1)
- const builtUrl = `https://github.com/ComplianceAsCode/content/releases/download/${tag}/scap-security-guide-${version}.zip`
- const downloadedResponse = await fetch(builtUrl);
- if (!downloadedResponse.ok) {
- throw new Error(`Failed to download: ${downloadedResponse.statusText}`);
- }
- const buffer = await downloadedResponse.arrayBuffer();
- const artifactName = "/__w/content/content/old_release.zip"
- fs.writeFileSync(artifactName, Buffer.from(buffer));
- - name: Extract old release
- run: |-
- unzip /__w/content/content/old_release.zip -d /__w/content/content/old_release
- mv /__w/content/content/old_release/*/* /__w/content/content/old_release/
- - name: Test
- run: ctest -j2 --output-on-failure -E unique-stigids
- working-directory: ./build
- - name: "Set git safe directory, ref: https://github.com/actions/checkout/issues/760"
- run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
- - name: Upload coverage to Qlty # Requires: git package
- if: ${{ github.repository == 'ComplianceAsCode/content' }}
- uses: qltysh/qlty-action/coverage@a19242102d17e497f437d7466aa01b528537e899 # v2.2.0
- with:
- token: qltcp_kdIPsqNZzW5rYoxq
- files: build/tests/coverage.xml
- strip-prefix: /__w/content/content
- - name: Validate gitmailmap
- run: grep -E "\S" .mailmap | grep -Ev '^#' | git check-mailmap --stdin
diff --git a/.github/workflows/gate_thin_ds.yml b/.github/workflows/gate_thin_ds.yml
deleted file mode 100644
index f53cc1be0fad..000000000000
--- a/.github/workflows/gate_thin_ds.yml
+++ /dev/null
@@ -1,29 +0,0 @@
-name: Gate Thin DS
-on:
- merge_group:
- branches: [ 'master' ]
- push:
- branches: ['*', '!stabilization*', '!stable*', 'master' ]
- pull_request:
- branches: [ 'master', 'stabilization*' ]
-concurrency:
- group: ${{ github.workflow }}-fedora-${{ github.event.number || github.run_id }}
- cancel-in-progress: true
-jobs:
- build-and-test-thin-ds:
- name: Build, Test on Fedora Latest (Container)
- runs-on: ubuntu-latest
- container:
- image: fedora:latest
- steps:
- - name: Install Deps
- run: dnf install -y cmake make openscap-utils python3-pyyaml bats ansible python3-pip ShellCheck git gcc gcc-c++ python3-devel python3-lxml python3-pytest python3-setuptools
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- - name: Install deps python
- # pytest-xdist is used for parallel execution of thin ds test
- run: pip install pcre2==0.4.0 pytest-xdist -r requirements.txt -r test-requirements.txt
- - name: Build
- run: ./build_product rhel9 --thin
- - name: Test
- run: python3 -m pytest -n auto tests/test_thin_ds.py
diff --git a/.github/workflows/gh-pages.yaml b/.github/workflows/gh-pages.yaml
deleted file mode 100644
index 8b07f4b81a41..000000000000
--- a/.github/workflows/gh-pages.yaml
+++ /dev/null
@@ -1,68 +0,0 @@
-name: Github Pages
-on:
- push:
- branches: [ 'master' ]
- pull_request:
- branches: [ 'master', 'oscal-update-*' ]
- merge_group:
- branches: [ 'master' ]
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.number || github.run_id }}
- cancel-in-progress: true
-jobs:
- publish:
- name: Publish stats, tables and guides
- runs-on: ubuntu-latest
- container:
- image: fedora:latest
- env:
- PAGES_DIR: __pages
- steps:
- - name: Install Deps
- run: dnf install -y cmake git ninja-build openscap-utils python3-pyyaml python3-jinja2 python3-pytest ansible-lint libxslt python3-pip rsync python3-lxml python3-setuptools
- - name: Install deps python
- run: pip3 install json2html prometheus_client
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- - name: Build
- run: cmake .. -G Ninja -DCMAKE_BUILD_TYPE=Debug
- working-directory: ./build
- - name: Build Guides and Mapping Tables
- run: ninja -j2
- working-directory: ./build
- - name: Build Statistics
- run: ninja html-stats html-profile-stats -j2
- working-directory: ./build
- - name: Render Policies (Using control files)
- run: ninja render-policies -j2
- working-directory: ./build
- - name: Generate Prometheus Metrics
- run: utils/controleval_metrics.py prometheus -p fedora ocp4 rhcos4 rhel10 rhel9 rhel8 sle12 sle15 -f ./build/policies_metrics
- env:
- PYTHONPATH: ${{ github.workspace }}
- - name: Generate HTML pages
- run: utils/generate_html_pages.sh $PAGES_DIR
- shell: bash
- env:
- PYTHONPATH: ${{ github.workspace }}
- - name: Configure git to trust the workspace despite the different owner
- run:
- git config --global --add safe.directory "$GITHUB_WORKSPACE"
- - name: Deploy
- if: ${{ github.event_name == 'push' && github.repository == 'ComplianceAsCode/content' && github.ref == 'refs/heads/master' }}
- uses: JamesIves/github-pages-deploy-action@4a3abc783e1a24aeb44c16e869ad83caf6b4cc23 # v4.7.4
- with:
- branch: main # The branch the action should deploy to.
- folder: ${{ env.PAGES_DIR }} # The folder the action should deploy.
- clean-exclude: srg_mapping/*
- repository-name: ComplianceAsCode/content-pages
- single-commit: true
- token: ${{ secrets.CONTENT_PAGES_TOKEN }}
- git-config-name: openscap-ci
- git-config-email: openscap-ci@gmail.com
- - name: Upload artifact if the event is pull request
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- if: ${{ github.event_name == 'pull_request' }}
- with:
- name: built-content
- path: ${{ env.PAGES_DIR }}
diff --git a/.github/workflows/nightly_build.yml b/.github/workflows/nightly_build.yml
deleted file mode 100644
index c8471f6fb070..000000000000
--- a/.github/workflows/nightly_build.yml
+++ /dev/null
@@ -1,40 +0,0 @@
-name: Nightly builds with OVAL 5.11
-on:
- schedule:
- # Run daily at 03:00
- - cron: "0 3 * * *"
-jobs:
- nightly-fedora:
- name: Nightly build on Fedora Latest (Container)
- runs-on: ubuntu-latest
- container:
- image: fedora:latest
- steps:
- - name: Install Dependencies
- run: dnf install -y cmake ninja-build openscap-utils python3-pip python3-devel gcc-c++ ansible-lint libxslt ansible python3-setuptools
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- - name: Install python deps
- run: pip install -r requirements-base.txt -r test-requirements.txt
- - name: Configure
- run: cmake -G Ninja ..
- working-directory: ./build
- - name: Build All
- run: ninja -j2 all
- working-directory: ./build
- - name: Build ZIP
- run: ninja -j2 zipfile
- working-directory: ./build
- - name: Test
- run: ctest -j2 --output-on-failure -E linkchecker
- working-directory: ./build
- - name: Build Package Source
- run: ninja -j2 package_source
- working-directory: ./build
- - name: 'Upload Artifact'
- uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- with:
- name: Nightly Build
- path: |
- build/zipfile/scap-security-guide-*.zip
- build/zipfile/scap-security-guide-*.zip.sha512
diff --git a/.github/workflows/no_merge_commits.yaml b/.github/workflows/no_merge_commits.yaml
deleted file mode 100644
index 1a9e4af7858a..000000000000
--- a/.github/workflows/no_merge_commits.yaml
+++ /dev/null
@@ -1,24 +0,0 @@
-name: Merge Commit Check
-
-on:
- pull_request:
-
-concurrency:
- group: ${{ github.workflow }}-no-merges-${{ github.event.number || github.run_id }}
- cancel-in-progress: true
-
-permissions:
- contents: read
- pull-requests: read
-
-jobs:
- validate-merge-commits:
- name: Ensure No Merge Commits
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- - name: Check for Merge Commits
- uses: NexusPHP/no-merge-commits@8c8c0fc273903ab75038323e3959179e89db480b # v2.2.1
- with:
- token: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/ocp-test-profiles.yaml b/.github/workflows/ocp-test-profiles.yaml
deleted file mode 100644
index 6111ecc18ab6..000000000000
--- a/.github/workflows/ocp-test-profiles.yaml
+++ /dev/null
@@ -1,140 +0,0 @@
-name: Trigger OCP Tests When Relevant
-on:
- pull_request:
- branches: [ master, 'stabilization*' ]
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.number || github.run_id }}
- cancel-in-progress: true
-jobs:
- check-and-trigger-ocp-prow-tests:
- name: Identify rules changed in PR and test them in OCP Prow
- runs-on: ubuntu-latest
- container:
- image: fedora:latest
- permissions:
- pull-requests: write
- steps:
- - name: Install Deps
- run: dnf install -y cmake make openscap-utils python3-pyyaml python3-jinja2 git python3-deepdiff python3-requests jq python3-pip nodejs
- - name: Install deps python
- run: pip install gitpython xmldiff
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- with:
- fetch-depth: 0
- - name: Checkout (CTF)
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- with:
- repository: ComplianceAsCode/content-test-filtering
- path: ctf
- # https://github.com/actions/checkout/issues/766
- - name: Set git safe directory
- run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
- - name: Find forking point
- env:
- BASE_BRANCH: ${{ github.base_ref }}
- run: echo "FORK_POINT=$(git merge-base origin/$BASE_BRANCH ${{ github.event.pull_request.head.sha }})" >> $GITHUB_OUTPUT
- id: fork_point
- - name: Detect content changes in the PR
- run: python3 ./ctf/content_test_filtering.py pr --base ${{ steps.fork_point.outputs.FORK_POINT }} --remote_repo ${{ github.server_url }}/${{ github.repository }} --verbose --rule --output json ${{ github.event.pull_request.number }} > ctf-output.json
- - name: Test if there are no content changes
- run: echo "CTF_OUTPUT_SIZE=$(stat --printf="%s" ctf-output.json)" >> $GITHUB_OUTPUT
- id: ctf
- - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- with:
- name: ctf-output
- path: ctf-output.json
- - name: Print changes to content detected if any
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- run: cat ctf-output.json
- - name: Get product attribute
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' }}
- id: product
- uses: notiz-dev/github-action-json-property@a5a9c668b16513c737c3e1f8956772c99c73f6e8 # v0.2.0
- with:
- path: 'ctf-output.json'
- prop_path: 'product'
-
- - name: Build product OCP and RHCOS content
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' && (contains(steps.product.outputs.prop, 'ocp4') || contains(steps.product.outputs.prop, 'rhcos4')) }}
- run: ./build_product -d ocp4 rhcos4
-
- - name: Process list of rules into a list of product-profiles to test
- if: ${{ steps.ctf.outputs.CTF_OUTPUT_SIZE != '0' && (contains(steps.product.outputs.prop, 'ocp4') || contains(steps.product.outputs.prop, 'rhcos4')) }}
- id: profiles_to_test
- run: |
- # Let's grab the profiles for which we have a CI job configured
- PROW_CONFIG=https://raw.githubusercontent.com/openshift/release/refs/heads/master/ci-operator/config/ComplianceAsCode/content/ComplianceAsCode-content-master.yaml
- curl -o prow_config.yaml ${PROW_CONFIG}
- readarray -t TESTED_PROFILES <<< $(grep -r PROFILE= ./prow_config.yaml | sort -u | sed 's/.*export PROFILE=\(.*\)/\1/')
-
- RULES=$(cat ctf-output.json | jq -r '.rules[]')
-
- # Let's grab one profile for each changed rule
- PROFILES=()
- ALL_PROFILES=()
-
- # Let's consistently grab a random profile for each rule, in order to do that we use the
- # PR number as the seed
- RANDOM=${{ github.event.pull_request.number }}
- for rule in $RULES; do
- readarray -t TEMP <<< $(grep -lr -e "- ${rule}\$" build/*/profiles | sort)
-
- ELIGIBLE_PROFILES=()
- for index in "${!TEMP[@]}"; do
- for tp in ${TESTED_PROFILES[@]}; do
- if [[ ${TEMP[$index]} =~ build\/.*\/profiles\/${tp}\.profile ]]; then
- ELIGIBLE_PROFILES+=(${TEMP[$index]});
- fi
- done
- done
-
- ALL_PROFILES+=(${ELIGIBLE_PROFILES[@]})
- PROFILES+=(${ELIGIBLE_PROFILES[$(($RANDOM%(${#ELIGIBLE_PROFILES[@]})))]})
- done
-
- # Sort and ensure that the profiles are unique
- readarray -t UNIQUE_PROFILES <<< $(echo ${PROFILES[@]} | tr ' ' '\n' | sort -u | tr '\n' ' ')
- readarray -t ALL_UNIQUE_PROFILES <<< $(echo ${ALL_PROFILES[@]} | tr ' ' '\n' | sort -u | tr '\n' ' ')
-
- # Craft a command to trigger tests
- COMMAND=$(for profile in ${UNIQUE_PROFILES[@]}; do
- echo ${profile} | sed 's/build\/\(.*\)\/profiles\/\(.*\)\.profile/\/test e2e-aws-\1-\2/'
- done)
-
- # COMMAND is a multiline string, so we need to set it this way
- {
- echo 'TEST_PROFILES_COMMAND<> $GITHUB_OUTPUT
-
- # Format all identified profiles for display
- ALL_PROFILES_FORMATTED=$(for profile in ${ALL_UNIQUE_PROFILES[@]}; do
- echo ${profile} | sed 's/build\/\(.*\)\/profiles\/\(.*\)\.profile/- `-e2e-aws-\1-\2`/'
- done)
- {
- echo 'ALL_PROFILES_COMMENT<> $GITHUB_OUTPUT
- - uses: thollander/actions-comment-pull-request@24bffb9b452ba05a4f3f77933840a6a841d1b32b # v2
- if: ${{ steps.profiles_to_test.outputs.TEST_PROFILES_COMMAND != '' }}
- with:
- message: |
- :robot: Trigger prow tests based on changed rules
-
- ${{ steps.profiles_to_test.outputs.TEST_PROFILES_COMMAND }}
-
- Note: if a test is not started it could be that a CI Job is not configure for that particular profile or product.
-
-
- Click here to see all the relevant profiles
-
- ${{ steps.profiles_to_test.outputs.ALL_PROFILES_COMMENT}}
-
-
- comment-tag: kubernetes_start_prow_tests
- pr-number: ${{ github.event.pull_request.number }}
- mode: recreate
diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml
deleted file mode 100644
index 8ffa13bc76b5..000000000000
--- a/.github/workflows/release.yaml
+++ /dev/null
@@ -1,62 +0,0 @@
-name: Release
-on:
- push:
- tags: [ 'v*.*.*' ]
-jobs:
- release-fedora:
- name: Release on Fedora Latest (Container)
- runs-on: ubuntu-latest
- container:
- image: fedora:latest
- steps:
- - name: Install Deps
- run: dnf install -y cmake ninja-build openscap-utils python3-pip python3-devel gcc-c++ ansible ansible-lint libxslt
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- - name: Install python deps
- run: pip install -r requirements-base.txt -r test-requirements.txt
- - name: Configure
- run: cmake ..
- working-directory: ./build
- - name: Build All
- run: make -j2 all
- working-directory: ./build
- - name: Build ZIP
- run: make -j2 zipfile
- working-directory: ./build
- - name: Test
- run: ctest -j2 --output-on-failure -E linkchecker
- working-directory: ./build
- - name: Build Package Source
- run: make -j2 package_source
- working-directory: ./build
- - name: Set Version
- id: set_version
- run: |-
- echo "tag=${GITHUB_REF/refs\/tags\//}" >> $GITHUB_OUTPUT
- echo "ver=${GITHUB_REF/refs\/tags\/v/}" >> $GITHUB_OUTPUT
- env:
- GITHUB_REF: ${{ github.ref }}
- - name: Build Changelog
- id: build_changelog
- uses: mikepenz/release-changelog-builder-action@439f79b5b5428107c7688c1d2b0e8bacc9b8792c # v4
- with:
- configuration: .github/workflows/release-changelog.json
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- - name: Release
- uses: softprops/action-gh-release@5be0e66d93ac7ed76da52eca8bb058f665c3a5fe # v2.4.2
- with:
- draft: True
- name: Content ${{ steps.set_version.outputs.ver }}
- tag_name: ${{ steps.set_version.outputs.tag }}
- body: ${{ steps.build_changelog.outputs.changelog }}
- files: |
- build/scap-security-guide-*.tar.bz2
- build/scap-security-guide-*.tar.bz2.sha512
- build/zipfile/scap-security-guide-*.zip
- build/zipfile/scap-security-guide-*.zip.sha512
- build/zipfile/scap-security-guide-*.tar.gz
- build/zipfile/scap-security-guide-*.tar.gz.sha512
- env:
- GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
diff --git a/.github/workflows/ruff.yaml b/.github/workflows/ruff.yaml
deleted file mode 100644
index 7a6f8c654806..000000000000
--- a/.github/workflows/ruff.yaml
+++ /dev/null
@@ -1,23 +0,0 @@
-name: Ruff Gate
-on:
- merge_group:
- branches: [ 'master' ]
- push:
- branches: [ '*', '!stabilization*', '!stable*', '!master' ]
- pull_request:
- branches: [ 'master', 'stabilization*', 'oscal-update-*' ]
-
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.number || github.run_id }}
- cancel-in-progress: true
-jobs:
- ruff:
- name: Run ruff
- runs-on: ubuntu-latest
- steps:
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd #v5.0.1
- - name: Install ruff
- run: python3 -m pip install ruff
- - name: Run ruff check
- run: ruff check
diff --git a/.github/workflows/srg-mapping-table.yaml b/.github/workflows/srg-mapping-table.yaml
deleted file mode 100644
index c959d25502ca..000000000000
--- a/.github/workflows/srg-mapping-table.yaml
+++ /dev/null
@@ -1,113 +0,0 @@
-name: SRG Mapping Table
-on:
- push:
- branches: [ 'master' ]
- pull_request:
- branches: [ 'master', 'stabilization*', 'oscal-update-*' ]
- merge_group:
- branches: [ 'master' ]
-concurrency:
- group: ${{ github.workflow }}-${{ github.event.number || github.run_id }}
- cancel-in-progress: true
-jobs:
- generate-data:
- name: SRG Mapping Table
- runs-on: ubuntu-latest
- container:
- image: fedora:latest
- env:
- PAGES_DIR: __srg_mapping
- steps:
- - name: Install Deps
- run: dnf install -y cmake git ninja-build openscap-utils python3-pyyaml python3-jinja2 python3-pytest ansible-lint libxslt python3-pip rsync python3-setuptools
- - name: Install deps python
- run: pip3 install pandas openpyxl
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- - name: Setup Build
- run: cmake .. -G Ninja
- working-directory: ./build
- - name: Build
- run: ninja -j2 rhel10 rhel9 ocp4
- working-directory: ./build
- - name: Build rule dir json
- run: python3 utils/rule_dir_json.py
- env:
- PYTHONPATH: ${{ github.workspace }}
- - name: Create data directory
- run: mkdir -p $PAGES_DIR
- - name: Generate XLSX for OCP4
- run: python3 utils/create_srg_export.py -c controls/srg_ctr.yml -p ocp4 -m shared/references/disa-ctr-srg-v1r3.xml --out-format xlsx --output $PAGES_DIR/srg-mapping-ocp4.xlsx --prefer-controls
- env:
- PYTHONPATH: ${{ github.workspace }}
- - name: Generate HTML for OCP4
- run: python3 utils/create_srg_export.py -c controls/srg_ctr.yml -p ocp4 -m shared/references/disa-ctr-srg-v1r3.xml --out-format html --output $PAGES_DIR/srg-mapping-ocp4.html --prefer-controls
- env:
- PYTHONPATH: ${{ github.workspace }}
- - name: Generate XLSX for RHEL9
- run: python3 utils/create_srg_export.py -c controls/srg_gpos.yml -p rhel9 -m shared/references/disa-os-srg-v3r2.xml --out-format xlsx --output $PAGES_DIR/srg-mapping-rhel9.xlsx
- env:
- PYTHONPATH: ${{ github.workspace }}
- - name: Generate HTML for RHEL9
- run: python3 utils/create_srg_export.py -c controls/srg_gpos.yml -p rhel9 -m shared/references/disa-os-srg-v3r2.xml --out-format html --output $PAGES_DIR/srg-mapping-rhel9.html
- env:
- PYTHONPATH: ${{ github.workspace }}
- - name: Generate XLSX for RHEL10
- run: python3 utils/create_srg_export.py -c controls/srg_gpos.yml -p rhel10 -m shared/references/disa-os-srg-v3r2.xml --out-format xlsx --output $PAGES_DIR/srg-mapping-rhel10.xlsx
- env:
- PYTHONPATH: ${{ github.workspace }}
- - name: Generate HTML for RHEL10
- run: python3 utils/create_srg_export.py -c controls/srg_gpos.yml -p rhel10 -m shared/references/disa-os-srg-v3r2.xml --out-format html --output $PAGES_DIR/srg-mapping-rhel10.html
- env:
- PYTHONPATH: ${{ github.workspace }}
- - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- if: ${{ github.event_name == 'pull_request' }}
- with:
- name: srg-mapping-rhel9.xlsx
- path: ${{ env.PAGES_DIR }}/srg-mapping-rhel9.xlsx
- - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- if: ${{ github.event_name == 'pull_request' }}
- with:
- name: srg-mapping-rhel9.html
- path: ${{ env.PAGES_DIR }}/srg-mapping-rhel9.html
-
- - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- if: ${{ github.event_name == 'pull_request' }}
- with:
- name: srg-mapping-rhel10.xlsx
- path: ${{ env.PAGES_DIR }}/srg-mapping-rhel10.xlsx
- - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- if: ${{ github.event_name == 'pull_request' }}
- with:
- name: srg-mapping-rhel10.html
- path: ${{ env.PAGES_DIR }}/srg-mapping-rhel10.html
- - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- if: ${{ github.event_name == 'pull_request' }}
- with:
- name: srg-mapping-ocp4.xlsx
- path: ${{ env.PAGES_DIR }}/srg-mapping-ocp4.xlsx
- - uses: actions/upload-artifact@330a01c490aca151604b8cf639adc76d48f6c5d4 # v4
- if: ${{ github.event_name == 'pull_request' }}
- with:
- name: srg-mapping-ocp4.html
- path: ${{ env.PAGES_DIR }}/srg-mapping-ocp4.html
- - name: Generate HTML pages
- run: utils/generate_html_index_srg_mapping.sh $PAGES_DIR "rhel10 rhel9 ocp4" # add more products to this list to generate their index
- shell: bash
- - name: Configure git to trust the workspace despite the different owner
- run:
- git config --global --add safe.directory "$GITHUB_WORKSPACE"
- - name: Deploy
- if: ${{ github.event_name == 'push' && github.repository == 'ComplianceAsCode/content' }}
- uses: JamesIves/github-pages-deploy-action@4a3abc783e1a24aeb44c16e869ad83caf6b4cc23 # v4.7.4
- with:
- branch: main # The branch the action should deploy to.
- folder: ${{ env.PAGES_DIR }} # The folder the action should deploy.
- target-folder: srg_mapping
- clean: false
- repository-name: ComplianceAsCode/content-pages
- single-commit: true
- token: ${{ secrets.CONTENT_PAGES_TOKEN }}
- git-config-name: openscap-ci
- git-config-email: openscap-ci@gmail.com
-
diff --git a/.github/workflows/stabilize.yaml b/.github/workflows/stabilize.yaml
deleted file mode 100644
index f512208ee19e..000000000000
--- a/.github/workflows/stabilize.yaml
+++ /dev/null
@@ -1,43 +0,0 @@
-name: Stabilize
-on:
- push:
- branches: [ 'stabilization*' ]
- schedule:
- # Run weekly at 05:00 on Sunday
- - cron: "0 5 * * 0"
-env:
- SCAPVAL_JAR: scapval-1.3.5.jar
- SCAPVAL_FILENAME: SCAP-Content-Validation-Tool-1.3.5
- SCAPVAL_URL: https://csrc.nist.gov/CSRC/media/Projects/Security-Content-Automation-Protocol/tools/scap/1.3/
-jobs:
- stabilize-fedora:
- name: Build and Stabilization Tests on Fedora Latest (Container)
- runs-on: ubuntu-latest
- container:
- image: fedora:latest
- steps:
- - name: Install Deps
- run: dnf install -y cmake ninja-build openscap-utils python3-pyyaml python3-jinja2 python3-pytest ansible libxslt python3-ansible-lint linkchecker java-latest-openjdk unar wget python-unversioned-command git-core python3-setuptools
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- - name: Configure
- run: cmake -DSSG_OVAL_SCHEMATRON_VALIDATION_ENABLED=OFF -DANSIBLE_CHECKS=ON -DENABLE_SCAPVAL13=ON -DSCAPVAL_PATH='/opt/scapval/SCAP-Content-Validation-Tool-1.3.5/scapval-1.3.5.jar' ..
- working-directory: ./build
- - name: Build All
- run: make -j2 all
- working-directory: ./build
- - name: Get SCAPVAL
- run: wget $SCAPVAL_URL/$SCAPVAL_FILENAME.zip
- - name: Unpack SCAPVAL
- run: mkdir -p /opt/scapval/ && unar $SCAPVAL_FILENAME.zip -o /opt/scapval/
- - name: Run SCAPVal
- # Runs SCAPVal on all built datastream
- run: ctest -j2 -R scapval --output-on-failure
- - name: Lint Check
- # Performs ansible-lint and yamllint checks on generated ansible playbooks
- run: ctest -j2 -R ansible-playbook --output-on-failure
- working-directory: ./build
- - name: Link Check
- # Performs linkcheck across all build tables and html guides to ensure there are no broken references.
- run: ctest -j2 -R linkchecker --output-on-failure
- working-directory: ./build
diff --git a/.github/workflows/sync-cac-oscal.yml b/.github/workflows/sync-cac-oscal.yml
deleted file mode 100644
index c6e544810c2e..000000000000
--- a/.github/workflows/sync-cac-oscal.yml
+++ /dev/null
@@ -1,388 +0,0 @@
-name: Sync CaC content to OSCAL content
-permissions:
- contents: write
- pull-requests: read
-on:
- push:
- branches:
- - master
-jobs:
- sync-cac-updates-to-oscal-content:
- runs-on: ubuntu-latest
- steps:
- # Step 1: Set up Python 3
- - name: Set up Python 3
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v6.0.0
- with:
- python-version: '3.9'
- # Step 2: Install Git
- - name: Install Git
- run: sudo apt-get update && sudo apt-get install -y git
- # Step 3: Checkout the CaC repo
- - name: Checkout CaC repo
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
- with:
- repository: ${{ github.repository }}
- path: cac-content
- - name: Get the commit message and PR number
- run: |
- cd cac-content
- # Get the latest commit message
- COMMIT_MSG=$(git log -1 --pretty=%B)
- # Extract the PR number from the commit message (if it's a merge commit)
- PR_NUMBER=$(echo "$COMMIT_MSG" | grep -oP '#\K\d+')
- if [ -n "$PR_NUMBER" ]; then
- echo "Found PR number: $PR_NUMBER"
- echo "PR_NUMBER=$PR_NUMBER" >> $GITHUB_ENV
- echo "SKIP=false" >> $GITHUB_ENV
- PR_INFO=$(curl -s "https://api.github.com/repos/${{ github.repository }}/pulls/${PR_NUMBER}")
- # Extract PR title from the response
- PR_TITLE=$(echo "$PR_INFO" | jq -r .title)
- echo "PR Title: $PR_TITLE"
- if [[ "$PR_TITLE" == *"Auto-generated PR from OSCAL"* ]]; then
- echo "The PR comes from OSCAL content. The task of Sync CaC content to OSCAL will exit."
- echo "Skipping further checks."
- echo "SKIP=true" >> $GITHUB_ENV
- fi
- fi
- # Step 4: Get the access token for content write permission to OSCAL content
- - name: Get GitHub app token
- if: ${{ env.SKIP == 'false' }}
- uses: actions/create-github-app-token@67018539274d69449ef7c02e8e71183d1719ab42 # v2.1.4
- id: app-token
- with:
- app-id: ${{ secrets.APP_ID }}
- private-key: ${{ secrets.PRIVATE_KEY }}
- owner: ${{ github.repository_owner }}
- repositories: |
- content
- oscal-content
- # Step 5: Checkout complyscribe and setup the environment
- - name: Checkout complyscribe repo
- if: ${{ env.SKIP == 'false' }}
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
- with:
- repository: complytime/complyscribe
- path: complyscribe
- - name: Setup complyscribe
- if: ${{ env.SKIP == 'false' }}
- run: |
- cd complyscribe && python3 -m venv venv && source venv/bin/activate
- python3 -m pip install --no-cache-dir "poetry==1.7.1"
- poetry install
- # Step 6: Detect the updates of CAC content
- - name: Detect files changed by PR
- if: ${{ env.SKIP == 'false' }}
- id: changed-files
- run: |
- OWNER="ComplianceAsCode"
- REPO="content"
- # Fetch all pages of the files for the pull request
- url="repos/$OWNER/$REPO/pulls/${{ env.PR_NUMBER }}/files"
- response=$(gh api "$url" --paginate)
- echo "$response" | jq -r '.[].filename' > filenames.txt
- echo "CHANGE_FOUND=false" >> $GITHUB_ENV
- source complyscribe/venv/bin/activate
- cd cac-content
- has_change() {
- local file="$1"
- local key="$2"
- ! python utils/compare_rule_var.py \
- --owner "$OWNER" \
- --repo "$REPO" \
- "${{ env.PR_NUMBER }}" \
- "$file" \
- "$key"
- }
- while IFS= read -r line; do
- # Exclude lines containing 'tests/data/'
- if [[ "$line" == *tests/data/* ]]; then
- continue
- fi
- case "$line" in
- *controls/*|*.profile*)
- echo "$line" >> updated_filenames.txt
- ;;
- *rule.yml)
- if has_change "$line" "title"; then
- echo "Change detected: The title in '$line' was updated."
- echo "$line" >> updated_filenames.txt
- fi
- ;;
- *.var)
- if has_change "$line" "description" || has_change "$line" "options"; then
- echo "Change detected: The description or options in '$line' were updated."
- echo "$line" >> updated_filenames.txt
- fi
- ;;
- esac
- done < ../filenames.txt
- if [[ -f updated_filenames.txt ]]; then
- echo "Shows updated_filenames:"
- cat updated_filenames.txt
- echo "CHANGE_FOUND=true" >> $GITHUB_ENV
- fi
- env:
- GH_TOKEN: ${{ steps.app-token.outputs.token }}
- - name: Checkout OSCAL content repo
- if: ${{ env.CHANGE_FOUND == 'true' }}
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v5.0.1
- with:
- repository: ComplianceAsCode/oscal-content
- path: oscal-content
- token: ${{ steps.app-token.outputs.token }}
- fetch-depth: 0
- - name: Set RH_PRODUCTS
- if: ${{ env.CHANGE_FOUND == 'true' }}
- run: |
- echo "RH_PRODUCTS=(rhel8 rhel9 rhel10 ocp4 fedora)" >> $GITHUB_ENV
- # Step 7: Get profiles, controls and level mapping
- - name: Get profiles, controls and level mapping
- if: ${{ env.CHANGE_FOUND == 'true' }}
- run: |
- cd complyscribe && source venv/bin/activate
- RH_PRODUCTS=${{ env.RH_PRODUCTS }}
- for product in "${RH_PRODUCTS[@]}"; do
- echo "The map for $product..."
- map_file=$product"_map.json"
- python -W ignore scripts/get_mappings_profile_control_levels.py $product "$GITHUB_WORKSPACE/cac-content" > $map_file 2>&1
- cat $map_file
- done
- # Step 8: Get product available controls
- - name: Get product controls
- if: ${{ env.CHANGE_FOUND == 'true' }}
- run: |
- cd complyscribe && source venv/bin/activate
- RH_PRODUCTS=${{ env.RH_PRODUCTS }}
- for product in "${RH_PRODUCTS[@]}"; do
- echo "All available controls of $product..."
- controls_file=$product"_controls"
- python -W ignore scripts/get_product_controls.py $product "$GITHUB_WORKSPACE/cac-content" > $controls_file 2>&1
- cat $controls_file
- done
- # Step 9: Handle the detected updates
- # 1. Get the updated controls
- # 2. Get the updated profiles
- # 3. Get the controls and profiles are impacted by rules and vars
- - name: Handle the detected updates
- if: ${{ env.CHANGE_FOUND == 'true' }}
- run: |
- python cac-content/utils/handle_detected_updates.py 'cac-content/updated_filenames.txt' > updates 2>&1
- cd complyscribe && source venv/bin/activate
- RH_PRODUCTS=${{ env.RH_PRODUCTS }}
- i=0
- while IFS= read -r line; do
- i=$((i + 1))
- if [[ $i -eq 1 ]]; then
- # 1. Get the updated controls
- echo $line > updated_controls
- elif [[ $i -eq 2 ]]; then
- # 2. Get the updated profiles
- profiles=($(echo "$line"| sed "s/{'/{\"/g; s/': '/\":\"/g; s/', '/\",\"/g; s/'}/\"}/g;"))
- for profile in "${profiles[@]}"; do
- product=$(echo "$profile" | jq -r '.product')
- profile_name=$(echo "$profile" | jq -r '.profile_name')
- echo $profile_name >> $product"_updated_profiles"
- done
- elif [[ $i -eq 3 ]]; then
- # 3. Get the updated rule and variables,
- # then convert them to impacted controls and profiles
- rules=($line)
- for rule in "${rules[@]}"; do
- python -W ignore scripts/get_rule_impacted_files.py rh-products "$GITHUB_WORKSPACE/cac-content" $rule control >> rule_impacted_controls 2>&1
- for product in "${RH_PRODUCTS[@]}"; do
- python -W ignore scripts/get_rule_impacted_files.py $product "$GITHUB_WORKSPACE/cac-content" $rule profile >> $product"_rule_impacted_profiles" 2>&1
- done
- done
- fi
- done < "$GITHUB_WORKSPACE"/updates
- # Step 10: Check if there is any existing open PR
- - name: Check if there is any existing open PR
- if: ${{ env.CHANGE_FOUND == 'true' }}
- run: |
- cd oscal-content
- # Use the GitHub CLI to search for an open PR.
- # The 'jq' query filters for PRs where the branch name contains "sync_cac_pr".
- # We take the first result found.
- PR_BRANCH=$(gh pr list --state open --json headRefName --jq '.[] | select(.headRefName | contains("sync_cac_pr")) | .headRefName' | head -n 1)
- if [[ -n "$PR_BRANCH" ]]; then
- echo "Found matching PR branch: $PR_BRANCH"
- # Set the branch name as the PR_BRANCH.
- echo "BRANCH_NAME=$PR_BRANCH" >> $GITHUB_ENV
- else
- echo "No open PR found with 'sync_cac_pr' in the branch name."
- BRANCH_NAME="sync_cac_pr${{ env.PR_NUMBER }}"
- echo "BRANCH_NAME=$BRANCH_NAME" >> $GITHUB_ENV
- fi
- env:
- GH_TOKEN: ${{ steps.app-token.outputs.token }}
- # Step 11: Check if the OSCAL content branch exists
- - name: Check if the OSCAL content branch exists
- if: ${{ env.CHANGE_FOUND == 'true' }}
- run: |
- cd oscal-content
- git fetch --all
- if git show-ref --verify --quiet refs/remotes/origin/"${{ env.BRANCH_NAME }}"; then
- git checkout -b "${{ env.BRANCH_NAME }}" origin/${{ env.BRANCH_NAME }}
- else
- echo "OSCAL content branch $BRANCH_NAME doesn't exist"
- fi
- # Get the base commit HASH
- base_commit=$(git log -1 --format=%H)
- echo "base_commit=$base_commit" >> $GITHUB_ENV
-
- # Step 12: Sync updated controls to OSCAL content
- - name: Sync updated controls to OSCAL content
- if: ${{ env.CHANGE_FOUND == 'true' }}
- run: |
- cd complyscribe && source venv/bin/activate
- RH_PRODUCTS=${{ env.RH_PRODUCTS }}
- # 1.1 Get the updated controls to array
- file="updated_controls"
- if [ -f "$file" ] && [ -s "$file" ]; then
- updated_controls=($(cat "$file"))
- # Output all the updated controls in the PR
- echo "The updated controls: ${updated_controls[@]}"
- fi
- # 1.2 Sync the updated controls to OSCAL content
- for product in "${RH_PRODUCTS[@]}"; do
- for policy_id in "${updated_controls[@]}"; do
- # This sync depends on the specific product available controls
- available_controls=($(cat $product"_controls")) # Get all the available controls of the product
- for pc in "${available_controls[@]}"; do
- if [[ "$pc" == "$policy_id" ]]; then
- # 1.2.1 Sync the updated controls to OSCAL catalog
- poetry run complyscribe sync-cac-content catalog --repo-path ../oscal-content --committer-email "openscap-ci@gmail.com" --committer-name "openscap-ci" --branch "${{ env.BRANCH_NAME }}" --cac-content-root "$GITHUB_WORKSPACE/cac-content" --cac-policy-id "$policy_id" --oscal-catalog "$policy_id"
- # 1.2.2 Sync the updated controls to OSCAL profile
- poetry run complyscribe sync-cac-content profile --repo-path ../oscal-content --committer-email "openscap-ci@gmail.com" --committer-name "openscap-ci" --branch "${{ env.BRANCH_NAME }}" --cac-content-root "$GITHUB_WORKSPACE/cac-content" --product "$product" --cac-policy-id "$policy_id" --oscal-catalog "$policy_id"
- fi
- done
- # 1.2.3 Sync the updated controls to OSCAL component-definition
- # This sync depends on the control assoicated profile and levels
- sh ../cac-content/utils/complyscribe-cli-compd.sh true $policy_id $product ${{ env.BRANCH_NAME }} $GITHUB_WORKSPACE $product"_map.json"
- done
- done
- # Step 13: Sync updated profiles to OSCAL content
- - name: Sync updated profiles to OSCAL content
- if: ${{ env.CHANGE_FOUND == 'true' }}
- run: |
- cd complyscribe && source venv/bin/activate
- RH_PRODUCTS=${{ env.RH_PRODUCTS }}
- pr_number="${{ env.PR_NUMBER }}"
- # 1. Get the updated profiles
- # 2. Sync the updated profiles to OSCAL profile and component-definition
- for product in "${RH_PRODUCTS[@]}"; do
- file=$product"_updated_profiles"
- if [ -f "$file" ] && [ -s "$file" ]; then
- updated_profiles=($(cat "$file" | tr ' ' '\n' | sort | uniq | tr '\n' ' ' | sed 's/ $//'))
- echo "The updated profiles for product $product: ${updated_profiles[@]}"
- for profile in "${updated_profiles[@]}"; do
- # 2.1 Sync CaC profile to OSCAL prifile
- while IFS= read -r line; do
- map=${line//\'/\"}
- policy_id=$(echo "$map" | jq -r '.policy_id')
- profile_name=$(echo "$map" | jq -r '.profile_name')
- if [[ "$profile" == "$profile_name" ]]; then
- poetry run complyscribe sync-cac-content profile --repo-path ../oscal-content --committer-email "openscap-ci@gmail.com" --committer-name "openscap-ci" --branch "${{ env.BRANCH_NAME }}" --cac-content-root "$GITHUB_WORKSPACE/cac-content" --product "$product" --cac-policy-id "$policy_id" --oscal-catalog "$policy_id"
- fi
- done < $product"_map.json"
- # 2.2 Sync CaC profile to OSCAL component-definition
- sh ../cac-content/utils/complyscribe-cli-compd.sh false $profile $product ${{ env.BRANCH_NAME }} $GITHUB_WORKSPACE $product"_map.json"
- done
- fi
- done
- # Step 14: Sync rule updates to OSCAL component-definition
- - name: Sync rule updates to OSCAL component-definition
- if: ${{ env.CHANGE_FOUND == 'true' }}
- run: |
- cd complyscribe && source venv/bin/activate
- RH_PRODUCTS=${{ env.RH_PRODUCTS }}
- pr_number="${{ env.PR_NUMBER }}"
- # 1. Get the rule impacted controls
- file="rule_impacted_controls"
- if [ -f "$file" ] && [ -s "$file" ]; then
- rule_impacted_controls=($(cat "$file" | tr ' ' '\n' | sort | uniq | tr '\n' ' ' | sed 's/ $//'))
- echo "The rule impacted controls: ${rule_impacted_controls[@]}"
- # 2. Sync the rule impacted controls to OSCAL component-definition
- for product in "${RH_PRODUCTS[@]}"; do
- # Sync CAC controls' updates to OSCAL content
- for policy_id in "${rule_impacted_controls[@]}"; do
- sh ../cac-content/utils/complyscribe-cli-compd.sh true $policy_id $product ${{ env.BRANCH_NAME }} $GITHUB_WORKSPACE $product"_map.json"
- done
- done
- fi
- # 3. Get the rule impacted profiles
- for product in "${RH_PRODUCTS[@]}"; do
- file=$product"_rule_impacted_profiles"
- if [ -f "$file" ] && [ -s "$file" ]; then
- rule_impacted_profiles=($(cat $file | tr ' ' '\n' | sort | uniq | tr '\n' ' ' | sed 's/ $//'))
- echo "The rule impacted profiles for $product: ${rule_impacted_profiles[@]}"
- # 4. Sync the rule impacted profiles to OSCAL component-definition
- for profile in "${rule_impacted_profiles[@]}"; do
- sh ../cac-content/utils/complyscribe-cli-compd.sh false $profile $product ${{ env.BRANCH_NAME }} $GITHUB_WORKSPACE $product"_map.json"
- done
- fi
- done
- # Step 15: Squash multiple commits for each run
- - name: Squash multiple commits
- if: ${{ env.CHANGE_FOUND == 'true' }}
- run: |
- cd oscal-content
- git config user.name "openscap-ci"
- git config user.email "openscap-ci@gmail.com"
- echo "PR_SKIP=false" >> $GITHUB_ENV
- if [ "$(git branch --show-current)" == "${{ env.BRANCH_NAME }}" ]; then
- SQUASH_COUNT=$(git rev-list --count ${{ env.base_commit }}..HEAD)
- echo "SQUASH_COUNT=$SQUASH_COUNT" >> $GITHUB_ENV
- if [[ "$SQUASH_COUNT" -eq 0 ]]; then
- echo "No commit from the CAC PR ${{ env.PR_NUMBER }}."
- echo "PR_SKIP=true" >> $GITHUB_ENV
- elif [[ "$SQUASH_COUNT" -eq 1 ]]; then
- echo "::notice::Branch has 1 commit. No squashing needed."
- else
- # Call the squash script using the commit count
- $GITHUB_WORKSPACE/complyscribe/scripts/squash.sh "$SQUASH_COUNT"
- fi
- else
- echo "PR_SKIP=true" >> $GITHUB_ENV
- echo "No branch ${{ env.BRANCH_NAME }}. Skipping squash and create PR."
- fi
- shell: bash
- env:
- GH_TOKEN: ${{ env.INSTALLATION_TOKEN }}
- # Step 16: Create PR or update PR in OSCAL content
- - name: Create a Pull Request in OSCAL content
- if: ${{ env.PR_SKIP == 'false' }}
- run: |
- cd oscal-content
- OWNER="ComplianceAsCode"
- REPO="oscal-content"
- CAC_PR_URL="https://github.com/$OWNER/content/pull/${{ env.PR_NUMBER }}"
- commit=$(git log -1 --format=%H)
- PR_BODY="This is an auto-generated commit $commit from CAC PR [${{ env.PR_NUMBER }}]("$CAC_PR_URL")"
- if [[ "$(git branch --show-current)" == "${{ env.BRANCH_NAME }}" ]]; then
- if [ "${{ env.SQUASH_COUNT }}" -eq 0 ]; then
- echo "No commits from the CAC PR ${{ env.PR_NUMBER }}. Skipping PR creation."
- else
- # Check if the PR exists
- PR_EXISTS=$(gh pr list --repo $OWNER/$REPO \
- --head $BRANCH_NAME --state open --json id \
- | jq length)
- if [ "$PR_EXISTS" -gt 0 ]; then
- echo "PR ${{ env.BRANCH_NAME }} already exists. Skipping PR creation."
- echo "Add a comment for the CAC PR ${{ env.PR_NUMBER }}."
- gh pr comment ${{ env.BRANCH_NAME }} --body "${PR_BODY}"
- else
- echo "Creating PR for new branch: ${{ env.BRANCH_NAME }}"
- gh pr create --repo $OWNER/$REPO \
- --title "Auto-generated PR from CAC ${{ env.PR_NUMBER }}" \
- --head "${{ env.BRANCH_NAME }}" \
- --base "main" \
- --body "${PR_BODY}"
- fi
- fi
- else
- echo "No branch ${{ env.BRANCH_NAME }}. Skipping PR creation."
- fi
- env:
- GH_TOKEN: ${{ steps.app-token.outputs.token }}
diff --git a/.github/workflows/update-oscal.yml b/.github/workflows/update-oscal.yml
deleted file mode 100644
index 5ea018f27b54..000000000000
--- a/.github/workflows/update-oscal.yml
+++ /dev/null
@@ -1,62 +0,0 @@
-name: Update vendored OSCAL content
-
-on:
- workflow_dispatch:
- schedule:
- # Run weekly at 05:00 on Sunday
- - cron: "0 5 * * 0"
-
-jobs:
- update-oscal:
- name: Update content
- runs-on: ubuntu-latest
- permissions:
- contents: write
- pull-requests: write
- strategy:
- matrix:
- variables:
- - catalog-source: "https://raw.githubusercontent.com/usnistgov/oscal-content/690f517daaf3a6cbb4056d3cde6eae2756765620/nist.gov/SP800-53/rev5/json/NIST_SP-800-53_rev5_catalog.json"
- profile-source: "https://raw.githubusercontent.com/GSA/fedramp-automation/master/dist/content/rev5/baselines/json/FedRAMP_rev5_HIGH-baseline_profile.json"
- profile-name: "fedramp_rev5_high"
- catalog-name: "nist_rev5_800_53"
- - catalog-source: "https://raw.githubusercontent.com/usnistgov/oscal-content/690f517daaf3a6cbb4056d3cde6eae2756765620/nist.gov/SP800-53/rev4/json/NIST_SP-800-53_rev4_catalog.json"
- profile-source: "https://raw.githubusercontent.com/GSA/fedramp-automation/master/dist/content/rev4/baselines/json/FedRAMP_rev4_HIGH-baseline_profile.json"
- profile-name: "fedramp_rev4_high"
- catalog-name: "nist_rev4_800_53"
- steps:
- - name: Checkout
- uses: actions/checkout@93cb6efe18208431cddfb8368fd83d5badbf9bfd # v4
- - name: Install Python
- uses: actions/setup-python@e797f83bcb11b83ae66e0230d6156d7c80228e7c # v5
- with:
- python-version: '3.9'
- - name: Install python deps
- run: pip3 install requests compliance-trestle==2.4.0
- - name: Update catalogs
- run: |
- rm -rf "catalogs/${{ matrix.variables.catalog-name }}"
- trestle import -f "${{ matrix.variables.catalog-source }}" -o "${{ matrix.variables.catalog-name }}"
- working-directory: ./shared/references/oscal
- - name: Update profiles
- run: |
- rm -rf "profiles/${{ matrix.variables.profile-name }}"
- trestle import -f "${{ matrix.variables.profile-source }}" -o "${{ matrix.variables.profile-name }}"
- trestle href --name "${{ matrix.variables.profile-name }}" -hr "trestle://catalogs/${{ matrix.variables.catalog-name }}/catalog.json"
- working-directory: ./shared/references/oscal
- - name: Update content
- uses: peter-evans/create-pull-request@271a8d0340265f705b14b6d32b9829c1cb33d45e # v7.0.8
- with:
- base: master
- branch: "oscal-update-${{ github.run_id }}"
- delete-branch: true
- commit-message: "Update OSCAL content in shared/references/oscal"
- title: "Update upstream OSCAL content from usnistogv and GSA"
- body: |
- Updates upstream OSCAL content
- - usnistgov NIST 800-53 from "${{ matrix.variables.catalog-source }}"
- - GSA FedRAMP OSCAL profiles from "${{ matrix.variables.profile-source }}"
-
- Auto-generated by the [update-oscal](https://github.com/ComplianceAsCode/content/blob/master/.github/workflows/update-oscal.yml) workflow.
- add-paths: |
- shared/references/oscal/
diff --git a/tests/submit_results_to_testing_farm.py b/tests/submit_results_to_testing_farm.py
new file mode 100644
index 000000000000..cf968efd61b3
--- /dev/null
+++ b/tests/submit_results_to_testing_farm.py
@@ -0,0 +1,88 @@
+#!/usr/bin/python3
+
+import sys
+import time
+import atexit
+import logging
+import argparse
+import xml.etree.ElementTree as ET
+
+from atex.provisioner.testingfarm import api
+
+
+# reuse urllib3 PoolManager configured for heavy Retry attempts
+# (because of TestingFarm API reboots, and other transient issues)
+http = api._http
+
+logging.basicConfig(
+ level=logging.INFO, # use DEBUG to see HTTP queries
+ stream=sys.stderr,
+ format="%(asctime)s %(name)s: %(message)s",
+ datefmt="%Y-%m-%d %H:%M:%S",
+)
+
+# Parse command-line arguments
+parser = argparse.ArgumentParser(description="Submit TMT test to Testing Farm")
+parser.add_argument("--repo-url", required=True, help="GitHub repository URL")
+parser.add_argument("--pr-number", required=True, help="Pull request number")
+parser.add_argument("--plan-name", default="/dummy_plan", help="TMT plan name to run")
+parser.add_argument("--os", default=None, help="OS to test on (e.g., rhel-9)")
+parser.add_argument("--arch", default="x86_64", help="Architecture to test on")
+args = parser.parse_args()
+
+request_json = {
+ "test": {
+ "fmf": {
+ "url": args.repo_url,
+ "ref": f"PR{args.pr_number}",
+ "name": args.plan_name,
+ },
+ },
+ "environments": [{"arch": args.arch, "os": args.os}],
+}
+
+# do faster queries than the default 30 secs, because we don't track
+# many dozens of requests, just one
+class FastRequest(api.Request):
+ api_query_limit = 5
+
+req = FastRequest()
+req.submit(request_json)
+atexit.register(req.cancel) # just in case we traceback
+
+req.wait_for_state("running")
+
+# artifacts URL doesn't appear instantly, wait for it
+while "run" not in req:
+ time.sleep(FastRequest.api_query_limit)
+while "artifacts" not in req["run"]:
+ time.sleep(FastRequest.api_query_limit)
+
+artifacts_url = req["run"]["artifacts"]
+logging.info(f"artifacts: {artifacts_url}")
+
+# results.xml appears only after completion
+req.wait_for_state("complete")
+atexit.unregister(req.cancel)
+
+# get results.xml for those artifacts, which is a XML representation of the
+# HTML artifacts view and contains links to logs and workdir
+reply = http.request("GET", f"{artifacts_url}/results.xml")
+if reply.status != 200:
+ raise RuntimeError("could not get results.xml")
+
+# find which log is the workdir and get its URL
+results_xml = ET.fromstring(reply.data)
+for log in results_xml.find("testsuite").find("logs"):
+ if log.get("name") == "workdir":
+ workdir_url = log.get("href")
+ break
+else:
+ raise RuntimeError("could not find workdir")
+
+# TODO: a more reliable way would be to read
+# {workdir_url}/testing-farm/sanity/execute/results.yaml
+# as YAML and look for the test name and get its 'data-path'
+# relative to the /execute/ dir
+logging.info(f"HTML: {workdir_url}/dummy_plan/execute/data/guest/default-0/dummy_test-1/data/index.html?q=TRUE")
+