Skip to content

chore(core): cse mitigation CVE 20251226 #24859

chore(core): cse mitigation CVE 20251226

chore(core): cse mitigation CVE 20251226 #24859

# Copyright 2024 Flant JSC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
name: Build and push for dev
env:
MODULES_REGISTRY: ${{ vars.DEV_REGISTRY }}
CI_COMMIT_REF_NAME: ${{ github.ref_name }}
MODULES_MODULE_NAME: ${{ vars.MODULE_NAME }}
MODULES_MODULE_SOURCE: ${{ vars.DEV_MODULE_SOURCE }}
MODULES_REGISTRY_LOGIN: ${{ vars.DEV_MODULES_REGISTRY_LOGIN }}
MODULES_REGISTRY_PASSWORD: ${{ secrets.DEV_MODULES_REGISTRY_PASSWORD }}
GO_VERSION: "1.24.6"
GOLANGCI_LINT_VERSION: "1.64.8"
SOURCE_REPO: "${{secrets.SOURCE_REPO}}"
SOURCE_REPO_GIT: "${{secrets.SOURCE_REPO_GIT}}"
TRIVY_DISABLE_VEX_NOTICE: "true"
on:
workflow_dispatch:
inputs:
pr_number:
description: |
Pull request number, like 563, or leave empty and choose a branch
For branches main, release-*, tag will be generated as branch name
required: false
type: number
svace_enabled:
description: "Enable svace build"
type: boolean
required: false
pull_request:
types: [opened, reopened, synchronize, labeled, unlabeled]
push:
branches:
- main
- release-*
defaults:
run:
shell: bash
concurrency:
group: "${{ github.workflow }}-${{ github.event.number || github.ref }}"
cancel-in-progress: true
jobs:
set_vars:
runs-on: ubuntu-latest
name: Get PR info and set vars
outputs:
modules_module_tag: ${{ steps.modules_module_tag.outputs.MODULES_MODULE_TAG }}
module_edition: ${{ steps.modules_module_tag.outputs.MODULE_EDITION }}
runner_type: ${{ steps.modules_module_tag.outputs.RUNNER_TYPE }}
debug_component: ${{ steps.get-labels.outputs.debug_component }}
steps:
- name: Get Pull Request Labels
id: get-labels
uses: actions/github-script@v7
with:
script: |
function processDelveLabels(labelList) {
const delveLabels = labelList.filter(label => label.includes('delve'));
if (delveLabels.length === 0) {
console.log("No delve labels found");
return null;
} else if (delveLabels.length === 1) {
return delveLabels[0];
} else {
const errorMsg = "Error: Multiple delve labels found - " + delveLabels.join(', ');
console.error(errorMsg);
core.setFailed(errorMsg);
return null;
}
}
let allLabels = [];
let debug_component = '';
if (context.eventName === "pull_request" || context.eventName === "pull_request_target" ) {
let prNumber = context.payload.pull_request.number;
try {
let { data: labels } = await github.rest.issues.listLabelsOnIssue({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: prNumber,
});
allLabels = labels.map(label => label.name);
debug_component = processDelveLabels(allLabels);
// Set the outputs
core.setOutput('debug_component', debug_component);
core.setOutput('prLabels', allLabels);
} catch (error) {
core.setFailed(`Script failed: ${error.message}`);
}
} else {
core.setOutput('debug_component', debug_component);
core.setOutput('prLabels', allLabels);
}
result-encoding: string
- name: Set vars
id: modules_module_tag
run: |
if [[ "${{ github.ref_name }}" == 'main' ]]; then
MODULES_MODULE_TAG="${{ github.ref_name }}"
elif [[ "${{ github.ref_name }}" =~ ^release-[0-9]+\.[0-9]+ ]]; then
MODULES_MODULE_TAG="${{ github.ref_name }}"
elif [[ -n "${{ github.event.pull_request.number }}" ]]; then
MODULES_MODULE_TAG="pr${{ github.event.pull_request.number }}"
elif [[ -n "${{ github.event.inputs.pr_number }}" ]]; then
MODULES_MODULE_TAG="pr${{ github.event.inputs.pr_number }}"
else
echo "::error title=Module image tag is required::Can't detect module tag from workflow context. Dev build uses branch name as tag for main and release branches, and PR number for builds from pull requests. Check workflow for correctness."
exit 1
fi
echo "MODULES_MODULE_TAG=$MODULES_MODULE_TAG" >> "$GITHUB_OUTPUT"
# Select edition for build, default EE
if echo "${{ steps.get-labels.outputs.prLabels }}" | grep -q "edition/ce"; then
echo "MODULE_EDITION=CE" >> $GITHUB_OUTPUT
else
echo "MODULE_EDITION=EE" >> "$GITHUB_OUTPUT"
fi
# Select runner
if echo "${{ steps.get-labels.outputs.prLabels }}" | grep -q "build/github/ubuntu"; then
echo "RUNNER_TYPE=[\"ubuntu-22.04\"]" >> "$GITHUB_OUTPUT"
elif echo "${{ steps.get-labels.outputs.prLabels }}" | grep -q "build/self-hosted/regular"; then
echo "RUNNER_TYPE=[\"self-hosted\", \"regular\"]" >> "$GITHUB_OUTPUT"
else
echo "RUNNER_TYPE=[\"self-hosted\", \"large\"]" >> "$GITHUB_OUTPUT"
fi
show_dev_manifest:
runs-on: ubuntu-latest
name: Show manifest
needs: set_vars
env:
MODULES_MODULE_TAG: ${{needs.set_vars.outputs.modules_module_tag}}
steps:
- name: Show dev config
run: |
cat << OUTER
Create ModuleConfig and ModulePullOverride resources to test this MR:
cat <<EOF | kubectl apply -f -
---
apiVersion: deckhouse.io/v1alpha1
kind: ModulePullOverride
metadata:
name: ${MODULES_MODULE_NAME}
spec:
imageTag: ${MODULES_MODULE_TAG}
source: deckhouse
---
apiVersion: deckhouse.io/v1alpha1
kind: ModuleConfig
metadata:
name: ${MODULES_MODULE_NAME}
spec:
enabled: true
settings:
dvcr:
storage:
type: PersistentVolumeClaim
persistentVolumeClaim:
size: 50G
virtualMachineCIDRs:
- 10.66.10.0/24
- 10.66.20.0/24
- 10.66.30.0/24
version: 1
EOF
Or patch an existing ModulePullOverride:
kubectl patch mpo ${MODULES_MODULE_NAME} --type merge -p '{"spec":{"imageTag":"${MODULES_MODULE_TAG}"}}'
OUTER
lint_dmt:
runs-on: ubuntu-latest
continue-on-error: true
name: Run DMT linter
steps:
- uses: actions/checkout@v4
- uses: deckhouse/modules-actions/lint@v2
env:
DMT_METRICS_URL: ${{ secrets.DMT_METRICS_URL }}
DMT_METRICS_TOKEN: ${{ secrets.DMT_METRICS_TOKEN }}
lint_go:
runs-on: ubuntu-22.04
name: Run go linter
steps:
- name: Set up Go ${{ env.GO_VERSION }}
uses: actions/setup-go@v5
with:
go-version: "${{ env.GO_VERSION }}"
- name: Install Task
uses: arduino/setup-task@v2
with:
version: 3.37.2
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha || github.sha }}
- name: Install golangci-lint
run: |
echo "Installing golangci-lint..."
curl -sSfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b $(go env GOPATH)/bin v${{ env.GOLANGCI_LINT_VERSION}}
echo "$(go env GOPATH)/bin" >> $GITHUB_PATH
echo "golangci-lint v${{ env.GOLANGCI_LINT_VERSION}} installed successfully!"
- name: Lint all directories with golangci-lint
shell: bash
run: |
# set -eo pipefail
set -e
# Find directories containing .golangci.yaml
mapfile -t config_dirs < <(
find . \
-path ./images/cdi-cloner/cloner-startup -prune -o \
-path ./images/dvcr-artifact -prune -o \
-path ./tests/performance/shatal -prune -o \
-type f -name '.golangci.yaml' -printf '%h\0' | \
xargs -0 -n1 | sort -u
)
count=${#config_dirs[@]}
echo "::notice title=Lint Setup::🔍 Found $count directories with linter configurations"
report=""
error_count=0
for dir in "${config_dirs[@]}"; do
find_errors=0
cd "$dir" || { echo "::error::Failed to access directory $dir"; continue; }
if ! output=$(golangci-lint run --sort-results); then
error_count=$(( error_count + 1 ))
echo "::group::📂 Linting directory ❌: $dir"
echo -e "❌ Errors:\n$output\n"
golangci-lint run --sort-results || true
else
echo "::group::📂 Linting directory ✅: $dir"
echo -e "✅ All check passed\n"
fi
cd - &>/dev/null
echo "::endgroup::"
done
has_errors=$( [[ "$error_count" -gt 0 ]] && echo true || echo false)
echo "has_errors=$has_errors" >> "$GITHUB_OUTPUT"
if [ $error_count -gt 0 ]; then
echo "$error_count error more than 0, exit 1"
exit 1
fi
lint_yaml:
runs-on: ubuntu-latest
name: Run yaml linter
steps:
- name: Install Task
uses: arduino/setup-task@v2
with:
version: 3.37.2
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha || github.sha }}
- name: Lint yaml with prettier
run: task -p lint:prettier:yaml
test:
runs-on: ubuntu-22.04
name: Run unit test
steps:
- name: Set up Go ${{ env.GO_VERSION }}
uses: actions/setup-go@v5
with:
go-version: "${{ env.GO_VERSION }}"
- name: Install Task
uses: arduino/setup-task@v2
with:
version: 3.37.2
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha || github.sha }}
- name: Run test GO hooks
run: |
task gohooks:test
- name: Run unit test virtualization-controller
run: |
task virtualization-controller:init
task virtualization-controller:test:unit
dev_setup_build:
runs-on: ${{ fromJSON(needs.set_vars.outputs.runner_type)}}
name: Build and Push images
needs: set_vars
env:
MODULES_MODULE_TAG: ${{needs.set_vars.outputs.modules_module_tag}}
MODULE_EDITION: ${{needs.set_vars.outputs.module_edition}}
WERF_VIRTUAL_MERGE: 0
DEBUG_COMPONENT: ${{needs.set_vars.outputs.component_debug}}
steps:
- name: Print vars
run: |
echo MODULES_REGISTRY=$MODULES_REGISTRY
echo CI_COMMIT_REF_NAME=$CI_COMMIT_REF_NAME
echo MODULES_MODULE_NAME=$MODULES_MODULE_NAME
echo MODULES_MODULE_SOURCE=$MODULES_MODULE_SOURCE
echo MODULES_MODULE_TAG=$MODULES_MODULE_TAG
echo MODULE_EDITION=$MODULE_EDITION
echo DEBUG_COMPONENT=$DEBUG_COMPONENT
- uses: actions/checkout@v4
with:
fetch-depth: 0
ref: ${{ github.event.pull_request.head.sha || github.sha }}
- name: Remove unwanted software
if: ${{ !contains(needs.set_vars.outputs.runner_type, 'self-hosted') }}
uses: ./.github/actions/remove-unwanted-software
- name: Start ssh-agent
uses: webfactory/[email protected]
with:
ssh-private-key: |
${{secrets.SOURCE_REPO_SSH_KEY}}
${{ secrets.SVACE_ANALYZE_SSH_PRIVATE_KEY }}
- name: Add ssh_known_hosts
run: |
HOST=$(grep -oP '(?<=@)[^/:]+' <<< ${{secrets.SOURCE_REPO_GIT}})
echo "::add-mask::$HOST"
IPS=$(nslookup "$HOST" | awk '/^Address: / { print $2 }')
for IP in $IPS; do
echo "::add-mask::$IP"
done
mkdir -p ~/.ssh
touch ~/.ssh/known_hosts
HOST_KEYS=$(ssh-keyscan -H "$HOST" 2>/dev/null)
while IFS= read -r KEY_LINE; do
CONSTANT_PART=$(awk '{print $2, $3}' <<< "$KEY_LINE")
if ! grep -q "$CONSTANT_PART" ~/.ssh/known_hosts; then
echo "$KEY_LINE" >> ~/.ssh/known_hosts
fi
done <<< "$HOST_KEYS"
- name: Add svace analyze server to ssh_known_hosts
continue-on-error: true
run: |
host=${{ secrets.SVACE_ANALYZE_HOST }}
host_ip=$(nslookup "$host" | awk '/^Address: / { print $2 }')
echo "::add-mask::$host_ip"
mkdir -p ~/.ssh
touch ~/.ssh/known_hosts
HOST_KEYS=$(ssh-keyscan -H "$host" 2>/dev/null)
while IFS= read -r KEY_LINE; do
CONSTANT_PART=$(awk '{print $2, $3}' <<< "$KEY_LINE")
if grep -q "$CONSTANT_PART" ~/.ssh/known_hosts; then
ssh-keygen -R $host
ssh-keygen -R $host_ip
fi
echo "$KEY_LINE" >> ~/.ssh/known_hosts
done <<< "$HOST_KEYS"
- uses: deckhouse/modules-actions/setup@v2
with:
registry: ${{ vars.DEV_REGISTRY }}
registry_login: ${{ vars.DEV_MODULES_REGISTRY_LOGIN }}
registry_password: ${{ secrets.DEV_MODULES_REGISTRY_PASSWORD }}
- uses: deckhouse/modules-actions/build@v4
with:
module_source: ${{ vars.DEV_MODULE_SOURCE}}
module_name: ${{ vars.MODULE_NAME }}
module_tag: ${{needs.set_vars.outputs.modules_module_tag}}
svace_enabled: ${{ inputs.svace_enabled || contains(github.event.pull_request.labels.*.name, 'analyze/svace') }}
svace_analyze_host: "${{ secrets.SVACE_ANALYZE_HOST }}"
svace_analyze_ssh_user: "${{ secrets.SVACE_ANALYZE_SSH_USER }}"
pull_request_info:
name: Get PR info
if: ${{ github.event_name == 'pull_request' }}
runs-on: ubuntu-latest
outputs:
labels: ${{ steps.pr_labels.outputs.labels }}
steps:
- name: Get PR labels
id: pr_labels
uses: actions/[email protected]
with:
script: |
const prNumber = context.payload.pull_request.number;
const { data: labels } = await github.rest.issues.listLabelsOnIssue({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: prNumber
});
core.setOutput('labels', JSON.stringify(labels));
set_e2e_requirement_status:
name: Set 'waiting for e2e' commit status
needs:
- pull_request_info
runs-on: ubuntu-latest
if: ${{ github.event.action != 'unlabeled' }}
steps:
- name: Checkout sources
uses: actions/[email protected]
- name: Set commit status after e2e run
id: set_e2e_requirement_status
uses: actions/[email protected]
env:
STATUS_TARGET_COMMIT: ${{ github.event.pull_request.head.sha }}
PR_LABELS: ${{ needs.pull_request_info.outputs.labels }}
with:
github-token: ${{secrets.RELEASE_PLEASE_TOKEN}}
script: |
const e2eStatus = require('./.github/scripts/js/e2e-commit-status');
await e2eStatus.setInitialStatus({github, context, core});
cve_scan_on_pr:
name: Trivy images check
runs-on: ${{ fromJSON(needs.set_vars.outputs.runner_type)}}
needs:
- set_vars
- dev_setup_build
steps:
- uses: actions/checkout@v4
- uses: deckhouse/modules-actions/cve_scan@v5
with:
tag: ${{needs.set_vars.outputs.modules_module_tag}}
tag_type: dev
module_name: ${{ vars.MODULE_NAME }}
dd_url: ${{vars.DEFECTDOJO_HOST}}
dd_token: ${{secrets.DEFECTDOJO_API_TOKEN}}
prod_registry: ${{vars.TRIVY_REGISTRY}}
prod_registry_user: ${{ secrets.PROD_READ_REGISTRY_USER }}
prod_registry_password: ${{ secrets.PROD_READ_REGISTRY_PASSWORD }}
dev_registry: ${{ vars.DEV_REGISTRY }}
dev_registry_user: ${{ vars.DEV_MODULES_REGISTRY_LOGIN }}
dev_registry_password: ${{ secrets.DEV_MODULES_REGISTRY_PASSWORD }}
deckhouse_private_repo: ${{vars.DECKHOUSE_PRIVATE_REPO}}
analyze_build:
if: ${{ github.event.inputs.svace_enabled == 'true' || contains(github.event.pull_request.labels.*.name, 'analyze/svace') }}
name: Analyze build
runs-on: ${{ fromJSON(needs.set_vars.outputs.runner_type)}}
needs:
- set_vars
- dev_setup_build
steps:
- uses: deckhouse/modules-actions/svace_analyze@v4
with:
project_group: ${{ github.event.repository.name }}
ci_commit_ref_name: ${{ github.event.pull_request.head.ref || github.ref_name }}
ci_commit_hash: ${{ github.event.pull_request.head.sha || github.sha }}
svace_analyze_host: "${{ secrets.SVACE_ANALYZE_HOST }}"
svace_analyze_ssh_user: "${{ secrets.SVACE_ANALYZE_SSH_USER }}"
svacer_url: "${{ secrets.SVACER_URL }}"
svacer_import_user: "${{ secrets.SVACER_IMPORT_USER }}"
svacer_import_password: "${{ secrets.SVACER_IMPORT_PASSWORD }}"
svace_analyze_ssh_private_key: "${{ secrets.SVACE_ANALYZE_SSH_PRIVATE_KEY }}"
skip_e2e:
if: ${{ github.event.label.name == 'skip/e2e' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Skip E2E tests
id: skip_e2e
uses: actions/github-script@v6
with:
script: |
const e2eStatus = require('./.github/scripts/js/e2e-commit-status');
e2eStatus.onLabeledForSkip({
github,
context,
core,
labeled: true,
commitSha: context.payload.pull_request.head.sha
})
run_e2e:
if: ${{ github.event_name == 'pull_request' && github.event.action == 'labeled' && github.event.label.name == 'e2e/run' }}
name: Run E2E tests
runs-on: ubuntu-latest
needs:
- dev_setup_build
- set_e2e_requirement_status
- set_vars
outputs:
comment_id: ${{ steps.create_comment.outputs.comment_id}}
steps:
- name: Create Initial PR Comment
id: create_comment
uses: actions/github-script@v7
with:
github-token: ${{secrets.RELEASE_PLEASE_TOKEN}}
script: |
const { data: comment } = await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: context.payload.pull_request.number,
body: `Workflow has started.
Follow the progress here: [Workflow Run](${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId})`
});
core.setOutput('comment_id', comment.id);
- uses: actions/checkout@v4
- name: Select user
id: select_user
uses: actions/github-script@v6
env:
KUBECONFIGS: ${{ secrets.K8S_CLUSTER_SECRET }}
USER_CLUSTER_LABELS: ${{ vars.USER_CLUSTER_LABELS }}
with:
script: |
const ci = require('./.github/scripts/js/ci');
let userClusterLabels;
try {
if (!process.env.USER_CLUSTER_LABELS || process.env.USER_CLUSTER_LABELS.trim() === '') {
throw new Error('USER_CLUSTER_LABELS is empty or not provided.');
}
userClusterLabels = JSON.parse(process.env.USER_CLUSTER_LABELS);
} catch (error) {
core.setFailed(`Failed to parse USER_CLUSTER_LABELS: ${error.message}`);
return;
}
const userId = await ci.getClusterUser({context, core, userClusterLabels});
const fs = require('fs');
const path = require('path');
const kubeconfigs = JSON.parse(process.env.KUBECONFIGS);
const kubeconfig = kubeconfigs.find(config => config.id === userId)?.kubeconfig;
if (!kubeconfig) {
core.setFailed(`No kubeconfig found for user with ID ${userId}.`);
} else {
core.info(`Found kubeconfig for user with ID ${userId}`);
const runnerTempDir = process.env['RUNNER_TEMP'];
const kubeconfigFile = path.join(runnerTempDir, `kubeconfig_${Date.now()}`);
fs.writeFileSync(kubeconfigFile, kubeconfig);
fs.chmodSync(kubeconfigFile, '600');
core.exportVariable('KUBECONFIG', kubeconfigFile)
}
- name: Install Deckhouse-cli
run: |
echo "Install d8"
curl -fsSL -o d8-install.sh https://raw.githubusercontent.com/deckhouse/deckhouse-cli/main/d8-install.sh
bash d8-install.sh
- name: Set up Go ${{ env.GO_VERSION }}
uses: actions/setup-go@v5
with:
go-version: "${{ env.GO_VERSION }}"
- name: Install Task
uses: arduino/setup-task@v2
- name: Install ginkgo
working-directory: ./tests/e2e/
run: |
echo "Install ginkgo"
GINKGO_VERSION=$(go list -f '{{.Version}}' -m github.com/onsi/ginkgo/v2)
go install "github.com/onsi/ginkgo/v2/ginkgo@${GINKGO_VERSION}"
- uses: deckhouse/modules-actions/setup@v2
with:
registry: ${{ vars.DEV_REGISTRY }}
registry_login: ${{ vars.DEV_MODULES_REGISTRY_LOGIN }}
registry_password: ${{ secrets.DEV_MODULES_REGISTRY_PASSWORD }}
- name: Checkout cluster to revision
env:
v12n_tag: pr${{ github.event.pull_request.number }}
working-directory: ./tests/e2e/
run: |
task checkout-to-mpo
- name: Download dependencies
working-directory: ./tests/e2e/
run: |
echo "Download dependencies"
go mod download
- name: Run E2E
id: e2e-tests
working-directory: ./tests/e2e/
run: |
task run -v
- uses: actions/upload-artifact@v4
if: always()
with:
name: resources_from_failed_tests
retention-days: 2
path: /tmp/e2e_failed__*
if-no-files-found: ignore
- name: Cleanup E2E resources on cancel
if: always() && steps.e2e-tests.outcome == 'cancelled'
timeout-minutes: 60
id: e2e-tests-cleanup
working-directory: ./tests/e2e/
run: |
task cleanup
update_comment_on_finish:
name: Update comment on finish
needs:
- run_e2e
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/[email protected]
- name: Set commit status after e2e run and remove label
if: ${{ always() }}
id: set_e2e_requirement_status
uses: actions/[email protected]
env:
JOB_STATUS: ${{ job.status }}
STATUS_TARGET_COMMIT: ${{ github.event.pull_request.head.sha }}
with:
github-token: ${{secrets.RELEASE_PLEASE_TOKEN}}
script: |
const e2eStatus = require('./.github/scripts/js/e2e-commit-status');
await e2eStatus.setStatusAfterE2eRun({github, context, core});
remove_label:
name: Remove label
runs-on: ubuntu-latest
# run the job at the end
if: ${{ always() && github.event_name == 'pull_request' && github.event.action == 'labeled' }}
needs:
- dev_setup_build
- set_e2e_requirement_status
- set_vars
- run_e2e
- update_comment_on_finish
- analyze_build
steps:
- uses: actions/checkout@v4
- name: Remove labels and add comment
id: remove-label
uses: actions/github-script@v6
with:
github-token: ${{secrets.RELEASE_PLEASE_TOKEN}}
script: |
const ci = require('./.github/scripts/js/ci');
const issueNumber = context.issue.number;
const owner = context.repo.owner;
const repo = context.repo.repo;
const labelToRemoveE2E = 'e2e/run';
const labelToRemoveSvace = 'analyze/svace';
const { data: labels } = await github.rest.issues.listLabelsOnIssue({
owner,
repo,
issue_number: issueNumber,
});
if (ci.removeLabel({github, context, labels, labelToRemove: labelToRemoveE2E})) {
const commentIdStr = '${{ needs.run_e2e.outputs.comment_id }}';
const commentId = parseInt(commentIdStr, 10);
const stepOutcome = '${{ needs.run_e2e.result }}';
const body = `Workflow has started.
Follow the progress here: [Workflow Run](${context.serverUrl}/${context.repo.owner}/${context.repo.repo}/actions/runs/${context.runId})
The target step completed with status: **${stepOutcome}**.`;
if (!isNaN(commentId)) {
// Valid integer, update the existing comment
await github.rest.issues.updateComment({
owner: context.repo.owner,
repo: context.repo.repo,
comment_id: commentId, // Now an integer
body: body
});
}
}
ci.removeLabel({github, context, labels, labelToRemove: labelToRemoveSvace});