|
| 1 | +name: AquaSec Full Repository Scan |
| 2 | + |
| 3 | +on: |
| 4 | + workflow_dispatch: |
| 5 | + pull_request: |
| 6 | + types: [ opened, synchronize ] |
| 7 | + |
| 8 | +permissions: |
| 9 | + contents: read |
| 10 | + issues: write |
| 11 | + pull-requests: write |
| 12 | + security-events: write |
| 13 | + |
| 14 | +concurrency: |
| 15 | + group: aquasec-scan-${{ github.ref }} |
| 16 | + cancel-in-progress: true |
| 17 | + |
| 18 | +jobs: |
| 19 | + aquasec-scanning: |
| 20 | + name: AquaSec Full Repository Scan |
| 21 | + runs-on: ubuntu-latest |
| 22 | + steps: |
| 23 | + - name: Checkout repository |
| 24 | + uses: actions/checkout@8e8c483db84b4bee98b60c0593521ed34d9990e8 |
| 25 | + with: |
| 26 | + persist-credentials: false |
| 27 | + fetch-depth: 0 |
| 28 | + |
| 29 | + - name: Retrieve AquaSec Scan Results |
| 30 | + env: |
| 31 | + AQUA_KEY: ${{ secrets.AQUA_KEY }} |
| 32 | + AQUA_SECRET: ${{ secrets.AQUA_SECRET }} |
| 33 | + REPOSITORY_ID: ${{ secrets.AQUA_REPOSITORY_ID }} |
| 34 | + run: | |
| 35 | + set -euo pipefail |
| 36 | + |
| 37 | + echo "=== Validating secret variables ===" |
| 38 | + |
| 39 | + if ! [[ "$REPOSITORY_ID" =~ ^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$ ]]; then |
| 40 | + echo "Error: AQUA_REPOSITORY_ID is not a valid UUID format" |
| 41 | + exit 1 |
| 42 | + fi |
| 43 | + |
| 44 | + echo "=== Authenticating with AquaSec ===" |
| 45 | + |
| 46 | + METHOD="POST" |
| 47 | + AUTH_ENDPOINT="https://eu-1.api.cloudsploit.com/v2/tokens" |
| 48 | + TIMESTAMP=$(date -u +%s) |
| 49 | + POST_BODY='{"group_id":1228,"allowed_endpoints":["GET"],"validity":240}' |
| 50 | + STRING_TO_SIGN="${TIMESTAMP}${METHOD}/v2/tokens${POST_BODY}" |
| 51 | + SIGNATURE=$(echo -n "$STRING_TO_SIGN" | openssl dgst -sha256 -hmac "${AQUA_SECRET}" -hex | sed 's/.*= //g') |
| 52 | +
|
| 53 | + AUTH_RESPONSE=$(curl -s --max-time 30 -X $METHOD "$AUTH_ENDPOINT" \ |
| 54 | + -H "Content-Type: application/json" \ |
| 55 | + -H "X-API-Key: $AQUA_KEY" \ |
| 56 | + -H "X-Timestamp: $TIMESTAMP" \ |
| 57 | + -H "X-Signature: $SIGNATURE" \ |
| 58 | + -d "$POST_BODY") |
| 59 | + |
| 60 | + RESPONSE_STATUS=$(echo "$AUTH_RESPONSE" | jq -r '.status') |
| 61 | + |
| 62 | + if [ "$RESPONSE_STATUS" = "200" ]; then |
| 63 | + echo "Login successful." |
| 64 | + BEARER_TOKEN=$(echo "$AUTH_RESPONSE" | jq -r '.data') |
| 65 | + else |
| 66 | + echo "Login failed with error message: $(echo "$AUTH_RESPONSE" | jq -r '.errors')" |
| 67 | + exit 1 |
| 68 | + fi |
| 69 | + |
| 70 | + echo "=== Receiving AquaSec Scan Results ===" |
| 71 | + |
| 72 | + SCAN_RESULTS_ENDPOINT="https://eu-1.codesec.aquasec.com/api/v1/scans/results" |
| 73 | + FINDINGS_JSON="[]" |
| 74 | + PAGE_NUM=1 |
| 75 | + PAGE_SIZE=100 |
| 76 | + TOTAL_EXPECTED=0 |
| 77 | + |
| 78 | + while true; do |
| 79 | + echo "Fetching page $PAGE_NUM..." |
| 80 | + |
| 81 | + REQUEST_URL="${SCAN_RESULTS_ENDPOINT}?repositoryIds=${REPOSITORY_ID}&size=${PAGE_SIZE}&page=${PAGE_NUM}" |
| 82 | + |
| 83 | + PAGE_RESPONSE=$(curl -s --max-time 30 -X GET "$REQUEST_URL" \ |
| 84 | + -H "Authorization: Bearer $BEARER_TOKEN" \ |
| 85 | + -H "Accept: application/json") |
| 86 | + |
| 87 | + if [ -z "$PAGE_RESPONSE" ]; then |
| 88 | + echo "Failed to retrieve scan results on page $PAGE_NUM" |
| 89 | + exit 1 |
| 90 | + fi |
| 91 | + |
| 92 | + if [ $PAGE_NUM -eq 1 ]; then |
| 93 | + TOTAL_EXPECTED=$(echo "$PAGE_RESPONSE" | jq -r '.total // 0') |
| 94 | + echo "Total findings expected: $TOTAL_EXPECTED" |
| 95 | + fi |
| 96 | + |
| 97 | + PAGE_DATA=$(echo "$PAGE_RESPONSE" | jq -c '.data // []') |
| 98 | + PAGE_COUNT=$(echo "$PAGE_DATA" | jq 'length') |
| 99 | + echo "Retrieved $PAGE_COUNT findings on page $PAGE_NUM" |
| 100 | + |
| 101 | + FINDINGS_JSON=$(echo "$FINDINGS_JSON" "$PAGE_DATA" | jq -s 'add') |
| 102 | + |
| 103 | + FINDINGS_COUNT=$(echo "$FINDINGS_JSON" | jq 'length') |
| 104 | + |
| 105 | + if [ "$FINDINGS_COUNT" -ge "$TOTAL_EXPECTED" ] || [ "$PAGE_COUNT" -eq 0 ]; then |
| 106 | + break |
| 107 | + fi |
| 108 | + |
| 109 | + PAGE_NUM=$((PAGE_NUM + 1)) |
| 110 | + sleep 2 |
| 111 | + done |
| 112 | + |
| 113 | + FINDINGS_COUNT=$(echo "$FINDINGS_JSON" | jq 'length') |
| 114 | + echo "Total findings retrieved: $FINDINGS_COUNT" |
| 115 | + |
| 116 | + jq -n --argjson total "$FINDINGS_COUNT" --argjson data "$FINDINGS_JSON" \ |
| 117 | + '{"total": $total, "size": $total, "page": 1, "data": $data}' > aquasec_scan_results.json |
| 118 | + |
| 119 | + echo "Full repository scan retrieved successfully" |
| 120 | +
|
| 121 | + - name: Convert to SARIF 2.1.0 |
| 122 | + shell: python |
| 123 | + run: | |
| 124 | + import json |
| 125 | + |
| 126 | + print("=== Converting Scan Result to SARIF Format ===") |
| 127 | + |
| 128 | + # Severity mapping: SARIF level, security-severity, severity tag |
| 129 | + SEVERITY_MAP = { |
| 130 | + 1: ("note", "2.0", "LOW"), |
| 131 | + 2: ("warning", "5.5", "MEDIUM"), |
| 132 | + 3: ("error", "8.0", "HIGH"), |
| 133 | + 4: ("error", "9.5", "CRITICAL"), |
| 134 | + } |
| 135 | + |
| 136 | + # Truncate text to follow with GitHub SARIF field limits |
| 137 | + def truncate(text, max_len=1024): |
| 138 | + if not text: |
| 139 | + return "Security issue detected" |
| 140 | + return text[:max_len] if len(text) > max_len else text |
| 141 | +
|
| 142 | + with open("aquasec_scan_results.json", "r") as f: |
| 143 | + data = json.load(f) |
| 144 | + |
| 145 | + aquasec_findings = data.get("data", []) |
| 146 | + rule_index_lookup = {} |
| 147 | + sarif_unique_rules = [] |
| 148 | + sarif_findings = [] |
| 149 | + |
| 150 | + for finding in aquasec_findings: |
| 151 | + target_file = finding.get("target_file", "") |
| 152 | + avd_id = finding.get("avd_id", "") |
| 153 | + severity = finding.get("severity", 1) |
| 154 | + level, sec_severity, sev_tag = SEVERITY_MAP.get(severity, SEVERITY_MAP[1]) |
| 155 | + title = finding.get("title", "") |
| 156 | + message = finding.get("message", "") |
| 157 | + extra = finding.get("extraData", {}) |
| 158 | + category = finding.get("category", "") |
| 159 | + |
| 160 | + if avd_id not in rule_index_lookup: |
| 161 | + tags = [category, "security", sev_tag] |
| 162 | + |
| 163 | + refs = extra.get("references", []) |
| 164 | + remediation = extra.get("remediation", "") |
| 165 | + |
| 166 | + rule = { |
| 167 | + "id": avd_id, |
| 168 | + "name": category, |
| 169 | + "shortDescription": {"text": truncate(title)}, |
| 170 | + "fullDescription": {"text": truncate(message)}, |
| 171 | + "defaultConfiguration": {"level": level}, |
| 172 | + "help": { |
| 173 | + "text": truncate(remediation), |
| 174 | + "markdown": f"**{category} {avd_id}**\n| Severity | Check | Message |\n| --- | --- | --- |\n|{sev_tag}|{truncate(title, 100)}|{truncate(message, 200)}|" |
| 175 | + }, |
| 176 | + "properties": { |
| 177 | + "precision": "very-high", |
| 178 | + "security-severity": sec_severity, |
| 179 | + "tags": tags |
| 180 | + } |
| 181 | + } |
| 182 | + |
| 183 | + if refs: |
| 184 | + rule["helpUri"] = refs[0] |
| 185 | + |
| 186 | + rule_index_lookup[avd_id] = len(sarif_unique_rules) |
| 187 | + sarif_unique_rules.append(rule) |
| 188 | + |
| 189 | + # Sanitize security finding line numbers to please SARIF schema |
| 190 | + start_line = finding.get("target_start_line") |
| 191 | + if not start_line or start_line < 1: |
| 192 | + start_line = 1 |
| 193 | + end_line = finding.get("target_end_line") |
| 194 | + if not end_line or end_line < start_line: |
| 195 | + end_line = start_line |
| 196 | + |
| 197 | + sarif_finding = { |
| 198 | + "ruleId": avd_id, |
| 199 | + "ruleIndex": rule_index_lookup[avd_id], |
| 200 | + "level": level, |
| 201 | + "message": {"text": truncate(message)}, |
| 202 | + "locations": [{ |
| 203 | + "physicalLocation": { |
| 204 | + "artifactLocation": {"uri": target_file}, |
| 205 | + "region": {"startLine": start_line, "endLine": end_line} |
| 206 | + } |
| 207 | + }] |
| 208 | + } |
| 209 | + |
| 210 | + sarif_findings.append(sarif_finding) |
| 211 | + |
| 212 | + sarif_output = { |
| 213 | + "$schema": "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/main/sarif-2.1/schema/sarif-schema-2.1.0.json", |
| 214 | + "version": "2.1.0", |
| 215 | + "runs": [{ |
| 216 | + "tool": { |
| 217 | + "driver": { |
| 218 | + "fullName": "AquaSec Security Scanner", |
| 219 | + "informationUri": "https://www.aquasec.com/", |
| 220 | + "name": "AquaSec", |
| 221 | + "version": "1.0.0", |
| 222 | + "rules": sarif_unique_rules |
| 223 | + } |
| 224 | + }, |
| 225 | + "results": sarif_findings |
| 226 | + }] |
| 227 | + } |
| 228 | + |
| 229 | + with open("aquasec_scan.sarif", "w") as f: |
| 230 | + json.dump(sarif_output, f, indent=2) |
| 231 | + |
| 232 | + print(f"Converted {len(sarif_findings)} findings to SARIF 2.1.0 format") |
| 233 | +
|
| 234 | + - name: Upload Scan Results to GitHub Security |
| 235 | + uses: github/codeql-action/upload-sarif@4e94bd11f71e507f7f87df81788dff88d1dacbfb |
| 236 | + with: |
| 237 | + sarif_file: aquasec_scan.sarif |
| 238 | + category: aquasec |
| 239 | + |
| 240 | + - name: Create Scan Summary Table |
| 241 | + id: scan_summary_table |
| 242 | + shell: python |
| 243 | + run: | |
| 244 | + import os |
| 245 | + import json |
| 246 | + import sys |
| 247 | + from collections import Counter |
| 248 | +
|
| 249 | + SARIF_PATH = "aquasec_scan.sarif" |
| 250 | + SEVERITIES = ["CRITICAL", "HIGH", "MEDIUM", "LOW"] |
| 251 | + CATEGORIES = ["sast", "vulnerabilities", "iacMisconfigurations", "secrets", "pipelineMisconfigurations", "license"] |
| 252 | +
|
| 253 | + print("=== Generating Scan Summary Table ===") |
| 254 | + |
| 255 | + try: |
| 256 | + with open(SARIF_PATH, "r", encoding="utf-8") as f: |
| 257 | + sarif = json.load(f) |
| 258 | +
|
| 259 | + if "runs" not in sarif or not sarif["runs"]: |
| 260 | + raise ValueError("SARIF file contains no runs") |
| 261 | +
|
| 262 | + run = sarif["runs"][0] |
| 263 | + rules = run.get("tool", {}).get("driver", {}).get("rules", []) |
| 264 | + results = run.get("results", []) |
| 265 | +
|
| 266 | + except (IOError, json.JSONDecodeError, ValueError) as e: |
| 267 | + print(f"Error processing SARIF file: {e}", file=sys.stderr) |
| 268 | + sys.exit(1) |
| 269 | +
|
| 270 | + # Initialize counters for each category |
| 271 | + category_severity_counts = {cat: Counter() for cat in CATEGORIES} |
| 272 | +
|
| 273 | + # Count results by category and severity |
| 274 | + for result in results: |
| 275 | + rule_idx = result.get("ruleIndex") |
| 276 | + if rule_idx is None or rule_idx >= len(rules): |
| 277 | + continue |
| 278 | + |
| 279 | + rule = rules[rule_idx] |
| 280 | + category = rule.get("name", "") |
| 281 | + tags = rule.get("properties", {}).get("tags", []) |
| 282 | + severity = next((s for s in SEVERITIES if s in tags), None) |
| 283 | + |
| 284 | + if category in CATEGORIES and severity: |
| 285 | + category_severity_counts[category][severity] += 1 |
| 286 | +
|
| 287 | + # Build Markdown summary table |
| 288 | + headers = ["AQUASEC"] + SEVERITIES + ["TOTAL"] |
| 289 | + summary_table = "| " + " | ".join(headers) + " |\n" |
| 290 | + summary_table += "|---|---|---|---|---|---|\n" |
| 291 | +
|
| 292 | + total_severity = Counter() |
| 293 | + total_all = 0 |
| 294 | + for category in CATEGORIES: |
| 295 | + row = [category] |
| 296 | + category_total = 0 |
| 297 | + for severity in SEVERITIES: |
| 298 | + count = category_severity_counts[category][severity] |
| 299 | + row.append(str(count)) |
| 300 | + total_severity[severity] += count |
| 301 | + category_total += count |
| 302 | + row.append(f"**{category_total}**") |
| 303 | + total_all += category_total |
| 304 | + summary_table += "| " + " | ".join(row) + " |\n" |
| 305 | +
|
| 306 | + total_row = ["**➡️ Total**"] + [f"**{total_severity[sev]}**" for sev in SEVERITIES] + [f"**{total_all}**"] |
| 307 | + summary_table += "| " + " | ".join(total_row) + " |" |
| 308 | +
|
| 309 | + try: |
| 310 | + if "GITHUB_OUTPUT" in os.environ: |
| 311 | + with open(os.environ["GITHUB_OUTPUT"], "a", encoding="utf-8") as f: |
| 312 | + f.write("table<<EOF\n") |
| 313 | + f.write(summary_table + "\n") |
| 314 | + f.write("EOF\n") |
| 315 | + else: |
| 316 | + print("Warning: GITHUB_OUTPUT not set", file=sys.stderr) |
| 317 | + except IOError as e: |
| 318 | + print(f"Error writing output: {e}", file=sys.stderr) |
| 319 | + sys.exit(1) |
| 320 | +
|
| 321 | + - name: GitHub scan summary comment |
| 322 | + if: github.event_name == 'pull_request' |
| 323 | + uses: actions/github-script@ed597411d8f924073f98dfc5c65a23a2325f34cd |
| 324 | + env: |
| 325 | + SUMMARY_TABLE: ${{ steps.scan_summary_table.outputs.table }} |
| 326 | + with: |
| 327 | + github-token: ${{ secrets.GITHUB_TOKEN }} |
| 328 | + script: | |
| 329 | + const marker = '<!-- aquasec-scan-comment -->'; |
| 330 | + const link = `https://github.com/${context.repo.owner}/${context.repo.repo}/security/code-scanning?query=pr%3A${context.issue.number}+is%3Aopen`; |
| 331 | + const sentence = `AquaSec has completed a full security repository scan ✅ You can find the analysis results for this PR branch on [this overview](${link}).\n Below is the summary of the findings:`; |
| 332 | + const summaryTable = process.env.SUMMARY_TABLE; |
| 333 | + const body = marker + "\n" + sentence + "\n\n" + summaryTable; |
| 334 | + |
| 335 | + // Find existing comment |
| 336 | + const { data: comments } = await github.rest.issues.listComments({ |
| 337 | + owner: context.repo.owner, |
| 338 | + repo: context.repo.repo, |
| 339 | + issue_number: context.issue.number |
| 340 | + }); |
| 341 | + |
| 342 | + const existingComment = comments.find(c => c.body.includes(marker)); |
| 343 | + |
| 344 | + // Create a new comment or update existing one |
| 345 | + if (existingComment) { |
| 346 | + await github.rest.issues.updateComment({ |
| 347 | + owner: context.repo.owner, |
| 348 | + repo: context.repo.repo, |
| 349 | + comment_id: existingComment.id, |
| 350 | + body |
| 351 | + }); |
| 352 | + } else { |
| 353 | + await github.rest.issues.createComment({ |
| 354 | + issue_number: context.issue.number, |
| 355 | + owner: context.repo.owner, |
| 356 | + repo: context.repo.repo, |
| 357 | + body |
| 358 | + }); |
| 359 | + } |
0 commit comments