-
Notifications
You must be signed in to change notification settings - Fork 28
Expand file tree
/
Copy pathintegration_up.sh
More file actions
executable file
·188 lines (161 loc) · 6.41 KB
/
integration_up.sh
File metadata and controls
executable file
·188 lines (161 loc) · 6.41 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
# Accept optional test filter as first argument (regex pattern for -run flag)
TEST_FILTER=${1:-""}
# Start the Squid proxy in a Docker container
docker run \
--name squid \
-d \
-p $PROXY_PORT:3128 \
-v $(pwd)/internal/commands/.scripts/squid/squid.conf:/etc/squid/squid.conf \
-v $(pwd)/internal/commands/.scripts/squid/passwords:/etc/squid/passwords \
ubuntu/squid:5.2-22.04_beta
# Download and extract the ScaResolver tool
wget https://sca-downloads.s3.amazonaws.com/cli/latest/ScaResolver-linux64.tar.gz
tar -xzvf ScaResolver-linux64.tar.gz -C /tmp
rm -rf ScaResolver-linux64.tar.gz
# Build the test filter argument if provided
RUN_ARG=""
if [ -n "$TEST_FILTER" ]; then
RUN_ARG="-run $TEST_FILTER"
echo "Running tests matching filter: $TEST_FILTER"
fi
# Initialize variables
FAILED_TESTS_FILE="failedTests"
rerun_status=0
# Step 1: Create tracking files (ensure they exist for CI artifact upload)
echo "Creating tracking files..."
touch "$FAILED_TESTS_FILE"
touch test_output.log
# Step 3: Run all tests and write failed test names to failedTests file
echo "Running all tests..."
go test \
-tags integration \
-v \
-timeout 210m \
$RUN_ARG \
-coverpkg github.com/checkmarx/ast-cli/internal/commands,github.com/checkmarx/ast-cli/internal/services,github.com/checkmarx/ast-cli/internal/wrappers \
-coverprofile cover.out \
github.com/checkmarx/ast-cli/test/integration 2>&1 | tee test_output.log
# Generate the initial HTML coverage report
go tool cover -html=cover.out -o coverage.html
# Extract names of failed tests and save them in the failedTests file
grep -E "^--- FAIL: " test_output.log | awk '{print $3}' > "$FAILED_TESTS_FILE"
# Capture the exit status of the tests
status=$?
echo "status value after tests $status"
if [ $status -ne 0 ]; then
echo "Integration tests failed"
fi
# Step 4: Check if failedTests file is empty
if [ ! -s "$FAILED_TESTS_FILE" ]; then
# If the file is empty, all tests passed
echo "All tests passed."
rm -f "$FAILED_TESTS_FILE"
else
# If the file is not empty, rerun the failed tests
echo "Rerunning failed tests..."
rerun_status=0
while IFS= read -r testName; do
go test \
-tags integration \
-v \
-timeout 30m \
-coverpkg github.com/checkmarx/ast-cli/internal/commands,github.com/checkmarx/ast-cli/internal/services,github.com/checkmarx/ast-cli/internal/wrappers \
-coverprofile cover_rerun.out \
-run "^$testName$" \
github.com/checkmarx/ast-cli/test/integration || rerun_status=1
done < "$FAILED_TESTS_FILE"
# Step 5: Merge the original and rerun coverage profiles
if [ -f cover_rerun.out ]; then
echo "Merging coverage profiles..."
gocovmerge cover.out cover_rerun.out > merged_coverage.out
mv merged_coverage.out cover.out
go tool cover -html=cover.out -o coverage.html
rm -f cover_rerun.out
fi
# Step 6: Check if any tests failed again
if [ $rerun_status -eq 1 ]; then
echo "Some tests are still failing."
else
echo "All failed tests passed on rerun."
fi
fi
# Step 7: Generate test summary table
echo ""
echo "=============================================="
echo " TEST EXECUTION SUMMARY "
echo "=============================================="
# Parse test results from log (use tr to remove any newlines/carriage returns)
TOTAL_PASSED=$(grep -c "^--- PASS:" test_output.log 2>/dev/null | tr -d '\r\n' || echo "0")
TOTAL_FAILED=$(grep -c "^--- FAIL:" test_output.log 2>/dev/null | tr -d '\r\n' || echo "0")
TOTAL_SKIPPED=$(grep -c "^--- SKIP:" test_output.log 2>/dev/null | tr -d '\r\n' || echo "0")
# Ensure values are valid integers (default to 0 if empty or invalid)
TOTAL_PASSED=${TOTAL_PASSED:-0}
TOTAL_FAILED=${TOTAL_FAILED:-0}
TOTAL_SKIPPED=${TOTAL_SKIPPED:-0}
# Remove any non-numeric characters
TOTAL_PASSED=$(echo "$TOTAL_PASSED" | tr -cd '0-9')
TOTAL_FAILED=$(echo "$TOTAL_FAILED" | tr -cd '0-9')
TOTAL_SKIPPED=$(echo "$TOTAL_SKIPPED" | tr -cd '0-9')
# Default to 0 if empty after cleaning
TOTAL_PASSED=${TOTAL_PASSED:-0}
TOTAL_FAILED=${TOTAL_FAILED:-0}
TOTAL_SKIPPED=${TOTAL_SKIPPED:-0}
TOTAL_TESTS=$((TOTAL_PASSED + TOTAL_FAILED + TOTAL_SKIPPED))
# Calculate pass rate
if [ "$TOTAL_TESTS" -gt 0 ]; then
PASS_RATE=$(awk "BEGIN {printf \"%.1f\", ($TOTAL_PASSED/$TOTAL_TESTS)*100}")
else
PASS_RATE="0.0"
fi
# Extract duration from test output (look for the integration test package line)
DURATION=$(grep -E "(ok|FAIL)\s+github.com/checkmarx/ast-cli/test/integration\s+" test_output.log | awk '{for(i=1;i<=NF;i++) if($i ~ /^[0-9]+\.[0-9]+s$/) print $i}' | head -1)
if [ -z "$DURATION" ]; then
DURATION="N/A"
fi
# Get test filter info (truncate if too long for table)
if [ -n "$TEST_FILTER" ]; then
FILTER_INFO=$(echo "$TEST_FILTER" | cut -c1-18)
if [ ${#TEST_FILTER} -gt 18 ]; then
FILTER_INFO="${FILTER_INFO}..."
fi
else
FILTER_INFO="All tests"
fi
# Print summary table (no ANSI colors for CI compatibility)
printf "\n"
printf "+---------------------+---------------------+\n"
printf "| %-19s | %-19s |\n" "Metric" "Value"
printf "+---------------------+---------------------+\n"
printf "| %-19s | %-19s |\n" "Test Filter" "$FILTER_INFO"
printf "| %-19s | %-19s |\n" "Total Tests" "$TOTAL_TESTS"
printf "| %-19s | %-19s |\n" "Passed" "$TOTAL_PASSED"
printf "| %-19s | %-19s |\n" "Failed" "$TOTAL_FAILED"
printf "| %-19s | %-19s |\n" "Skipped" "$TOTAL_SKIPPED"
printf "| %-19s | %-19s |\n" "Pass Rate" "${PASS_RATE}%"
printf "| %-19s | %-19s |\n" "Duration" "$DURATION"
printf "+---------------------+---------------------+\n"
# Print failed test names if any
if [ "$TOTAL_FAILED" -gt 0 ]; then
echo ""
echo "Failed Tests:"
echo "-------------"
grep "^--- FAIL:" test_output.log | awk '{print " [X] " $3}' | head -20
FAIL_COUNT=$(grep -c "^--- FAIL:" test_output.log 2>/dev/null | tr -cd '0-9')
FAIL_COUNT=${FAIL_COUNT:-0}
if [ "$FAIL_COUNT" -gt 20 ]; then
echo " ... and $((FAIL_COUNT - 20)) more"
fi
fi
echo ""
echo "=============================================="
# Step 8: Run the cleandata package to delete projects
echo "Running cleandata to clean up projects..."
go test -v github.com/checkmarx/ast-cli/test/cleandata
# Step 9: Final cleanup and exit
# Note: Keep test_output.log for CI artifact upload
rm -f "$FAILED_TESTS_FILE"
if [ $status -ne 0 ] || [ $rerun_status -eq 1 ]; then
exit 1
else
exit 0
fi