diff --git a/.github/workflows/load.tests.yml b/.github/workflows/load.tests.yml new file mode 100644 index 000000000..f3f70a503 --- /dev/null +++ b/.github/workflows/load.tests.yml @@ -0,0 +1,227 @@ +name: Load Tests + +on: + pull_request: + branches: [main, development] + +jobs: + test-base: + runs-on: ubuntu-latest + + services: + redis: + image: redis + ports: + - 6379:6379 + options: >- + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + + timescaledb: + image: timescale/timescaledb-ha:pg16 + env: + POSTGRES_USER: timescaledb + POSTGRES_PASSWORD: password + ports: + - 5432:5432 + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + + mongodb: + image: mongo:latest + env: + MONGO_INITDB_ROOT_USERNAME: admin + MONGO_INITDB_ROOT_PASSWORD: admin + ports: + - 27017:27017 + options: >- + --health-cmd "mongosh --eval 'db.runCommand({ ping: 1 })'" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ github.event.pull_request.base.sha }} + + - name: Set up Node.js + uses: actions/setup-node@v2 + with: + node-version: '18' + + - name: Install dependencies + run: npm ci + + - name: Copy config + run: npm run copy-env + + - name: Build + run: npm run build + + - name: Start Node.js API + run: GH_ACTIONS=true node ./dist/main.js + + - name: Install k6 + run: | + sudo gpg -k + sudo gpg --no-default-keyring --keyring /usr/share/keyrings/k6-archive-keyring.gpg --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys C5AD17C747E3415A3642D57D77C6C491D6AC1D69 + echo "deb [signed-by=/usr/share/keyrings/k6-archive-keyring.gpg] https://dl.k6.io/deb stable main" | sudo tee /etc/apt/sources.list.d/k6.list + sudo apt-get update + sudo apt-get install k6 + + - name: Wait for API to be ready + run: | + until curl --output /dev/null --silent --max-time 60 http://localhost:3005/graphql; do + echo 'Waiting for API...' + sleep 5 + done + + - name: Preload cache + run: k6 run ./k6/preload.js + + - name: Run k6 Load Test + run: k6 run ./k6/script.js + + - name: Upload result file for base branch + uses: actions/upload-artifact@v4 + with: + name: base-results + path: k6/output/summary.json + + test-head: + runs-on: ubuntu-latest + + services: + redis: + image: redis + ports: + - 6379:6379 + options: >- + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + + timescaledb: + image: timescale/timescaledb-ha:pg16 + env: + POSTGRES_USER: timescaledb + POSTGRES_PASSWORD: password + ports: + - 5432:5432 + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + + mongodb: + image: mongo:latest + env: + MONGO_INITDB_ROOT_USERNAME: admin + MONGO_INITDB_ROOT_PASSWORD: admin + ports: + - 27017:27017 + options: >- + --health-cmd "mongosh --eval 'db.runCommand({ ping: 1 })'" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + + steps: + - uses: actions/checkout@v2 + with: + ref: ${{ github.event.pull_request.head.sha }} + + - name: Set up Node.js + uses: actions/setup-node@v2 + with: + node-version: '18' + + - name: Install dependencies + run: npm ci + + - name: Copy config + run: npm run copy-env + + - name: Build + run: npm run build + + - name: Start Node.js API + run: GH_ACTIONS=true node ./dist/main.js & + + - name: Install k6 + run: | + sudo gpg -k + sudo gpg --no-default-keyring --keyring /usr/share/keyrings/k6-archive-keyring.gpg --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys C5AD17C747E3415A3642D57D77C6C491D6AC1D69 + echo "deb [signed-by=/usr/share/keyrings/k6-archive-keyring.gpg] https://dl.k6.io/deb stable main" | sudo tee /etc/apt/sources.list.d/k6.list + sudo apt-get update + sudo apt-get install k6 + + - name: Wait for API to be ready + run: | + until curl --output /dev/null --silent --max-time 60 http://localhost:3005/graphql; do + echo 'Waiting for API...' + sleep 5 + done + + - name: Preload cache + run: k6 run ./k6/preload.js + + - name: Run k6 Load Test + run: k6 run ./k6/script.js + + - name: Upload result file for head branch + uses: actions/upload-artifact@v4 + with: + name: head-results + path: k6/output/summary.json + + compare-results: + runs-on: ubuntu-latest + needs: [test-base, test-head] + + steps: + - uses: actions/checkout@v2 + + - name: Download all artifacts + uses: actions/download-artifact@v4 + with: + path: artifacts + + - name: Set up Node.js + uses: actions/setup-node@v2 + with: + node-version: '18' + + - name: Compare test results + run: | + node ./k6/compare-results.js ${{ github.event.pull_request.base.sha }} artifacts/base-results/summary.json ${{ github.event.pull_request.head.sha }} artifacts/head-results/summary.json report.md + + - name: Find Comment + uses: peter-evans/find-comment@v2 + id: fc + with: + issue-number: ${{ github.event.pull_request.number }} + comment-author: 'github-actions[bot]' + body-includes: k6 load testing comparison + + - name: Display Report Contents + run: | + echo "=== Load Test Comparison Report ===" + cat report.md + echo "================================" + + - name: Create or update comment + uses: peter-evans/create-or-update-comment@v2 + with: + comment-id: ${{ steps.fc.outputs.comment-id }} + issue-number: ${{ github.event.pull_request.number }} + body-file: report.md + edit-mode: replace diff --git a/.gitignore b/.gitignore index 64db8786a..ad0cd74e2 100644 --- a/.gitignore +++ b/.gitignore @@ -32,3 +32,6 @@ workspace-dex-service.code-workspace # Env .env + +# k6 output +k6/output/ diff --git a/k6/compare-results.js b/k6/compare-results.js new file mode 100644 index 000000000..2c1566778 --- /dev/null +++ b/k6/compare-results.js @@ -0,0 +1,73 @@ +const fs = require('fs'); + +function readResults(filePath) { + const data = JSON.parse(fs.readFileSync(filePath, 'utf8')); + return { + http_req_duration: { + avg: data.metrics.http_req_duration.values.avg, + p95: data.metrics.http_req_duration.values['p(95)'], + }, + http_reqs: data.metrics.http_reqs.values.count, + http_req_failed: data.metrics.http_req_failed.values.rate, + checks_passed: data.metrics.checks.values.passes, + checks_failed: data.metrics.checks.values.fails, + }; +} + +function calculateDiff(base, head) { + return { + http_req_duration: { + avg: ((head.http_req_duration.avg - base.http_req_duration.avg) / base.http_req_duration.avg * 100).toFixed(2), + p95: ((head.http_req_duration.p95 - base.http_req_duration.p95) / base.http_req_duration.p95 * 100).toFixed(2), + }, + http_reqs: ((head.http_reqs - base.http_reqs) / base.http_reqs * 100).toFixed(2), + http_req_failed: (head.http_req_failed - base.http_req_failed).toFixed(4), + checks_passed: ((head.checks_passed - base.checks_passed) / base.checks_passed * 100).toFixed(2), + checks_failed: head.checks_failed - base.checks_failed, + }; +} + +function generateReport(baseSha, baseResults, headSha, headResults, diff) { + return `# k6 load testing comparison +## Base Branch (${baseSha}) +- Average Response Time: ${baseResults.http_req_duration.avg.toFixed(2)}ms +- P95 Response Time: ${baseResults.http_req_duration.p95.toFixed(2)}ms +- Total Requests: ${baseResults.http_reqs} +- Failed Requests Rate: ${(baseResults.http_req_failed * 100).toFixed(2)}% +- Checks Passed: ${baseResults.checks_passed} +- Checks Failed: ${baseResults.checks_failed} + +## Head Branch (${headSha}) +- Average Response Time: ${headResults.http_req_duration.avg.toFixed(2)}ms +- P95 Response Time: ${headResults.http_req_duration.p95.toFixed(2)}ms +- Total Requests: ${headResults.http_reqs} +- Failed Requests Rate: ${(headResults.http_req_failed * 100).toFixed(2)}% +- Checks Passed: ${headResults.checks_passed} +- Checks Failed: ${headResults.checks_failed} + +## Changes +- Average Response Time: ${diff.http_req_duration.avg}% ${diff.http_req_duration.avg > 20 ? '⚠️' : '✅'} +- P95 Response Time: ${diff.http_req_duration.p95}% ${diff.http_req_duration.p95 > 20 ? '⚠️' : '✅'} +- Total Requests: ${diff.http_reqs}% +- Failed Requests Rate Change: ${diff.http_req_failed}% ${diff.http_req_failed > 0 ? '⚠️' : '✅'} +- Checks Passed: ${diff.checks_passed}% +- Checks Failed Change: ${diff.checks_failed} ${diff.checks_failed > 0 ? '⚠️' : '✅'} +${ + diff.http_req_duration.avg > 20 || + diff.http_req_duration.p95 > 20 || + diff.http_req_failed > 0.01 || + diff.checks_failed > 0 + ? '⚠️ **Performance regression detected!** Please review the changes.' + : '✅ **No significant performance regression detected.**' +}`; +} + +// Main execution +const [baseSha, baseFile, headSha, headFile, outputFile] = process.argv.slice(2); + +const baseResults = readResults(baseFile); +const headResults = readResults(headFile); +const diff = calculateDiff(baseResults, headResults); + +const report = generateReport(baseSha, baseResults, headSha, headResults, diff); +fs.writeFileSync(outputFile, report); \ No newline at end of file diff --git a/k6/preload.js b/k6/preload.js new file mode 100644 index 000000000..007b7b524 --- /dev/null +++ b/k6/preload.js @@ -0,0 +1,211 @@ +import http from 'k6/http'; +import { check, sleep, textSummary } from 'k6'; + +export const options = { + vus: 1, + iterations: 1, + // Much longer duration for preload + maxDuration: '5m', + // Add output configuration + summaryTrendStats: ['avg', 'min', 'med', 'max', 'p(95)', 'p(99)', 'count'], + summaryTimeUnit: 'ms', +}; + +const BASE_URL = 'http://localhost:3005'; +const MAX_RETRIES = 5; +const RETRY_DELAY = 10; // Increased to 10 seconds between retries +const SERVER_CHECK_RETRIES = 12; // 2 minutes total with 10-second delay +const REQUEST_TIMEOUT = '120s'; // 2 minutes timeout for initial data load + +const GET_PAIRS_QUERY = ` +query { + pairs { + address + firstToken { + identifier + } + secondToken { + identifier + } + } +}`; + +function waitForServer() { + const headers = { + 'Content-Type': 'application/json', + 'Accept': 'application/json', + }; + const introspectionQuery = JSON.stringify({ + query: ` + query { + __schema { + types { + name + } + } + } + ` + }); + + for (let i = 0; i < SERVER_CHECK_RETRIES; i++) { + try { + const response = http.post( + `${BASE_URL}/graphql`, + introspectionQuery, + { + headers, + timeout: REQUEST_TIMEOUT, + tags: { name: 'ServerCheck' } + } + ); + + if (response.status === 200) { + try { + const body = JSON.parse(response.body); + if (body.data && body.data.__schema) { + console.log('GraphQL server is ready'); + sleep(5); // Increased to 5 seconds to ensure server is fully ready + return true; + } + } catch (e) { + console.log('Invalid response from server:', e.message); + } + } + console.log(`Server not ready, status: ${response.status}, attempt ${i + 1}/${SERVER_CHECK_RETRIES}`); + } catch (e) { + console.log(`Server not ready, attempt ${i + 1}/${SERVER_CHECK_RETRIES}:`, e.message); + } + sleep(RETRY_DELAY); + } + console.log('Server failed to become ready'); + return false; +} + +export default function () { + if (!waitForServer()) { + console.log('Aborting tests as server is not ready'); + return; + } + + const headers = { + 'Content-Type': 'application/json', + 'Accept': 'application/json', + }; + + let lastResponse; + for (let attempt = 1; attempt <= MAX_RETRIES; attempt++) { + try { + console.log(`Attempting to get pairs (Attempt ${attempt}/${MAX_RETRIES})`); + const pairsResponse = http.post( + `${BASE_URL}/graphql`, + JSON.stringify({ query: GET_PAIRS_QUERY }), + { + headers, + timeout: REQUEST_TIMEOUT, + tags: { name: 'GetPairs' } + } + ); + + console.log(`Pairs Response (Attempt ${attempt}/${MAX_RETRIES}):`, { + status: pairsResponse.status, + body: pairsResponse.body?.substring(0, 100) + '...' + }); + + let pairAddress; + try { + const pairsData = JSON.parse(pairsResponse.body); + if (pairsData.data && pairsData.data.pairs && pairsData.data.pairs.length > 0) { + pairAddress = pairsData.data.pairs[0].address; + console.log('Using pair address:', pairAddress); + } else { + throw new Error('No pairs found in response'); + } + } catch (e) { + console.log('Failed to parse pairs response:', e.message); + if (attempt < MAX_RETRIES) { + console.log(`Retrying in ${RETRY_DELAY} seconds... (Attempt ${attempt}/${MAX_RETRIES})`); + sleep(RETRY_DELAY); + continue; + } + throw e; + } + + sleep(5); // Increased to 5 seconds between requests + + console.log(`Attempting to get trading activity (Attempt ${attempt}/${MAX_RETRIES})`); + const TRADING_ACTIVITY_QUERY = ` + query { + tradingActivity(series: "${pairAddress}") { + hash + timestamp + action + inputToken { + identifier + name + decimals + balance + } + outputToken { + identifier + name + decimals + balance + } + } + }`; + + const tradingActivityResponse = http.post( + `${BASE_URL}/graphql`, + JSON.stringify({ query: TRADING_ACTIVITY_QUERY }), + { + headers, + timeout: REQUEST_TIMEOUT, + tags: { name: 'TradingActivity' } + } + ); + + console.log(`Trading Activity Response (Attempt ${attempt}/${MAX_RETRIES}):`, { + status: tradingActivityResponse.status, + body: tradingActivityResponse.body?.substring(0, 100) + '...' + }); + + const tradingActivityChecks = check(tradingActivityResponse, { + 'TradingActivity status is 200': (r) => r.status === 200, + 'TradingActivity has data': (r) => { + try { + const response = JSON.parse(r.body); + console.log('Trading Activity data:', response); + return response.data?.tradingActivity != null; + } catch (e) { + console.log('Failed to parse Trading Activity response:', e.message); + return false; + } + } + }); + + if (Object.values(tradingActivityChecks).every(check => check === true)) { + console.log('All checks passed on attempt', attempt); + return; + } + + lastResponse = tradingActivityResponse; + + } catch (e) { + console.log(`Request failed on attempt ${attempt}:`, e.message); + } + + if (attempt < MAX_RETRIES) { + console.log(`Retrying in ${RETRY_DELAY} seconds... (Attempt ${attempt}/${MAX_RETRIES})`); + sleep(RETRY_DELAY); + } + } + + console.log('All retries failed. Last response:', lastResponse?.body); +} + +export function handleSummary(data) { + return { + 'summary.json': JSON.stringify(data), + stdout: textSummary(data, { indent: ' ', enableColors: true }), + }; +} \ No newline at end of file diff --git a/k6/script.js b/k6/script.js new file mode 100644 index 000000000..7eef0d00b --- /dev/null +++ b/k6/script.js @@ -0,0 +1,147 @@ +import http from 'k6/http'; +import { check, sleep } from 'k6'; +import { textSummary } from "https://jslib.k6.io/k6-summary/0.0.1/index.js"; + +export const options = { + scenarios: { + // Constant load scenario for trading activity + constant_trading_load: { + executor: 'constant-vus', + vus: 45, // Increased from 30 to 45 concurrent users + duration: '2m', // Keep 2 minutes duration + }, + // Ramping scenario for trading activity scalability + ramp_trading_load: { + executor: 'ramping-vus', + startVUs: 0, + stages: [ + { duration: '30s', target: 50 }, // Ramp up to 50 VUs over 30s + { duration: '1m', target: 50 }, // Stay at 50 VUs for 1m + { duration: '30s', target: 75 }, // Ramp up to 75 VUs over 30s + { duration: '1m', target: 75 }, // Stay at 75 VUs for 1m + { duration: '30s', target: 0 }, // Ramp down to 0 + ], + }, + }, + thresholds: { + http_req_duration: ['p(95)<1000', 'p(99)<1500'], // Stricter response time thresholds + http_req_failed: ['rate<0.01'], // Keep the 1% failure rate + checks: ['rate>=0.99'], // Keep 99% check pass rate + http_reqs: ['rate>150'], // Increased request rate requirement + }, + timeout: '10s', + summaryTrendStats: ['avg', 'min', 'med', 'max', 'p(95)', 'p(99)', 'count'], + summaryTimeUnit: 'ms', +}; + +const BASE_URL = 'http://localhost:3005'; +const REQUEST_TIMEOUT = '5s'; + +// Reuse the pair address across VUs to avoid unnecessary lookups +let CACHED_PAIR_ADDRESS = null; + +export function setup() { + // Get a pair address once during setup + const headers = { + 'Content-Type': 'application/json', + 'Accept': 'application/json', + }; + + const GET_PAIRS_QUERY = ` + query { + pairs { + address + firstToken { + identifier + } + secondToken { + identifier + } + } + }`; + + const pairsResponse = http.post( + `${BASE_URL}/graphql`, + JSON.stringify({ query: GET_PAIRS_QUERY }), + { + headers, + timeout: '30s', // Longer timeout for setup + tags: { name: 'Setup_GetPairs' } + } + ); + + const pairsData = JSON.parse(pairsResponse.body); + if (pairsData.data?.pairs?.length > 0) { + CACHED_PAIR_ADDRESS = pairsData.data.pairs[0].address; + console.log('Using pair address:', CACHED_PAIR_ADDRESS); + return { pairAddress: CACHED_PAIR_ADDRESS }; + } + + throw new Error('Failed to get pair address during setup'); +} + +export default function (data) { + const headers = { + 'Content-Type': 'application/json', + 'Accept': 'application/json', + }; + + const TRADING_ACTIVITY_QUERY = ` + query { + tradingActivity(series: "${data.pairAddress}") { + hash + timestamp + action + inputToken { + identifier + name + decimals + balance + } + outputToken { + identifier + name + decimals + balance + } + } + }`; + + const tradingActivityResponse = http.post( + `${BASE_URL}/graphql`, + JSON.stringify({ query: TRADING_ACTIVITY_QUERY }), + { + headers, + timeout: REQUEST_TIMEOUT, + tags: { name: 'TradingActivity' } + } + ); + + check(tradingActivityResponse, { + 'trading activity status is 200': (r) => r.status === 200, + 'has trading activity data': (r) => { + const response = JSON.parse(r.body); + return response.data?.tradingActivity != null; + }, + 'response time is acceptable': (r) => r.timings.duration < 1000, + 'response is valid JSON': (r) => { + try { + JSON.parse(r.body); + return true; + } catch (e) { + console.error('Parse error:', e); + return false; + } + } + }); + + // Reduce sleep time to increase request rate + sleep(Math.random() * 0.5); // Reduced from 1s to 0.5s max sleep +} + +export function handleSummary(data) { + return { + 'summary.json': JSON.stringify(data), + stdout: textSummary(data, { indent: ' ', enableColors: true }), + }; +} diff --git a/src/services/analytics/timescaledb/migration/typeOrm.config.ts b/src/services/analytics/timescaledb/migration/typeOrm.config.ts index b4509f520..4712e6ba3 100644 --- a/src/services/analytics/timescaledb/migration/typeOrm.config.ts +++ b/src/services/analytics/timescaledb/migration/typeOrm.config.ts @@ -32,6 +32,14 @@ export default new DataSource({ username: configService.get('TIMESCALEDB_USERNAME'), password: configService.get('TIMESCALEDB_PASSWORD'), database: configService.get('TIMESCALEDB_DATABASE'), + ...(process.env.GH_ACTIONS !== 'true' && { + ssl: true, + extra: { + ssl: { + rejectUnauthorized: false, + }, + }, + }), migrationsTransactionMode: 'each', entities: [ XExchangeAnalyticsEntity, diff --git a/src/services/analytics/timescaledb/timescaledb.module.ts b/src/services/analytics/timescaledb/timescaledb.module.ts index 0500e1e79..d19d51051 100644 --- a/src/services/analytics/timescaledb/timescaledb.module.ts +++ b/src/services/analytics/timescaledb/timescaledb.module.ts @@ -41,12 +41,14 @@ import { DynamicModuleUtils } from 'src/utils/dynamic.module.utils'; username: apiConfig.getTimescaleDbUsername(), password: apiConfig.getTimescaleDbPassword(), applicationName: 'xExchangeService', - ssl: true, - extra: { - ssl: { - rejectUnauthorized: false, + ...(process.env.GH_ACTIONS !== 'true' && { + ssl: true, + extra: { + ssl: { + rejectUnauthorized: false, + }, }, - }, + }), entities: ['dist/**/*.entities.{ts,js}'], }), inject: [ApiConfigService],