From c21900d203cd0056f2e409500ffae5ec37819235 Mon Sep 17 00:00:00 2001 From: Fortune Ikechi Date: Mon, 23 Jun 2025 10:59:12 +0100 Subject: [PATCH 1/9] add test docs, update workflows --- .github/workflows/doc-style-checker.yml | 300 ++++++++++++++++++++++++ .github/workflows/docs-review.yml | 19 ++ .github/workflows/fi-test.yml | 16 -- test-doc.adoc | 110 +++++++++ 4 files changed, 429 insertions(+), 16 deletions(-) create mode 100644 .github/workflows/doc-style-checker.yml create mode 100644 .github/workflows/docs-review.yml delete mode 100644 .github/workflows/fi-test.yml create mode 100644 test-doc.adoc diff --git a/.github/workflows/doc-style-checker.yml b/.github/workflows/doc-style-checker.yml new file mode 100644 index 0000000..fa683f4 --- /dev/null +++ b/.github/workflows/doc-style-checker.yml @@ -0,0 +1,300 @@ +name: Doc Style Checker + +on: + workflow_call: + inputs: + repository: + description: "The repository to check out" + required: true + type: string + path: + type: string + description: "The startPath pointing to the folder containing documentation" + required: false + default: "." + pull_request_number: + type: string + description: "The pull request number to check out" + required: true + base_sha: + type: string + description: "The base sha to diff against" + required: true + head_sha: + type: string + description: "The head sha to comment against" + required: true + secrets: + GEMINI_API_KEY: + description: "Google Gemini API key" + required: true + GEMINI_API_KEY_2: + description: "Google Gemini API key" + required: true + GEMINI_API_KEY_3: + description: "Google Gemini API key" + required: true + ACTION_TOKEN: + description: "GitHub token for posting comments" + required: true + +jobs: + doc-style-check: + runs-on: ubuntu-latest + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + repository: ${{ inputs.repository }} + ref: ${{ inputs.head_sha }} + fetch-depth: 100 + path: content-repo + + - name: Get changed documentation files + id: changed-files + working-directory: ./content-repo + run: | + echo "Getting changed files between ${{ inputs.base_sha }} and ${{ inputs.head_sha }}" + CHANGED_FILES=$(git diff --name-only ${{ inputs.base_sha }} ${{ inputs.head_sha }} | grep -E '\.(adoc|md)$' || true) + echo "Changed documentation files:" + echo "$CHANGED_FILES" + echo "files<> $GITHUB_OUTPUT + echo "$CHANGED_FILES" >> $GITHUB_OUTPUT + echo "EOF" >> $GITHUB_OUTPUT + + - name: Process changed documentation files + if: steps.changed-files.outputs.files != '' + working-directory: ./content-repo + run: | + mkdir -p /tmp/doc-processing + echo "Processing files..." + + echo '${{ steps.changed-files.outputs.files }}' | while IFS= read -r file; do + if [[ -n "$file" && -f "$file" ]]; then + echo "Preparing: $file" + SAFE_NAME=$(echo "$(basename "$file")" | sed 's/[^a-zA-Z0-9._-]/_/g') + CONTENT=$(cat "$file" | jq -Rs .) + + cat > "/tmp/doc-processing/${SAFE_NAME}.meta" << EOF + { + "filename": "$file", + "content": $CONTENT + } + EOF + echo "βœ“ Prepared $file" + fi + done + + - name: Setup Node.js + if: steps.changed-files.outputs.files != '' + uses: actions/setup-node@v4 + with: + node-version: '18' + + - name: Run Doc Style Checker + if: steps.changed-files.outputs.files != '' + run: | + cat > check_docs.js << 'EOF' + const fs = require('fs'); + const path = require('path'); + const https = require('https'); + + async function checkDocStyle(content, filename) { + const data = JSON.stringify({ + content: content, + filename: filename + }); + + const options = { + hostname: 'doc-style-checker.vercel.app', + path: '/api/check-style', + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'Content-Length': data.length, + 'User-Agent': 'GitHub-Actions-Doc-Checker/1.0' + } + }; + + return new Promise((resolve, reject) => { + const req = https.request(options, (res) => { + let responseData = ''; + res.on('data', (chunk) => { + responseData += chunk; + }); + res.on('end', () => { + try { + if (res.statusCode !== 200) { + throw new Error(`HTTP ${res.statusCode}: ${responseData}`); + } + const result = JSON.parse(responseData); + resolve(result); + } catch (e) { + console.error('Response parsing error:', e.message); + reject(e); + } + }); + }); + + req.on('error', (e) => { + reject(e); + }); + + req.setTimeout(30000, () => { + req.destroy(); + reject(new Error('Request timeout')); + }); + + req.write(data); + req.end(); + }); + } + + async function processAllFiles() { + const processingDir = '/tmp/doc-processing'; + const results = {}; + + if (!fs.existsSync(processingDir)) { + console.log('No processing directory found'); + return results; + } + + const metaFiles = fs.readdirSync(processingDir).filter(f => f.endsWith('.meta')); + console.log(`Found ${metaFiles.length} file(s) to process`); + + for (const metaFile of metaFiles) { + const metaPath = path.join(processingDir, metaFile); + + try { + const metadata = JSON.parse(fs.readFileSync(metaPath, 'utf8')); + console.log(`πŸ” Checking: ${metadata.filename}`); + + const result = await checkDocStyle(metadata.content, metadata.filename); + results[metadata.filename] = result; + + const issueCount = result.issues ? result.issues.length : 0; + console.log(`βœ“ ${metadata.filename}: ${issueCount} issue(s) found`); + + await new Promise(resolve => setTimeout(resolve, 2000)); + + } catch (error) { + console.error(`βœ— Error processing ${metaFile}:`, error.message); + const filename = metaFile.replace('.meta', ''); + results[filename] = { error: error.message }; + } + } + + return results; + } + + processAllFiles() + .then(results => { + fs.writeFileSync('/tmp/style-check-results.json', JSON.stringify(results, null, 2)); + console.log('πŸ“Š Style check completed'); + }) + .catch(error => { + console.error('❌ Style check failed:', error); + process.exit(1); + }); + EOF + + node check_docs.js + + - name: Post PR Comment + if: steps.changed-files.outputs.files != '' + uses: actions/github-script@v7 + with: + github-token: ${{ secrets.ACTION_TOKEN }} + script: | + const fs = require('fs'); + + let results = {}; + try { + const resultsData = fs.readFileSync('/tmp/style-check-results.json', 'utf8'); + results = JSON.parse(resultsData); + } catch (error) { + console.log('No results to process'); + return; + } + + console.log(`Processing results for ${Object.keys(results).length} files`); + + let comment = '## πŸ“ Doc Style Checker Results\n\n'; + let hasIssues = false; + let totalIssues = 0; + + for (const [filename, result] of Object.entries(results)) { + if (result.error) { + comment += `### ❌ ${filename}\n**Error:** \`${result.error}\`\n\n`; + continue; + } + + if (!result.issues || result.issues.length === 0) { + comment += `### βœ… ${filename}\nNo style issues found!\n\n`; + continue; + } + + hasIssues = true; + totalIssues += result.issues.length; + comment += `### πŸ“‹ ${filename}\n**${result.issues.length} issue${result.issues.length > 1 ? 's' : ''} found**\n\n`; + + const byCategory = {}; + result.issues.forEach(issue => { + const cat = issue.category || 'General'; + if (!byCategory[cat]) byCategory[cat] = []; + byCategory[cat].push(issue); + }); + + for (const [category, issues] of Object.entries(byCategory)) { + comment += `#### ${category}\n\n`; + + issues.forEach((issue, i) => { + comment += `
\nIssue ${i + 1}\n\n`; + + if (issue.problem) { + comment += `**Problem:** ${issue.problem}\n\n`; + } + + if (issue.problematicText) { + comment += `**Text:**\n\`\`\`\n${issue.problematicText}\n\`\`\`\n\n`; + } + + if (issue.location) { + comment += `**Location:** ${issue.location}\n\n`; + } + + if (issue.suggestion) { + comment += `**Suggestion:** ${issue.suggestion}\n\n`; + } + + if (issue.guideline) { + comment += `**Guideline:** ${issue.guideline}\n\n`; + } + + comment += `
\n\n`; + }); + } + } + + if (hasIssues) { + const summary = `πŸ” **Summary:** ${totalIssues} issue${totalIssues > 1 ? 's' : ''} found across ${Object.keys(results).length} file${Object.keys(results).length > 1 ? 's' : ''}\n\n`; + comment = comment.replace('## πŸ“ Doc Style Checker Results\n\n', `## πŸ“ Doc Style Checker Results\n\n${summary}`); + } else if (Object.keys(results).length > 0) { + comment += 'πŸŽ‰ **All documentation looks great!** No style issues found.\n\n'; + } + + comment += '---\n*Automated by [Doc Style Checker](https://doc-style-checker.vercel.app/) β€’ Couchbase Documentation Style Guide*'; + + try { + const [owner, repo] = '${{ inputs.repository }}'.split('/'); + await github.rest.issues.createComment({ + issue_number: parseInt('${{ inputs.pull_request_number }}'), + owner: owner, + repo: repo, + body: comment + }); + console.log('βœ… Comment posted successfully'); + } catch (error) { + console.error('❌ Failed to post comment:', error); + throw error; + } \ No newline at end of file diff --git a/.github/workflows/docs-review.yml b/.github/workflows/docs-review.yml new file mode 100644 index 0000000..536a6a4 --- /dev/null +++ b/.github/workflows/docs-review.yml @@ -0,0 +1,19 @@ +name: Documentation Review +on: + pull_request: + types: [opened, synchronize, reopened] + +jobs: + doc-style-check: + uses: couchbaselabs/docs-runner/.github/workflows/doc-style-checker.yml@fi-docs-style-checker + with: + repository: ${{ github.repository }} + path: "." + pull_request_number: ${{ github.event.pull_request.number }} + base_sha: ${{ github.event.pull_request.base.sha }} + head_sha: ${{ github.event.pull_request.head.sha }} + secrets: + GEMINI_API_KEY: ${{ secrets.GEMINI_API_KEY }} + GEMINI_API_KEY_2: ${{ secrets.GEMINI_API_KEY_2 }} + GEMINI_API_KEY_3: ${{ secrets.GEMINI_API_KEY_3 }} + ACTION_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/.github/workflows/fi-test.yml b/.github/workflows/fi-test.yml deleted file mode 100644 index ff778e3..0000000 --- a/.github/workflows/fi-test.yml +++ /dev/null @@ -1,16 +0,0 @@ -name: Test workflow for FI - -on: - workflow_call: - workflow_dispatch: - -jobs: - test: - runs-on: ubuntu-latest - - steps: - - name: test - run: | - echo worlld - - diff --git a/test-doc.adoc b/test-doc.adoc new file mode 100644 index 0000000..6a8409b --- /dev/null +++ b/test-doc.adoc @@ -0,0 +1,110 @@ +Data Modelling + +couchbase Sync Gateway’s data model; for secure cloud-to-edge synchronization of enterprise data. + +introduction + +This page includes guidance and constraints relating to the design of data buckets and documents that you want to replicate using Sync Gateway. They do not necessarily align with constraints on the local storage and use of such documents. + +property naming + +You can use an underscore prefix (_, ASCII _) for property naming, but your name cannot match any of the Document system properties reserved by Sync Gateway: + +_sync + +_id + +_rev + +_deleted + +_attachments + +_revisions + +_exp + +_purged + +_removed + +Any document that matches the reserved property names listed will be rejected by Sync Gateway — see Example 1 for the error details. + +Example 1. Property prefix error message +text +Copy +"{"error":"Bad Request","reason":"user defined top level properties beginning with '_' are not allowed in document body"}" +Where it applies +This rule applies to writes performed through: + +Couchbase Lite SDKs + +Sync Gateway REST APIs + +Couchbase Server SDKs when shared bucket access is enabled. + +When you might encounter the error +You may encounter the error in the following deployment situations: + +In Mobile-to-Web Data Sync with Node.js Server SDK and Ottoman.js (the Node.js ODM for Couchbase), where the rule conflicts with the _type property that is automatically added by Ottoman.js. + +A suggested workaround in this scenario is to fork the Ottoman.js library, perform a search-replace for the _type property and replace it without a leading underscore. + +For versions 2.x of Sync Gateway, you can encounter the following error: + +In Mobile-to-Web Data Sync with Field-level Encryption enabled, because the rule conflicts with the default field encryption format. + +How to avoid the error +You should change any top-level user properties that have a key with a leading underscore , by either: + +Renaming them to remove the underscore, or, + +Wrapping them inside another object with a key that doesn’t have a leading underscore. + +Document Structure +Couchbase’s unit of data is a document, this is the NOSQL equivalent of a row or record. + +Documents are stored as a key-value pair, which comprises a unique and immutable key, the Id, and a value representing the users' data (a JSON-object or binary blob). + +Key +The document key, the Id, is: + +A UTF-8 string with no spaces, although it may contain special characters, such as (, %, /, ", and _ + +No longer than 250 bytes + +Unique within the bucket + +Automatically generated (as a UUID) or be set by the user or application when saved + +Immutable; that is, once saved the Id cannot be changed. + +Value +The document value is either: + +A JSON value, termed a Document. + +This JSON object is a collection of key/value pairs. The values may be numbers, strings, arrays, or even nested objects. As a result, documents can represent complex data structures in a readily parsable and self-organizing manner. + +a binary object (also known as a blob or attachment) + +These attachments provide a means to store large media files or any other non-textual data. Couchbase Lite supports attachments of unlimited size, although the Sync Gateway imposes a 20MB limit for attachments synced to it. + +Document Attributes +Each Document has the following attributes: + +A document ID + +A current revision ID (which changes when the document is updated) + +A history of past revision IDs (usually linear, but will form a branching tree if the document has or has had conflicts) + +A body in the form of a JSON object (a set of key/value pairs) + +Zero or more named binary attachments + +Document Change History +Couchbase Lite tracks the change history of every document as a series of revisions, like version control systems such as Git or Subversion. Its main purpose is to enable the replicator to determine which data to sync and any conflicts arising. + +Each document change is assigned a unique revision ID. The IDs of past revisions are available. The content of past revisions may be available if the revision was created locally and the database has not yet been compacted. + From 4648c2dd65d4615e51b82bf298f542439df7acd9 Mon Sep 17 00:00:00 2001 From: Fortune Ikechi Date: Mon, 23 Jun 2025 11:23:26 +0100 Subject: [PATCH 2/9] updated actions to reflect api handling --- .github/workflows/doc-style-checker.yml | 41 +++++++++++++++++++------ 1 file changed, 32 insertions(+), 9 deletions(-) diff --git a/.github/workflows/doc-style-checker.yml b/.github/workflows/doc-style-checker.yml index fa683f4..e9f36a4 100644 --- a/.github/workflows/doc-style-checker.yml +++ b/.github/workflows/doc-style-checker.yml @@ -28,15 +28,15 @@ on: GEMINI_API_KEY: description: "Google Gemini API key" required: true - GEMINI_API_KEY_2: - description: "Google Gemini API key" - required: true - GEMINI_API_KEY_3: - description: "Google Gemini API key" - required: true ACTION_TOKEN: description: "GitHub token for posting comments" required: true + workflow_dispatch: # Add this for manual testing + inputs: + test_mode: + description: "Run in test mode" + default: "true" + type: boolean jobs: doc-style-check: @@ -124,6 +124,9 @@ jobs: }); res.on('end', () => { try { + console.log(`Response status: ${res.statusCode}`); + console.log(`Response body: ${responseData}`); + if (res.statusCode !== 200) { throw new Error(`HTTP ${res.statusCode}: ${responseData}`); } @@ -131,6 +134,7 @@ jobs: resolve(result); } catch (e) { console.error('Response parsing error:', e.message); + console.error('Raw response:', responseData); reject(e); } }); @@ -234,12 +238,31 @@ jobs: continue; } + // Process the nested structure from your API + let fileIssues = []; + result.issues.forEach(categoryGroup => { + if (categoryGroup.issues && Array.isArray(categoryGroup.issues)) { + categoryGroup.issues.forEach(issue => { + fileIssues.push({ + category: categoryGroup.category, + ...issue + }); + }); + } + }); + + if (fileIssues.length === 0) { + comment += `### βœ… ${filename}\nNo style issues found!\n\n`; + continue; + } + hasIssues = true; - totalIssues += result.issues.length; - comment += `### πŸ“‹ ${filename}\n**${result.issues.length} issue${result.issues.length > 1 ? 's' : ''} found**\n\n`; + totalIssues += fileIssues.length; + comment += `### πŸ“‹ ${filename}\n**${fileIssues.length} issue${fileIssues.length > 1 ? 's' : ''} found**\n\n`; + // Group by category const byCategory = {}; - result.issues.forEach(issue => { + fileIssues.forEach(issue => { const cat = issue.category || 'General'; if (!byCategory[cat]) byCategory[cat] = []; byCategory[cat].push(issue); From b0335d5d22fc0e55dfc6de7673aa1b2b129d3b9e Mon Sep 17 00:00:00 2001 From: Fortune Ikechi Date: Mon, 23 Jun 2025 11:25:44 +0100 Subject: [PATCH 3/9] add extra gemini keys --- .github/workflows/doc-style-checker.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/doc-style-checker.yml b/.github/workflows/doc-style-checker.yml index e9f36a4..5610b4c 100644 --- a/.github/workflows/doc-style-checker.yml +++ b/.github/workflows/doc-style-checker.yml @@ -28,6 +28,12 @@ on: GEMINI_API_KEY: description: "Google Gemini API key" required: true + GEMINI_API_KEY_2: + description: "Google Gemini API key" + required: true + GEMINI_API_KEY_3: + description: "Google Gemini API key" + required: true ACTION_TOKEN: description: "GitHub token for posting comments" required: true From a92b7754b39ab53cdc250601ba5e95354acdd3da Mon Sep 17 00:00:00 2001 From: Fortune Ikechi Date: Mon, 23 Jun 2025 11:30:22 +0100 Subject: [PATCH 4/9] fix content encoding for github api --- .github/workflows/doc-style-checker.yml | 33 +++++++++++++++++++++---- 1 file changed, 28 insertions(+), 5 deletions(-) diff --git a/.github/workflows/doc-style-checker.yml b/.github/workflows/doc-style-checker.yml index 5610b4c..316ee01 100644 --- a/.github/workflows/doc-style-checker.yml +++ b/.github/workflows/doc-style-checker.yml @@ -79,14 +79,37 @@ jobs: if [[ -n "$file" && -f "$file" ]]; then echo "Preparing: $file" SAFE_NAME=$(echo "$(basename "$file")" | sed 's/[^a-zA-Z0-9._-]/_/g') - CONTENT=$(cat "$file" | jq -Rs .) - cat > "/tmp/doc-processing/${SAFE_NAME}.meta" << EOF + # Read file content and create JSON properly + cat > "/tmp/doc-processing/${SAFE_NAME}.meta" << 'METAEOF' { - "filename": "$file", - "content": $CONTENT + "filename": "FILE_PLACEHOLDER", + "content": "CONTENT_PLACEHOLDER" } - EOF + METAEOF + + # Replace placeholders with actual values using proper escaping + sed -i "s|FILE_PLACEHOLDER|$file|g" "/tmp/doc-processing/${SAFE_NAME}.meta" + + # Use Python to properly escape JSON content + python3 -c " +import json +import sys + +with open('$file', 'r', encoding='utf-8') as f: + content = f.read() + +# Read the template +with open('/tmp/doc-processing/${SAFE_NAME}.meta', 'r') as f: + template = f.read() + +# Replace content placeholder with properly escaped JSON +escaped_content = json.dumps(content) +final_json = template.replace('\"CONTENT_PLACEHOLDER\"', escaped_content) + +with open('/tmp/doc-processing/${SAFE_NAME}.meta', 'w') as f: + f.write(final_json) +" echo "βœ“ Prepared $file" fi done From 965580ced2ee8cb6c4c992f80c14159c195fa27b Mon Sep 17 00:00:00 2001 From: Fortune Ikechi Date: Mon, 23 Jun 2025 11:37:39 +0100 Subject: [PATCH 5/9] fix indentation with python in yaml --- .github/workflows/doc-style-checker.yml | 36 ++++++++++++------------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/.github/workflows/doc-style-checker.yml b/.github/workflows/doc-style-checker.yml index 316ee01..341f42e 100644 --- a/.github/workflows/doc-style-checker.yml +++ b/.github/workflows/doc-style-checker.yml @@ -41,7 +41,7 @@ on: inputs: test_mode: description: "Run in test mode" - default: "true" + default: true type: boolean jobs: @@ -93,23 +93,23 @@ jobs: # Use Python to properly escape JSON content python3 -c " -import json -import sys - -with open('$file', 'r', encoding='utf-8') as f: - content = f.read() - -# Read the template -with open('/tmp/doc-processing/${SAFE_NAME}.meta', 'r') as f: - template = f.read() - -# Replace content placeholder with properly escaped JSON -escaped_content = json.dumps(content) -final_json = template.replace('\"CONTENT_PLACEHOLDER\"', escaped_content) - -with open('/tmp/doc-processing/${SAFE_NAME}.meta', 'w') as f: - f.write(final_json) -" + import json + import sys + + with open('$file', 'r', encoding='utf-8') as f: + content = f.read() + + # Read the template + with open('/tmp/doc-processing/${SAFE_NAME}.meta', 'r') as f: + template = f.read() + + # Replace content placeholder with properly escaped JSON + escaped_content = json.dumps(content) + final_json = template.replace('\"CONTENT_PLACEHOLDER\"', escaped_content) + + with open('/tmp/doc-processing/${SAFE_NAME}.meta', 'w') as f: + f.write(final_json) + " echo "βœ“ Prepared $file" fi done From a4d7828281d741e00dd01260e86c517a6e73a0ae Mon Sep 17 00:00:00 2001 From: Fortune Ikechi Date: Mon, 23 Jun 2025 11:40:34 +0100 Subject: [PATCH 6/9] fix indentation with python in yaml --- .github/workflows/doc-style-checker.yml | 68 +++++++++++++++---------- 1 file changed, 41 insertions(+), 27 deletions(-) diff --git a/.github/workflows/doc-style-checker.yml b/.github/workflows/doc-style-checker.yml index 341f42e..5c45193 100644 --- a/.github/workflows/doc-style-checker.yml +++ b/.github/workflows/doc-style-checker.yml @@ -75,41 +75,55 @@ jobs: mkdir -p /tmp/doc-processing echo "Processing files..." - echo '${{ steps.changed-files.outputs.files }}' | while IFS= read -r file; do - if [[ -n "$file" && -f "$file" ]]; then - echo "Preparing: $file" - SAFE_NAME=$(echo "$(basename "$file")" | sed 's/[^a-zA-Z0-9._-]/_/g') - - # Read file content and create JSON properly - cat > "/tmp/doc-processing/${SAFE_NAME}.meta" << 'METAEOF' - { - "filename": "FILE_PLACEHOLDER", - "content": "CONTENT_PLACEHOLDER" - } - METAEOF - - # Replace placeholders with actual values using proper escaping - sed -i "s|FILE_PLACEHOLDER|$file|g" "/tmp/doc-processing/${SAFE_NAME}.meta" - - # Use Python to properly escape JSON content - python3 -c " - import json - import sys - - with open('$file', 'r', encoding='utf-8') as f: + # Create Python script for processing content + python3 -c " + import os + script_content = '''import json + import sys + + if len(sys.argv) != 3: + print(\"Usage: python process_content.py \") + sys.exit(1) + + file_path = sys.argv[1] + meta_path = sys.argv[2] + + try: + with open(file_path, \"r\", encoding=\"utf-8\") as f: content = f.read() - # Read the template - with open('/tmp/doc-processing/${SAFE_NAME}.meta', 'r') as f: + with open(meta_path, \"r\") as f: template = f.read() - # Replace content placeholder with properly escaped JSON escaped_content = json.dumps(content) final_json = template.replace('\"CONTENT_PLACEHOLDER\"', escaped_content) - with open('/tmp/doc-processing/${SAFE_NAME}.meta', 'w') as f: + with open(meta_path, \"w\") as f: f.write(final_json) - " + + print(f\"Successfully processed {file_path}\") + except Exception as e: + print(f\"Error processing {file_path}: {e}\") + sys.exit(1) + ''' + + with open('/tmp/process_content.py', 'w') as f: + f.write(script_content) + " + + echo '${{ steps.changed-files.outputs.files }}' | while IFS= read -r file; do + if [[ -n "$file" && -f "$file" ]]; then + echo "Preparing: $file" + SAFE_NAME=$(echo "$(basename "$file")" | sed 's/[^a-zA-Z0-9._-]/_/g') + + # Create JSON template + echo '{"filename": "FILE_PLACEHOLDER", "content": "CONTENT_PLACEHOLDER"}' > "/tmp/doc-processing/${SAFE_NAME}.meta" + + # Replace filename placeholder + sed -i "s|FILE_PLACEHOLDER|$file|g" "/tmp/doc-processing/${SAFE_NAME}.meta" + + # Execute the Python script to handle content escaping + python3 /tmp/process_content.py "$file" "/tmp/doc-processing/${SAFE_NAME}.meta" echo "βœ“ Prepared $file" fi done From eb6cd87212871ce154162d3f1e794decdbdf3c32 Mon Sep 17 00:00:00 2001 From: Fortune Ikechi Date: Mon, 23 Jun 2025 11:45:55 +0100 Subject: [PATCH 7/9] checking what files are push for actions --- .github/workflows/doc-style-checker.yml | 100 +++++++++++------------- 1 file changed, 45 insertions(+), 55 deletions(-) diff --git a/.github/workflows/doc-style-checker.yml b/.github/workflows/doc-style-checker.yml index 5c45193..5a29f99 100644 --- a/.github/workflows/doc-style-checker.yml +++ b/.github/workflows/doc-style-checker.yml @@ -28,12 +28,6 @@ on: GEMINI_API_KEY: description: "Google Gemini API key" required: true - GEMINI_API_KEY_2: - description: "Google Gemini API key" - required: true - GEMINI_API_KEY_3: - description: "Google Gemini API key" - required: true ACTION_TOKEN: description: "GitHub token for posting comments" required: true @@ -75,56 +69,35 @@ jobs: mkdir -p /tmp/doc-processing echo "Processing files..." - # Create Python script for processing content - python3 -c " - import os - script_content = '''import json + echo '${{ steps.changed-files.outputs.files }}' | while IFS= read -r file; do + if [[ -n "$file" && -f "$file" ]]; then + echo "Preparing: $file" + SAFE_NAME=$(echo "$(basename "$file")" | sed 's/[^a-zA-Z0-9._-]/_/g') + + # Create a simple JSON file with proper escaping + python3 -c " + import json import sys - if len(sys.argv) != 3: - print(\"Usage: python process_content.py \") - sys.exit(1) - - file_path = sys.argv[1] - meta_path = sys.argv[2] + filename = '$file' try: - with open(file_path, \"r\", encoding=\"utf-8\") as f: + with open(filename, 'r', encoding='utf-8') as f: content = f.read() - with open(meta_path, \"r\") as f: - template = f.read() - - escaped_content = json.dumps(content) - final_json = template.replace('\"CONTENT_PLACEHOLDER\"', escaped_content) + data = { + 'filename': filename, + 'content': content + } - with open(meta_path, \"w\") as f: - f.write(final_json) + with open('/tmp/doc-processing/${SAFE_NAME}.meta', 'w') as f: + json.dump(data, f) - print(f\"Successfully processed {file_path}\") + print(f'βœ“ Prepared {filename}') except Exception as e: - print(f\"Error processing {file_path}: {e}\") + print(f'βœ— Error processing {filename}: {e}') sys.exit(1) - ''' - - with open('/tmp/process_content.py', 'w') as f: - f.write(script_content) " - - echo '${{ steps.changed-files.outputs.files }}' | while IFS= read -r file; do - if [[ -n "$file" && -f "$file" ]]; then - echo "Preparing: $file" - SAFE_NAME=$(echo "$(basename "$file")" | sed 's/[^a-zA-Z0-9._-]/_/g') - - # Create JSON template - echo '{"filename": "FILE_PLACEHOLDER", "content": "CONTENT_PLACEHOLDER"}' > "/tmp/doc-processing/${SAFE_NAME}.meta" - - # Replace filename placeholder - sed -i "s|FILE_PLACEHOLDER|$file|g" "/tmp/doc-processing/${SAFE_NAME}.meta" - - # Execute the Python script to handle content escaping - python3 /tmp/process_content.py "$file" "/tmp/doc-processing/${SAFE_NAME}.meta" - echo "βœ“ Prepared $file" fi done @@ -143,11 +116,17 @@ jobs: const https = require('https'); async function checkDocStyle(content, filename) { + console.log(`πŸ“€ Sending request for: ${filename}`); + console.log(`πŸ“ Content preview: ${content.substring(0, 100)}...`); + console.log(`πŸ“ Content length: ${content.length}`); + const data = JSON.stringify({ content: content, filename: filename }); + console.log(`πŸ“¦ Request data size: ${data.length} bytes`); + const options = { hostname: 'doc-style-checker.vercel.app', path: '/api/check-style', @@ -167,31 +146,38 @@ jobs: }); res.on('end', () => { try { - console.log(`Response status: ${res.statusCode}`); - console.log(`Response body: ${responseData}`); + console.log(`πŸ“¨ Response status: ${res.statusCode}`); + console.log(`πŸ“¨ Response headers:`, JSON.stringify(res.headers)); + console.log(`πŸ“¨ Response body preview: ${responseData.substring(0, 500)}...`); if (res.statusCode !== 200) { + console.error(`❌ Non-200 status code: ${res.statusCode}`); + console.error(`❌ Full response: ${responseData}`); throw new Error(`HTTP ${res.statusCode}: ${responseData}`); } const result = JSON.parse(responseData); + console.log(`βœ… Successfully parsed response for ${filename}`); resolve(result); } catch (e) { - console.error('Response parsing error:', e.message); - console.error('Raw response:', responseData); + console.error('❌ Response parsing error:', e.message); + console.error('❌ Raw response:', responseData); reject(e); } }); }); req.on('error', (e) => { + console.error(`❌ Request error for ${filename}:`, e.message); reject(e); }); - req.setTimeout(30000, () => { + req.setTimeout(45000, () => { + console.error(`⏰ Request timeout for ${filename}`); req.destroy(); - reject(new Error('Request timeout')); + reject(new Error('Request timeout after 45 seconds')); }); + console.log(`πŸš€ Sending request to API...`); req.write(data); req.end(); }); @@ -214,18 +200,22 @@ jobs: try { const metadata = JSON.parse(fs.readFileSync(metaPath, 'utf8')); - console.log(`πŸ” Checking: ${metadata.filename}`); + console.log(`πŸ” Processing: ${metadata.filename}`); + console.log(`πŸ“„ Content preview: "${metadata.content.substring(0, 150)}..."`); + console.log(`πŸ“ Content length: ${metadata.content.length} characters`); + console.log(`πŸ”€ Content type: ${typeof metadata.content}`); const result = await checkDocStyle(metadata.content, metadata.filename); results[metadata.filename] = result; const issueCount = result.issues ? result.issues.length : 0; - console.log(`βœ“ ${metadata.filename}: ${issueCount} issue(s) found`); + console.log(`βœ… ${metadata.filename}: ${issueCount} issue(s) found`); - await new Promise(resolve => setTimeout(resolve, 2000)); + await new Promise(resolve => setTimeout(resolve, 3000)); } catch (error) { - console.error(`βœ— Error processing ${metaFile}:`, error.message); + console.error(`❌ Error processing ${metaFile}:`, error.message); + console.error(`❌ Error stack:`, error.stack); const filename = metaFile.replace('.meta', ''); results[filename] = { error: error.message }; } From c4465a7c80796188cba8efcda66ef2c45711e93c Mon Sep 17 00:00:00 2001 From: Fortune Ikechi Date: Mon, 23 Jun 2025 11:47:15 +0100 Subject: [PATCH 8/9] checking what files are push for actions --- .github/workflows/doc-style-checker.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/workflows/doc-style-checker.yml b/.github/workflows/doc-style-checker.yml index 5a29f99..aaf24df 100644 --- a/.github/workflows/doc-style-checker.yml +++ b/.github/workflows/doc-style-checker.yml @@ -28,6 +28,12 @@ on: GEMINI_API_KEY: description: "Google Gemini API key" required: true + GEMINI_API_KEY_2: + description: "Optional second Google Gemini API key" + required: false + GEMINI_API_KEY_3: + description: "Optional third Google Gemini API key" + required: false ACTION_TOKEN: description: "GitHub token for posting comments" required: true From 6b89bb40e0c693172d3ce016ad5d130d328e3c5b Mon Sep 17 00:00:00 2001 From: Fortune Ikechi Date: Mon, 23 Jun 2025 12:02:57 +0100 Subject: [PATCH 9/9] add node stringify, fix functions error --- .github/workflows/doc-style-checker.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/doc-style-checker.yml b/.github/workflows/doc-style-checker.yml index aaf24df..c704b7d 100644 --- a/.github/workflows/doc-style-checker.yml +++ b/.github/workflows/doc-style-checker.yml @@ -27,8 +27,8 @@ on: secrets: GEMINI_API_KEY: description: "Google Gemini API key" - required: true - GEMINI_API_KEY_2: + required: true + GEMINI_API_KEY_2: description: "Optional second Google Gemini API key" required: false GEMINI_API_KEY_3: